mirror of
https://github.com/FoundationAgents/MetaGPT.git
synced 2026-05-01 11:56:24 +02:00
Merge branch 'main' into code_interpreter
This commit is contained in:
commit
a570c81ccf
37 changed files with 632 additions and 356 deletions
|
|
@ -6,16 +6,25 @@
|
|||
@File : llm_hello_world.py
|
||||
"""
|
||||
import asyncio
|
||||
from pathlib import Path
|
||||
|
||||
from metagpt.llm import LLM
|
||||
from metagpt.logs import logger
|
||||
from metagpt.utils.common import encode_image
|
||||
|
||||
|
||||
async def main():
|
||||
llm = LLM()
|
||||
logger.info(await llm.aask("hello world"))
|
||||
# llm type check
|
||||
id_ques = "what's your name"
|
||||
logger.info(f"{id_ques}: ")
|
||||
logger.info(await llm.aask(id_ques))
|
||||
logger.info("\n\n")
|
||||
|
||||
logger.info(
|
||||
await llm.aask(
|
||||
"who are you", system_msgs=["act as a robot, answer 'I'am robot' if the question is 'who are you'"]
|
||||
)
|
||||
)
|
||||
|
||||
logger.info(await llm.aask_batch(["hi", "write python hello world."]))
|
||||
|
||||
hello_msg = [{"role": "user", "content": "count from 1 to 10. split by newline."}]
|
||||
|
|
@ -29,12 +38,6 @@ async def main():
|
|||
if hasattr(llm, "completion"):
|
||||
logger.info(llm.completion(hello_msg))
|
||||
|
||||
# check if the configured llm supports llm-vision capacity. If not, it will throw a error
|
||||
invoice_path = Path(__file__).parent.joinpath("..", "tests", "data", "invoices", "invoice-2.png")
|
||||
img_base64 = encode_image(invoice_path)
|
||||
res = await llm.aask(msg="if this is a invoice, just return True else return False", images=[img_base64])
|
||||
assert "true" in res.lower()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
|
|
|
|||
23
examples/llm_vision.py
Normal file
23
examples/llm_vision.py
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
# @Desc : example to run the ability of LLM vision
|
||||
|
||||
import asyncio
|
||||
from pathlib import Path
|
||||
|
||||
from metagpt.llm import LLM
|
||||
from metagpt.utils.common import encode_image
|
||||
|
||||
|
||||
async def main():
|
||||
llm = LLM()
|
||||
|
||||
# check if the configured llm supports llm-vision capacity. If not, it will throw a error
|
||||
invoice_path = Path(__file__).parent.joinpath("..", "tests", "data", "invoices", "invoice-2.png")
|
||||
img_base64 = encode_image(invoice_path)
|
||||
res = await llm.aask(msg="if this is a invoice, just return True else return False", images=[img_base64])
|
||||
assert "true" in res.lower()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
Loading…
Add table
Add a link
Reference in a new issue