mirror of
https://github.com/FoundationAgents/MetaGPT.git
synced 2026-05-01 20:03:28 +02:00
Merge branch 'geekan:main' into main
This commit is contained in:
commit
9e4e32e7c7
33 changed files with 321 additions and 140 deletions
|
|
@ -14,9 +14,9 @@ from metagpt.roles import Role
|
|||
from metagpt.team import Team
|
||||
|
||||
gpt35 = Config.default()
|
||||
gpt35.llm.model = "gpt-3.5-turbo-1106"
|
||||
gpt35.llm.model = "gpt-3.5-turbo"
|
||||
gpt4 = Config.default()
|
||||
gpt4.llm.model = "gpt-4-1106-preview"
|
||||
gpt4.llm.model = "gpt-4-turbo"
|
||||
action1 = Action(config=gpt4, name="AlexSay", instruction="Express your opinion with emotion and don't repeat it")
|
||||
action2 = Action(config=gpt35, name="BobSay", instruction="Express your opinion with emotion and don't repeat it")
|
||||
alex = Role(name="Alex", profile="Democratic candidate", goal="Win the election", actions=[action1], watch=[action2])
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
"""
|
||||
@Time : 2023/5/6 14:13
|
||||
@Author : alexanderwu
|
||||
@File : llm_hello_world.py
|
||||
@File : hello_world.py
|
||||
"""
|
||||
import asyncio
|
||||
|
||||
|
|
@ -11,20 +11,15 @@ from metagpt.llm import LLM
|
|||
from metagpt.logs import logger
|
||||
|
||||
|
||||
async def main():
|
||||
llm = LLM()
|
||||
# llm type check
|
||||
question = "what's your name"
|
||||
logger.info(f"{question}: ")
|
||||
logger.info(await llm.aask(question))
|
||||
logger.info("\n\n")
|
||||
async def ask_and_print(question: str, llm: LLM, system_prompt) -> str:
|
||||
logger.info(f"Q: {question}")
|
||||
rsp = await llm.aask(question, system_msgs=[system_prompt])
|
||||
logger.info(f"A: {rsp}")
|
||||
return rsp
|
||||
|
||||
logger.info(
|
||||
await llm.aask(
|
||||
"who are you", system_msgs=["act as a robot, just answer 'I'am robot' if the question is 'who are you'"]
|
||||
)
|
||||
)
|
||||
|
||||
async def lowlevel_api_example(llm: LLM):
|
||||
logger.info("low level api example")
|
||||
logger.info(await llm.aask_batch(["hi", "write python hello world."]))
|
||||
|
||||
hello_msg = [{"role": "user", "content": "count from 1 to 10. split by newline."}]
|
||||
|
|
@ -39,5 +34,12 @@ async def main():
|
|||
logger.info(llm.completion(hello_msg))
|
||||
|
||||
|
||||
async def main():
|
||||
llm = LLM()
|
||||
await ask_and_print("what's your name?", llm, "I'm a helpful AI assistant.")
|
||||
await ask_and_print("who are you?", llm, "just answer 'I am a robot' if the question is 'who are you'")
|
||||
await lowlevel_api_example(llm)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
29
examples/ping.py
Normal file
29
examples/ping.py
Normal file
|
|
@ -0,0 +1,29 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
@Time : 2024/4/22 14:28
|
||||
@Author : alexanderwu
|
||||
@File : ping.py
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
|
||||
from metagpt.llm import LLM
|
||||
from metagpt.logs import logger
|
||||
|
||||
|
||||
async def ask_and_print(question: str, llm: LLM, system_prompt) -> str:
|
||||
logger.info(f"Q: {question}")
|
||||
rsp = await llm.aask(question, system_msgs=[system_prompt])
|
||||
logger.info(f"A: {rsp}")
|
||||
logger.info("\n")
|
||||
return rsp
|
||||
|
||||
|
||||
async def main():
|
||||
llm = LLM()
|
||||
await ask_and_print("ping?", llm, "Just answer pong when ping.")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
|
|
@ -40,7 +40,10 @@ class Player(BaseModel):
|
|||
|
||||
|
||||
class RAGExample:
|
||||
"""Show how to use RAG."""
|
||||
"""Show how to use RAG.
|
||||
|
||||
Default engine use LLM Reranker, if the answer from the LLM is incorrect, may encounter `IndexError: list index out of range`.
|
||||
"""
|
||||
|
||||
def __init__(self, engine: SimpleEngine = None):
|
||||
self._engine = engine
|
||||
|
|
@ -59,6 +62,7 @@ class RAGExample:
|
|||
def engine(self, value: SimpleEngine):
|
||||
self._engine = value
|
||||
|
||||
@handle_exception
|
||||
async def run_pipeline(self, question=QUESTION, print_title=True):
|
||||
"""This example run rag pipeline, use faiss retriever and llm ranker, will print something like:
|
||||
|
||||
|
|
@ -79,6 +83,7 @@ class RAGExample:
|
|||
answer = await self.engine.aquery(question)
|
||||
self._print_query_result(answer)
|
||||
|
||||
@handle_exception
|
||||
async def add_docs(self):
|
||||
"""This example show how to add docs.
|
||||
|
||||
|
|
@ -148,6 +153,7 @@ class RAGExample:
|
|||
except Exception as e:
|
||||
logger.error(f"nodes is empty, llm don't answer correctly, exception: {e}")
|
||||
|
||||
@handle_exception
|
||||
async def init_objects(self):
|
||||
"""This example show how to from objs, will print something like:
|
||||
|
||||
|
|
@ -160,6 +166,7 @@ class RAGExample:
|
|||
await self.add_objects(print_title=False)
|
||||
self.engine = pre_engine
|
||||
|
||||
@handle_exception
|
||||
async def init_and_query_chromadb(self):
|
||||
"""This example show how to use chromadb. how to save and load index. will print something like:
|
||||
|
||||
|
|
@ -233,7 +240,7 @@ class RAGExample:
|
|||
|
||||
|
||||
async def main():
|
||||
"""RAG pipeline"""
|
||||
"""RAG pipeline."""
|
||||
e = RAGExample()
|
||||
await e.run_pipeline()
|
||||
await e.add_docs()
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue