Merge branch 'main' into main

This commit is contained in:
better629 2024-10-17 16:25:31 +08:00 committed by GitHub
commit d99054ab5e
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
98 changed files with 1697 additions and 496 deletions

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

64
examples/rag/omniparse.py Normal file
View file

@ -0,0 +1,64 @@
import asyncio
from metagpt.config2 import config
from metagpt.const import EXAMPLE_DATA_PATH
from metagpt.logs import logger
from metagpt.rag.parsers import OmniParse
from metagpt.rag.schema import OmniParseOptions, OmniParseType, ParseResultType
from metagpt.utils.omniparse_client import OmniParseClient
TEST_DOCX = EXAMPLE_DATA_PATH / "omniparse/test01.docx"
TEST_PDF = EXAMPLE_DATA_PATH / "omniparse/test02.pdf"
TEST_VIDEO = EXAMPLE_DATA_PATH / "omniparse/test03.mp4"
TEST_AUDIO = EXAMPLE_DATA_PATH / "omniparse/test04.mp3"
async def omniparse_client_example():
client = OmniParseClient(base_url=config.omniparse.base_url)
# docx
with open(TEST_DOCX, "rb") as f:
file_input = f.read()
document_parse_ret = await client.parse_document(file_input=file_input, bytes_filename="test_01.docx")
logger.info(document_parse_ret)
# pdf
pdf_parse_ret = await client.parse_pdf(file_input=TEST_PDF)
logger.info(pdf_parse_ret)
# video
video_parse_ret = await client.parse_video(file_input=TEST_VIDEO)
logger.info(video_parse_ret)
# audio
audio_parse_ret = await client.parse_audio(file_input=TEST_AUDIO)
logger.info(audio_parse_ret)
async def omniparse_example():
parser = OmniParse(
api_key=config.omniparse.api_key,
base_url=config.omniparse.base_url,
parse_options=OmniParseOptions(
parse_type=OmniParseType.PDF,
result_type=ParseResultType.MD,
max_timeout=120,
num_workers=3,
),
)
ret = parser.load_data(file_path=TEST_PDF)
logger.info(ret)
file_paths = [TEST_DOCX, TEST_PDF]
parser.parse_type = OmniParseType.DOCUMENT
ret = await parser.aload_data(file_path=file_paths)
logger.info(ret)
async def main():
await omniparse_client_example()
await omniparse_example()
if __name__ == "__main__":
asyncio.run(main())

View file

@ -2,7 +2,7 @@
import asyncio
from examples.rag_pipeline import DOC_PATH, QUESTION
from examples.rag.rag_pipeline import DOC_PATH, QUESTION
from metagpt.logs import logger
from metagpt.rag.engines import SimpleEngine
from metagpt.roles import Sales

View file

@ -1,82 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@Time : 2023/9/13 12:36
@Author : femto Zheng
@File : sk_agent.py
"""
import asyncio
from semantic_kernel.core_skills import FileIOSkill, MathSkill, TextSkill, TimeSkill
from semantic_kernel.planning import SequentialPlanner
# from semantic_kernel.planning import SequentialPlanner
from semantic_kernel.planning.action_planner.action_planner import ActionPlanner
from metagpt.actions import UserRequirement
from metagpt.const import SKILL_DIRECTORY
from metagpt.roles.sk_agent import SkAgent
from metagpt.schema import Message
from metagpt.tools.search_engine import SkSearchEngine
async def main():
# await basic_planner_example()
# await action_planner_example()
# await sequential_planner_example()
await basic_planner_web_search_example()
async def basic_planner_example():
task = """
Tomorrow is Valentine's day. I need to come up with a few date ideas. She speaks French so write it in French.
Convert the text to uppercase"""
role = SkAgent()
# let's give the agent some skills
role.import_semantic_skill_from_directory(SKILL_DIRECTORY, "SummarizeSkill")
role.import_semantic_skill_from_directory(SKILL_DIRECTORY, "WriterSkill")
role.import_skill(TextSkill(), "TextSkill")
# using BasicPlanner
await role.run(Message(content=task, cause_by=UserRequirement))
async def sequential_planner_example():
task = """
Tomorrow is Valentine's day. I need to come up with a few date ideas. She speaks French so write it in French.
Convert the text to uppercase"""
role = SkAgent(planner_cls=SequentialPlanner)
# let's give the agent some skills
role.import_semantic_skill_from_directory(SKILL_DIRECTORY, "SummarizeSkill")
role.import_semantic_skill_from_directory(SKILL_DIRECTORY, "WriterSkill")
role.import_skill(TextSkill(), "TextSkill")
# using BasicPlanner
await role.run(Message(content=task, cause_by=UserRequirement))
async def basic_planner_web_search_example():
task = """
Question: Who made the 1989 comic book, the film version of which Jon Raymond Polito appeared in?"""
role = SkAgent()
role.import_skill(SkSearchEngine(), "WebSearchSkill")
# role.import_semantic_skill_from_directory(skills_directory, "QASkill")
await role.run(Message(content=task, cause_by=UserRequirement))
async def action_planner_example():
role = SkAgent(planner_cls=ActionPlanner)
# let's give the agent 4 skills
role.import_skill(MathSkill(), "math")
role.import_skill(FileIOSkill(), "fileIO")
role.import_skill(TimeSkill(), "time")
role.import_skill(TextSkill(), "text")
task = "What is the sum of 110 and 990?"
await role.run(Message(content=task, cause_by=UserRequirement)) # it will choose mathskill.Add
if __name__ == "__main__":
asyncio.run(main())

3
examples/ui_with_chainlit/.gitignore vendored Normal file
View file

@ -0,0 +1,3 @@
*.chainlit
chainlit.md
.files

View file

@ -0,0 +1,34 @@
# MetaGPT in UI with Chainlit! 🤖
- MetaGPT functionality in UI using Chainlit.
- It also takes a **one line requirement** as input and outputs **user stories / competitive analysis / requirements / data structures / APIs / documents, etc.**, But `everything in UI`.
## Install Chainlit
- Setup initial MetaGPT config from [Main](../../README.md).
```bash
pip install chainlit
```
## Usage
```bash
chainlit run app.py
```
- Now go to: http://localhost:8000
- Select,
- `Create a 2048 game`
- `Write a cli Blackjack Game`
- `Type your own message...`
- It will run a metagpt software company.
## To Setup with own application
- We can change `Environment.run`, `Team.run`, `Role.run`, `Role._act`, `Action.run`.
- In this code, changed `Environment.run`, as it was easier to do.
- We will need to change `metagpt.logs.set_llm_stream_logfunc` to stream messages in UI with Chainlit Message.
- To use at some other place we need to call `chainlit.Message(content="").send()` with content.

View file

View file

@ -0,0 +1,83 @@
import chainlit as cl
from init_setup import ChainlitEnv
from metagpt.roles import (
Architect,
Engineer,
ProductManager,
ProjectManager,
QaEngineer,
)
from metagpt.team import Team
# https://docs.chainlit.io/concepts/starters
@cl.set_chat_profiles
async def chat_profile() -> list[cl.ChatProfile]:
"""Generates a chat profile containing starter messages which can be triggered to run MetaGPT
Returns:
list[chainlit.ChatProfile]: List of Chat Profile
"""
return [
cl.ChatProfile(
name="MetaGPT",
icon="/public/MetaGPT-new-log.jpg",
markdown_description="It takes a **one line requirement** as input and outputs **user stories / competitive analysis / requirements / data structures / APIs / documents, etc.**, But `everything in UI`.",
starters=[
cl.Starter(
label="Create a 2048 Game",
message="Create a 2048 game",
icon="/public/2048.jpg",
),
cl.Starter(
label="Write a cli Blackjack Game",
message="Write a cli Blackjack Game",
icon="/public/blackjack.jpg",
),
],
)
]
# https://docs.chainlit.io/concepts/message
@cl.on_message
async def startup(message: cl.Message) -> None:
"""On Message in UI, Create a MetaGPT software company
Args:
message (chainlit.Message): message by chainlist
"""
idea = message.content
company = Team(env=ChainlitEnv())
# Similar to software_company.py
company.hire(
[
ProductManager(),
Architect(),
ProjectManager(),
Engineer(n_borg=5, use_code_review=True),
QaEngineer(),
]
)
company.invest(investment=3.0)
company.run_project(idea=idea)
await company.run(n_round=5)
workdir = company.env.context.git_repo.workdir
files = company.env.context.git_repo.get_files(workdir)
files = "\n".join([f"{workdir}/{file}" for file in files if not file.startswith(".git")])
await cl.Message(
content=f"""
Codes can be found here:
{files}
---
Total cost: `{company.cost_manager.total_cost}`
"""
).send()

View file

@ -0,0 +1,69 @@
import asyncio
import chainlit as cl
from metagpt.environment import Environment
from metagpt.logs import logger, set_llm_stream_logfunc
from metagpt.roles import Role
from metagpt.utils.common import any_to_name
def log_llm_stream_chainlit(msg):
# Stream the message token into Chainlit UI.
cl.run_sync(chainlit_message.stream_token(msg))
set_llm_stream_logfunc(func=log_llm_stream_chainlit)
class ChainlitEnv(Environment):
"""Chainlit Environment for UI Integration"""
async def run(self, k=1):
"""处理一次所有信息的运行
Process all Role runs at once
"""
for _ in range(k):
futures = []
for role in self.roles.values():
# Call role.run with chainlit configuration
future = self._chainlit_role_run(role=role)
futures.append(future)
await asyncio.gather(*futures)
logger.debug(f"is idle: {self.is_idle}")
async def _chainlit_role_run(self, role: Role) -> None:
"""To run the role with chainlit config
Args:
role (Role): metagpt.role.Role
"""
global chainlit_message
chainlit_message = cl.Message(content="")
message = await role.run()
# If message is from role._act() publish to UI.
if message is not None and message.content != "No actions taken yet":
# Convert a message from action node in json format
chainlit_message.content = await self._convert_message_to_markdownjson(message=chainlit_message.content)
# message content from which role and its action...
chainlit_message.content += f"---\n\nAction: `{any_to_name(message.cause_by)}` done by `{role._setting}`."
await chainlit_message.send()
# for clean view in UI
async def _convert_message_to_markdownjson(self, message: str) -> str:
"""If the message is from MetaGPT Action Node output, then
convert it into markdown json for clear view in UI.
Args:
message (str): message by role._act
Returns:
str: message in mardown from
"""
if message.startswith("[CONTENT]"):
return f"```json\n{message}\n```\n"
return message

Binary file not shown.

After

Width:  |  Height:  |  Size: 27 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 89 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 58 KiB