diff --git a/.github/workflows/auto-unittest.yaml b/.github/workflows/auto-unittest.yaml new file mode 100644 index 000000000..a58163b4d --- /dev/null +++ b/.github/workflows/auto-unittest.yaml @@ -0,0 +1,61 @@ +name: Auto Unit Tests + +on: + pull_request_target: + push: + branches: + - 'main' + - 'dev' + - '*-release' + +jobs: + build: + runs-on: ubuntu-latest + strategy: + matrix: + # python-version: ['3.9', '3.10', '3.11'] + python-version: ['3.9'] + + steps: + - uses: actions/checkout@v4 + with: + ref: ${{ github.event.pull_request.head.sha }} + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + cache: 'pip' + - name: Install dependencies + run: | + sh tests/scripts/run_install_deps.sh + - name: Test with pytest + run: | + export ALLOW_OPENAI_API_CALL=0 + mkdir -p ~/.metagpt && cp tests/config2.yaml ~/.metagpt/config2.yaml && cp tests/spark.yaml ~/.metagpt/spark.yaml + pytest tests/ --doctest-modules --cov=./metagpt/ --cov-report=xml:cov.xml --cov-report=html:htmlcov --durations=20 | tee unittest.txt + - name: Show coverage report + run: | + coverage report -m + - name: Show failed tests and overall summary + run: | + grep -E "FAILED tests|ERROR tests|[0-9]+ passed," unittest.txt + failed_count=$(grep -E "FAILED|ERROR" unittest.txt | wc -l) + if [[ "$failed_count" -gt 0 ]]; then + echo "$failed_count failed lines found! Task failed." + exit 1 + fi + - name: Upload pytest test results + uses: actions/upload-artifact@v3 + with: + name: pytest-results-${{ matrix.python-version }} + path: | + ./unittest.txt + ./htmlcov/ + ./tests/data/rsp_cache_new.json + retention-days: 3 + if: ${{ always() }} + - name: Upload coverage reports to Codecov + uses: codecov/codecov-action@v3 + env: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} + if: ${{ always() }} diff --git a/.github/workflows/unittest.yaml b/.github/workflows/unittest.yaml index 87ccbf144..68d3c382f 100644 --- a/.github/workflows/unittest.yaml +++ b/.github/workflows/unittest.yaml @@ -5,6 +5,9 @@ on: pull_request_target: push: branches: + - 'main' + - 'dev' + - '*-release' - '*-debugger' jobs: @@ -51,6 +54,7 @@ jobs: export ALLOW_OPENAI_API_CALL=0 echo "${{ secrets.METAGPT_KEY_YAML }}" | base64 -d > config/key.yaml mkdir -p ~/.metagpt && echo "${{ secrets.METAGPT_CONFIG2_YAML }}" | base64 -d > ~/.metagpt/config2.yaml + echo "${{ secrets.SPARK_YAML }}" | base64 -d > ~/.metagpt/spark.yaml pytest tests/ --doctest-modules --cov=./metagpt/ --cov-report=xml:cov.xml --cov-report=html:htmlcov --durations=20 | tee unittest.txt - name: Show coverage report run: | diff --git a/README.md b/README.md index 39dde8208..b6f31901b 100644 --- a/README.md +++ b/README.md @@ -55,30 +55,21 @@ ## Install ### Pip installation +> Ensure that Python 3.9+ is installed on your system. You can check this by using: `python --version`. +> You can use conda like this: `conda create -n metagpt python=3.9 && conda activate metagpt` + ```bash -# Step 1: Ensure that Python 3.9+ is installed on your system. You can check this by using: -# You can use conda to initialize a new python env -# conda create -n metagpt python=3.9 -# conda activate metagpt -python3 --version +pip install metagpt +metagpt --init-config # create ~/.metagpt/config2.yaml, modify it to your own config +metagpt "Create a 2048 game" # this will create a repo in ./workspace +``` -# Step 2: Clone the repository to your local machine for latest version, and install it. -git clone https://github.com/geekan/MetaGPT.git -cd MetaGPT -pip3 install -e . # or pip3 install metagpt # for stable version +or you can use it as library -# Step 3: setup your LLM key in the config2.yaml file -mkdir ~/.metagpt -cp config/config2.yaml ~/.metagpt/config2.yaml -vim ~/.metagpt/config2.yaml - -# Step 4: run metagpt cli -metagpt "Create a 2048 game in python" - -# Step 5 [Optional]: If you want to save the artifacts like diagrams such as quadrant chart, system designs, sequence flow in the workspace, you can execute the step before Step 3. By default, the framework is compatible, and the entire process can be run completely without executing this step. -# If executing, ensure that NPM is installed on your system. Then install mermaid-js. (If you don't have npm in your computer, please go to the Node.js official website to install Node.js https://nodejs.org/ and then you will have npm tool in your computer.) -npm --version -sudo npm install -g @mermaid-js/mermaid-cli +```python +from metagpt.software_company import generate_repo, ProjectRepo +repo: ProjectRepo = generate_repo("Create a 2048 game") # or ProjectRepo("") +print(repo) # it will print the repo structure with files ``` detail installation please refer to [cli_install](https://docs.deepwisdom.ai/main/en/guide/get_started/installation.html#install-stable-version) @@ -99,7 +90,7 @@ # Step 2: Run metagpt demo with container -v /opt/metagpt/config/config2.yaml:/app/metagpt/config/config2.yaml \ -v /opt/metagpt/workspace:/app/metagpt/workspace \ metagpt/metagpt:latest \ - metagpt "Write a cli snake game" + metagpt "Create a 2048 game" ``` detail installation please refer to [docker_install](https://docs.deepwisdom.ai/main/en/guide/get_started/installation.html#install-with-docker) diff --git a/config/config2.yaml.example b/config/config2.yaml.example index 7c523fe7d..bead3c626 100644 --- a/config/config2.yaml.example +++ b/config/config2.yaml.example @@ -29,11 +29,13 @@ s3: bucket: "test" -AZURE_TTS_SUBSCRIPTION_KEY: "YOUR_SUBSCRIPTION_KEY" -AZURE_TTS_REGION: "eastus" +azure_tts_subscription_key: "YOUR_SUBSCRIPTION_KEY" +azure_tts_region: "eastus" -IFLYTEK_APP_ID: "YOUR_APP_ID" -IFLYTEK_API_KEY: "YOUR_API_KEY" -IFLYTEK_API_SECRET: "YOUR_API_SECRET" +iflytek_api_id: "YOUR_APP_ID" +iflytek_api_key: "YOUR_API_KEY" +iflytek_api_secret: "YOUR_API_SECRET" -METAGPT_TEXT_TO_IMAGE_MODEL_URL: "YOUR_MODEL_URL" +metagpt_tti_url: "YOUR_MODEL_URL" + +repair_llm_output: true diff --git a/docs/.well-known/metagpt_oas3_api.yaml b/docs/.well-known/metagpt_oas3_api.yaml index 0a702e8b6..1f370b62d 100644 --- a/docs/.well-known/metagpt_oas3_api.yaml +++ b/docs/.well-known/metagpt_oas3_api.yaml @@ -14,16 +14,16 @@ paths: /tts/azsure: x-prerequisite: configurations: - AZURE_TTS_SUBSCRIPTION_KEY: + azure_tts_subscription_key: type: string description: "For more details, check out: [Azure Text-to_Speech](https://learn.microsoft.com/en-us/azure/ai-services/speech-service/language-support?tabs=tts)" - AZURE_TTS_REGION: + azure_tts_region: type: string description: "For more details, check out: [Azure Text-to_Speech](https://learn.microsoft.com/en-us/azure/ai-services/speech-service/language-support?tabs=tts)" required: allOf: - - AZURE_TTS_SUBSCRIPTION_KEY - - AZURE_TTS_REGION + - azure_tts_subscription_key + - azure_tts_region post: summary: "Convert Text to Base64-encoded .wav File Stream" description: "For more details, check out: [Azure Text-to_Speech](https://learn.microsoft.com/en-us/azure/ai-services/speech-service/language-support?tabs=tts)" @@ -94,9 +94,9 @@ paths: description: "WebAPI argument, see: `https://console.xfyun.cn/services/tts`" required: allOf: - - IFLYTEK_APP_ID - - IFLYTEK_API_KEY - - IFLYTEK_API_SECRET + - iflytek_app_id + - iflytek_api_key + - iflytek_api_secret post: summary: "Convert Text to Base64-encoded .mp3 File Stream" description: "For more details, check out: [iFlyTek](https://console.xfyun.cn/services/tts)" @@ -242,12 +242,12 @@ paths: /txt2image/metagpt: x-prerequisite: configurations: - METAGPT_TEXT_TO_IMAGE_MODEL_URL: + metagpt_tti_url: type: string description: "Model url." required: allOf: - - METAGPT_TEXT_TO_IMAGE_MODEL_URL + - metagpt_tti_url post: summary: "Text to Image" description: "Generate an image from the provided text using the MetaGPT Text-to-Image API." diff --git a/docs/.well-known/skills.yaml b/docs/.well-known/skills.yaml index c19a9501e..30c215445 100644 --- a/docs/.well-known/skills.yaml +++ b/docs/.well-known/skills.yaml @@ -14,10 +14,10 @@ entities: id: text_to_speech.text_to_speech x-prerequisite: configurations: - AZURE_TTS_SUBSCRIPTION_KEY: + azure_tts_subscription_key: type: string description: "For more details, check out: [Azure Text-to_Speech](https://learn.microsoft.com/en-us/azure/ai-services/speech-service/language-support?tabs=tts)" - AZURE_TTS_REGION: + azure_tts_region: type: string description: "For more details, check out: [Azure Text-to_Speech](https://learn.microsoft.com/en-us/azure/ai-services/speech-service/language-support?tabs=tts)" IFLYTEK_APP_ID: @@ -32,12 +32,12 @@ entities: required: oneOf: - allOf: - - AZURE_TTS_SUBSCRIPTION_KEY - - AZURE_TTS_REGION + - azure_tts_subscription_key + - azure_tts_region - allOf: - - IFLYTEK_APP_ID - - IFLYTEK_API_KEY - - IFLYTEK_API_SECRET + - iflytek_app_id + - iflytek_api_key + - iflytek_api_secret parameters: text: description: 'The text used for voice conversion.' @@ -103,13 +103,13 @@ entities: OPENAI_API_KEY: type: string description: "OpenAI API key, For more details, checkout: `https://platform.openai.com/account/api-keys`" - METAGPT_TEXT_TO_IMAGE_MODEL_URL: + metagpt_tti_url: type: string description: "Model url." required: oneOf: - OPENAI_API_KEY - - METAGPT_TEXT_TO_IMAGE_MODEL_URL + - metagpt_tti_url parameters: text: description: 'The text used for image conversion.' diff --git a/docs/FAQ-EN.md b/docs/FAQ-EN.md index 88b5b0573..d3caa244e 100644 --- a/docs/FAQ-EN.md +++ b/docs/FAQ-EN.md @@ -1,161 +1,93 @@ Our vision is to [extend human life](https://github.com/geekan/HowToLiveLonger) and [reduce working hours](https://github.com/geekan/MetaGPT/). -1. ### Convenient Link for Sharing this Document: +### Convenient Link for Sharing this Document: ``` -- MetaGPT-Index/FAQ https://deepwisdom.feishu.cn/wiki/MsGnwQBjiif9c3koSJNcYaoSnu4 +- MetaGPT-Index/FAQ-EN https://github.com/geekan/MetaGPT/blob/main/docs/FAQ-EN.md +- MetaGPT-Index/FAQ-CN https://deepwisdom.feishu.cn/wiki/MsGnwQBjiif9c3koSJNcYaoSnu4 ``` -2. ### Link - - +### Link 1. Code:https://github.com/geekan/MetaGPT - -1. Roadmap:https://github.com/geekan/MetaGPT/blob/main/docs/ROADMAP.md - -1. EN - - 1. Demo Video: [MetaGPT: Multi-Agent AI Programming Framework](https://www.youtube.com/watch?v=8RNzxZBTW8M) +2. Roadmap:https://github.com/geekan/MetaGPT/blob/main/docs/ROADMAP.md +3. EN + 1. Demo Video: [MetaGPT: Multi-Agent AI Programming Framework](https://www.youtube.com/watch?v=8RNzxZBTW8M) 2. Tutorial: [MetaGPT: Deploy POWERFUL Autonomous Ai Agents BETTER Than SUPERAGI!](https://www.youtube.com/watch?v=q16Gi9pTG_M&t=659s) 3. Author's thoughts video(EN): [MetaGPT Matthew Berman](https://youtu.be/uT75J_KG_aY?si=EgbfQNAwD8F5Y1Ak) +4. CN + 1. Demo Video: [MetaGPT:一行代码搭建你的虚拟公司_哔哩哔哩_bilibili](https://www.bilibili.com/video/BV1NP411C7GW/?spm_id_from=333.999.0.0&vd_source=735773c218b47da1b4bd1b98a33c5c77) + 1. Tutorial: [一个提示词写游戏 Flappy bird, 比AutoGPT强10倍的MetaGPT,最接近AGI的AI项目](https://youtu.be/Bp95b8yIH5c) + 2. Author's thoughts video(CN): [MetaGPT作者深度解析直播回放_哔哩哔哩_bilibili](https://www.bilibili.com/video/BV1Ru411V7XL/?spm_id_from=333.337.search-card.all.click) -1. CN - - 1. Demo Video: [MetaGPT:一行代码搭建你的虚拟公司_哔哩哔哩_bilibili](https://www.bilibili.com/video/BV1NP411C7GW/?spm_id_from=333.999.0.0&vd_source=735773c218b47da1b4bd1b98a33c5c77) - 1. Tutorial: [一个提示词写游戏 Flappy bird, 比AutoGPT强10倍的MetaGPT,最接近AGI的AI项目](https://youtu.be/Bp95b8yIH5c) - 2. Author's thoughts video(CN): [MetaGPT作者深度解析直播回放_哔哩哔哩_bilibili](https://www.bilibili.com/video/BV1Ru411V7XL/?spm_id_from=333.337.search-card.all.click) - - - -3. ### How to become a contributor? - - +### How to become a contributor? 1. Choose a task from the Roadmap (or you can propose one). By submitting a PR, you can become a contributor and join the dev team. -1. Current contributors come from backgrounds including ByteDance AI Lab/DingDong/Didi/Xiaohongshu, Tencent/Baidu/MSRA/TikTok/BloomGPT Infra/Bilibili/CUHK/HKUST/CMU/UCB +2. Current contributors come from backgrounds including ByteDance AI Lab/DingDong/Didi/Xiaohongshu, Tencent/Baidu/MSRA/TikTok/BloomGPT Infra/Bilibili/CUHK/HKUST/CMU/UCB - - -4. ### Chief Evangelist (Monthly Rotation) +### Chief Evangelist (Monthly Rotation) MetaGPT Community - The position of Chief Evangelist rotates on a monthly basis. The primary responsibilities include: 1. Maintaining community FAQ documents, announcements, and Github resources/READMEs. -1. Responding to, answering, and distributing community questions within an average of 30 minutes, including on platforms like Github Issues, Discord and WeChat. -1. Upholding a community atmosphere that is enthusiastic, genuine, and friendly. -1. Encouraging everyone to become contributors and participate in projects that are closely related to achieving AGI (Artificial General Intelligence). -1. (Optional) Organizing small-scale events, such as hackathons. +2. Responding to, answering, and distributing community questions within an average of 30 minutes, including on platforms like Github Issues, Discord and WeChat. +3. Upholding a community atmosphere that is enthusiastic, genuine, and friendly. +4. Encouraging everyone to become contributors and participate in projects that are closely related to achieving AGI (Artificial General Intelligence). +5. (Optional) Organizing small-scale events, such as hackathons. - - -5. ### FAQ - - - -1. Experience with the generated repo code: - - 1. https://github.com/geekan/MetaGPT/releases/tag/v0.1.0 +### FAQ 1. Code truncation/ Parsing failure: - - 1. Check if it's due to exceeding length. Consider using the gpt-3.5-turbo-16k or other long token versions. - -1. Success rate: - - 1. There hasn't been a quantitative analysis yet, but the success rate of code generated by GPT-4 is significantly higher than that of gpt-3.5-turbo. - -1. Support for incremental, differential updates (if you wish to continue a half-done task): - - 1. Several prerequisite tasks are listed on the ROADMAP. - -1. Can existing code be loaded? - - 1. It's not on the ROADMAP yet, but there are plans in place. It just requires some time. - -1. Support for multiple programming languages and natural languages? - - 1. It's listed on ROADMAP. - -1. Want to join the contributor team? How to proceed? - + 1. Check if it's due to exceeding length. Consider using the gpt-4-turbo-preview or other long token versions. +2. Success rate: + 1. There hasn't been a quantitative analysis yet, but the success rate of code generated by gpt-4-turbo-preview is significantly higher than that of gpt-3.5-turbo. +3. Support for incremental, differential updates (if you wish to continue a half-done task): + 1. There is now an experimental version. Specify `--inc --project-path ""` or `--inc --project-name ""` on the command line and enter the corresponding requirements to try it. +4. Can existing code be loaded? + 1. We are doing this, but it is very difficult, especially when the project is large, it is very difficult to achieve a high success rate. +5. Support for multiple programming languages and natural languages? + 1. It is now supported, but it is still in experimental version +6. Want to join the contributor team? How to proceed? 1. Merging a PR will get you into the contributor's team. The main ongoing tasks are all listed on the ROADMAP. - -1. PRD stuck / unable to access/ connection interrupted - +7. PRD stuck / unable to access/ connection interrupted 1. The official openai base_url address is `https://api.openai.com/v1` - 1. If the official openai base_url address is inaccessible in your environment (this can be verified with curl), it's recommended to configure using the reverse proxy openai base_url provided by libraries such as openai-forward. For instance, `openai base_url: "``https://api.openai-forward.com/v1``"` - 1. If the official openai base_url address is inaccessible in your environment (again, verifiable via curl), another option is to configure the llm.proxy parameter. This way, you can access the official openai base_url via a local proxy. If you don't need to access via a proxy, please do not enable this configuration; if accessing through a proxy is required, modify it to the correct proxy address. Note that when llm.proxy is enabled, don't set openai base_url. - 1. Note: OpenAI's default API design ends with a v1. An example of the correct configuration is: `openai base_url: "``https://api.openai.com/v1``"` - -1. Absolutely! How can I assist you today? - + 2. If the official openai base_url address is inaccessible in your environment (this can be verified with curl), it's recommended to configure using base_url to other "reverse-proxy" provider such as openai-forward. For instance, `openai base_url: "``https://api.openai-forward.com/v1``"` + 3. If the official openai base_url address is inaccessible in your environment (again, verifiable via curl), another option is to configure the llm.proxy in the `config2.yaml`. This way, you can access the official openai base_url via a local proxy. If you don't need to access via a proxy, please do not enable this configuration; if accessing through a proxy is required, modify it to the correct proxy address. + 4. Note: OpenAI's default API design ends with a v1. An example of the correct configuration is: `base_url: "https://api.openai.com/v1" +8. Get reply: "Absolutely! How can I assist you today?" 1. Did you use Chi or a similar service? These services are prone to errors, and it seems that the error rate is higher when consuming 3.5k-4k tokens in GPT-4 - -1. What does Max token mean? - +9. What does Max token mean? 1. It's a configuration for OpenAI's maximum response length. If the response exceeds the max token, it will be truncated. - -1. How to change the investment amount? - +10. How to change the investment amount? 1. You can view all commands by typing `metagpt --help` - -1. Which version of Python is more stable? - +11. Which version of Python is more stable? 1. python3.9 / python3.10 - -1. Can't use GPT-4, getting the error "The model gpt-4 does not exist." - +12. Can't use GPT-4, getting the error "The model gpt-4 does not exist." 1. OpenAI's official requirement: You can use GPT-4 only after spending $1 on OpenAI. 1. Tip: Run some data with gpt-3.5-turbo (consume the free quota and $1), and then you should be able to use gpt-4. - -1. Can games whose code has never been seen before be written? - +13. Can games whose code has never been seen before be written? 1. Refer to the README. The recommendation system of Toutiao is one of the most complex systems in the world currently. Although it's not on GitHub, many discussions about it exist online. If it can visualize these, it suggests it can also summarize these discussions and convert them into code. The prompt would be something like "write a recommendation system similar to Toutiao". Note: this was approached in earlier versions of the software. The SOP of those versions was different; the current one adopts Elon Musk's five-step work method, emphasizing trimming down requirements as much as possible. - -1. Under what circumstances would there typically be errors? - +14. Under what circumstances would there typically be errors? 1. More than 500 lines of code: some function implementations may be left blank. - 1. When using a database, it often gets the implementation wrong — since the SQL database initialization process is usually not in the code. - 1. With more lines of code, there's a higher chance of false impressions, leading to calls to non-existent APIs. - -1. An error occurred during installation: "Another program is using this file...egg". - + 2. When using a database, it often gets the implementation wrong — since the SQL database initialization process is usually not in the code. + 3. With more lines of code, there's a higher chance of false impressions, leading to calls to non-existent APIs. +15. An error occurred during installation: "Another program is using this file...egg". 1. Delete the file and try again. - 1. Or manually execute`pip install -r requirements.txt` - -1. The origin of the name MetaGPT? - + 2. Or manually execute`pip install -r requirements.txt` +16. The origin of the name MetaGPT? 1. The name was derived after iterating with GPT-4 over a dozen rounds. GPT-4 scored and suggested it. - -1. Is there a more step-by-step installation tutorial? - - 1. Youtube(CN):[一个提示词写游戏 Flappy bird, 比AutoGPT强10倍的MetaGPT,最接近AGI的AI项目=一个软件公司产品经理+程序员](https://youtu.be/Bp95b8yIH5c) - 1. Youtube(EN)https://www.youtube.com/watch?v=q16Gi9pTG_M&t=659s - 2. video(EN): [MetaGPT Matthew Berman](https://youtu.be/uT75J_KG_aY?si=EgbfQNAwD8F5Y1Ak) - -1. openai.error.RateLimitError: You exceeded your current quota, please check your plan and billing details - +17. openai.error.RateLimitError: You exceeded your current quota, please check your plan and billing details 1. If you haven't exhausted your free quota, set RPM to 3 or lower in the settings. - 1. If your free quota is used up, consider adding funds to your account. - -1. What does "borg" mean in n_borg? - + 2. If your free quota is used up, consider adding funds to your account. +18. What does "borg" mean in n_borg? 1. [Wikipedia borg meaning ](https://en.wikipedia.org/wiki/Borg) - 1. The Borg civilization operates based on a hive or collective mentality, known as "the Collective." Every Borg individual is connected to the collective via a sophisticated subspace network, ensuring continuous oversight and guidance for every member. This collective consciousness allows them to not only "share the same thoughts" but also to adapt swiftly to new strategies. While individual members of the collective rarely communicate, the collective "voice" sometimes transmits aboard ships. - -1. How to use the Claude API? - + 2. The Borg civilization operates based on a hive or collective mentality, known as "the Collective." Every Borg individual is connected to the collective via a sophisticated subspace network, ensuring continuous oversight and guidance for every member. This collective consciousness allows them to not only "share the same thoughts" but also to adapt swiftly to new strategies. While individual members of the collective rarely communicate, the collective "voice" sometimes transmits aboard ships. +19. How to use the Claude API? 1. The full implementation of the Claude API is not provided in the current code. 1. You can use the Claude API through third-party API conversion projects like: https://github.com/jtsang4/claude-to-chatgpt - -1. Is Llama2 supported? - +20. Is Llama2 supported? 1. On the day Llama2 was released, some of the community members began experiments and found that output can be generated based on MetaGPT's structure. However, Llama2's context is too short to generate a complete project. Before regularly using Llama2, it's necessary to expand the context window to at least 8k. If anyone has good recommendations for expansion models or methods, please leave a comment. - -1. `mermaid-cli getElementsByTagName SyntaxError: Unexpected token '.'` - +21. `mermaid-cli getElementsByTagName SyntaxError: Unexpected token '.'` 1. Upgrade node to version 14.x or above: - 1. `npm install -g n` - 1. `n stable` to install the stable version of node(v18.x) + 2. `n stable` to install the stable version of node(v18.x) diff --git a/docs/README_CN.md b/docs/README_CN.md index ebf5dd408..7a0db4974 100644 --- a/docs/README_CN.md +++ b/docs/README_CN.md @@ -35,29 +35,24 @@ # MetaGPT: 多智能体框架 ## 安装 ### Pip安装 +> 确保您的系统已安装 Python 3.9 或更高版本。您可以使用以下命令来检查:`python --version`。 +> 您可以这样使用 conda:`conda create -n metagpt python=3.9 && conda activate metagpt` + ```bash -# 第 1 步:确保您的系统上安装了 Python 3.9+。您可以使用以下命令进行检查: -# 可以使用conda来初始化新的python环境 -# conda create -n metagpt python=3.9 -# conda activate metagpt -python3 --version - -# 第 2 步:克隆最新仓库到您的本地机器,并进行安装。 -git clone https://github.com/geekan/MetaGPT.git -cd MetaGPT -pip3 install -e. # 或者 pip3 install metagpt # 安装稳定版本 - -# 第 3 步:执行metagpt -# 拷贝config2.yaml为~/.metagpt/config2.yaml,并设置你自己的api_key -metagpt "Write a cli snake game" - -# 第 4 步【可选的】:如果你想在执行过程中保存像象限图、系统设计、序列流程等图表这些产物,可以在第3步前执行该步骤。默认的,框架做了兼容,在不执行该步的情况下,也可以完整跑完整个流程。 -# 如果执行,确保您的系统上安装了 NPM。并使用npm安装mermaid-js -npm --version -sudo npm install -g @mermaid-js/mermaid-cli +pip install metagpt +metagpt --init-config # 创建 ~/.metagpt/config2.yaml,根据您的需求修改它 +metagpt "创建一个 2048 游戏" # 这将在 ./workspace 创建一个仓库 ``` -详细的安装请安装 [cli_install](https://docs.deepwisdom.ai/guide/get_started/installation.html#install-stable-version) +或者您可以将其作为库使用 + +```python +from metagpt.software_company import generate_repo, ProjectRepo +repo: ProjectRepo = generate_repo("创建一个 2048 游戏") # 或 ProjectRepo("<路径>") +print(repo) # 它将打印出仓库结构及其文件 +``` + +详细的安装请参考 [cli_install](https://docs.deepwisdom.ai/guide/get_started/installation.html#install-stable-version) ### Docker安装 > 注意:在Windows中,你需要将 "/opt/metagpt" 替换为Docker具有创建权限的目录,比如"D:\Users\x\metagpt" @@ -78,7 +73,7 @@ # 步骤2: 使用容器运行metagpt演示 metagpt "Write a cli snake game" ``` -详细的安装请安装 [docker_install](https://docs.deepwisdom.ai/main/zh/guide/get_started/installation.html#%E4%BD%BF%E7%94%A8docker%E5%AE%89%E8%A3%85) +详细的安装请参考 [docker_install](https://docs.deepwisdom.ai/main/zh/guide/get_started/installation.html#%E4%BD%BF%E7%94%A8docker%E5%AE%89%E8%A3%85) ### 快速开始的演示视频 - 在 [MetaGPT Huggingface Space](https://huggingface.co/spaces/deepwisdom/MetaGPT) 上进行体验 diff --git a/docs/README_JA.md b/docs/README_JA.md index 26db0498f..c6b99461c 100644 --- a/docs/README_JA.md +++ b/docs/README_JA.md @@ -57,24 +57,21 @@ ### インストールビデオガイド - [Matthew Berman: How To Install MetaGPT - Build A Startup With One Prompt!!](https://youtu.be/uT75J_KG_aY) ### 伝統的なインストール +> Python 3.9 以上がシステムにインストールされていることを確認してください。これは `python --version` を使ってチェックできます。 +> 以下のようにcondaを使うことができます:`conda create -n metagpt python=3.9 && conda activate metagpt` ```bash -# ステップ 1: Python 3.9+ がシステムにインストールされていることを確認してください。これを確認するには: -python3 --version +pip install metagpt +metagpt --init-config # ~/.metagpt/config2.yaml を作成し、自分の設定に合わせて変更してください +metagpt "2048ゲームを作成する" # これにより ./workspace にリポジトリが作成されます +``` -# ステップ 2: リポジトリをローカルマシンにクローンし、インストールする。 -git clone https://github.com/geekan/MetaGPT.git -cd MetaGPT -pip install -e. +または、ライブラリとして使用することもできます -# ステップ 3: metagpt を実行する -# config/config2.yaml を ~/.metagpt/config2.yaml にコピーし、独自の api_key を設定します -metagpt "Write a cli snake game" - -# ステップ 4 [オプション]: 実行中に PRD ファイルなどのアーティファクトを保存する場合は、ステップ 3 の前にこのステップを実行できます。デフォルトでは、フレームワークには互換性があり、この手順を実行しなくてもプロセス全体を完了できます。 -# NPM がシステムにインストールされていることを確認してください。次に mermaid-js をインストールします。(お使いのコンピューターに npm がない場合は、Node.js 公式サイトで Node.js https://nodejs.org/ をインストールしてください。) -npm --version -sudo npm install -g @mermaid-js/mermaid-cli +```python +from metagpt.software_company import generate_repo, ProjectRepo +repo: ProjectRepo = generate_repo("2048ゲームを作成する") # または ProjectRepo("<パス>") +print(repo) # リポジトリの構造とファイルを出力します ``` **注:** diff --git a/docs/install/cli_install.md b/docs/install/cli_install.md index 33d759758..b79ad9cb7 100644 --- a/docs/install/cli_install.md +++ b/docs/install/cli_install.md @@ -9,17 +9,29 @@ ### Support System and version ### Detail Installation ```bash -# Step 1: Ensure that NPM is installed on your system. Then install mermaid-js. (If you don't have npm in your computer, please go to the Node.js official website to install Node.js https://nodejs.org/ and then you will have npm tool in your computer.) -npm --version -sudo npm install -g @mermaid-js/mermaid-cli - -# Step 2: Ensure that Python 3.9+ is installed on your system. You can check this by using: +# Step 1: Ensure that Python 3.9+ is installed on your system. You can check this by using: +# You can use conda to initialize a new python env +# conda create -n metagpt python=3.9 +# conda activate metagpt python3 --version -# Step 3: Clone the repository to your local machine, and install it. +# Step 2: Clone the repository to your local machine for latest version, and install it. git clone https://github.com/geekan/MetaGPT.git cd MetaGPT -pip install -e. +pip3 install -e . # or pip3 install metagpt # for stable version + +# Step 3: setup your LLM key in the config2.yaml file +mkdir ~/.metagpt +cp config/config2.yaml ~/.metagpt/config2.yaml +vim ~/.metagpt/config2.yaml + +# Step 4: run metagpt cli +metagpt "Create a 2048 game in python" + +# Step 5 [Optional]: If you want to save the artifacts like diagrams such as quadrant chart, system designs, sequence flow in the workspace, you can execute the step before Step 3. By default, the framework is compatible, and the entire process can be run completely without executing this step. +# If executing, ensure that NPM is installed on your system. Then install mermaid-js. (If you don't have npm in your computer, please go to the Node.js official website to install Node.js https://nodejs.org/ and then you will have npm tool in your computer.) +npm --version +sudo npm install -g @mermaid-js/mermaid-cli ``` **Note:** @@ -33,11 +45,12 @@ # Step 3: Clone the repository to your local machine, and install it. npm install @mermaid-js/mermaid-cli ``` -- don't forget to the configuration for mmdc in config.yml +- don't forget to the configuration for mmdc path in config.yml - ```yml - puppeteer_config: "./config/puppeteer-config.json" - path: "./node_modules/.bin/mmdc" + ```yaml + mermaid: + puppeteer_config: "./config/puppeteer-config.json" + path: "./node_modules/.bin/mmdc" ``` - if `pip install -e.` fails with error `[Errno 13] Permission denied: '/usr/local/lib/python3.11/dist-packages/test-easy-install-13129.write-test'`, try instead running `pip install -e. --user` @@ -61,7 +74,7 @@ # Step 3: Clone the repository to your local machine, and install it. - **modify `config2.yaml`** - uncomment mermaid.engine from config2.yaml and change it to `playwright` + change mermaid.engine to `playwright` ```yaml mermaid: @@ -91,7 +104,7 @@ # Step 3: Clone the repository to your local machine, and install it. - **modify `config2.yaml`** - uncomment mermaid.engine from config2.yaml and change it to `pyppeteer` + change mermaid.engine to `pyppeteer` ```yaml mermaid: @@ -100,8 +113,8 @@ # Step 3: Clone the repository to your local machine, and install it. - mermaid.ink - **modify `config2.yaml`** - - uncomment mermaid.engine from config2.yaml and change it to `ink` + + change mermaid.engine to `ink` ```yaml mermaid: @@ -109,4 +122,4 @@ # Step 3: Clone the repository to your local machine, and install it. ``` Note: this method does not support pdf export. - \ No newline at end of file + diff --git a/docs/install/cli_install_cn.md b/docs/install/cli_install_cn.md index 891b72d24..1ee18d9a6 100644 --- a/docs/install/cli_install_cn.md +++ b/docs/install/cli_install_cn.md @@ -10,17 +10,29 @@ ### 支持的系统和版本 ### 详细安装 ```bash -# 第 1 步:确保您的系统上安装了 NPM。并使用npm安装mermaid-js -npm --version -sudo npm install -g @mermaid-js/mermaid-cli - -# 第 2 步:确保您的系统上安装了 Python 3.9+。您可以使用以下命令进行检查: +# 步骤 1: 确保您的系统安装了 Python 3.9 或更高版本。您可以使用以下命令来检查: +# 您可以使用 conda 来初始化一个新的 Python 环境 +# conda create -n metagpt python=3.9 +# conda activate metagpt python3 --version -# 第 3 步:克隆仓库到您的本地机器,并进行安装。 +# 步骤 2: 克隆仓库到您的本地机器以获取最新版本,并安装它。 git clone https://github.com/geekan/MetaGPT.git cd MetaGPT -pip install -e. +pip3 install -e . # 或 pip3 install metagpt # 用于稳定版本 + +# 步骤 3: 在 config2.yaml 文件中设置您的 LLM 密钥 +mkdir ~/.metagpt +cp config/config2.yaml ~/.metagpt/config2.yaml +vim ~/.metagpt/config2.yaml + +# 步骤 4: 运行 metagpt 命令行界面 +metagpt "用 python 创建一个 2048 游戏" + +# 步骤 5 [可选]: 如果您想保存诸如象限图、系统设计、序列流等图表作为工作空间的工件,您可以在执行步骤 3 之前执行此步骤。默认情况下,该框架是兼容的,整个过程可以完全不执行此步骤而运行。 +# 如果执行此步骤,请确保您的系统上安装了 NPM。然后安装 mermaid-js。(如果您的计算机中没有 npm,请访问 Node.js 官方网站 https://nodejs.org/ 安装 Node.js,然后您将在计算机中拥有 npm 工具。) +npm --version +sudo npm install -g @mermaid-js/mermaid-cli ``` **注意:** @@ -33,11 +45,12 @@ # 第 3 步:克隆仓库到您的本地机器,并进行安装。 npm install @mermaid-js/mermaid-cli ``` -- 不要忘记在config.yml中为mmdc配置配置, +- 不要忘记在config.yml中为mmdc配置 ```yml - puppeteer_config: "./config/puppeteer-config.json" - path: "./node_modules/.bin/mmdc" + mermaid: + puppeteer_config: "./config/puppeteer-config.json" + path: "./node_modules/.bin/mmdc" ``` - 如果`pip install -e.`失败并显示错误`[Errno 13] Permission denied: '/usr/local/lib/python3.11/dist-packages/test-easy-install-13129.write-test'`,请尝试使用`pip install -e. --user`运行。 diff --git a/docs/tutorial/usage.md b/docs/tutorial/usage.md index 809f91e1f..1128e98a5 100644 --- a/docs/tutorial/usage.md +++ b/docs/tutorial/usage.md @@ -2,7 +2,7 @@ ## MetaGPT Usage ### Configuration -- Configure your `key` in any of `~/.metagpt/config2.yaml / config/config2.yaml` +- Configure your `api_key` in any of `~/.metagpt/config2.yaml / config/config2.yaml` - Priority order: `~/.metagpt/config2.yaml > config/config2.yaml` ```bash @@ -34,29 +34,28 @@ ### Preference of Platform or Tool ### Usage ``` -NAME - metagpt - We are a software startup comprised of AI. By investing in us, you are empowering a future filled with limitless possibilities. - -SYNOPSIS - metagpt IDEA - -DESCRIPTION - We are a software startup comprised of AI. By investing in us, you are empowering a future filled with limitless possibilities. - -POSITIONAL ARGUMENTS - IDEA - Type: str - Your innovative idea, such as "Creating a snake game." - -FLAGS - --investment=INVESTMENT - Type: float - Default: 3.0 - As an investor, you have the opportunity to contribute a certain dollar amount to this AI company. - --n_round=N_ROUND - Type: int - Default: 5 - -NOTES - You can also use flags syntax for POSITIONAL ARGUMENTS + Usage: metagpt [OPTIONS] [IDEA] + + Start a new project. + +╭─ Arguments ────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +│ idea [IDEA] Your innovative idea, such as 'Create a 2048 game.' [default: None] │ +╰────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +│ --investment FLOAT Dollar amount to invest in the AI company. [default: 3.0] │ +│ --n-round INTEGER Number of rounds for the simulation. [default: 5] │ +│ --code-review --no-code-review Whether to use code review. [default: code-review] │ +│ --run-tests --no-run-tests Whether to enable QA for adding & running tests. [default: no-run-tests] │ +│ --implement --no-implement Enable or disable code implementation. [default: implement] │ +│ --project-name TEXT Unique project name, such as 'game_2048'. │ +│ --inc --no-inc Incremental mode. Use it to coop with existing repo. [default: no-inc] │ +│ --project-path TEXT Specify the directory path of the old version project to fulfill the incremental requirements. │ +│ --reqa-file TEXT Specify the source file name for rewriting the quality assurance code. │ +│ --max-auto-summarize-code INTEGER The maximum number of times the 'SummarizeCode' action is automatically invoked, with -1 indicating unlimited. This parameter is used for debugging the │ +│ workflow. │ +│ [default: 0] │ +│ --recover-path TEXT recover the project from existing serialized storage [default: None] │ +│ --init-config --no-init-config Initialize the configuration file for MetaGPT. [default: no-init-config] │ +│ --help Show this message and exit. │ +╰────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ``` \ No newline at end of file diff --git a/docs/tutorial/usage_cn.md b/docs/tutorial/usage_cn.md index 709ec9968..3b0c86279 100644 --- a/docs/tutorial/usage_cn.md +++ b/docs/tutorial/usage_cn.md @@ -2,7 +2,7 @@ ## MetaGPT 使用 ### 配置 -- 在 `~/.metagpt/config2.yaml / config/config2.yaml` 中配置您的 `key` +- 在 `~/.metagpt/config2.yaml / config/config2.yaml` 中配置您的 `api_key` - 优先级顺序:`~/.metagpt/config2.yaml > config/config2.yaml` ```bash @@ -30,29 +30,28 @@ ### 平台或工具的倾向性 ### 使用 ``` -名称 - metagpt - 我们是一家AI软件创业公司。通过投资我们,您将赋能一个充满无限可能的未来。 - -概要 - metagpt IDEA - -描述 - 我们是一家AI软件创业公司。通过投资我们,您将赋能一个充满无限可能的未来。 - -位置参数 - IDEA - 类型: str - 您的创新想法,例如"写一个命令行贪吃蛇。" - -标志 - --investment=INVESTMENT - 类型: float - 默认值: 3.0 - 作为投资者,您有机会向这家AI公司投入一定的美元金额。 - --n_round=N_ROUND - 类型: int - 默认值: 5 - -备注 - 您也可以用`标志`的语法,来处理`位置参数` + Usage: metagpt [OPTIONS] [IDEA] + + Start a new project. + +╭─ Arguments ────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +│ idea [IDEA] Your innovative idea, such as 'Create a 2048 game.' [default: None] │ +╰────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +│ --investment FLOAT Dollar amount to invest in the AI company. [default: 3.0] │ +│ --n-round INTEGER Number of rounds for the simulation. [default: 5] │ +│ --code-review --no-code-review Whether to use code review. [default: code-review] │ +│ --run-tests --no-run-tests Whether to enable QA for adding & running tests. [default: no-run-tests] │ +│ --implement --no-implement Enable or disable code implementation. [default: implement] │ +│ --project-name TEXT Unique project name, such as 'game_2048'. │ +│ --inc --no-inc Incremental mode. Use it to coop with existing repo. [default: no-inc] │ +│ --project-path TEXT Specify the directory path of the old version project to fulfill the incremental requirements. │ +│ --reqa-file TEXT Specify the source file name for rewriting the quality assurance code. │ +│ --max-auto-summarize-code INTEGER The maximum number of times the 'SummarizeCode' action is automatically invoked, with -1 indicating unlimited. This parameter is used for debugging the │ +│ workflow. │ +│ [default: 0] │ +│ --recover-path TEXT recover the project from existing serialized storage [default: None] │ +│ --init-config --no-init-config Initialize the configuration file for MetaGPT. [default: no-init-config] │ +│ --help Show this message and exit. │ +╰────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ``` diff --git a/examples/write_novel.py b/examples/write_novel.py new file mode 100644 index 000000000..b272a56e6 --- /dev/null +++ b/examples/write_novel.py @@ -0,0 +1,49 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +""" +@Time : 2024/2/1 12:01 +@Author : alexanderwu +@File : write_novel.py +""" +import asyncio +from typing import List + +from pydantic import BaseModel, Field + +from metagpt.actions.action_node import ActionNode +from metagpt.llm import LLM + + +class Novel(BaseModel): + name: str = Field(default="The Lord of the Rings", description="The name of the novel.") + user_group: str = Field(default="...", description="The user group of the novel.") + outlines: List[str] = Field( + default=["Chapter 1: ...", "Chapter 2: ...", "Chapter 3: ..."], + description="The outlines of the novel. No more than 10 chapters.", + ) + background: str = Field(default="...", description="The background of the novel.") + character_names: List[str] = Field(default=["Frodo", "Gandalf", "Sauron"], description="The characters.") + conflict: str = Field(default="...", description="The conflict of the characters.") + plot: str = Field(default="...", description="The plot of the novel.") + ending: str = Field(default="...", description="The ending of the novel.") + + +class Chapter(BaseModel): + name: str = Field(default="Chapter 1", description="The name of the chapter.") + content: str = Field(default="...", description="The content of the chapter. No more than 1000 words.") + + +async def generate_novel(): + instruction = ( + "Write a novel named 'Harry Potter in The Lord of the Rings'. " + "Fill the empty nodes with your own ideas. Be creative! Use your own words!" + "I will tip you $100,000 if you write a good novel." + ) + novel_node = await ActionNode.from_pydantic(Novel).fill(context=instruction, llm=LLM()) + chap_node = await ActionNode.from_pydantic(Chapter).fill( + context=f"### instruction\n{instruction}\n### novel\n{novel_node.content}", llm=LLM() + ) + print(chap_node.content) + + +asyncio.run(generate_novel()) diff --git a/metagpt/actions/run_code.py b/metagpt/actions/run_code.py index 3b84cc9f2..b2c33c19b 100644 --- a/metagpt/actions/run_code.py +++ b/metagpt/actions/run_code.py @@ -42,8 +42,8 @@ Determine the ONE file to rewrite in order to fix the error, for example, xyz.py Determine if all of the code works fine, if so write PASS, else FAIL, WRITE ONLY ONE WORD, PASS OR FAIL, IN THIS SECTION ## Send To: -Please write Engineer if the errors are due to problematic development codes, and QaEngineer to problematic test codes, and NoOne if there are no errors, -WRITE ONLY ONE WORD, Engineer OR QaEngineer OR NoOne, IN THIS SECTION. +Please write NoOne if there are no errors, Engineer if the errors are due to problematic development codes, else QaEngineer, +WRITE ONLY ONE WORD, NoOne OR Engineer OR QaEngineer, IN THIS SECTION. --- You should fill in necessary instruction, status, send to, and finally return all content between the --- segment line. """ diff --git a/metagpt/actions/write_docstring.py b/metagpt/actions/write_docstring.py index 79204e6a4..5cc4cafb8 100644 --- a/metagpt/actions/write_docstring.py +++ b/metagpt/actions/write_docstring.py @@ -16,7 +16,7 @@ Options: Default: 'google' Example: - python3 -m metagpt.actions.write_docstring ./metagpt/startup.py --overwrite False --style=numpy + python3 -m metagpt.actions.write_docstring ./metagpt/software_company.py --overwrite False --style=numpy This script uses the 'fire' library to create a command-line interface. It generates docstrings for the given Python code using the specified docstring style and adds them to the code. diff --git a/metagpt/config2.py b/metagpt/config2.py index 8457dd78f..bbf574818 100644 --- a/metagpt/config2.py +++ b/metagpt/config2.py @@ -67,17 +67,16 @@ class Config(CLIParams, YamlModel): code_review_k_times: int = 2 # Will be removed in the future - METAGPT_TEXT_TO_IMAGE_MODEL_URL: str = "" + metagpt_tti_url: str = "" language: str = "English" redis_key: str = "placeholder" - IFLYTEK_APP_ID: str = "" - IFLYTEK_API_SECRET: str = "" - IFLYTEK_API_KEY: str = "" - AZURE_TTS_SUBSCRIPTION_KEY: str = "" - AZURE_TTS_REGION: str = "" - mermaid_engine: str = "nodejs" - OPENAI_VISION_MODEL: str = "gpt-4-vision-preview" - VISION_MAX_TOKENS: int = 4096 + iflytek_app_id: str = "" + iflytek_api_secret: str = "" + iflytek_api_key: str = "" + azure_tts_subscription_key: str = "" + azure_tts_region: str = "" + openai_vision_model: str = "gpt-4-vision-preview" + vision_max_tokens: int = 4096 @classmethod def from_home(cls, path): diff --git a/metagpt/learn/text_to_image.py b/metagpt/learn/text_to_image.py index e2fac7647..163859fc0 100644 --- a/metagpt/learn/text_to_image.py +++ b/metagpt/learn/text_to_image.py @@ -27,7 +27,7 @@ async def text_to_image(text, size_type: str = "512x512", config: Config = metag """ image_declaration = "data:image/png;base64," - model_url = config.METAGPT_TEXT_TO_IMAGE_MODEL_URL + model_url = config.metagpt_tti_url if model_url: binary_data = await oas3_metagpt_text_to_image(text, size_type, model_url) elif config.get_openai_llm(): diff --git a/metagpt/learn/text_to_speech.py b/metagpt/learn/text_to_speech.py index 37e56eaff..8dbd6d243 100644 --- a/metagpt/learn/text_to_speech.py +++ b/metagpt/learn/text_to_speech.py @@ -39,8 +39,8 @@ async def text_to_speech( """ - subscription_key = config.AZURE_TTS_SUBSCRIPTION_KEY - region = config.AZURE_TTS_REGION + subscription_key = config.azure_tts_subscription_key + region = config.azure_tts_region if subscription_key and region: audio_declaration = "data:audio/wav;base64," base64_data = await oas3_azsure_tts(text, lang, voice, style, role, subscription_key, region) @@ -50,9 +50,9 @@ async def text_to_speech( return f"[{text}]({url})" return audio_declaration + base64_data if base64_data else base64_data - iflytek_app_id = config.IFLYTEK_APP_ID - iflytek_api_key = config.IFLYTEK_API_KEY - iflytek_api_secret = config.IFLYTEK_API_SECRET + iflytek_app_id = config.iflytek_app_id + iflytek_api_key = config.iflytek_api_key + iflytek_api_secret = config.iflytek_api_secret if iflytek_app_id and iflytek_api_key and iflytek_api_secret: audio_declaration = "data:audio/mp3;base64," base64_data = await oas3_iflytek_tts( @@ -65,5 +65,5 @@ async def text_to_speech( return audio_declaration + base64_data if base64_data else base64_data raise ValueError( - "AZURE_TTS_SUBSCRIPTION_KEY, AZURE_TTS_REGION, IFLYTEK_APP_ID, IFLYTEK_API_KEY, IFLYTEK_API_SECRET error" + "azure_tts_subscription_key, azure_tts_region, iflytek_app_id, iflytek_api_key, iflytek_api_secret error" ) diff --git a/metagpt/provider/base_llm.py b/metagpt/provider/base_llm.py index 65e70af00..23278479c 100644 --- a/metagpt/provider/base_llm.py +++ b/metagpt/provider/base_llm.py @@ -108,7 +108,7 @@ class BaseLLM(ABC): def get_choice_delta_text(self, rsp: dict) -> str: """Required to provide the first text of stream choice""" - return rsp.get("choices")[0]["delta"]["content"] + return rsp.get("choices", [{}])[0].get("delta", {}).get("content", "") def get_choice_function(self, rsp: dict) -> dict: """Required to provide the first function of choice diff --git a/metagpt/startup.py b/metagpt/software_company.py similarity index 95% rename from metagpt/startup.py rename to metagpt/software_company.py index 4a077cab7..26bb29cd1 100644 --- a/metagpt/startup.py +++ b/metagpt/software_company.py @@ -17,17 +17,17 @@ app = typer.Typer(add_completion=False, pretty_exceptions_show_locals=False) def generate_repo( idea, - investment, - n_round, - code_review, - run_tests, - implement, - project_name, - inc, - project_path, - reqa_file, - max_auto_summarize_code, - recover_path, + investment=3.0, + n_round=5, + code_review=True, + run_tests=False, + implement=True, + project_name="", + inc=False, + project_path="", + reqa_file="", + max_auto_summarize_code=0, + recover_path=None, ) -> ProjectRepo: """Run the startup logic. Can be called from CLI or other Python scripts.""" from metagpt.roles import ( diff --git a/metagpt/tools/libs/gpt_v_generator.py b/metagpt/tools/libs/gpt_v_generator.py index e079a8eef..bae8bcbc0 100644 --- a/metagpt/tools/libs/gpt_v_generator.py +++ b/metagpt/tools/libs/gpt_v_generator.py @@ -35,14 +35,10 @@ class GPTvGenerator: def __init__(self): from metagpt.config2 import config - OPENAI_API_BASE = config.llm.base_url - API_KEY = config.llm.api_key - MODEL = config.OPENAI_VISION_MODEL - MAX_TOKENS = config.VISION_MAX_TOKENS - self.api_key = API_KEY - self.api_base = OPENAI_API_BASE - self.model = MODEL - self.max_tokens = MAX_TOKENS + self.api_key = config.llm.api_key + self.api_base = config.llm.base_url + self.model = config.openai_vision_model + self.max_tokens = config.vision_max_tokens def analyze_layout(self, image_path): return self.get_result(image_path, ANALYZE_LAYOUT_PROMPT) diff --git a/metagpt/utils/project_repo.py b/metagpt/utils/project_repo.py index 72bca7ea0..c1f98e1ec 100644 --- a/metagpt/utils/project_repo.py +++ b/metagpt/utils/project_repo.py @@ -99,6 +99,13 @@ class ProjectRepo(FileRepository): self.tests = self._git_repo.new_file_repository(relative_path=TEST_CODES_FILE_REPO) self.test_outputs = self._git_repo.new_file_repository(relative_path=TEST_OUTPUTS_FILE_REPO) self._srcs_path = None + self.code_files_exists() + + def __str__(self): + repo_str = f"ProjectRepo({self._git_repo.workdir})" + docs_str = f"Docs({self.docs.all_files})" + srcs_str = f"Srcs({self.srcs.all_files})" + return f"{repo_str}\n{docs_str}\n{srcs_str}" @property async def requirement(self): diff --git a/metagpt/utils/repair_llm_raw_output.py b/metagpt/utils/repair_llm_raw_output.py index 82b2dd5b1..06484f71d 100644 --- a/metagpt/utils/repair_llm_raw_output.py +++ b/metagpt/utils/repair_llm_raw_output.py @@ -119,15 +119,22 @@ def repair_json_format(output: str) -> str: logger.info(f"repair_json_format: {'}]'}") elif output.startswith("{") and output.endswith("]"): output = output[:-1] + "}" - - # remove `#` in output json str, usually appeared in `glm-4` + # remove comments in output json string, after json value content, maybe start with #, maybe start with // arr = output.split("\n") new_arr = [] - for line in arr: - idx = line.find("#") - if idx >= 0: - line = line[:idx] - new_arr.append(line) + for json_line in arr: + # look for # or // comments and make sure they are not inside the string value + comment_index = -1 + for match in re.finditer(r"(\".*?\"|\'.*?\')|(#|//)", json_line): + if match.group(1): # if the string value + continue + if match.group(2): # if comments + comment_index = match.start(2) + break + # if comments, then delete them + if comment_index != -1: + json_line = json_line[:comment_index].rstrip() + new_arr.append(json_line) output = "\n".join(new_arr) return output diff --git a/setup.py b/setup.py index d1445e3f8..b16d978cf 100644 --- a/setup.py +++ b/setup.py @@ -76,7 +76,7 @@ setup( }, entry_points={ "console_scripts": [ - "metagpt=metagpt.startup:app", + "metagpt=metagpt.software_company:app", ], }, ) diff --git a/tests/config2.yaml b/tests/config2.yaml new file mode 100644 index 000000000..58314eaed --- /dev/null +++ b/tests/config2.yaml @@ -0,0 +1,27 @@ +llm: + base_url: "https://api.openai.com/v1" + api_key: "sk-xxx" + model: "gpt-3.5-turbo-1106" + +search: + api_type: "serpapi" + api_key: "xxx" + +s3: + access_key: "MOCK_S3_ACCESS_KEY" + secret_key: "MOCK_S3_SECRET_KEY" + endpoint: "http://mock:9000" + secure: false + bucket: "mock" + +azure_tts_subscription_key: "xxx" +azure_tts_region: "eastus" + +iflytek_app_id: "xxx" +iflytek_api_key: "xxx" +iflytek_api_secret: "xxx" + +metagpt_tti_url: "http://mock.com" + +repair_llm_output: true + diff --git a/tests/metagpt/actions/test_rebuild_class_view.py b/tests/metagpt/actions/test_rebuild_class_view.py index 04b7d91fc..403109cc0 100644 --- a/tests/metagpt/actions/test_rebuild_class_view.py +++ b/tests/metagpt/actions/test_rebuild_class_view.py @@ -29,9 +29,9 @@ async def test_rebuild(context): @pytest.mark.parametrize( ("path", "direction", "diff", "want"), [ - ("metagpt/startup.py", "=", ".", "metagpt/startup.py"), - ("metagpt/startup.py", "+", "MetaGPT", "MetaGPT/metagpt/startup.py"), - ("metagpt/startup.py", "-", "metagpt", "startup.py"), + ("metagpt/software_company.py", "=", ".", "metagpt/software_company.py"), + ("metagpt/software_company.py", "+", "MetaGPT", "MetaGPT/metagpt/software_company.py"), + ("metagpt/software_company.py", "-", "metagpt", "software_company.py"), ], ) def test_align_path(path, direction, diff, want): diff --git a/tests/metagpt/actions/test_skill_action.py b/tests/metagpt/actions/test_skill_action.py index 2ebe79b30..d667d6d70 100644 --- a/tests/metagpt/actions/test_skill_action.py +++ b/tests/metagpt/actions/test_skill_action.py @@ -23,9 +23,9 @@ class TestSkillAction: "type": "string", "description": "OpenAI API key, For more details, checkout: `https://platform.openai.com/account/api-keys`", }, - "METAGPT_TEXT_TO_IMAGE_MODEL_URL": {"type": "string", "description": "Model url."}, + "metagpt_tti_url": {"type": "string", "description": "Model url."}, }, - "required": {"oneOf": ["OPENAI_API_KEY", "METAGPT_TEXT_TO_IMAGE_MODEL_URL"]}, + "required": {"oneOf": ["OPENAI_API_KEY", "metagpt_tti_url"]}, }, parameters={ "text": Parameter(type="string", description="The text used for image conversion."), diff --git a/tests/metagpt/learn/test_text_to_image.py b/tests/metagpt/learn/test_text_to_image.py index 167a35891..d3272dadd 100644 --- a/tests/metagpt/learn/test_text_to_image.py +++ b/tests/metagpt/learn/test_text_to_image.py @@ -27,7 +27,7 @@ async def test_text_to_image(mocker): mocker.patch.object(S3, "cache", return_value="http://mock/s3") config = Config.default() - assert config.METAGPT_TEXT_TO_IMAGE_MODEL_URL + assert config.metagpt_tti_url data = await text_to_image("Panda emoji", size_type="512x512", config=config) assert "base64" in data or "http" in data @@ -52,7 +52,7 @@ async def test_openai_text_to_image(mocker): mocker.patch.object(S3, "cache", return_value="http://mock.s3.com/0.png") config = Config.default() - config.METAGPT_TEXT_TO_IMAGE_MODEL_URL = None + config.metagpt_tti_url = None assert config.get_openai_llm() data = await text_to_image("Panda emoji", size_type="512x512", config=config) diff --git a/tests/metagpt/learn/test_text_to_speech.py b/tests/metagpt/learn/test_text_to_speech.py index 38e051cc6..f01e5d132 100644 --- a/tests/metagpt/learn/test_text_to_speech.py +++ b/tests/metagpt/learn/test_text_to_speech.py @@ -20,9 +20,9 @@ from metagpt.utils.s3 import S3 async def test_azure_text_to_speech(mocker): # mock config = Config.default() - config.IFLYTEK_API_KEY = None - config.IFLYTEK_API_SECRET = None - config.IFLYTEK_APP_ID = None + config.iflytek_api_key = None + config.iflytek_api_secret = None + config.iflytek_app_id = None mock_result = mocker.Mock() mock_result.audio_data = b"mock audio data" mock_result.reason = ResultReason.SynthesizingAudioCompleted @@ -32,11 +32,11 @@ async def test_azure_text_to_speech(mocker): mocker.patch.object(S3, "cache", return_value="http://mock.s3.com/1.wav") # Prerequisites - assert not config.IFLYTEK_APP_ID - assert not config.IFLYTEK_API_KEY - assert not config.IFLYTEK_API_SECRET - assert config.AZURE_TTS_SUBSCRIPTION_KEY and config.AZURE_TTS_SUBSCRIPTION_KEY != "YOUR_API_KEY" - assert config.AZURE_TTS_REGION + assert not config.iflytek_app_id + assert not config.iflytek_api_key + assert not config.iflytek_api_secret + assert config.azure_tts_subscription_key and config.azure_tts_subscription_key != "YOUR_API_KEY" + assert config.azure_tts_region config.copy() # test azure @@ -48,8 +48,8 @@ async def test_azure_text_to_speech(mocker): async def test_iflytek_text_to_speech(mocker): # mock config = Config.default() - config.AZURE_TTS_SUBSCRIPTION_KEY = None - config.AZURE_TTS_REGION = None + config.azure_tts_subscription_key = None + config.azure_tts_region = None mocker.patch.object(IFlyTekTTS, "synthesize_speech", return_value=None) mock_data = mocker.AsyncMock() mock_data.read.return_value = b"mock iflytek" @@ -58,11 +58,11 @@ async def test_iflytek_text_to_speech(mocker): mocker.patch.object(S3, "cache", return_value="http://mock.s3.com/1.mp3") # Prerequisites - assert config.IFLYTEK_APP_ID - assert config.IFLYTEK_API_KEY - assert config.IFLYTEK_API_SECRET - assert not config.AZURE_TTS_SUBSCRIPTION_KEY or config.AZURE_TTS_SUBSCRIPTION_KEY == "YOUR_API_KEY" - assert not config.AZURE_TTS_REGION + assert config.iflytek_app_id + assert config.iflytek_api_key + assert config.iflytek_api_secret + assert not config.azure_tts_subscription_key or config.azure_tts_subscription_key == "YOUR_API_KEY" + assert not config.azure_tts_region # test azure data = await text_to_speech("panda emoji", config=config) diff --git a/tests/metagpt/test_incremental_dev.py b/tests/metagpt/test_incremental_dev.py index 3e4a1b901..964d4c757 100644 --- a/tests/metagpt/test_incremental_dev.py +++ b/tests/metagpt/test_incremental_dev.py @@ -14,7 +14,7 @@ from typer.testing import CliRunner from metagpt.const import TEST_DATA_PATH from metagpt.logs import logger -from metagpt.startup import app +from metagpt.software_company import app runner = CliRunner() diff --git a/tests/metagpt/test_startup.py b/tests/metagpt/test_software_company.py similarity index 86% rename from tests/metagpt/test_startup.py rename to tests/metagpt/test_software_company.py index 095a74e3b..1b6477260 100644 --- a/tests/metagpt/test_startup.py +++ b/tests/metagpt/test_software_company.py @@ -3,13 +3,13 @@ """ @Time : 2023/5/15 11:40 @Author : alexanderwu -@File : test_startup.py +@File : test_software_company.py """ import pytest from typer.testing import CliRunner from metagpt.logs import logger -from metagpt.startup import app +from metagpt.software_company import app from metagpt.team import Team runner = CliRunner() @@ -23,7 +23,7 @@ async def test_empty_team(new_filename): logger.info(history) -def test_startup(new_filename): +def test_software_company(new_filename): args = ["Make a cli snake game"] result = runner.invoke(app, args) logger.info(result) diff --git a/tests/metagpt/tools/test_azure_tts.py b/tests/metagpt/tools/test_azure_tts.py index 74d23e439..f72b5663b 100644 --- a/tests/metagpt/tools/test_azure_tts.py +++ b/tests/metagpt/tools/test_azure_tts.py @@ -28,10 +28,10 @@ async def test_azure_tts(mocker): mocker.patch.object(Path, "exists", return_value=True) # Prerequisites - assert config.AZURE_TTS_SUBSCRIPTION_KEY and config.AZURE_TTS_SUBSCRIPTION_KEY != "YOUR_API_KEY" - assert config.AZURE_TTS_REGION + assert config.azure_tts_subscription_key and config.azure_tts_subscription_key != "YOUR_API_KEY" + assert config.azure_tts_region - azure_tts = AzureTTS(subscription_key=config.AZURE_TTS_SUBSCRIPTION_KEY, region=config.AZURE_TTS_REGION) + azure_tts = AzureTTS(subscription_key=config.azure_tts_subscription_key, region=config.azure_tts_region) text = """ 女儿看见父亲走了进来,问道: diff --git a/tests/metagpt/tools/test_iflytek_tts.py b/tests/metagpt/tools/test_iflytek_tts.py index 8e4c0cf54..c51f62b8e 100644 --- a/tests/metagpt/tools/test_iflytek_tts.py +++ b/tests/metagpt/tools/test_iflytek_tts.py @@ -15,8 +15,8 @@ from metagpt.tools.iflytek_tts import IFlyTekTTS, oas3_iflytek_tts async def test_iflytek_tts(mocker): # mock config = Config.default() - config.AZURE_TTS_SUBSCRIPTION_KEY = None - config.AZURE_TTS_REGION = None + config.azure_tts_subscription_key = None + config.azure_tts_region = None mocker.patch.object(IFlyTekTTS, "synthesize_speech", return_value=None) mock_data = mocker.AsyncMock() mock_data.read.return_value = b"mock iflytek" @@ -24,15 +24,15 @@ async def test_iflytek_tts(mocker): mock_reader.return_value.__aenter__.return_value = mock_data # Prerequisites - assert config.IFLYTEK_APP_ID - assert config.IFLYTEK_API_KEY - assert config.IFLYTEK_API_SECRET + assert config.iflytek_app_id + assert config.iflytek_api_key + assert config.iflytek_api_secret result = await oas3_iflytek_tts( text="你好,hello", - app_id=config.IFLYTEK_APP_ID, - api_key=config.IFLYTEK_API_KEY, - api_secret=config.IFLYTEK_API_SECRET, + app_id=config.iflytek_app_id, + api_key=config.iflytek_api_key, + api_secret=config.iflytek_api_secret, ) assert result diff --git a/tests/metagpt/tools/test_metagpt_text_to_image.py b/tests/metagpt/tools/test_metagpt_text_to_image.py index 0dcad20d2..d3797a460 100644 --- a/tests/metagpt/tools/test_metagpt_text_to_image.py +++ b/tests/metagpt/tools/test_metagpt_text_to_image.py @@ -24,7 +24,7 @@ async def test_draw(mocker): mock_post.return_value.__aenter__.return_value = mock_response # Prerequisites - assert config.METAGPT_TEXT_TO_IMAGE_MODEL_URL + assert config.metagpt_tti_url binary_data = await oas3_metagpt_text_to_image("Panda emoji") assert binary_data diff --git a/tests/metagpt/tools/test_ut_writer.py b/tests/metagpt/tools/test_ut_writer.py index 29b6572c2..3cc7e86bb 100644 --- a/tests/metagpt/tools/test_ut_writer.py +++ b/tests/metagpt/tools/test_ut_writer.py @@ -8,6 +8,17 @@ from pathlib import Path import pytest +from openai.resources.chat.completions import AsyncCompletions +from openai.types import CompletionUsage +from openai.types.chat.chat_completion import ( + ChatCompletion, + ChatCompletionMessage, + Choice, +) +from openai.types.chat.chat_completion_message_tool_call import ( + ChatCompletionMessageToolCall, + Function, +) from metagpt.config2 import config from metagpt.const import API_QUESTIONS_PATH, UT_PY_PATH @@ -16,7 +27,43 @@ from metagpt.tools.ut_writer import YFT_PROMPT_PREFIX, UTGenerator class TestUTWriter: @pytest.mark.asyncio - async def test_api_to_ut_sample(self): + async def test_api_to_ut_sample(self, mocker): + async def mock_create(*args, **kwargs): + return ChatCompletion( + id="chatcmpl-8n5fAd21w2J1IIFkI4qxWlNfM7QRC", + choices=[ + Choice( + finish_reason="stop", + index=0, + logprobs=None, + message=ChatCompletionMessage( + content=None, + role="assistant", + function_call=None, + tool_calls=[ + ChatCompletionMessageToolCall( + id="call_EjjmIY7GMspHu3r9mx8gPA2k", + function=Function( + arguments='{"code":"import string\\nimport random\\n\\ndef random_string' + "(length=10):\\n return ''.join(random.choice(string.ascii_" + 'lowercase) for i in range(length))"}', + name="execute", + ), + type="function", + ) + ], + ), + ) + ], + created=1706710532, + model="gpt-3.5-turbo-1106", + object="chat.completion", + system_fingerprint="fp_04f9a1eebf", + usage=CompletionUsage(completion_tokens=35, prompt_tokens=1982, total_tokens=2017), + ) + + mocker.patch.object(AsyncCompletions, "create", mock_create) + # Prerequisites swagger_file = Path(__file__).parent / "../../data/ut_writer/yft_swaggerApi.json" assert swagger_file.exists() diff --git a/tests/metagpt/utils/test_repair_llm_raw_output.py b/tests/metagpt/utils/test_repair_llm_raw_output.py index 3ccca3e06..9eec24727 100644 --- a/tests/metagpt/utils/test_repair_llm_raw_output.py +++ b/tests/metagpt/utils/test_repair_llm_raw_output.py @@ -141,6 +141,32 @@ def test_repair_json_format(): output = repair_llm_raw_output(output=raw_output, req_keys=[None], repair_type=RepairType.JSON) assert output == target_output + raw_output = """ +{ + "Language": "en_us", // define language + "Programming Language": "Python" # define code language +} +""" + target_output = """{ + "Language": "en_us", + "Programming Language": "Python" +}""" + output = repair_llm_raw_output(output=raw_output, req_keys=[None], repair_type=RepairType.JSON) + assert output == target_output + + raw_output = """ + { + "Language": "#en_us#", // define language + "Programming Language": "//Python # Code // Language//" # define code language + } + """ + target_output = """{ + "Language": "#en_us#", + "Programming Language": "//Python # Code // Language//" + }""" + output = repair_llm_raw_output(output=raw_output, req_keys=[None], repair_type=RepairType.JSON) + assert output == target_output + def test_repair_invalid_json(): from metagpt.utils.repair_llm_raw_output import repair_invalid_json diff --git a/tests/spark.yaml b/tests/spark.yaml new file mode 100644 index 000000000..a5bbd98bd --- /dev/null +++ b/tests/spark.yaml @@ -0,0 +1,7 @@ +llm: + api_type: "spark" + app_id: "xxx" + api_key: "xxx" + api_secret: "xxx" + domain: "generalv2" + base_url: "wss://spark-api.xf-yun.com/v3.1/chat" \ No newline at end of file