mirror of
https://github.com/FoundationAgents/MetaGPT.git
synced 2026-05-15 11:02:36 +02:00
Merge branch 'main' into upgrade-oi
This commit is contained in:
commit
efbddbf257
10 changed files with 241 additions and 7 deletions
|
|
@ -1,7 +1,7 @@
|
|||
# MetaGPT: The Multi-Agent Framework
|
||||
|
||||
<p align="center">
|
||||
<a href=""><img src="docs/resources/MetaGPT-logo.jpeg" alt="MetaGPT logo: Enable GPT to work in software company, collaborating to tackle more complex tasks." width="150px"></a>
|
||||
<a href=""><img src="docs/resources/MetaGPT-new-log.png" alt="MetaGPT logo: Enable GPT to work in software company, collaborating to tackle more complex tasks." width="150px"></a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
|
|
@ -60,7 +60,7 @@ ### Installation Video Guide
|
|||
### Traditional Installation
|
||||
|
||||
```bash
|
||||
# Step 1: Ensure that NPM is installed on your system. Then install mermaid-js. (If you don't have npm in your computer, please go to the Node.js offical website to install Node.js https://nodejs.org/ and then you will have npm tool in your computer.)
|
||||
# Step 1: Ensure that NPM is installed on your system. Then install mermaid-js. (If you don't have npm in your computer, please go to the Node.js official website to install Node.js https://nodejs.org/ and then you will have npm tool in your computer.)
|
||||
npm --version
|
||||
sudo npm install -g @mermaid-js/mermaid-cli
|
||||
|
||||
|
|
@ -127,7 +127,7 @@ # Step 3: Clone the repository to your local machine, and install it.
|
|||
|
||||
- **Use your own Browsers**
|
||||
|
||||
pyppeteer alow you use installed browsers, please set the following envirment
|
||||
pyppeteer allows you use installed browsers, please set the following envirment
|
||||
|
||||
```bash
|
||||
export PUPPETEER_EXECUTABLE_PATH = /path/to/your/chromium or edge or chrome
|
||||
|
|
|
|||
|
|
@ -12,6 +12,13 @@ OPENAI_API_MODEL: "gpt-4"
|
|||
MAX_TOKENS: 1500
|
||||
RPM: 10
|
||||
|
||||
#### if Spark
|
||||
#SPARK_APPID : "YOUR_APPID"
|
||||
#SPARK_API_SECRET : "YOUR_APISecret"
|
||||
#SPARK_API_KEY : "YOUR_APIKey"
|
||||
#DOMAIN : "generalv2"
|
||||
#SPARK_URL : "ws://spark-api.xf-yun.com/v2.1/chat"
|
||||
|
||||
#### if Anthropic
|
||||
#Anthropic_API_KEY: "YOUR_API_KEY"
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
# MetaGPT: 多智能体框架
|
||||
|
||||
<p align="center">
|
||||
<a href=""><img src="resources/MetaGPT-logo.jpeg" alt="MetaGPT logo: 使 GPT 以软件公司的形式工作,协作处理更复杂的任务" width="150px"></a>
|
||||
<a href=""><img src="resources/MetaGPT-new-log.png" alt="MetaGPT logo: 使 GPT 以软件公司的形式工作,协作处理更复杂的任务" width="150px"></a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
# MetaGPT: マルチエージェントフレームワーク
|
||||
|
||||
<p align="center">
|
||||
<a href=""><img src="resources/MetaGPT-logo.jpeg" alt="MetaGPT ロゴ: GPT がソフトウェア会社で働けるようにし、協力してより複雑な仕事に取り組む。" width="150px"></a>
|
||||
<a href=""><img src="resources/MetaGPT-new-log.png" alt="MetaGPT ロゴ: GPT がソフトウェア会社で働けるようにし、協力してより複雑な仕事に取り組む。" width="150px"></a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
|
|
|
|||
BIN
docs/resources/MetaGPT-new-log.png
Normal file
BIN
docs/resources/MetaGPT-new-log.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 61 KiB |
|
|
@ -91,7 +91,7 @@ if __name__ == "__main__":
|
|||
|
||||
msg = """
|
||||
Write an agent called SimpleTester that will take any code snippet (str) and do the following:
|
||||
1. write a testing code (str) for testing the given code snippet, save the testing code as a .py file in the current working diretory;
|
||||
1. write a testing code (str) for testing the given code snippet, save the testing code as a .py file in the current working directory;
|
||||
2. run the testing code.
|
||||
You can use pytest as the testing framework.
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -46,7 +46,7 @@ class Config(metaclass=Singleton):
|
|||
self.openai_api_key = self._get("OPENAI_API_KEY")
|
||||
self.anthropic_api_key = self._get("Anthropic_API_KEY")
|
||||
if (not self.openai_api_key or "YOUR_API_KEY" == self.openai_api_key) and (
|
||||
not self.anthropic_api_key or "YOUR_API_KEY" == self.anthropic_api_key
|
||||
not self.anthropic_api_key or "YOUR_API_KEY" == self.anthropic_api_key
|
||||
):
|
||||
raise NotConfiguredException("Set OPENAI_API_KEY or Anthropic_API_KEY first")
|
||||
self.openai_api_base = self._get("OPENAI_API_BASE")
|
||||
|
|
@ -62,6 +62,12 @@ class Config(metaclass=Singleton):
|
|||
self.deployment_name = self._get("DEPLOYMENT_NAME")
|
||||
self.deployment_id = self._get("DEPLOYMENT_ID")
|
||||
|
||||
self.spark_appid = self._get("SPARK_APPID")
|
||||
self.spark_api_secret = self._get("SPARK_API_SECRET")
|
||||
self.spark_api_key = self._get("SPARK_API_KEY")
|
||||
self.domain = self._get("DOMAIN")
|
||||
self.spark_url = self._get("SPARK_URL")
|
||||
|
||||
self.claude_api_key = self._get("Anthropic_API_KEY")
|
||||
self.serpapi_api_key = self._get("SERPAPI_API_KEY")
|
||||
self.serper_api_key = self._get("SERPER_API_KEY")
|
||||
|
|
|
|||
205
metagpt/provider/spark_api.py
Normal file
205
metagpt/provider/spark_api.py
Normal file
|
|
@ -0,0 +1,205 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
@Time : 2023/7/21 11:15
|
||||
@Author : Leo Xiao
|
||||
@File : anthropic_api.py
|
||||
"""
|
||||
import _thread as thread
|
||||
import base64
|
||||
import datetime
|
||||
import hashlib
|
||||
import hmac
|
||||
import json
|
||||
import ssl
|
||||
from time import mktime
|
||||
from typing import Optional
|
||||
from urllib.parse import urlencode
|
||||
from urllib.parse import urlparse
|
||||
from wsgiref.handlers import format_date_time
|
||||
|
||||
import websocket # 使用websocket_client
|
||||
|
||||
from metagpt.config import CONFIG
|
||||
from metagpt.logs import logger
|
||||
from metagpt.provider.base_gpt_api import BaseGPTAPI
|
||||
|
||||
|
||||
class SparkAPI(BaseGPTAPI):
|
||||
|
||||
def __init__(self):
|
||||
logger.warning('当前方法无法支持异步运行。当你使用acompletion时,并不能并行访问。')
|
||||
|
||||
def ask(self, msg: str) -> str:
|
||||
message = [self._default_system_msg(), self._user_msg(msg)]
|
||||
rsp = self.completion(message)
|
||||
return rsp
|
||||
|
||||
async def aask(self, msg: str, system_msgs: Optional[list[str]] = None) -> str:
|
||||
if system_msgs:
|
||||
message = self._system_msgs(system_msgs) + [self._user_msg(msg)]
|
||||
else:
|
||||
message = [self._default_system_msg(), self._user_msg(msg)]
|
||||
rsp = await self.acompletion(message)
|
||||
logger.debug(message)
|
||||
return rsp
|
||||
|
||||
def get_choice_text(self, rsp: dict) -> str:
|
||||
return rsp["payload"]["choices"]["text"][-1]["content"]
|
||||
|
||||
async def acompletion_text(self, messages: list[dict], stream=False) -> str:
|
||||
# 不支持
|
||||
logger.error('该功能禁用。')
|
||||
w = GetMessageFromWeb(messages)
|
||||
return w.run()
|
||||
|
||||
async def acompletion(self, messages: list[dict]):
|
||||
# 不支持异步
|
||||
w = GetMessageFromWeb(messages)
|
||||
return w.run()
|
||||
|
||||
def completion(self, messages: list[dict]):
|
||||
w = GetMessageFromWeb(messages)
|
||||
return w.run()
|
||||
|
||||
|
||||
class GetMessageFromWeb:
|
||||
class WsParam:
|
||||
"""
|
||||
该类适合讯飞星火大部分接口的调用。
|
||||
输入 app_id, api_key, api_secret, spark_url以初始化,
|
||||
create_url方法返回接口url
|
||||
"""
|
||||
|
||||
# 初始化
|
||||
def __init__(self, app_id, api_key, api_secret, spark_url, message=None):
|
||||
self.app_id = app_id
|
||||
self.api_key = api_key
|
||||
self.api_secret = api_secret
|
||||
self.host = urlparse(spark_url).netloc
|
||||
self.path = urlparse(spark_url).path
|
||||
self.spark_url = spark_url
|
||||
self.message = message
|
||||
|
||||
# 生成url
|
||||
def create_url(self):
|
||||
# 生成RFC1123格式的时间戳
|
||||
now = datetime.datetime.now()
|
||||
date = format_date_time(mktime(now.timetuple()))
|
||||
|
||||
# 拼接字符串
|
||||
signature_origin = "host: " + self.host + "\n"
|
||||
signature_origin += "date: " + date + "\n"
|
||||
signature_origin += "GET " + self.path + " HTTP/1.1"
|
||||
|
||||
# 进行hmac-sha256进行加密
|
||||
signature_sha = hmac.new(self.api_secret.encode('utf-8'), signature_origin.encode('utf-8'),
|
||||
digestmod=hashlib.sha256).digest()
|
||||
|
||||
signature_sha_base64 = base64.b64encode(signature_sha).decode(encoding='utf-8')
|
||||
|
||||
authorization_origin = f'api_key="{self.api_key}", algorithm="hmac-sha256", headers="host date request-line", signature="{signature_sha_base64}"'
|
||||
|
||||
authorization = base64.b64encode(authorization_origin.encode('utf-8')).decode(encoding='utf-8')
|
||||
|
||||
# 将请求的鉴权参数组合为字典
|
||||
v = {
|
||||
"authorization": authorization,
|
||||
"date": date,
|
||||
"host": self.host
|
||||
}
|
||||
# 拼接鉴权参数,生成url
|
||||
url = self.spark_url + '?' + urlencode(v)
|
||||
# 此处打印出建立连接时候的url,参考本demo的时候可取消上方打印的注释,比对相同参数时生成的url与自己代码生成的url是否一致
|
||||
return url
|
||||
|
||||
def __init__(self, text):
|
||||
self.text = text
|
||||
self.ret = ''
|
||||
self.spark_appid = CONFIG.spark_appid
|
||||
self.spark_api_secret = CONFIG.spark_api_secret
|
||||
self.spark_api_key = CONFIG.spark_api_key
|
||||
self.domain = CONFIG.domain
|
||||
self.spark_url = CONFIG.spark_url
|
||||
|
||||
def on_message(self, ws, message):
|
||||
data = json.loads(message)
|
||||
code = data['header']['code']
|
||||
|
||||
if code != 0:
|
||||
ws.close() # 请求错误,则关闭socket
|
||||
logger.critical(f'回答获取失败,响应信息反序列化之后为: {data}')
|
||||
return
|
||||
else:
|
||||
choices = data["payload"]["choices"]
|
||||
seq = choices["seq"] # 服务端是流式返回,seq为返回的数据序号
|
||||
status = choices["status"] # 服务端是流式返回,status用于判断信息是否传送完毕
|
||||
content = choices["text"][0]["content"] # 本次接收到的回答文本
|
||||
self.ret += content
|
||||
if status == 2:
|
||||
ws.close()
|
||||
|
||||
# 收到websocket错误的处理
|
||||
def on_error(self, ws, error):
|
||||
# on_message方法处理接收到的信息,出现任何错误,都会调用这个方法
|
||||
logger.critical(f'通讯连接出错,【错误提示: {error}】')
|
||||
|
||||
# 收到websocket关闭的处理
|
||||
def on_close(self, ws, one, two):
|
||||
pass
|
||||
|
||||
# 处理请求数据
|
||||
def gen_params(self):
|
||||
|
||||
data = {
|
||||
"header": {
|
||||
"app_id": self.spark_appid,
|
||||
"uid": "1234"
|
||||
},
|
||||
"parameter": {
|
||||
"chat": {
|
||||
# domain为必传参数
|
||||
"domain": self.domain,
|
||||
|
||||
# 以下为可微调,非必传参数
|
||||
# 注意:官方建议,temperature和top_k修改一个即可
|
||||
"max_tokens": 2048, # 默认2048,模型回答的tokens的最大长度,即允许它输出文本的最长字数
|
||||
"temperature": 0.5, # 取值为[0,1],默认为0.5。取值越高随机性越强、发散性越高,即相同的问题得到的不同答案的可能性越高
|
||||
"top_k": 4, # 取值为[1,6],默认为4。从k个候选中随机选择一个(非等概率)
|
||||
}
|
||||
},
|
||||
"payload": {
|
||||
"message": {
|
||||
"text": self.text
|
||||
}
|
||||
}
|
||||
}
|
||||
return data
|
||||
|
||||
def send(self, ws, *args):
|
||||
data = json.dumps(self.gen_params())
|
||||
ws.send(data)
|
||||
|
||||
# 收到websocket连接建立的处理
|
||||
def on_open(self, ws):
|
||||
thread.start_new_thread(self.send, (ws,))
|
||||
|
||||
# 处理收到的 websocket消息,出现任何错误,调用on_error方法
|
||||
def run(self):
|
||||
return self._run(self.text)
|
||||
|
||||
def _run(self, text_list):
|
||||
|
||||
ws_param = self.WsParam(
|
||||
self.spark_appid,
|
||||
self.spark_api_key,
|
||||
self.spark_api_secret,
|
||||
self.spark_url,
|
||||
text_list)
|
||||
ws_url = ws_param.create_url()
|
||||
|
||||
websocket.enableTrace(False) # 默认禁用 WebSocket 的跟踪功能
|
||||
ws = websocket.WebSocketApp(ws_url, on_message=self.on_message, on_error=self.on_error, on_close=self.on_close,
|
||||
on_open=self.on_open)
|
||||
ws.run_forever(sslopt={"cert_reqs": ssl.CERT_NONE})
|
||||
return self.ret
|
||||
|
|
@ -43,3 +43,8 @@ open-interpreter==0.1.7; python_version>"3.9"
|
|||
ta==0.10.2
|
||||
semantic-kernel
|
||||
wrapt==1.15.0
|
||||
websocket-client==0.58.0
|
||||
aiofiles~=23.2.1
|
||||
pygments~=2.16.1
|
||||
requests~=2.31.0
|
||||
yaml~=0.2.5
|
||||
|
|
|
|||
11
tests/metagpt/provider/test_spark_api.py
Normal file
11
tests/metagpt/provider/test_spark_api.py
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
from metagpt.logs import logger
|
||||
from metagpt.provider.spark_api import SparkAPI
|
||||
|
||||
|
||||
def test_message():
|
||||
llm = SparkAPI()
|
||||
|
||||
logger.info(llm.ask('只回答"收到了"这三个字。'))
|
||||
result = llm.ask('写一篇五百字的日记')
|
||||
logger.info(result)
|
||||
assert len(result) > 100
|
||||
Loading…
Add table
Add a link
Reference in a new issue