diff --git a/config/config.yaml b/config/config.yaml index 274cdf469..c1f3abd28 100644 --- a/config/config.yaml +++ b/config/config.yaml @@ -15,6 +15,19 @@ RPM: 10 #### if Anthropic #Anthropic_API_KEY: "YOUR_API_KEY" +#### if xinghuo +#xinghuo_appid : "APPID" +#xinghuo_api_secret : "APISecret" +#xinghuo_api_key : "APIKey" + +#domain : "generalv2" + +#Spark_url : "ws://spark-api.xf-yun.com/v2.1/chat" + +#### 如果不能使用api + +#no_api_mode :"true" + #### if AZURE, check https://github.com/openai/openai-cookbook/blob/main/examples/azure/chat.ipynb #OPENAI_API_TYPE: "azure" diff --git a/examples/llm_hello_world.py b/examples/llm_hello_world.py index 3ba03eea0..d6d24b688 100644 --- a/examples/llm_hello_world.py +++ b/examples/llm_hello_world.py @@ -7,12 +7,11 @@ """ import asyncio -from metagpt.llm import LLM, Claude -from metagpt.logs import logger +import metagpt.llm as LLM async def main(): - llm = LLM() + llm=LLM.DEFAULT_LLM claude = Claude() logger.info(await claude.aask('你好,请进行自我介绍')) logger.info(await llm.aask('hello world')) diff --git a/metagpt/actions/action.py b/metagpt/actions/action.py index fa0d592a3..748300561 100644 --- a/metagpt/actions/action.py +++ b/metagpt/actions/action.py @@ -11,15 +11,16 @@ from typing import Optional from tenacity import retry, stop_after_attempt, wait_fixed from metagpt.actions.action_output import ActionOutput -from metagpt.llm import LLM +import metagpt.llm as LLM from metagpt.utils.common import OutputParser from metagpt.logs import logger +from metagpt.config import CONFIG class Action(ABC): def __init__(self, name: str = '', context=None, llm: LLM = None): self.name: str = name if llm is None: - llm = LLM() + llm=LLM.DEFAULT_LLM self.llm = llm self.context = context self.prefix = "" @@ -54,13 +55,42 @@ class Action(ABC): if not system_msgs: system_msgs = [] system_msgs.append(self.prefix) - content = await self.llm.aask(prompt, system_msgs) - logger.debug(content) - output_class = ActionOutput.create_model_class(output_class_name, output_data_mapping) - parsed_data = OutputParser.parse_data_with_mapping(content, output_data_mapping) - logger.debug(parsed_data) - instruct_content = output_class(**parsed_data) - return ActionOutput(content, instruct_content) + if not CONFIG.no_api_mode: + content = await self.llm.aask(prompt, system_msgs) + logger.debug(content) + output_class = ActionOutput.create_model_class(output_class_name, output_data_mapping) + parsed_data = OutputParser.parse_data_with_mapping(content, output_data_mapping) + logger.debug(parsed_data) + try: + instruct_content = output_class(**parsed_data) + return ActionOutput(content, instruct_content) + except Exception as e: + print('Error:',e) + print('自动运行出错,切换为手动运行') + print('prompt为') + print('\n'.join( system_msgs)+prompt) + print('输入格式:') + print(output_data_mapping) + print('请准备输入,输入完成按ctrl+Z') + while True: + try: + lines=[] + while True: + try: + lines.append(input()) + except: + break + + content ='\n'.join(lines) + output_class = ActionOutput.create_model_class(output_class_name, output_data_mapping) + parsed_data = OutputParser.parse_data_with_mapping(content, output_data_mapping) + logger.debug(parsed_data) + instruct_content = output_class(**parsed_data) + return ActionOutput(content, instruct_content) + except Exception as e: + print('Error:',e) + print('输入错误,请重试') + async def run(self, *args, **kwargs): """Run action""" diff --git a/metagpt/actions/write_prd.py b/metagpt/actions/write_prd.py index 0edd24d55..bc2e175e8 100644 --- a/metagpt/actions/write_prd.py +++ b/metagpt/actions/write_prd.py @@ -143,4 +143,5 @@ class WritePRD(Action): format_example=FORMAT_EXAMPLE) logger.debug(prompt) prd = await self._aask_v1(prompt, "prd", OUTPUT_MAPPING) + return prd diff --git a/metagpt/config.py b/metagpt/config.py index 2c1096877..e9e39cb22 100644 --- a/metagpt/config.py +++ b/metagpt/config.py @@ -45,8 +45,18 @@ class Config(metaclass=Singleton): self.global_proxy = self._get("GLOBAL_PROXY") self.openai_api_key = self._get("OPENAI_API_KEY") self.anthropic_api_key = self._get("Anthropic_API_KEY") + + #星火大模型相关 + self.xinghuo_appid = self._get("xinghuo_appid") + self.xinghuo_api_secret = self._get("xinghuo_api_secret") + self.xinghuo_api_key = self._get("xinghuo_api_key") + self.domain=self._get("domain") + self.Spark_url=self._get("Spark_url") + self.no_api_mode=self._get("no_api_mode") if (not self.openai_api_key or "YOUR_API_KEY" == self.openai_api_key) and ( not self.anthropic_api_key or "YOUR_API_KEY" == self.anthropic_api_key + )and ( + not self.xinghuo_api_key or "APIKey" == self.xinghuo_api_key ): raise NotConfiguredException("Set OPENAI_API_KEY or Anthropic_API_KEY first") self.openai_api_base = self._get("OPENAI_API_BASE") diff --git a/metagpt/llm.py b/metagpt/llm.py index 6a9a9132f..e523e7698 100644 --- a/metagpt/llm.py +++ b/metagpt/llm.py @@ -8,10 +8,11 @@ from metagpt.provider.anthropic_api import Claude2 as Claude from metagpt.provider.openai_api import OpenAIGPTAPI as LLM +from metagpt.provider.spark_api import Spark -DEFAULT_LLM = LLM() +DEFAULT_LLM = Spark() CLAUDE_LLM = Claude() - +SPARK_LLM = Spark() async def ai_func(prompt): """使用LLM进行QA diff --git a/metagpt/management/skill_manager.py b/metagpt/management/skill_manager.py index f067e6df6..dbe8d1545 100644 --- a/metagpt/management/skill_manager.py +++ b/metagpt/management/skill_manager.py @@ -8,7 +8,7 @@ from metagpt.actions import Action from metagpt.const import PROMPT_PATH from metagpt.document_store.chromadb_store import ChromaStore -from metagpt.llm import LLM +import metagpt.llm as LLM from metagpt.logs import logger Skill = Action @@ -18,7 +18,7 @@ class SkillManager: """用来管理所有技能""" def __init__(self): - self._llm = LLM() + self._llm=LLM.DEFAULT_LLM self._store = ChromaStore('skill_manager') self._skills: dict[str: Skill] = {} diff --git a/metagpt/manager.py b/metagpt/manager.py index 9d238c621..3f6c115f3 100644 --- a/metagpt/manager.py +++ b/metagpt/manager.py @@ -5,13 +5,13 @@ @Author : alexanderwu @File : manager.py """ -from metagpt.llm import LLM +import metagpt.llm as LLM from metagpt.logs import logger from metagpt.schema import Message class Manager: - def __init__(self, llm: LLM = LLM()): + def __init__(self, llm: llm=LLM.DEFAULT_LLM): self.llm = llm # Large Language Model self.role_directions = { "BOSS": "Product Manager", diff --git a/metagpt/prompts/generate_skill.md b/metagpt/prompts/generate_skill.md index fd950c143..dc1365733 100644 --- a/metagpt/prompts/generate_skill.md +++ b/metagpt/prompts/generate_skill.md @@ -10,10 +10,10 @@ ```python from typing import Optional from abc import ABC -from metagpt.llm import LLM # 大语言模型,类似GPT +import metagpt.llm as LLM # 大语言模型,类似GPT class Action(ABC): - def __init__(self, name='', context=None, llm: LLM = LLM()): + def __init__(self, name='', context=None, llm: llm=LLM.DEFAULT_LLM): self.name = name self.llm = llm self.context = context diff --git a/metagpt/provider/SparkApi.py b/metagpt/provider/SparkApi.py new file mode 100644 index 000000000..330420439 --- /dev/null +++ b/metagpt/provider/SparkApi.py @@ -0,0 +1,137 @@ +import _thread as thread +import base64 +import datetime +import hashlib +import hmac +import json +from urllib.parse import urlparse +import ssl +from datetime import datetime +from time import mktime +from urllib.parse import urlencode +from wsgiref.handlers import format_date_time + +import websocket # 使用websocket_client +answer = "" + +class Ws_Param(object): + # 初始化 + def __init__(self, APPID, APIKey, APISecret, Spark_url): + self.APPID = APPID + self.APIKey = APIKey + self.APISecret = APISecret + self.host = urlparse(Spark_url).netloc + self.path = urlparse(Spark_url).path + self.Spark_url = Spark_url + + # 生成url + def create_url(self): + # 生成RFC1123格式的时间戳 + now = datetime.now() + date = format_date_time(mktime(now.timetuple())) + + # 拼接字符串 + signature_origin = "host: " + self.host + "\n" + signature_origin += "date: " + date + "\n" + signature_origin += "GET " + self.path + " HTTP/1.1" + + # 进行hmac-sha256进行加密 + signature_sha = hmac.new(self.APISecret.encode('utf-8'), signature_origin.encode('utf-8'), + digestmod=hashlib.sha256).digest() + + signature_sha_base64 = base64.b64encode(signature_sha).decode(encoding='utf-8') + + authorization_origin = f'api_key="{self.APIKey}", algorithm="hmac-sha256", headers="host date request-line", signature="{signature_sha_base64}"' + + authorization = base64.b64encode(authorization_origin.encode('utf-8')).decode(encoding='utf-8') + + # 将请求的鉴权参数组合为字典 + v = { + "authorization": authorization, + "date": date, + "host": self.host + } + # 拼接鉴权参数,生成url + url = self.Spark_url + '?' + urlencode(v) + # 此处打印出建立连接时候的url,参考本demo的时候可取消上方打印的注释,比对相同参数时生成的url与自己代码生成的url是否一致 + return url + + +# 收到websocket错误的处理 +def on_error(ws, error): + print("### error:", error) + + +# 收到websocket关闭的处理 +def on_close(ws,one,two): + print(" ") + + +# 收到websocket连接建立的处理 +def on_open(ws): + thread.start_new_thread(run, (ws,)) + + +def run(ws, *args): + data = json.dumps(gen_params(appid=ws.appid, domain= ws.domain,question=ws.question)) + ws.send(data) + + +# 收到websocket消息的处理 +def on_message(ws, message): + # print(message) + data = json.loads(message) + code = data['header']['code'] + if code != 0: + print(f'请求错误: {code}, {data}') + ws.close() + else: + choices = data["payload"]["choices"] + status = choices["status"] + content = choices["text"][0]["content"] + print(content,end ="") + global answer + answer += content + # print(1) + if status == 2: + ws.close() + + +def gen_params(appid, domain,question): + """ + 通过appid和用户的提问来生成请参数 + """ + data = { + "header": { + "app_id": appid, + "uid": "1234" + }, + "parameter": { + "chat": { + "domain": domain, + "random_threshold": 0.5, + "max_tokens": 2048, + "auditing": "default" + } + }, + "payload": { + "message": { + "text": question + } + } + } + return data + + +def main(appid, api_key, api_secret, Spark_url,domain, question): + # print("星火:") + wsParam = Ws_Param(appid, api_key, api_secret, Spark_url) + websocket.enableTrace(False) + wsUrl = wsParam.create_url() + ws = websocket.WebSocketApp(wsUrl, on_message=on_message, on_error=on_error, on_close=on_close, on_open=on_open) + ws.appid = appid + ws.question = question + ws.domain = domain + ws.run_forever(sslopt={"cert_reqs": ssl.CERT_NONE}) + + diff --git a/metagpt/provider/spark_api.py b/metagpt/provider/spark_api.py new file mode 100644 index 000000000..2f75208c8 --- /dev/null +++ b/metagpt/provider/spark_api.py @@ -0,0 +1,57 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +""" +@Time : 2023/7/21 11:15 +@Author : Leo Xiao +@File : anthropic_api.py +""" + +from typing import Optional +from metagpt.provider import SparkApi + +from metagpt.config import CONFIG + +def getlength(text): + length = 0 + for content in text: + temp = content["content"] + leng = len(temp) + length += leng + return length + +def checklen(text): + while (getlength(text) > 8000): + del text[0] + return text + +class Spark: + system_prompt = 'You are a helpful assistant.' + + def _user_msg(self, msg: str) -> dict[str, str]: + return {"role": "user", "content": msg} + + def _assistant_msg(self, msg: str) -> dict[str, str]: + return {"role": "assistant", "content": msg} + + def _system_msg(self, msg: str) -> dict[str, str]: + return {"role": "system", "content": msg} + + def _system_msgs(self, msgs: list[str]) -> list[dict[str, str]]: + return [self._system_msg(msg) for msg in msgs] + + def _default_system_msg(self): + return self._system_msg(self.system_prompt) + def ask(self, msg: str): + message = [self._user_msg(msg)] + SparkApi.main(CONFIG.xinghuo_appid,CONFIG.xinghuo_api_key,CONFIG.xinghuo_api_secret,"ws://spark-api.xf-yun.com/v2.1/chat","generalv2",message) + rsp = SparkApi.answer + return rsp + + async def aask(self, msg: str, system_msgs: Optional[list[str]] = None) -> str: + if system_msgs: + message = self._system_msgs(system_msgs) + [self._user_msg(msg)] + else: + message = [self._user_msg(msg)] + SparkApi.main(CONFIG.xinghuo_appid,CONFIG.xinghuo_api_key,CONFIG.xinghuo_api_secret,"ws://spark-api.xf-yun.com/v2.1/chat","generalv2",message) + rsp = SparkApi.answer + return rsp diff --git a/metagpt/roles/role.py b/metagpt/roles/role.py index d3750495f..add0a339c 100644 --- a/metagpt/roles/role.py +++ b/metagpt/roles/role.py @@ -14,7 +14,7 @@ from pydantic import BaseModel, Field # from metagpt.environment import Environment from metagpt.config import CONFIG from metagpt.actions import Action, ActionOutput -from metagpt.llm import LLM +from metagpt import llm as LLM from metagpt.logs import logger from metagpt.memory import Memory, LongTermMemory from metagpt.schema import Message @@ -94,7 +94,7 @@ class Role: """角色/代理""" def __init__(self, name="", profile="", goal="", constraints="", desc=""): - self._llm = LLM() + self._llm=LLM.DEFAULT_LLM self._setting = RoleSetting(name=name, profile=profile, goal=goal, constraints=constraints, desc=desc) self._states = [] self._actions = [] diff --git a/requirements.txt b/requirements.txt index efc2ea3e7..c202a8f17 100644 --- a/requirements.txt +++ b/requirements.txt @@ -33,7 +33,7 @@ tqdm==4.64.0 # selenium>4 # webdriver_manager<3.9 anthropic==0.3.6 -typing-inspect==0.8.0 +typing-inspect typing_extensions==4.5.0 -libcst==1.0.1 +libcst qdrant-client==1.4.0 \ No newline at end of file diff --git a/startup.py b/startup.py index 03b2149c4..94e2788e3 100644 --- a/startup.py +++ b/startup.py @@ -24,7 +24,7 @@ async def startup(idea: str, investment: float = 3.0, n_round: int = 5, await company.run(n_round=n_round) -def main(idea: str, investment: float = 3.0, n_round: int = 5, code_review: bool = False, run_tests: bool = False): +def main(idea: str ='写一个贪吃蛇命令行游戏', investment: float = 3.0, n_round: int = 5, code_review: bool = False, run_tests: bool = False): """ We are a software startup comprised of AI. By investing in us, you are empowering a future filled with limitless possibilities. :param idea: Your innovative idea, such as "Creating a snake game." diff --git a/tests/metagpt/actions/test_write_code.py b/tests/metagpt/actions/test_write_code.py index 7bb18ddf2..00d2c504e 100644 --- a/tests/metagpt/actions/test_write_code.py +++ b/tests/metagpt/actions/test_write_code.py @@ -8,7 +8,7 @@ import pytest from metagpt.actions.write_code import WriteCode -from metagpt.llm import LLM +import metagpt.llm as LLM from metagpt.logs import logger from tests.metagpt.actions.mock import TASKS_2, WRITE_CODE_PROMPT_SAMPLE @@ -29,6 +29,6 @@ async def test_write_code(): @pytest.mark.asyncio async def test_write_code_directly(): prompt = WRITE_CODE_PROMPT_SAMPLE + '\n' + TASKS_2[0] - llm = LLM() + llm=LLM.DEFAULT_LLM rsp = await llm.aask(prompt) logger.info(rsp) diff --git a/tests/metagpt/roles/mock.py b/tests/metagpt/roles/mock.py index 52fc4a3c1..9567be603 100644 --- a/tests/metagpt/roles/mock.py +++ b/tests/metagpt/roles/mock.py @@ -16,7 +16,7 @@ DETAIL_REQUIREMENT = """需求:开发一个基于LLM(大语言模型)与 3. 私有知识库支持pdf、word、txt等各种文件格式上传,上传后可以在服务端解析为文本,存储ES 资源: -1. 大语言模型已经有前置的抽象、部署,可以通过 `from metagpt.llm import LLM`,再使用`LLM().ask(prompt)`直接调用 +1. 大语言模型已经有前置的抽象、部署,可以通过 `import metagpt.llm as LLM`,再使用`LLM().ask(prompt)`直接调用 2. Elastic已有[部署](http://192.168.50.82:9200/),代码可以直接使用这个部署""" diff --git a/tests/metagpt/test_llm.py b/tests/metagpt/test_llm.py index 11503af1d..1986f3f22 100644 --- a/tests/metagpt/test_llm.py +++ b/tests/metagpt/test_llm.py @@ -8,7 +8,7 @@ import pytest -from metagpt.llm import LLM +import metagpt.llm as LLM @pytest.fixture() diff --git a/webui.py b/webui.py new file mode 100644 index 000000000..e69de29bb