format code

This commit is contained in:
seehi 2024-04-10 15:54:08 +08:00
parent 60d34f4a50
commit 549cb2d90b
3 changed files with 27 additions and 26 deletions

View file

@ -6,20 +6,18 @@
@File : stream_output_via_api.py
@Description : Stream log information and communicate over the network via web api.
"""
import asyncio
import json
import socket
import asyncio
import threading
from contextvars import ContextVar
from flask import Flask, Response
from flask import request, jsonify, send_from_directory
from metagpt.logs import logger
from flask import Flask, Response, jsonify, request, send_from_directory
from metagpt.const import TUTORIAL_PATH
from metagpt.logs import set_llm_stream_logfunc
from metagpt.utils.stream_pipe import StreamPipe
from metagpt.logs import logger, set_llm_stream_logfunc
from metagpt.roles.tutorial_assistant import TutorialAssistant
from metagpt.utils.stream_pipe import StreamPipe
app = Flask(__name__)
@ -39,12 +37,18 @@ def write_tutorial(message):
def thread_run(idea: str, stream_pipe: StreamPipe = None):
"""
Convert asynchronous function to thread function
Convert asynchronous function to thread function
"""
asyncio.run(main(idea, stream_pipe))
stream_pipe = StreamPipe()
thread = threading.Thread(target=thread_run, args=(message["content"], stream_pipe,))
thread = threading.Thread(
target=thread_run,
args=(
message["content"],
stream_pipe,
),
)
thread.start()
while thread.is_alive():
@ -52,7 +56,7 @@ def write_tutorial(message):
yield stream_pipe.msg2stream(msg)
@app.route('/v1/chat/completions', methods=['POST'])
@app.route("/v1/chat/completions", methods=["POST"])
def completions():
"""
data: {
@ -87,7 +91,7 @@ def completions():
return jsonify({"status": 400, "msg": "No suitable agent found."})
@app.route('/download/<path:filename>')
@app.route("/download/<path:filename>")
def download_file(filename):
return send_from_directory(TUTORIAL_PATH, filename, as_attachment=True)

View file

@ -45,7 +45,13 @@ class ZhiPuAILLM(BaseLLM):
def _const_kwargs(self, messages: list[dict], stream: bool = False) -> dict:
max_tokens = self.config.max_token if self.config.max_token > 0 else 1024
temperature = self.config.temperature if self.config.temperature > 0.0 else 0.3
kwargs = {"model": self.model, "max_tokens": max_tokens, "messages": messages, "stream": stream, "temperature": temperature}
kwargs = {
"model": self.model,
"max_tokens": max_tokens,
"messages": messages,
"stream": stream,
"temperature": temperature,
}
return kwargs
def completion(self, messages: list[dict], timeout=USE_CONFIG_TIMEOUT) -> dict:

View file

@ -5,8 +5,8 @@
# @Version : None
# @Description : None
import time
import json
import time
from multiprocessing import Pipe
@ -21,17 +21,8 @@ class StreamPipe:
"model": "gpt-3.5-turbo-0125",
"system_fingerprint": "fp_3bc1b5746c",
"choices": [
{
"index": 0,
"delta":
{
"role": "assistant",
"content": "content"
},
"logprobs": None,
"finish_reason": None
}
]
{"index": 0, "delta": {"role": "assistant", "content": "content"}, "logprobs": None, "finish_reason": None}
],
}
def set_message(self, msg):
@ -44,6 +35,6 @@ class StreamPipe:
return None
def msg2stream(self, msg):
self.format_data['created'] = int(time.time())
self.format_data['choices'][0]['delta']['content'] = msg
self.format_data["created"] = int(time.time())
self.format_data["choices"][0]["delta"]["content"] = msg
return f"data: {json.dumps(self.format_data, ensure_ascii=False)}\n".encode("utf-8")