diff --git a/config/config.yaml b/config/config.yaml index 249552693..9ef923366 100644 --- a/config/config.yaml +++ b/config/config.yaml @@ -2,10 +2,10 @@ # The configuration of key.yaml has a higher priority and will not enter git #### if OpenAI -## The official OPENAI_BASE_URL is https://api.openai.com/v1/ +## The official OPENAI_BASE_URL is https://api.openai.com/v1 ## If the official OPENAI_BASE_URL is not available, we recommend using the [openai-forward](https://github.com/beidongjiedeguang/openai-forward). ## Or, you can configure OPENAI_PROXY to access official OPENAI_BASE_URL. -OPENAI_BASE_URL: "https://api.openai.com/v1/" +OPENAI_BASE_URL: "https://api.openai.com/v1" #OPENAI_PROXY: "http://127.0.0.1:8118" #OPENAI_API_KEY: "YOUR_API_KEY" # set the value to sk-xxx if you host the openai interface for open llm model OPENAI_API_MODEL: "gpt-4" diff --git a/docs/FAQ-EN.md b/docs/FAQ-EN.md index 1c5b4a86a..fe2def1e1 100644 --- a/docs/FAQ-EN.md +++ b/docs/FAQ-EN.md @@ -83,10 +83,10 @@ 1. PRD stuck / unable to access/ connection interrupted - 1. The official OPENAI_BASE_URL address is `https://api.openai.com/v1/` - 1. If the official OPENAI_BASE_URL address is inaccessible in your environment (this can be verified with curl), it's recommended to configure using the reverse proxy OPENAI_BASE_URL provided by libraries such as openai-forward. For instance, `OPENAI_BASE_URL: "``https://api.openai-forward.com/v1/``"` + 1. The official OPENAI_BASE_URL address is `https://api.openai.com/v1` + 1. If the official OPENAI_BASE_URL address is inaccessible in your environment (this can be verified with curl), it's recommended to configure using the reverse proxy OPENAI_BASE_URL provided by libraries such as openai-forward. For instance, `OPENAI_BASE_URL: "``https://api.openai-forward.com/v1``"` 1. If the official OPENAI_BASE_URL address is inaccessible in your environment (again, verifiable via curl), another option is to configure the OPENAI_PROXY parameter. This way, you can access the official OPENAI_BASE_URL via a local proxy. If you don't need to access via a proxy, please do not enable this configuration; if accessing through a proxy is required, modify it to the correct proxy address. Note that when OPENAI_PROXY is enabled, don't set OPENAI_BASE_URL. - 1. Note: OpenAI's default API design ends with a v1. An example of the correct configuration is: `OPENAI_BASE_URL: "``https://api.openai.com/v1/``"` + 1. Note: OpenAI's default API design ends with a v1. An example of the correct configuration is: `OPENAI_BASE_URL: "``https://api.openai.com/v1``"` 1. Absolutely! How can I assist you today? diff --git a/docs/README_JA.md b/docs/README_JA.md index 33b08b770..14e7c3111 100644 --- a/docs/README_JA.md +++ b/docs/README_JA.md @@ -219,7 +219,7 @@ # 設定ファイルをコピーし、必要な修正を加える。 | 変数名 | config/key.yaml | env | | --------------------------------------- | ----------------------------------------- | ----------------------------------------------- | | OPENAI_API_KEY # 自分のキーに置き換える | OPENAI_API_KEY: "sk-..." | export OPENAI_API_KEY="sk-..." | -| OPENAI_BASE_URL # オプション | OPENAI_BASE_URL: "https:///v1/" | export OPENAI_BASE_URL="https:///v1/" | +| OPENAI_BASE_URL # オプション | OPENAI_BASE_URL: "https:///v1" | export OPENAI_BASE_URL="https:///v1" | ## チュートリアル: スタートアップの開始 diff --git a/docs/tutorial/usage.md b/docs/tutorial/usage.md index f8a25c84f..e6b4a7cc5 100644 --- a/docs/tutorial/usage.md +++ b/docs/tutorial/usage.md @@ -13,7 +13,7 @@ # Copy the configuration file and make the necessary modifications. | Variable Name | config/key.yaml | env | | ------------------------------------------ | ----------------------------------------- | ----------------------------------------------- | | OPENAI_API_KEY # Replace with your own key | OPENAI_API_KEY: "sk-..." | export OPENAI_API_KEY="sk-..." | -| OPENAI_BASE_URL # Optional | OPENAI_BASE_URL: "https:///v1/" | export OPENAI_BASE_URL="https:///v1/" | +| OPENAI_BASE_URL # Optional | OPENAI_BASE_URL: "https:///v1" | export OPENAI_BASE_URL="https:///v1" | ### Initiating a startup diff --git a/docs/tutorial/usage_cn.md b/docs/tutorial/usage_cn.md index ddd1c2267..195eec674 100644 --- a/docs/tutorial/usage_cn.md +++ b/docs/tutorial/usage_cn.md @@ -13,7 +13,7 @@ # 复制配置文件并进行必要的修改 | 变量名 | config/key.yaml | env | | ----------------------------------- | ----------------------------------------- | ----------------------------------------------- | | OPENAI_API_KEY # 用您自己的密钥替换 | OPENAI_API_KEY: "sk-..." | export OPENAI_API_KEY="sk-..." | -| OPENAI_BASE_URL # 可选 | OPENAI_BASE_URL: "https:///v1/" | export OPENAI_BASE_URL="https:///v1/" | +| OPENAI_BASE_URL # 可选 | OPENAI_BASE_URL: "https:///v1" | export OPENAI_BASE_URL="https:///v1" | ### 示例:启动一个创业公司 diff --git a/metagpt/provider/openai_api.py b/metagpt/provider/openai_api.py index 3853e0ea6..98551c370 100644 --- a/metagpt/provider/openai_api.py +++ b/metagpt/provider/openai_api.py @@ -26,6 +26,7 @@ from metagpt.logs import logger from metagpt.provider.base_gpt_api import BaseGPTAPI from metagpt.provider.constant import GENERAL_FUNCTION_SCHEMA, GENERAL_TOOL_CHOICE from metagpt.schema import Message +from metagpt.utils.common import ensure_trailing_slash from metagpt.utils.singleton import Singleton from metagpt.utils.token_counter import ( TOKEN_COSTS, @@ -153,27 +154,37 @@ class OpenAIGPTAPI(BaseGPTAPI, RateLimiter): RateLimiter.__init__(self, rpm=self.rpm) def __init_openai(self, config: Config): - client_kwargs, async_client_kwargs = self.__make_client_args(config) + client_kwargs, async_client_kwargs = self._make_client_kwargs(config) self.client = OpenAI(**client_kwargs) self.async_client = AsyncOpenAI(**async_client_kwargs) self.rpm = int(config.get("RPM", 10)) - def __make_client_args(self, config: Config): + def _make_client_kwargs(self, config: Config) -> (dict, dict): mapping = { "api_key": "openai_api_key", "base_url": "openai_base_url", } + kwargs = {} + for key, attr in mapping.items(): + value = getattr(config, attr, None) + if value: + kwargs[key] = value + + if config.openai_base_url: + kwargs["base_url"] = ensure_trailing_slash(config.openai_base_url) - kwargs = {key: getattr(config, mapping[key]) for key in mapping if getattr(config, mapping[key], None)} async_kwargs = kwargs.copy() - # need http_client to support proxy + # Create http_client if proxy is specified if config.openai_proxy: - httpx_args = dict(base_url=kwargs["base_url"], proxies=config.openai_proxy) - kwargs["http_client"] = httpx.Client(**httpx_args) - async_kwargs["http_client"] = httpx.AsyncClient(**httpx_args) + params = {"proxies": config.openai_proxy} + if config.openai_base_url: + params["base_url"] = config.openai_base_url + + kwargs["http_client"] = httpx.Client(**params) + async_kwargs["http_client"] = httpx.AsyncClient(**params) return kwargs, async_kwargs diff --git a/metagpt/utils/common.py b/metagpt/utils/common.py index f09666beb..c69a0fe10 100644 --- a/metagpt/utils/common.py +++ b/metagpt/utils/common.py @@ -305,3 +305,9 @@ def parse_recipient(text): pattern = r"## Send To:\s*([A-Za-z]+)\s*?" # hard code for now recipient = re.search(pattern, text) return recipient.group(1) if recipient else "" + + +def ensure_trailing_slash(url): + if not url: + return url + return url if url.endswith("/") else url + "/" diff --git a/tests/metagpt/provider/test_openai.py b/tests/metagpt/provider/test_openai.py index 2b0af37b5..3e8dbf7e7 100644 --- a/tests/metagpt/provider/test_openai.py +++ b/tests/metagpt/provider/test_openai.py @@ -1,4 +1,5 @@ import pytest +from httpx import AsyncClient, Client from metagpt.provider.openai_api import OpenAIGPTAPI from metagpt.schema import UserMessage @@ -78,3 +79,43 @@ def test_ask_code_list_str(): assert "language" in rsp assert "code" in rsp assert len(rsp["code"]) > 0 + + +def test_make_client_kwargs(): + class Config: + openai_api_key = "test_key" + openai_base_url = "test_url" + openai_proxy = "http://test_proxy" + + config = Config() + obj = OpenAIGPTAPI() + kwargs, async_kwargs = obj._make_client_kwargs(config) + + assert kwargs["api_key"] == "test_key" + assert kwargs["base_url"] == "test_url/" + assert isinstance(kwargs["http_client"], Client) + assert kwargs["http_client"].base_url == "test_url/" + + assert async_kwargs["api_key"] == "test_key" + assert async_kwargs["base_url"] == "test_url/" + assert isinstance(async_kwargs["http_client"], AsyncClient) + assert async_kwargs["http_client"].base_url == "test_url/" + + +def test_make_client_kwargs_no_proxy(): + class Config: + openai_api_key = "test_key" + openai_base_url = "test_url" + openai_proxy = None + + config = Config() + obj = OpenAIGPTAPI() + kwargs, async_kwargs = obj._make_client_kwargs(config) + + assert kwargs["api_key"] == "test_key" + assert kwargs["base_url"] == "test_url/" + assert "http_client" not in kwargs + + assert async_kwargs["api_key"] == "test_key" + assert async_kwargs["base_url"] == "test_url/" + assert "http_client" not in async_kwargs