diff --git a/config/config.yaml b/config/config.yaml index 355413d1f..ceab18854 100644 --- a/config/config.yaml +++ b/config/config.yaml @@ -61,4 +61,8 @@ SD_T2I_API: "/sdapi/v1/txt2img" #### for Mermaid CLI ## If you installed mmdc (Mermaid CLI) only for metagpt then enable the following configuration. #PUPPETEER_CONFIG: "./config/puppeteer-config.json" -#MMDC: "./node_modules/.bin/mmdc" \ No newline at end of file +#MMDC: "./node_modules/.bin/mmdc" + +### for update_costs & calc_usage +UPDATE_COSTS: false +CALC_USAGE: false \ No newline at end of file diff --git a/metagpt/config.py b/metagpt/config.py index 0f8ed37d6..d53571468 100644 --- a/metagpt/config.py +++ b/metagpt/config.py @@ -79,6 +79,10 @@ class Config(metaclass=Singleton): self.total_cost = 0.0 self.puppeteer_config = self._get("PUPPETEER_CONFIG","") self.mmdc = self._get("MMDC","mmdc") + self.update_costs = self._get("UPDATE_COSTS",True) + self.calc_usage = self._get("CALC_USAGE",True) + + def _init_with_config_files_and_env(self, configs: dict, yaml_file): """从config/key.yaml / config/config.yaml / env三处按优先级递减加载""" diff --git a/metagpt/provider/openai_api.py b/metagpt/provider/openai_api.py index f6499c643..fe9532d43 100644 --- a/metagpt/provider/openai_api.py +++ b/metagpt/provider/openai_api.py @@ -221,10 +221,11 @@ class OpenAIGPTAPI(BaseGPTAPI, RateLimiter): def _calc_usage(self, messages: list[dict], rsp: str) -> dict: usage = {} - prompt_tokens = count_message_tokens(messages, self.model) - completion_tokens = count_string_tokens(rsp, self.model) - usage['prompt_tokens'] = prompt_tokens - usage['completion_tokens'] = completion_tokens + if CONFIG.calc_usage: + prompt_tokens = count_message_tokens(messages, self.model) + completion_tokens = count_string_tokens(rsp, self.model) + usage['prompt_tokens'] = prompt_tokens + usage['completion_tokens'] = completion_tokens return usage async def acompletion_batch(self, batch: list[list[dict]]) -> list[dict]: @@ -254,9 +255,10 @@ class OpenAIGPTAPI(BaseGPTAPI, RateLimiter): return results def _update_costs(self, usage: dict): - prompt_tokens = int(usage['prompt_tokens']) - completion_tokens = int(usage['completion_tokens']) - self._cost_manager.update_cost(prompt_tokens, completion_tokens, self.model) + if CONFIG.update_costs: + prompt_tokens = int(usage['prompt_tokens']) + completion_tokens = int(usage['completion_tokens']) + self._cost_manager.update_cost(prompt_tokens, completion_tokens, self.model) def get_costs(self) -> Costs: return self._cost_manager.get_costs()