feat: merge geekan:env_refactor

This commit is contained in:
莘权 马 2023-12-15 17:13:56 +08:00
parent be832c9995
commit 1a36361691
5 changed files with 62 additions and 57 deletions

View file

@ -5,7 +5,8 @@
import openai
from metagpt.config import CONFIG
from metagpt.provider.openai_api import CostManager, OpenAIGPTAPI, RateLimiter
from metagpt.provider.openai_api import OpenAIGPTAPI, RateLimiter
from metagpt.utils.cost_manager import CostManager
class FireWorksGPTAPI(OpenAIGPTAPI):

View file

@ -6,7 +6,8 @@ import openai
from metagpt.config import CONFIG
from metagpt.logs import logger
from metagpt.provider.openai_api import CostManager, OpenAIGPTAPI, RateLimiter
from metagpt.provider.openai_api import OpenAIGPTAPI, RateLimiter
from metagpt.utils.cost_manager import CostManager
class OpenLLMCostManager(CostManager):

View file

@ -118,7 +118,11 @@ class OpenAIGPTAPI(BaseGPTAPI, RateLimiter):
kwargs["model"] = CONFIG.deployment_id
else:
kwargs["model"] = self.model
kwargs["timeout"] = max(CONFIG.TIMEOUT, timeout) if CONFIG.TIMEOUT is not None else timeout
try:
default_timeout = int(CONFIG.TIMEOUT) if CONFIG.TIMEOUT else 0
except ValueError:
default_timeout = 0
kwargs["timeout"] = max(default_timeout, timeout)
return kwargs

View file

@ -45,6 +45,7 @@ class ProductManager(Role):
self._init_actions([PrepareDocuments, WritePRD])
self._watch([UserRequirement, PrepareDocuments])
self._todo = any_to_name(PrepareDocuments)
async def _think(self) -> None:
"""Decide what to do"""
@ -52,6 +53,7 @@ class ProductManager(Role):
self._set_state(1)
else:
self._set_state(0)
self._todo = any_to_name(WritePRD)
return self._rc.todo
async def _observe(self, ignore_memory=False) -> int:
@ -59,7 +61,4 @@ class ProductManager(Role):
@property
def todo(self) -> str:
if self._rc.state == 0:
return any_to_name(WritePRD)
else:
return any_to_name(PrepareDocuments)
return self._todo

View file

@ -93,57 +93,57 @@ async def mermaid_to_file(mermaid_code, output_file_without_suffix, width=2048,
return 0
MMC1 = """classDiagram
class Main {
-SearchEngine search_engine
+main() str
}
class SearchEngine {
-Index index
-Ranking ranking
-Summary summary
+search(query: str) str
}
class Index {
-KnowledgeBase knowledge_base
+create_index(data: dict)
+query_index(query: str) list
}
class Ranking {
+rank_results(results: list) list
}
class Summary {
+summarize_results(results: list) str
}
class KnowledgeBase {
+update(data: dict)
+fetch_data(query: str) dict
}
Main --> SearchEngine
SearchEngine --> Index
SearchEngine --> Ranking
SearchEngine --> Summary
Index --> KnowledgeBase"""
MMC2 = """sequenceDiagram
participant M as Main
participant SE as SearchEngine
participant I as Index
participant R as Ranking
participant S as Summary
participant KB as KnowledgeBase
M->>SE: search(query)
SE->>I: query_index(query)
I->>KB: fetch_data(query)
KB-->>I: return data
I-->>SE: return results
SE->>R: rank_results(results)
R-->>SE: return ranked_results
SE->>S: summarize_results(ranked_results)
S-->>SE: return summary
SE-->>M: return summary"""
if __name__ == "__main__":
MMC1 = """classDiagram
class Main {
-SearchEngine search_engine
+main() str
}
class SearchEngine {
-Index index
-Ranking ranking
-Summary summary
+search(query: str) str
}
class Index {
-KnowledgeBase knowledge_base
+create_index(data: dict)
+query_index(query: str) list
}
class Ranking {
+rank_results(results: list) list
}
class Summary {
+summarize_results(results: list) str
}
class KnowledgeBase {
+update(data: dict)
+fetch_data(query: str) dict
}
Main --> SearchEngine
SearchEngine --> Index
SearchEngine --> Ranking
SearchEngine --> Summary
Index --> KnowledgeBase"""
MMC2 = """sequenceDiagram
participant M as Main
participant SE as SearchEngine
participant I as Index
participant R as Ranking
participant S as Summary
participant KB as KnowledgeBase
M->>SE: search(query)
SE->>I: query_index(query)
I->>KB: fetch_data(query)
KB-->>I: return data
I-->>SE: return results
SE->>R: rank_results(results)
R-->>SE: return ranked_results
SE->>S: summarize_results(ranked_results)
S-->>SE: return summary
SE-->>M: return summary"""
loop = asyncio.new_event_loop()
result = loop.run_until_complete(mermaid_to_file(MMC1, METAGPT_ROOT / f"{CONFIG.mermaid_engine}/1"))
result = loop.run_until_complete(mermaid_to_file(MMC2, METAGPT_ROOT / f"{CONFIG.mermaid_engine}/2"))