Merge pull request #481 from garylin2099/human_roleplay

allow human to play any roles
This commit is contained in:
garylin2099 2023-11-17 16:50:46 +08:00 committed by GitHub
commit 820fee5df6
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
6 changed files with 218 additions and 19 deletions

View file

@ -8,6 +8,7 @@
from metagpt.provider.anthropic_api import Claude2 as Claude
from metagpt.provider.openai_api import OpenAIGPTAPI as LLM
from metagpt.provider.human_provider import HumanProvider
DEFAULT_LLM = LLM()
CLAUDE_LLM = Claude()

View file

@ -0,0 +1,35 @@
'''
Filename: MetaGPT/metagpt/provider/human_provider.py
Created Date: Wednesday, November 8th 2023, 11:55:46 pm
Author: garylin2099
'''
from typing import Optional
from metagpt.provider.base_gpt_api import BaseGPTAPI
from metagpt.logs import logger
class HumanProvider(BaseGPTAPI):
"""Humans provide themselves as a 'model', which actually takes in human input as its response.
This enables replacing LLM anywhere in the framework with a human, thus introducing human interaction
"""
def ask(self, msg: str) -> str:
logger.info("It's your turn, please type in your response. You may also refer to the context below")
rsp = input(msg)
if rsp in ["exit", "quit"]:
exit()
return rsp
async def aask(self, msg: str, system_msgs: Optional[list[str]] = None) -> str:
return self.ask(msg)
def completion(self, messages: list[dict]):
"""dummy implementation of abstract method in base"""
return []
async def acompletion(self, messages: list[dict]):
"""dummy implementation of abstract method in base"""
return []
async def acompletion_text(self, messages: list[dict], stream=False) -> str:
"""dummy implementation of abstract method in base"""
return []

View file

@ -15,7 +15,7 @@ from pydantic import BaseModel, Field
# from metagpt.environment import Environment
from metagpt.config import CONFIG
from metagpt.actions import Action, ActionOutput
from metagpt.llm import LLM
from metagpt.llm import LLM, HumanProvider
from metagpt.logs import logger
from metagpt.memory import Memory, LongTermMemory
from metagpt.schema import Message
@ -65,6 +65,7 @@ class RoleSetting(BaseModel):
goal: str
constraints: str
desc: str
is_human: bool
def __str__(self):
return f"{self.name}({self.profile})"
@ -106,9 +107,10 @@ class RoleContext(BaseModel):
class Role:
"""Role/Agent"""
def __init__(self, name="", profile="", goal="", constraints="", desc=""):
self._llm = LLM()
self._setting = RoleSetting(name=name, profile=profile, goal=goal, constraints=constraints, desc=desc)
def __init__(self, name="", profile="", goal="", constraints="", desc="", is_human=False):
self._llm = LLM() if not is_human else HumanProvider()
self._setting = RoleSetting(name=name, profile=profile, goal=goal,
constraints=constraints, desc=desc, is_human=is_human)
self._states = []
self._actions = []
self._role_id = str(self._setting)
@ -122,8 +124,11 @@ class Role:
self._reset()
for idx, action in enumerate(actions):
if not isinstance(action, Action):
i = action("")
i = action("", llm=self._llm)
else:
if self._setting.is_human and not isinstance(action.llm, HumanProvider):
logger.warning(f"is_human attribute does not take effect,"
f"as Role's {str(action)} was initialized using LLM, try passing in Action classes instead of initialized instances")
i = action
i.set_prefix(self._get_prefix(), self.profile)
self._actions.append(i)