compress type to enum

This commit is contained in:
garylin2099 2024-08-02 18:43:55 +08:00
parent 987d90f6ff
commit 25e67db1af
5 changed files with 55 additions and 25 deletions

View file

@ -8,6 +8,7 @@
import pytest
from metagpt.configs.compress_msg_config import CompressType
from metagpt.configs.llm_config import LLMConfig
from metagpt.provider.base_llm import BaseLLM
from metagpt.schema import Message
@ -106,9 +107,7 @@ async def test_async_base_llm():
# assert resp == default_resp_cont
@pytest.mark.parametrize(
"compress_type", ["post_cut_by_msg", "post_cut_by_token", "pre_cut_by_msg", "pre_cut_by_token"]
)
@pytest.mark.parametrize("compress_type", list(CompressType))
def test_compress_messages_no_effect(compress_type):
base_llm = MockBaseLLM()
messages = [
@ -123,9 +122,7 @@ def test_compress_messages_no_effect(compress_type):
assert compressed == messages
@pytest.mark.parametrize(
"compress_type", ["post_cut_by_msg", "post_cut_by_token", "pre_cut_by_msg", "pre_cut_by_token"]
)
@pytest.mark.parametrize("compress_type", CompressType.cut_types())
def test_compress_messages_long(compress_type):
base_llm = MockBaseLLM()
base_llm.config.model = "test_llm"
@ -142,7 +139,7 @@ def test_compress_messages_long(compress_type):
print(compressed)
print(len(compressed))
assert len(compressed) < len(messages)
assert 3 <= len(compressed) < len(messages)
assert compressed[0]["role"] == "system" and compressed[1]["role"] == "system"
assert compressed[2]["role"] != "system"
@ -154,9 +151,7 @@ def test_long_messages_no_compress():
assert len(compressed) == len(messages)
@pytest.mark.parametrize(
"compress_type", ["post_cut_by_msg", "post_cut_by_token", "pre_cut_by_msg", "pre_cut_by_token"]
)
@pytest.mark.parametrize("compress_type", CompressType.cut_types())
def test_compress_messages_long_no_sys_msg(compress_type):
base_llm = MockBaseLLM()
base_llm.config.model = "test_llm"

View file

@ -9,6 +9,7 @@ from openai.types.chat.chat_completion import Choice, CompletionUsage
from openai.types.chat.chat_completion_message_tool_call import Function
from PIL import Image
from metagpt.configs.compress_msg_config import CompressType
from metagpt.const import TEST_DATA_PATH
from metagpt.llm import LLM
from metagpt.logs import logger
@ -202,7 +203,7 @@ def test_count_tokens_long():
async def test_aask_long():
llm = LLM()
llm.config.model = "deepseek-ai/DeepSeek-Coder-V2-Instruct" # deepseek-coder on siliconflow, limit 32k
llm.config.compress_type = "post_cut_by_token"
llm.config.compress_type = CompressType.POST_CUT_BY_TOKEN
test_msg_content = " ".join([str(i) for i in range(100000)]) # corresponds to ~300k tokens
messages = [
llm._system_msg("You are a helpful assistant"),