test(e2e): wire Slack fake into harness

This commit is contained in:
Anish Sarkar 2026-05-08 03:08:36 +05:30
parent bf23cb2381
commit 73b6375688
4 changed files with 61 additions and 1 deletions

View file

@ -27,6 +27,8 @@ LINEAR_CANARY_TITLE = "E2E Canary Linear Issue"
JIRA_CANARY_TOKEN = "SURFSENSE_E2E_CANARY_TOKEN_JIRA_001"
JIRA_CANARY_SUMMARY = "E2E Canary Jira Issue"
JIRA_CANARY_KEY = "E2E-101"
SLACK_CANARY_TOKEN = "SURFSENSE_E2E_CANARY_TOKEN_SLACK_001"
SLACK_CANARY_CHANNEL = "slack-e2e-canary"
NO_RELEVANT_CONTENT_SENTINEL = "No relevant indexed content found."
NO_RELEVANT_CONTENT_QUERY = "E2E_NO_RELEVANT_CONTENT_SMOKE"
@ -111,6 +113,11 @@ class FakeChatLLM(BaseChatModel):
and JIRA_CANARY_TOKEN in latest_tool_text
):
return f"Jira live tool content found: {JIRA_CANARY_TOKEN}"
if (
latest_tool_name == "slack_search_channels"
and SLACK_CANARY_TOKEN in latest_tool_text
):
return f"Slack live tool content found: {SLACK_CANARY_TOKEN}"
wants_gmail = _contains_any(
latest_human,
@ -147,6 +154,10 @@ class FakeChatLLM(BaseChatModel):
"fake-jira-cloud-001",
),
)
wants_slack = _contains_any(
latest_human,
("slack", SLACK_CANARY_TOKEN),
)
has_gmail_evidence = (
GMAIL_CANARY_SUBJECT in prompt_text
or GMAIL_CANARY_MESSAGE_ID in prompt_text
@ -183,7 +194,15 @@ class FakeChatLLM(BaseChatModel):
or "fake-jira-cloud-001" in prompt_text
or "surfsense-e2e.atlassian.net" in prompt_text
)
has_slack_evidence = (
SLACK_CANARY_CHANNEL in prompt_text
or SLACK_CANARY_TOKEN in prompt_text
or "C_FAKE_SLACK_CANARY" in prompt_text
or "T_FAKE_SLACK_TEAM" in prompt_text
)
if wants_slack and has_slack_evidence:
return f"Slack content found: {SLACK_CANARY_TOKEN}"
if wants_jira and has_jira_evidence:
return f"Jira content found: {JIRA_CANARY_TOKEN}"
if wants_linear and has_linear_evidence:
@ -206,6 +225,7 @@ class FakeChatLLM(BaseChatModel):
and not has_calendar_evidence
and not has_gmail_evidence
and not has_drive_evidence
and not has_slack_evidence
):
return f"Notion content found: {NOTION_CANARY_TOKEN}"
if (
@ -216,6 +236,7 @@ class FakeChatLLM(BaseChatModel):
and not has_calendar_evidence
and not has_gmail_evidence
and not has_drive_evidence
and not has_slack_evidence
):
return f"Confluence content found: {CONFLUENCE_CANARY_TOKEN}"
if (
@ -226,6 +247,7 @@ class FakeChatLLM(BaseChatModel):
and not has_calendar_evidence
and not has_gmail_evidence
and not has_drive_evidence
and not has_slack_evidence
):
return f"Jira content found: {JIRA_CANARY_TOKEN}"
if (
@ -236,6 +258,7 @@ class FakeChatLLM(BaseChatModel):
and not has_calendar_evidence
and not has_gmail_evidence
and not has_drive_evidence
and not has_slack_evidence
):
return f"Linear content found: {LINEAR_CANARY_TOKEN}"
if (
@ -246,6 +269,7 @@ class FakeChatLLM(BaseChatModel):
and not has_notion_evidence
and not has_gmail_evidence
and not has_drive_evidence
and not has_slack_evidence
):
return f"Calendar content found: {CALENDAR_CANARY_TOKEN}"
if (
@ -255,6 +279,7 @@ class FakeChatLLM(BaseChatModel):
and not has_linear_evidence
and not has_notion_evidence
and not has_drive_evidence
and not has_slack_evidence
):
return f"Gmail content found: {GMAIL_CANARY_TOKEN}"
if (
@ -264,8 +289,20 @@ class FakeChatLLM(BaseChatModel):
and not has_linear_evidence
and not has_notion_evidence
and not has_gmail_evidence
and not has_slack_evidence
):
return f"Drive content found: {DRIVE_CANARY_TOKEN}"
if (
has_slack_evidence
and not has_confluence_evidence
and not has_jira_evidence
and not has_linear_evidence
and not has_notion_evidence
and not has_calendar_evidence
and not has_gmail_evidence
and not has_drive_evidence
):
return f"Slack content found: {SLACK_CANARY_TOKEN}"
return NO_RELEVANT_CONTENT_SENTINEL
def _tool_call_message_for(self, messages: list[BaseMessage]) -> AIMessage | None:
@ -373,6 +410,21 @@ class FakeChatLLM(BaseChatModel):
],
)
if latest_tool is None and _contains_any(
latest_human,
("slack", SLACK_CANARY_TOKEN),
):
return AIMessage(
content="",
tool_calls=[
{
"name": "slack_search_channels",
"args": {"query": SLACK_CANARY_CHANNEL, "limit": 5},
"id": "call_e2e_search_slack_channels",
}
],
)
return None
def _generate(

View file

@ -5,7 +5,7 @@
},
"channel": {
"id": "C_FAKE_SLACK_CANARY",
"name": "e2e-canary",
"name": "slack-e2e-canary",
"purpose": "SurfSense E2E Slack canary channel"
},
"messages": [

View file

@ -69,6 +69,8 @@ os.environ.setdefault(
"NOTION_REDIRECT_URI",
"http://localhost:8000/api/v1/auth/notion/connector/callback",
)
os.environ["SLACK_CLIENT_ID"] = "fake-slack-mcp-client-id"
os.environ["SLACK_CLIENT_SECRET"] = "fake-slack-mcp-client-secret"
logging.basicConfig(
level=logging.INFO,
@ -104,6 +106,7 @@ from tests.e2e.fakes import ( # noqa: E402
mcp_runtime as _fake_mcp_runtime,
native_google as _fake_native_google,
notion_module as _fake_notion_module,
slack_module as _fake_slack_module,
)
from tests.e2e.fakes.chat_llm import ( # noqa: E402
fake_create_chat_litellm_from_agent_config,
@ -181,6 +184,7 @@ _fake_linear_module.install(_active_patches)
_fake_jira_module.install(_active_patches)
_fake_mcp_runtime.install(_active_patches)
_fake_mcp_oauth_runtime.install(_active_patches)
_fake_slack_module.install(_active_patches)
# ---------------------------------------------------------------------------

View file

@ -56,6 +56,8 @@ os.environ.setdefault(
"NOTION_REDIRECT_URI",
"http://localhost:8000/api/v1/auth/notion/connector/callback",
)
os.environ["SLACK_CLIENT_ID"] = "fake-slack-mcp-client-id"
os.environ["SLACK_CLIENT_SECRET"] = "fake-slack-mcp-client-secret"
logging.basicConfig(
level=logging.INFO,
@ -89,6 +91,7 @@ from tests.e2e.fakes import ( # noqa: E402
mcp_runtime as _fake_mcp_runtime,
native_google as _fake_native_google,
notion_module as _fake_notion_module,
slack_module as _fake_slack_module,
)
from tests.e2e.fakes.chat_llm import ( # noqa: E402
fake_create_chat_litellm_from_agent_config,
@ -165,6 +168,7 @@ _fake_linear_module.install(_active_patches)
_fake_jira_module.install(_active_patches)
_fake_mcp_runtime.install(_active_patches)
_fake_mcp_oauth_runtime.install(_active_patches)
_fake_slack_module.install(_active_patches)
# ---------------------------------------------------------------------------