mirror of
https://github.com/MODSetter/SurfSense.git
synced 2026-05-06 22:32:39 +02:00
Add OneDrive, Slack, and Teams connector route slices.
This commit is contained in:
parent
6e54fb00e3
commit
745bcec390
24 changed files with 1297 additions and 0 deletions
|
|
@ -0,0 +1,54 @@
|
||||||
|
"""`onedrive` route: ``SubAgent`` spec for deepagents."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from collections.abc import Sequence
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from deepagents import SubAgent
|
||||||
|
from langchain_core.language_models import BaseChatModel
|
||||||
|
|
||||||
|
from app.agents.multi_agent_with_deepagents.subagents.shared.md_file_reader import (
|
||||||
|
read_md_file,
|
||||||
|
)
|
||||||
|
from app.agents.multi_agent_with_deepagents.subagents.shared.permissions import (
|
||||||
|
ToolsPermissions,
|
||||||
|
merge_tools_permissions,
|
||||||
|
)
|
||||||
|
from app.agents.multi_agent_with_deepagents.subagents.shared.subagent_builder import (
|
||||||
|
pack_subagent,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .tools.index import load_tools
|
||||||
|
|
||||||
|
NAME = "onedrive"
|
||||||
|
|
||||||
|
|
||||||
|
def build_subagent(
|
||||||
|
*,
|
||||||
|
dependencies: dict[str, Any],
|
||||||
|
model: BaseChatModel | None = None,
|
||||||
|
extra_middleware: Sequence[Any] | None = None,
|
||||||
|
extra_tools_bucket: ToolsPermissions | None = None,
|
||||||
|
) -> SubAgent:
|
||||||
|
buckets = load_tools(dependencies=dependencies)
|
||||||
|
merged_tools_bucket = merge_tools_permissions(buckets, extra_tools_bucket)
|
||||||
|
tools = [
|
||||||
|
row["tool"]
|
||||||
|
for row in (*merged_tools_bucket["allow"], *merged_tools_bucket["ask"])
|
||||||
|
if row.get("tool") is not None
|
||||||
|
]
|
||||||
|
interrupt_on = {r["name"]: True for r in merged_tools_bucket["ask"] if r.get("name")}
|
||||||
|
description = read_md_file(__package__, "description").strip()
|
||||||
|
if not description:
|
||||||
|
description = "Handles onedrive tasks for this workspace."
|
||||||
|
system_prompt = read_md_file(__package__, "system_prompt").strip()
|
||||||
|
return pack_subagent(
|
||||||
|
name=NAME,
|
||||||
|
description=description,
|
||||||
|
system_prompt=system_prompt,
|
||||||
|
tools=tools,
|
||||||
|
interrupt_on=interrupt_on,
|
||||||
|
model=model,
|
||||||
|
extra_middleware=extra_middleware,
|
||||||
|
)
|
||||||
|
|
@ -0,0 +1 @@
|
||||||
|
Use for OneDrive file storage tasks: browse folders, read files, and manage OneDrive file content.
|
||||||
|
|
@ -0,0 +1,52 @@
|
||||||
|
You are the Microsoft OneDrive operations sub-agent.
|
||||||
|
You receive delegated instructions from a supervisor agent and return structured results for supervisor synthesis.
|
||||||
|
|
||||||
|
<goal>
|
||||||
|
Execute OneDrive file create/delete actions accurately in the connected account.
|
||||||
|
</goal>
|
||||||
|
|
||||||
|
<available_tools>
|
||||||
|
- `create_onedrive_file`
|
||||||
|
- `delete_onedrive_file`
|
||||||
|
</available_tools>
|
||||||
|
|
||||||
|
<tool_policy>
|
||||||
|
- Use only tools in `<available_tools>`.
|
||||||
|
- Ensure file identity/path is explicit before mutate actions.
|
||||||
|
- If ambiguous, return `status=blocked` with candidate paths and supervisor next step.
|
||||||
|
- Never invent IDs/paths or mutation results.
|
||||||
|
</tool_policy>
|
||||||
|
|
||||||
|
<out_of_scope>
|
||||||
|
- Do not perform non-OneDrive tasks.
|
||||||
|
</out_of_scope>
|
||||||
|
|
||||||
|
<safety>
|
||||||
|
- Never claim file mutation success without tool confirmation.
|
||||||
|
</safety>
|
||||||
|
|
||||||
|
<failure_policy>
|
||||||
|
- On tool failure, return `status=error` with concise recovery `next_step`.
|
||||||
|
- On ambiguous targets, return `status=blocked` with candidate paths.
|
||||||
|
</failure_policy>
|
||||||
|
|
||||||
|
<output_contract>
|
||||||
|
Return **only** one JSON object (no markdown/prose):
|
||||||
|
{
|
||||||
|
"status": "success" | "partial" | "blocked" | "error",
|
||||||
|
"action_summary": string,
|
||||||
|
"evidence": {
|
||||||
|
"file_id": string | null,
|
||||||
|
"file_path": string | null,
|
||||||
|
"operation": "create" | "delete" | null,
|
||||||
|
"matched_candidates": string[] | null
|
||||||
|
},
|
||||||
|
"next_step": string | null,
|
||||||
|
"missing_fields": string[] | null,
|
||||||
|
"assumptions": string[] | null
|
||||||
|
}
|
||||||
|
Rules:
|
||||||
|
- `status=success` -> `next_step=null`, `missing_fields=null`.
|
||||||
|
- `status=partial|blocked|error` -> `next_step` must be non-null.
|
||||||
|
- `status=blocked` due to missing required inputs -> `missing_fields` must be non-null.
|
||||||
|
</output_contract>
|
||||||
|
|
@ -0,0 +1,11 @@
|
||||||
|
from app.agents.new_chat.tools.onedrive.create_file import (
|
||||||
|
create_create_onedrive_file_tool,
|
||||||
|
)
|
||||||
|
from app.agents.new_chat.tools.onedrive.trash_file import (
|
||||||
|
create_delete_onedrive_file_tool,
|
||||||
|
)
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"create_create_onedrive_file_tool",
|
||||||
|
"create_delete_onedrive_file_tool",
|
||||||
|
]
|
||||||
|
|
@ -0,0 +1,252 @@
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import tempfile
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from langchain_core.tools import tool
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
from sqlalchemy.future import select
|
||||||
|
|
||||||
|
from app.agents.new_chat.tools.hitl import request_approval
|
||||||
|
from app.connectors.onedrive.client import OneDriveClient
|
||||||
|
from app.db import SearchSourceConnector, SearchSourceConnectorType
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
DOCX_MIME = "application/vnd.openxmlformats-officedocument.wordprocessingml.document"
|
||||||
|
|
||||||
|
|
||||||
|
def _ensure_docx_extension(name: str) -> str:
|
||||||
|
"""Strip any existing extension and append .docx."""
|
||||||
|
stem = Path(name).stem
|
||||||
|
return f"{stem}.docx"
|
||||||
|
|
||||||
|
|
||||||
|
def _markdown_to_docx(markdown_text: str) -> bytes:
|
||||||
|
"""Convert a markdown string to DOCX bytes using pypandoc."""
|
||||||
|
import pypandoc
|
||||||
|
|
||||||
|
fd, tmp_path = tempfile.mkstemp(suffix=".docx")
|
||||||
|
os.close(fd)
|
||||||
|
try:
|
||||||
|
pypandoc.convert_text(
|
||||||
|
markdown_text,
|
||||||
|
"docx",
|
||||||
|
format="gfm",
|
||||||
|
extra_args=["--standalone"],
|
||||||
|
outputfile=tmp_path,
|
||||||
|
)
|
||||||
|
with open(tmp_path, "rb") as f:
|
||||||
|
return f.read()
|
||||||
|
finally:
|
||||||
|
os.unlink(tmp_path)
|
||||||
|
|
||||||
|
|
||||||
|
def create_create_onedrive_file_tool(
|
||||||
|
db_session: AsyncSession | None = None,
|
||||||
|
search_space_id: int | None = None,
|
||||||
|
user_id: str | None = None,
|
||||||
|
):
|
||||||
|
@tool
|
||||||
|
async def create_onedrive_file(
|
||||||
|
name: str,
|
||||||
|
content: str | None = None,
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
"""Create a new Word document (.docx) in Microsoft OneDrive.
|
||||||
|
|
||||||
|
Use this tool when the user explicitly asks to create a new document
|
||||||
|
in OneDrive. The user MUST specify a topic before you call this tool.
|
||||||
|
|
||||||
|
The file is always saved as a .docx Word document. Provide content as
|
||||||
|
markdown and it will be automatically converted to a formatted Word file.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name: The document title (without extension). Extension will be set to .docx automatically.
|
||||||
|
content: Optional initial content as markdown. Will be converted to a formatted Word document.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary with status, file_id, name, web_url, and message.
|
||||||
|
"""
|
||||||
|
logger.info(f"create_onedrive_file called: name='{name}'")
|
||||||
|
|
||||||
|
if db_session is None or search_space_id is None or user_id is None:
|
||||||
|
return {
|
||||||
|
"status": "error",
|
||||||
|
"message": "OneDrive tool not properly configured.",
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = await db_session.execute(
|
||||||
|
select(SearchSourceConnector).filter(
|
||||||
|
SearchSourceConnector.search_space_id == search_space_id,
|
||||||
|
SearchSourceConnector.user_id == user_id,
|
||||||
|
SearchSourceConnector.connector_type
|
||||||
|
== SearchSourceConnectorType.ONEDRIVE_CONNECTOR,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
connectors = result.scalars().all()
|
||||||
|
|
||||||
|
if not connectors:
|
||||||
|
return {
|
||||||
|
"status": "error",
|
||||||
|
"message": "No OneDrive connector found. Please connect OneDrive in your workspace settings.",
|
||||||
|
}
|
||||||
|
|
||||||
|
accounts = []
|
||||||
|
for c in connectors:
|
||||||
|
cfg = c.config or {}
|
||||||
|
accounts.append(
|
||||||
|
{
|
||||||
|
"id": c.id,
|
||||||
|
"name": c.name,
|
||||||
|
"user_email": cfg.get("user_email"),
|
||||||
|
"auth_expired": cfg.get("auth_expired", False),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if all(a.get("auth_expired") for a in accounts):
|
||||||
|
return {
|
||||||
|
"status": "auth_error",
|
||||||
|
"message": "All connected OneDrive accounts need re-authentication.",
|
||||||
|
"connector_type": "onedrive",
|
||||||
|
}
|
||||||
|
|
||||||
|
parent_folders: dict[int, list[dict[str, str]]] = {}
|
||||||
|
for acc in accounts:
|
||||||
|
cid = acc["id"]
|
||||||
|
if acc.get("auth_expired"):
|
||||||
|
parent_folders[cid] = []
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
client = OneDriveClient(session=db_session, connector_id=cid)
|
||||||
|
items, err = await client.list_children("root")
|
||||||
|
if err:
|
||||||
|
logger.warning(
|
||||||
|
"Failed to list folders for connector %s: %s", cid, err
|
||||||
|
)
|
||||||
|
parent_folders[cid] = []
|
||||||
|
else:
|
||||||
|
parent_folders[cid] = [
|
||||||
|
{"folder_id": item["id"], "name": item["name"]}
|
||||||
|
for item in items
|
||||||
|
if item.get("folder") is not None
|
||||||
|
and item.get("id")
|
||||||
|
and item.get("name")
|
||||||
|
]
|
||||||
|
except Exception:
|
||||||
|
logger.warning(
|
||||||
|
"Error fetching folders for connector %s", cid, exc_info=True
|
||||||
|
)
|
||||||
|
parent_folders[cid] = []
|
||||||
|
|
||||||
|
context: dict[str, Any] = {
|
||||||
|
"accounts": accounts,
|
||||||
|
"parent_folders": parent_folders,
|
||||||
|
}
|
||||||
|
|
||||||
|
result = request_approval(
|
||||||
|
action_type="onedrive_file_creation",
|
||||||
|
tool_name="create_onedrive_file",
|
||||||
|
params={
|
||||||
|
"name": name,
|
||||||
|
"content": content,
|
||||||
|
"connector_id": None,
|
||||||
|
"parent_folder_id": None,
|
||||||
|
},
|
||||||
|
context=context,
|
||||||
|
)
|
||||||
|
|
||||||
|
if result.rejected:
|
||||||
|
return {
|
||||||
|
"status": "rejected",
|
||||||
|
"message": "User declined. Do not retry or suggest alternatives.",
|
||||||
|
}
|
||||||
|
|
||||||
|
final_name = result.params.get("name", name)
|
||||||
|
final_content = result.params.get("content", content)
|
||||||
|
final_connector_id = result.params.get("connector_id")
|
||||||
|
final_parent_folder_id = result.params.get("parent_folder_id")
|
||||||
|
|
||||||
|
if not final_name or not final_name.strip():
|
||||||
|
return {"status": "error", "message": "File name cannot be empty."}
|
||||||
|
|
||||||
|
final_name = _ensure_docx_extension(final_name)
|
||||||
|
|
||||||
|
if final_connector_id is not None:
|
||||||
|
result = await db_session.execute(
|
||||||
|
select(SearchSourceConnector).filter(
|
||||||
|
SearchSourceConnector.id == final_connector_id,
|
||||||
|
SearchSourceConnector.search_space_id == search_space_id,
|
||||||
|
SearchSourceConnector.user_id == user_id,
|
||||||
|
SearchSourceConnector.connector_type
|
||||||
|
== SearchSourceConnectorType.ONEDRIVE_CONNECTOR,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
connector = result.scalars().first()
|
||||||
|
else:
|
||||||
|
connector = connectors[0]
|
||||||
|
|
||||||
|
if not connector:
|
||||||
|
return {
|
||||||
|
"status": "error",
|
||||||
|
"message": "Selected OneDrive connector is invalid.",
|
||||||
|
}
|
||||||
|
|
||||||
|
docx_bytes = _markdown_to_docx(final_content or "")
|
||||||
|
|
||||||
|
client = OneDriveClient(session=db_session, connector_id=connector.id)
|
||||||
|
created = await client.create_file(
|
||||||
|
name=final_name,
|
||||||
|
parent_id=final_parent_folder_id,
|
||||||
|
content=docx_bytes,
|
||||||
|
mime_type=DOCX_MIME,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f"OneDrive file created: id={created.get('id')}, name={created.get('name')}"
|
||||||
|
)
|
||||||
|
|
||||||
|
kb_message_suffix = ""
|
||||||
|
try:
|
||||||
|
from app.services.onedrive import OneDriveKBSyncService
|
||||||
|
|
||||||
|
kb_service = OneDriveKBSyncService(db_session)
|
||||||
|
kb_result = await kb_service.sync_after_create(
|
||||||
|
file_id=created.get("id"),
|
||||||
|
file_name=created.get("name", final_name),
|
||||||
|
mime_type=DOCX_MIME,
|
||||||
|
web_url=created.get("webUrl"),
|
||||||
|
content=final_content,
|
||||||
|
connector_id=connector.id,
|
||||||
|
search_space_id=search_space_id,
|
||||||
|
user_id=user_id,
|
||||||
|
)
|
||||||
|
if kb_result["status"] == "success":
|
||||||
|
kb_message_suffix = " Your knowledge base has also been updated."
|
||||||
|
else:
|
||||||
|
kb_message_suffix = " This file will be added to your knowledge base in the next scheduled sync."
|
||||||
|
except Exception as kb_err:
|
||||||
|
logger.warning(f"KB sync after create failed: {kb_err}")
|
||||||
|
kb_message_suffix = " This file will be added to your knowledge base in the next scheduled sync."
|
||||||
|
|
||||||
|
return {
|
||||||
|
"status": "success",
|
||||||
|
"file_id": created.get("id"),
|
||||||
|
"name": created.get("name"),
|
||||||
|
"web_url": created.get("webUrl"),
|
||||||
|
"message": f"Successfully created '{created.get('name')}' in OneDrive.{kb_message_suffix}",
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
from langgraph.errors import GraphInterrupt
|
||||||
|
|
||||||
|
if isinstance(e, GraphInterrupt):
|
||||||
|
raise
|
||||||
|
logger.error(f"Error creating OneDrive file: {e}", exc_info=True)
|
||||||
|
return {
|
||||||
|
"status": "error",
|
||||||
|
"message": "Something went wrong while creating the file. Please try again.",
|
||||||
|
}
|
||||||
|
|
||||||
|
return create_onedrive_file
|
||||||
|
|
@ -0,0 +1,28 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from app.agents.multi_agent_with_deepagents.subagents.shared.permissions import (
|
||||||
|
ToolsPermissions,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .create_file import create_create_onedrive_file_tool
|
||||||
|
from .trash_file import create_delete_onedrive_file_tool
|
||||||
|
|
||||||
|
|
||||||
|
def load_tools(*, dependencies: dict[str, Any] | None = None, **kwargs: Any) -> ToolsPermissions:
|
||||||
|
d = {**(dependencies or {}), **kwargs}
|
||||||
|
common = {
|
||||||
|
"db_session": d["db_session"],
|
||||||
|
"search_space_id": d["search_space_id"],
|
||||||
|
"user_id": d["user_id"],
|
||||||
|
}
|
||||||
|
create = create_create_onedrive_file_tool(**common)
|
||||||
|
delete = create_delete_onedrive_file_tool(**common)
|
||||||
|
return {
|
||||||
|
"allow": [],
|
||||||
|
"ask": [
|
||||||
|
{"name": getattr(create, "name", "") or "", "tool": create},
|
||||||
|
{"name": getattr(delete, "name", "") or "", "tool": delete},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,281 @@
|
||||||
|
import logging
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from langchain_core.tools import tool
|
||||||
|
from sqlalchemy import String, and_, cast, func
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
from sqlalchemy.future import select
|
||||||
|
|
||||||
|
from app.agents.new_chat.tools.hitl import request_approval
|
||||||
|
from app.connectors.onedrive.client import OneDriveClient
|
||||||
|
from app.db import (
|
||||||
|
Document,
|
||||||
|
DocumentType,
|
||||||
|
SearchSourceConnector,
|
||||||
|
SearchSourceConnectorType,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def create_delete_onedrive_file_tool(
|
||||||
|
db_session: AsyncSession | None = None,
|
||||||
|
search_space_id: int | None = None,
|
||||||
|
user_id: str | None = None,
|
||||||
|
):
|
||||||
|
@tool
|
||||||
|
async def delete_onedrive_file(
|
||||||
|
file_name: str,
|
||||||
|
delete_from_kb: bool = False,
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
"""Move a OneDrive file to the recycle bin.
|
||||||
|
|
||||||
|
Use this tool when the user explicitly asks to delete, remove, or trash
|
||||||
|
a file in OneDrive.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file_name: The exact name of the file to trash.
|
||||||
|
delete_from_kb: Whether to also remove the file from the knowledge base.
|
||||||
|
Default is False.
|
||||||
|
Set to True to remove from both OneDrive and knowledge base.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary with:
|
||||||
|
- status: "success", "rejected", "not_found", or "error"
|
||||||
|
- file_id: OneDrive file ID (if success)
|
||||||
|
- deleted_from_kb: whether the document was removed from the knowledge base
|
||||||
|
- message: Result message
|
||||||
|
|
||||||
|
IMPORTANT:
|
||||||
|
- If status is "rejected", the user explicitly declined. Respond with a brief
|
||||||
|
acknowledgment and do NOT retry or suggest alternatives.
|
||||||
|
- If status is "not_found", relay the exact message to the user and ask them
|
||||||
|
to verify the file name or check if it has been indexed.
|
||||||
|
"""
|
||||||
|
logger.info(
|
||||||
|
f"delete_onedrive_file called: file_name='{file_name}', delete_from_kb={delete_from_kb}"
|
||||||
|
)
|
||||||
|
|
||||||
|
if db_session is None or search_space_id is None or user_id is None:
|
||||||
|
return {
|
||||||
|
"status": "error",
|
||||||
|
"message": "OneDrive tool not properly configured.",
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
doc_result = await db_session.execute(
|
||||||
|
select(Document)
|
||||||
|
.join(
|
||||||
|
SearchSourceConnector,
|
||||||
|
Document.connector_id == SearchSourceConnector.id,
|
||||||
|
)
|
||||||
|
.filter(
|
||||||
|
and_(
|
||||||
|
Document.search_space_id == search_space_id,
|
||||||
|
Document.document_type == DocumentType.ONEDRIVE_FILE,
|
||||||
|
func.lower(Document.title) == func.lower(file_name),
|
||||||
|
SearchSourceConnector.user_id == user_id,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.order_by(Document.updated_at.desc().nullslast())
|
||||||
|
.limit(1)
|
||||||
|
)
|
||||||
|
document = doc_result.scalars().first()
|
||||||
|
|
||||||
|
if not document:
|
||||||
|
doc_result = await db_session.execute(
|
||||||
|
select(Document)
|
||||||
|
.join(
|
||||||
|
SearchSourceConnector,
|
||||||
|
Document.connector_id == SearchSourceConnector.id,
|
||||||
|
)
|
||||||
|
.filter(
|
||||||
|
and_(
|
||||||
|
Document.search_space_id == search_space_id,
|
||||||
|
Document.document_type == DocumentType.ONEDRIVE_FILE,
|
||||||
|
func.lower(
|
||||||
|
cast(
|
||||||
|
Document.document_metadata["onedrive_file_name"],
|
||||||
|
String,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
== func.lower(file_name),
|
||||||
|
SearchSourceConnector.user_id == user_id,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.order_by(Document.updated_at.desc().nullslast())
|
||||||
|
.limit(1)
|
||||||
|
)
|
||||||
|
document = doc_result.scalars().first()
|
||||||
|
|
||||||
|
if not document:
|
||||||
|
return {
|
||||||
|
"status": "not_found",
|
||||||
|
"message": (
|
||||||
|
f"File '{file_name}' not found in your indexed OneDrive files. "
|
||||||
|
"This could mean: (1) the file doesn't exist, (2) it hasn't been indexed yet, "
|
||||||
|
"or (3) the file name is different."
|
||||||
|
),
|
||||||
|
}
|
||||||
|
|
||||||
|
if not document.connector_id:
|
||||||
|
return {
|
||||||
|
"status": "error",
|
||||||
|
"message": "Document has no associated connector.",
|
||||||
|
}
|
||||||
|
|
||||||
|
meta = document.document_metadata or {}
|
||||||
|
file_id = meta.get("onedrive_file_id")
|
||||||
|
document_id = document.id
|
||||||
|
|
||||||
|
if not file_id:
|
||||||
|
return {
|
||||||
|
"status": "error",
|
||||||
|
"message": "File ID is missing. Please re-index the file.",
|
||||||
|
}
|
||||||
|
|
||||||
|
conn_result = await db_session.execute(
|
||||||
|
select(SearchSourceConnector).filter(
|
||||||
|
and_(
|
||||||
|
SearchSourceConnector.id == document.connector_id,
|
||||||
|
SearchSourceConnector.search_space_id == search_space_id,
|
||||||
|
SearchSourceConnector.user_id == user_id,
|
||||||
|
SearchSourceConnector.connector_type
|
||||||
|
== SearchSourceConnectorType.ONEDRIVE_CONNECTOR,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
connector = conn_result.scalars().first()
|
||||||
|
if not connector:
|
||||||
|
return {
|
||||||
|
"status": "error",
|
||||||
|
"message": "OneDrive connector not found or access denied.",
|
||||||
|
}
|
||||||
|
|
||||||
|
cfg = connector.config or {}
|
||||||
|
if cfg.get("auth_expired"):
|
||||||
|
return {
|
||||||
|
"status": "auth_error",
|
||||||
|
"message": "OneDrive account needs re-authentication. Please re-authenticate in your connector settings.",
|
||||||
|
"connector_type": "onedrive",
|
||||||
|
}
|
||||||
|
|
||||||
|
context = {
|
||||||
|
"file": {
|
||||||
|
"file_id": file_id,
|
||||||
|
"name": file_name,
|
||||||
|
"document_id": document_id,
|
||||||
|
"web_url": meta.get("web_url"),
|
||||||
|
},
|
||||||
|
"account": {
|
||||||
|
"id": connector.id,
|
||||||
|
"name": connector.name,
|
||||||
|
"user_email": cfg.get("user_email"),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
result = request_approval(
|
||||||
|
action_type="onedrive_file_trash",
|
||||||
|
tool_name="delete_onedrive_file",
|
||||||
|
params={
|
||||||
|
"file_id": file_id,
|
||||||
|
"connector_id": connector.id,
|
||||||
|
"delete_from_kb": delete_from_kb,
|
||||||
|
},
|
||||||
|
context=context,
|
||||||
|
)
|
||||||
|
|
||||||
|
if result.rejected:
|
||||||
|
return {
|
||||||
|
"status": "rejected",
|
||||||
|
"message": "User declined. Do not retry or suggest alternatives.",
|
||||||
|
}
|
||||||
|
|
||||||
|
final_file_id = result.params.get("file_id", file_id)
|
||||||
|
final_connector_id = result.params.get("connector_id", connector.id)
|
||||||
|
final_delete_from_kb = result.params.get("delete_from_kb", delete_from_kb)
|
||||||
|
|
||||||
|
if final_connector_id != connector.id:
|
||||||
|
result = await db_session.execute(
|
||||||
|
select(SearchSourceConnector).filter(
|
||||||
|
and_(
|
||||||
|
SearchSourceConnector.id == final_connector_id,
|
||||||
|
SearchSourceConnector.search_space_id == search_space_id,
|
||||||
|
SearchSourceConnector.user_id == user_id,
|
||||||
|
SearchSourceConnector.connector_type
|
||||||
|
== SearchSourceConnectorType.ONEDRIVE_CONNECTOR,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
validated_connector = result.scalars().first()
|
||||||
|
if not validated_connector:
|
||||||
|
return {
|
||||||
|
"status": "error",
|
||||||
|
"message": "Selected OneDrive connector is invalid or has been disconnected.",
|
||||||
|
}
|
||||||
|
actual_connector_id = validated_connector.id
|
||||||
|
else:
|
||||||
|
actual_connector_id = connector.id
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f"Deleting OneDrive file: file_id='{final_file_id}', connector={actual_connector_id}"
|
||||||
|
)
|
||||||
|
|
||||||
|
client = OneDriveClient(
|
||||||
|
session=db_session, connector_id=actual_connector_id
|
||||||
|
)
|
||||||
|
await client.trash_file(final_file_id)
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f"OneDrive file deleted (moved to recycle bin): file_id={final_file_id}"
|
||||||
|
)
|
||||||
|
|
||||||
|
trash_result: dict[str, Any] = {
|
||||||
|
"status": "success",
|
||||||
|
"file_id": final_file_id,
|
||||||
|
"message": f"Successfully moved '{file_name}' to the recycle bin.",
|
||||||
|
}
|
||||||
|
|
||||||
|
deleted_from_kb = False
|
||||||
|
if final_delete_from_kb and document_id:
|
||||||
|
try:
|
||||||
|
doc_result = await db_session.execute(
|
||||||
|
select(Document).filter(Document.id == document_id)
|
||||||
|
)
|
||||||
|
doc = doc_result.scalars().first()
|
||||||
|
if doc:
|
||||||
|
await db_session.delete(doc)
|
||||||
|
await db_session.commit()
|
||||||
|
deleted_from_kb = True
|
||||||
|
logger.info(
|
||||||
|
f"Deleted document {document_id} from knowledge base"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger.warning(f"Document {document_id} not found in KB")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to delete document from KB: {e}")
|
||||||
|
await db_session.rollback()
|
||||||
|
trash_result["warning"] = (
|
||||||
|
f"File moved to recycle bin, but failed to remove from knowledge base: {e!s}"
|
||||||
|
)
|
||||||
|
|
||||||
|
trash_result["deleted_from_kb"] = deleted_from_kb
|
||||||
|
if deleted_from_kb:
|
||||||
|
trash_result["message"] = (
|
||||||
|
f"{trash_result.get('message', '')} (also removed from knowledge base)"
|
||||||
|
)
|
||||||
|
|
||||||
|
return trash_result
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
from langgraph.errors import GraphInterrupt
|
||||||
|
|
||||||
|
if isinstance(e, GraphInterrupt):
|
||||||
|
raise
|
||||||
|
logger.error(f"Error deleting OneDrive file: {e}", exc_info=True)
|
||||||
|
return {
|
||||||
|
"status": "error",
|
||||||
|
"message": "Something went wrong while trashing the file. Please try again.",
|
||||||
|
}
|
||||||
|
|
||||||
|
return delete_onedrive_file
|
||||||
|
|
@ -0,0 +1,54 @@
|
||||||
|
"""`slack` route: ``SubAgent`` spec for deepagents."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from collections.abc import Sequence
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from deepagents import SubAgent
|
||||||
|
from langchain_core.language_models import BaseChatModel
|
||||||
|
|
||||||
|
from app.agents.multi_agent_with_deepagents.subagents.shared.md_file_reader import (
|
||||||
|
read_md_file,
|
||||||
|
)
|
||||||
|
from app.agents.multi_agent_with_deepagents.subagents.shared.permissions import (
|
||||||
|
ToolsPermissions,
|
||||||
|
merge_tools_permissions,
|
||||||
|
)
|
||||||
|
from app.agents.multi_agent_with_deepagents.subagents.shared.subagent_builder import (
|
||||||
|
pack_subagent,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .tools.index import load_tools
|
||||||
|
|
||||||
|
NAME = "slack"
|
||||||
|
|
||||||
|
|
||||||
|
def build_subagent(
|
||||||
|
*,
|
||||||
|
dependencies: dict[str, Any],
|
||||||
|
model: BaseChatModel | None = None,
|
||||||
|
extra_middleware: Sequence[Any] | None = None,
|
||||||
|
extra_tools_bucket: ToolsPermissions | None = None,
|
||||||
|
) -> SubAgent:
|
||||||
|
buckets = load_tools(dependencies=dependencies)
|
||||||
|
merged_tools_bucket = merge_tools_permissions(buckets, extra_tools_bucket)
|
||||||
|
tools = [
|
||||||
|
row["tool"]
|
||||||
|
for row in (*merged_tools_bucket["allow"], *merged_tools_bucket["ask"])
|
||||||
|
if row.get("tool") is not None
|
||||||
|
]
|
||||||
|
interrupt_on = {r["name"]: True for r in merged_tools_bucket["ask"] if r.get("name")}
|
||||||
|
description = read_md_file(__package__, "description").strip()
|
||||||
|
if not description:
|
||||||
|
description = "Handles slack tasks for this workspace."
|
||||||
|
system_prompt = read_md_file(__package__, "system_prompt").strip()
|
||||||
|
return pack_subagent(
|
||||||
|
name=NAME,
|
||||||
|
description=description,
|
||||||
|
system_prompt=system_prompt,
|
||||||
|
tools=tools,
|
||||||
|
interrupt_on=interrupt_on,
|
||||||
|
model=model,
|
||||||
|
extra_middleware=extra_middleware,
|
||||||
|
)
|
||||||
|
|
@ -0,0 +1 @@
|
||||||
|
Use for Slack channel communication: read channel/thread history, summarize conversations, and post replies.
|
||||||
|
|
@ -0,0 +1,45 @@
|
||||||
|
You are the Slack MCP operations sub-agent.
|
||||||
|
You receive delegated instructions from a supervisor agent and return structured results for supervisor synthesis.
|
||||||
|
|
||||||
|
<goal>
|
||||||
|
Execute Slack MCP reads/actions accurately in the connected workspace.
|
||||||
|
</goal>
|
||||||
|
|
||||||
|
<available_tools>
|
||||||
|
- Runtime-provided Slack MCP tools for search, channel/thread reads, and related actions.
|
||||||
|
</available_tools>
|
||||||
|
|
||||||
|
<tool_policy>
|
||||||
|
- Use only runtime-provided MCP tools and their documented arguments.
|
||||||
|
- If channel/thread target is ambiguous, return `status=blocked` with candidate options.
|
||||||
|
- Never invent message content, sender identity, timestamps, or delivery outcomes.
|
||||||
|
</tool_policy>
|
||||||
|
|
||||||
|
<out_of_scope>
|
||||||
|
- Do not execute non-Slack tasks.
|
||||||
|
</out_of_scope>
|
||||||
|
|
||||||
|
<safety>
|
||||||
|
- Never claim send/read success without tool evidence.
|
||||||
|
</safety>
|
||||||
|
|
||||||
|
<failure_policy>
|
||||||
|
- On tool failure, return `status=error` with concise recovery `next_step`.
|
||||||
|
- On unresolved channel/thread ambiguity, return `status=blocked` with candidates.
|
||||||
|
</failure_policy>
|
||||||
|
|
||||||
|
<output_contract>
|
||||||
|
Return **only** one JSON object (no markdown/prose):
|
||||||
|
{
|
||||||
|
"status": "success" | "partial" | "blocked" | "error",
|
||||||
|
"action_summary": string,
|
||||||
|
"evidence": { "items": object | null },
|
||||||
|
"next_step": string | null,
|
||||||
|
"missing_fields": string[] | null,
|
||||||
|
"assumptions": string[] | null
|
||||||
|
}
|
||||||
|
Rules:
|
||||||
|
- `status=success` -> `next_step=null`, `missing_fields=null`.
|
||||||
|
- `status=partial|blocked|error` -> `next_step` must be non-null.
|
||||||
|
- `status=blocked` due to missing required inputs -> `missing_fields` must be non-null.
|
||||||
|
</output_contract>
|
||||||
|
|
@ -0,0 +1,3 @@
|
||||||
|
"""Slack route: native tool factories are empty; MCP supplies tools when configured."""
|
||||||
|
|
||||||
|
__all__: list[str] = []
|
||||||
|
|
@ -0,0 +1,12 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from app.agents.multi_agent_with_deepagents.subagents.shared.permissions import (
|
||||||
|
ToolsPermissions,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def load_tools(*, dependencies: dict[str, Any] | None = None, **kwargs: Any) -> ToolsPermissions:
|
||||||
|
_ = {**(dependencies or {}), **kwargs}
|
||||||
|
return {"allow": [], "ask": []}
|
||||||
|
|
@ -0,0 +1,54 @@
|
||||||
|
"""`teams` route: ``SubAgent`` spec for deepagents."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from collections.abc import Sequence
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from deepagents import SubAgent
|
||||||
|
from langchain_core.language_models import BaseChatModel
|
||||||
|
|
||||||
|
from app.agents.multi_agent_with_deepagents.subagents.shared.md_file_reader import (
|
||||||
|
read_md_file,
|
||||||
|
)
|
||||||
|
from app.agents.multi_agent_with_deepagents.subagents.shared.permissions import (
|
||||||
|
ToolsPermissions,
|
||||||
|
merge_tools_permissions,
|
||||||
|
)
|
||||||
|
from app.agents.multi_agent_with_deepagents.subagents.shared.subagent_builder import (
|
||||||
|
pack_subagent,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .tools.index import load_tools
|
||||||
|
|
||||||
|
NAME = "teams"
|
||||||
|
|
||||||
|
|
||||||
|
def build_subagent(
|
||||||
|
*,
|
||||||
|
dependencies: dict[str, Any],
|
||||||
|
model: BaseChatModel | None = None,
|
||||||
|
extra_middleware: Sequence[Any] | None = None,
|
||||||
|
extra_tools_bucket: ToolsPermissions | None = None,
|
||||||
|
) -> SubAgent:
|
||||||
|
buckets = load_tools(dependencies=dependencies)
|
||||||
|
merged_tools_bucket = merge_tools_permissions(buckets, extra_tools_bucket)
|
||||||
|
tools = [
|
||||||
|
row["tool"]
|
||||||
|
for row in (*merged_tools_bucket["allow"], *merged_tools_bucket["ask"])
|
||||||
|
if row.get("tool") is not None
|
||||||
|
]
|
||||||
|
interrupt_on = {r["name"]: True for r in merged_tools_bucket["ask"] if r.get("name")}
|
||||||
|
description = read_md_file(__package__, "description").strip()
|
||||||
|
if not description:
|
||||||
|
description = "Handles teams tasks for this workspace."
|
||||||
|
system_prompt = read_md_file(__package__, "system_prompt").strip()
|
||||||
|
return pack_subagent(
|
||||||
|
name=NAME,
|
||||||
|
description=description,
|
||||||
|
system_prompt=system_prompt,
|
||||||
|
tools=tools,
|
||||||
|
interrupt_on=interrupt_on,
|
||||||
|
model=model,
|
||||||
|
extra_middleware=extra_middleware,
|
||||||
|
)
|
||||||
|
|
@ -0,0 +1 @@
|
||||||
|
Use for Microsoft Teams communication: read channel/thread messages, gather context, and post replies.
|
||||||
|
|
@ -0,0 +1,55 @@
|
||||||
|
You are the Microsoft Teams operations sub-agent.
|
||||||
|
You receive delegated instructions from a supervisor agent and return structured results for supervisor synthesis.
|
||||||
|
|
||||||
|
<goal>
|
||||||
|
Execute Teams channel discovery, message reads, and sends accurately.
|
||||||
|
</goal>
|
||||||
|
|
||||||
|
<available_tools>
|
||||||
|
- `list_teams_channels`
|
||||||
|
- `read_teams_messages`
|
||||||
|
- `send_teams_message`
|
||||||
|
</available_tools>
|
||||||
|
|
||||||
|
<tool_policy>
|
||||||
|
- Use only tools in `<available_tools>`.
|
||||||
|
- Resolve team/channel targets before read/send operations.
|
||||||
|
- If ambiguous, return `status=blocked` with candidate channels and `next_step`.
|
||||||
|
- Never invent message content, sender identity, timestamps, or delivery outcomes.
|
||||||
|
</tool_policy>
|
||||||
|
|
||||||
|
<out_of_scope>
|
||||||
|
- Do not perform non-Teams tasks.
|
||||||
|
</out_of_scope>
|
||||||
|
|
||||||
|
<safety>
|
||||||
|
- Never claim send success without tool confirmation.
|
||||||
|
</safety>
|
||||||
|
|
||||||
|
<failure_policy>
|
||||||
|
- On tool failure, return `status=error` with concise recovery `next_step`.
|
||||||
|
- On unresolved destination ambiguity, return `status=blocked` with candidates.
|
||||||
|
</failure_policy>
|
||||||
|
|
||||||
|
<output_contract>
|
||||||
|
Return **only** one JSON object (no markdown/prose):
|
||||||
|
{
|
||||||
|
"status": "success" | "partial" | "blocked" | "error",
|
||||||
|
"action_summary": string,
|
||||||
|
"evidence": {
|
||||||
|
"team_id": string | null,
|
||||||
|
"channel_id": string | null,
|
||||||
|
"message_id": string | null,
|
||||||
|
"matched_candidates": [
|
||||||
|
{ "team_id": string | null, "channel_id": string, "label": string | null }
|
||||||
|
] | null
|
||||||
|
},
|
||||||
|
"next_step": string | null,
|
||||||
|
"missing_fields": string[] | null,
|
||||||
|
"assumptions": string[] | null
|
||||||
|
}
|
||||||
|
Rules:
|
||||||
|
- `status=success` -> `next_step=null`, `missing_fields=null`.
|
||||||
|
- `status=partial|blocked|error` -> `next_step` must be non-null.
|
||||||
|
- `status=blocked` due to missing required inputs -> `missing_fields` must be non-null.
|
||||||
|
</output_contract>
|
||||||
|
|
@ -0,0 +1,15 @@
|
||||||
|
from app.agents.new_chat.tools.teams.list_channels import (
|
||||||
|
create_list_teams_channels_tool,
|
||||||
|
)
|
||||||
|
from app.agents.new_chat.tools.teams.read_messages import (
|
||||||
|
create_read_teams_messages_tool,
|
||||||
|
)
|
||||||
|
from app.agents.new_chat.tools.teams.send_message import (
|
||||||
|
create_send_teams_message_tool,
|
||||||
|
)
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"create_list_teams_channels_tool",
|
||||||
|
"create_read_teams_messages_tool",
|
||||||
|
"create_send_teams_message_tool",
|
||||||
|
]
|
||||||
|
|
@ -0,0 +1,38 @@
|
||||||
|
"""Builds Microsoft Graph auth headers for Teams connector tools."""
|
||||||
|
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
from sqlalchemy.future import select
|
||||||
|
|
||||||
|
from app.db import SearchSourceConnector, SearchSourceConnectorType
|
||||||
|
|
||||||
|
GRAPH_API = "https://graph.microsoft.com/v1.0"
|
||||||
|
|
||||||
|
|
||||||
|
async def get_teams_connector(
|
||||||
|
db_session: AsyncSession,
|
||||||
|
search_space_id: int,
|
||||||
|
user_id: str,
|
||||||
|
) -> SearchSourceConnector | None:
|
||||||
|
result = await db_session.execute(
|
||||||
|
select(SearchSourceConnector).filter(
|
||||||
|
SearchSourceConnector.search_space_id == search_space_id,
|
||||||
|
SearchSourceConnector.user_id == user_id,
|
||||||
|
SearchSourceConnector.connector_type
|
||||||
|
== SearchSourceConnectorType.TEAMS_CONNECTOR,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return result.scalars().first()
|
||||||
|
|
||||||
|
|
||||||
|
async def get_access_token(
|
||||||
|
db_session: AsyncSession,
|
||||||
|
connector: SearchSourceConnector,
|
||||||
|
) -> str:
|
||||||
|
"""Get a valid Microsoft Graph access token, refreshing if expired."""
|
||||||
|
from app.connectors.teams_connector import TeamsConnector
|
||||||
|
|
||||||
|
tc = TeamsConnector(
|
||||||
|
session=db_session,
|
||||||
|
connector_id=connector.id,
|
||||||
|
)
|
||||||
|
return await tc._get_valid_token()
|
||||||
|
|
@ -0,0 +1,30 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from app.agents.multi_agent_with_deepagents.subagents.shared.permissions import (
|
||||||
|
ToolsPermissions,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .list_channels import create_list_teams_channels_tool
|
||||||
|
from .read_messages import create_read_teams_messages_tool
|
||||||
|
from .send_message import create_send_teams_message_tool
|
||||||
|
|
||||||
|
|
||||||
|
def load_tools(*, dependencies: dict[str, Any] | None = None, **kwargs: Any) -> ToolsPermissions:
|
||||||
|
d = {**(dependencies or {}), **kwargs}
|
||||||
|
common = {
|
||||||
|
"db_session": d["db_session"],
|
||||||
|
"search_space_id": d["search_space_id"],
|
||||||
|
"user_id": d["user_id"],
|
||||||
|
}
|
||||||
|
list_ch = create_list_teams_channels_tool(**common)
|
||||||
|
read_msg = create_read_teams_messages_tool(**common)
|
||||||
|
send = create_send_teams_message_tool(**common)
|
||||||
|
return {
|
||||||
|
"allow": [
|
||||||
|
{"name": getattr(list_ch, "name", "") or "", "tool": list_ch},
|
||||||
|
{"name": getattr(read_msg, "name", "") or "", "tool": read_msg},
|
||||||
|
],
|
||||||
|
"ask": [{"name": getattr(send, "name", "") or "", "tool": send}],
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,92 @@
|
||||||
|
import logging
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
from langchain_core.tools import tool
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
|
||||||
|
from ._auth import GRAPH_API, get_access_token, get_teams_connector
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def create_list_teams_channels_tool(
|
||||||
|
db_session: AsyncSession | None = None,
|
||||||
|
search_space_id: int | None = None,
|
||||||
|
user_id: str | None = None,
|
||||||
|
):
|
||||||
|
@tool
|
||||||
|
async def list_teams_channels() -> dict[str, Any]:
|
||||||
|
"""List all Microsoft Teams and their channels the user has access to.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary with status and a list of teams, each containing
|
||||||
|
team_id, team_name, and a list of channels (id, name).
|
||||||
|
"""
|
||||||
|
if db_session is None or search_space_id is None or user_id is None:
|
||||||
|
return {"status": "error", "message": "Teams tool not properly configured."}
|
||||||
|
|
||||||
|
try:
|
||||||
|
connector = await get_teams_connector(db_session, search_space_id, user_id)
|
||||||
|
if not connector:
|
||||||
|
return {"status": "error", "message": "No Teams connector found."}
|
||||||
|
|
||||||
|
token = await get_access_token(db_session, connector)
|
||||||
|
headers = {"Authorization": f"Bearer {token}"}
|
||||||
|
|
||||||
|
async with httpx.AsyncClient(timeout=20.0) as client:
|
||||||
|
teams_resp = await client.get(
|
||||||
|
f"{GRAPH_API}/me/joinedTeams", headers=headers
|
||||||
|
)
|
||||||
|
|
||||||
|
if teams_resp.status_code == 401:
|
||||||
|
return {
|
||||||
|
"status": "auth_error",
|
||||||
|
"message": "Teams token expired. Please re-authenticate.",
|
||||||
|
"connector_type": "teams",
|
||||||
|
}
|
||||||
|
if teams_resp.status_code != 200:
|
||||||
|
return {
|
||||||
|
"status": "error",
|
||||||
|
"message": f"Graph API error: {teams_resp.status_code}",
|
||||||
|
}
|
||||||
|
|
||||||
|
teams_data = teams_resp.json().get("value", [])
|
||||||
|
result_teams = []
|
||||||
|
|
||||||
|
async with httpx.AsyncClient(timeout=20.0) as client:
|
||||||
|
for team in teams_data:
|
||||||
|
team_id = team["id"]
|
||||||
|
ch_resp = await client.get(
|
||||||
|
f"{GRAPH_API}/teams/{team_id}/channels",
|
||||||
|
headers=headers,
|
||||||
|
)
|
||||||
|
channels = []
|
||||||
|
if ch_resp.status_code == 200:
|
||||||
|
channels = [
|
||||||
|
{"id": ch["id"], "name": ch.get("displayName", "")}
|
||||||
|
for ch in ch_resp.json().get("value", [])
|
||||||
|
]
|
||||||
|
result_teams.append(
|
||||||
|
{
|
||||||
|
"team_id": team_id,
|
||||||
|
"team_name": team.get("displayName", ""),
|
||||||
|
"channels": channels,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"status": "success",
|
||||||
|
"teams": result_teams,
|
||||||
|
"total_teams": len(result_teams),
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
from langgraph.errors import GraphInterrupt
|
||||||
|
|
||||||
|
if isinstance(e, GraphInterrupt):
|
||||||
|
raise
|
||||||
|
logger.error("Error listing Teams channels: %s", e, exc_info=True)
|
||||||
|
return {"status": "error", "message": "Failed to list Teams channels."}
|
||||||
|
|
||||||
|
return list_teams_channels
|
||||||
|
|
@ -0,0 +1,103 @@
|
||||||
|
import logging
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
from langchain_core.tools import tool
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
|
||||||
|
from ._auth import GRAPH_API, get_access_token, get_teams_connector
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def create_read_teams_messages_tool(
|
||||||
|
db_session: AsyncSession | None = None,
|
||||||
|
search_space_id: int | None = None,
|
||||||
|
user_id: str | None = None,
|
||||||
|
):
|
||||||
|
@tool
|
||||||
|
async def read_teams_messages(
|
||||||
|
team_id: str,
|
||||||
|
channel_id: str,
|
||||||
|
limit: int = 25,
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
"""Read recent messages from a Microsoft Teams channel.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
team_id: The team ID (from list_teams_channels).
|
||||||
|
channel_id: The channel ID (from list_teams_channels).
|
||||||
|
limit: Number of messages to fetch (default 25, max 50).
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary with status and a list of messages including
|
||||||
|
id, sender, content, timestamp.
|
||||||
|
"""
|
||||||
|
if db_session is None or search_space_id is None or user_id is None:
|
||||||
|
return {"status": "error", "message": "Teams tool not properly configured."}
|
||||||
|
|
||||||
|
limit = min(limit, 50)
|
||||||
|
|
||||||
|
try:
|
||||||
|
connector = await get_teams_connector(db_session, search_space_id, user_id)
|
||||||
|
if not connector:
|
||||||
|
return {"status": "error", "message": "No Teams connector found."}
|
||||||
|
|
||||||
|
token = await get_access_token(db_session, connector)
|
||||||
|
|
||||||
|
async with httpx.AsyncClient(timeout=20.0) as client:
|
||||||
|
resp = await client.get(
|
||||||
|
f"{GRAPH_API}/teams/{team_id}/channels/{channel_id}/messages",
|
||||||
|
headers={"Authorization": f"Bearer {token}"},
|
||||||
|
params={"$top": limit},
|
||||||
|
)
|
||||||
|
|
||||||
|
if resp.status_code == 401:
|
||||||
|
return {
|
||||||
|
"status": "auth_error",
|
||||||
|
"message": "Teams token expired. Please re-authenticate.",
|
||||||
|
"connector_type": "teams",
|
||||||
|
}
|
||||||
|
if resp.status_code == 403:
|
||||||
|
return {
|
||||||
|
"status": "error",
|
||||||
|
"message": "Insufficient permissions to read this channel.",
|
||||||
|
}
|
||||||
|
if resp.status_code != 200:
|
||||||
|
return {
|
||||||
|
"status": "error",
|
||||||
|
"message": f"Graph API error: {resp.status_code}",
|
||||||
|
}
|
||||||
|
|
||||||
|
raw_msgs = resp.json().get("value", [])
|
||||||
|
messages = []
|
||||||
|
for m in raw_msgs:
|
||||||
|
sender = m.get("from", {})
|
||||||
|
user_info = sender.get("user", {}) if sender else {}
|
||||||
|
body = m.get("body", {})
|
||||||
|
messages.append(
|
||||||
|
{
|
||||||
|
"id": m.get("id"),
|
||||||
|
"sender": user_info.get("displayName", "Unknown"),
|
||||||
|
"content": body.get("content", ""),
|
||||||
|
"content_type": body.get("contentType", "text"),
|
||||||
|
"timestamp": m.get("createdDateTime", ""),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"status": "success",
|
||||||
|
"team_id": team_id,
|
||||||
|
"channel_id": channel_id,
|
||||||
|
"messages": messages,
|
||||||
|
"total": len(messages),
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
from langgraph.errors import GraphInterrupt
|
||||||
|
|
||||||
|
if isinstance(e, GraphInterrupt):
|
||||||
|
raise
|
||||||
|
logger.error("Error reading Teams messages: %s", e, exc_info=True)
|
||||||
|
return {"status": "error", "message": "Failed to read Teams messages."}
|
||||||
|
|
||||||
|
return read_teams_messages
|
||||||
|
|
@ -0,0 +1,115 @@
|
||||||
|
import logging
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
from langchain_core.tools import tool
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
|
||||||
|
from app.agents.new_chat.tools.hitl import request_approval
|
||||||
|
|
||||||
|
from ._auth import GRAPH_API, get_access_token, get_teams_connector
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def create_send_teams_message_tool(
|
||||||
|
db_session: AsyncSession | None = None,
|
||||||
|
search_space_id: int | None = None,
|
||||||
|
user_id: str | None = None,
|
||||||
|
):
|
||||||
|
@tool
|
||||||
|
async def send_teams_message(
|
||||||
|
team_id: str,
|
||||||
|
channel_id: str,
|
||||||
|
content: str,
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
"""Send a message to a Microsoft Teams channel.
|
||||||
|
|
||||||
|
Requires the ChannelMessage.Send OAuth scope. If the user gets a
|
||||||
|
permission error, they may need to re-authenticate with updated scopes.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
team_id: The team ID (from list_teams_channels).
|
||||||
|
channel_id: The channel ID (from list_teams_channels).
|
||||||
|
content: The message text (HTML supported).
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary with status, message_id on success.
|
||||||
|
|
||||||
|
IMPORTANT:
|
||||||
|
- If status is "rejected", the user explicitly declined. Do NOT retry.
|
||||||
|
"""
|
||||||
|
if db_session is None or search_space_id is None or user_id is None:
|
||||||
|
return {"status": "error", "message": "Teams tool not properly configured."}
|
||||||
|
|
||||||
|
try:
|
||||||
|
connector = await get_teams_connector(db_session, search_space_id, user_id)
|
||||||
|
if not connector:
|
||||||
|
return {"status": "error", "message": "No Teams connector found."}
|
||||||
|
|
||||||
|
result = request_approval(
|
||||||
|
action_type="teams_send_message",
|
||||||
|
tool_name="send_teams_message",
|
||||||
|
params={
|
||||||
|
"team_id": team_id,
|
||||||
|
"channel_id": channel_id,
|
||||||
|
"content": content,
|
||||||
|
},
|
||||||
|
context={"connector_id": connector.id},
|
||||||
|
)
|
||||||
|
|
||||||
|
if result.rejected:
|
||||||
|
return {
|
||||||
|
"status": "rejected",
|
||||||
|
"message": "User declined. Message was not sent.",
|
||||||
|
}
|
||||||
|
|
||||||
|
final_content = result.params.get("content", content)
|
||||||
|
final_team = result.params.get("team_id", team_id)
|
||||||
|
final_channel = result.params.get("channel_id", channel_id)
|
||||||
|
|
||||||
|
token = await get_access_token(db_session, connector)
|
||||||
|
|
||||||
|
async with httpx.AsyncClient(timeout=20.0) as client:
|
||||||
|
resp = await client.post(
|
||||||
|
f"{GRAPH_API}/teams/{final_team}/channels/{final_channel}/messages",
|
||||||
|
headers={
|
||||||
|
"Authorization": f"Bearer {token}",
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
},
|
||||||
|
json={"body": {"content": final_content}},
|
||||||
|
)
|
||||||
|
|
||||||
|
if resp.status_code == 401:
|
||||||
|
return {
|
||||||
|
"status": "auth_error",
|
||||||
|
"message": "Teams token expired. Please re-authenticate.",
|
||||||
|
"connector_type": "teams",
|
||||||
|
}
|
||||||
|
if resp.status_code == 403:
|
||||||
|
return {
|
||||||
|
"status": "insufficient_permissions",
|
||||||
|
"message": "Missing ChannelMessage.Send permission. Please re-authenticate with updated scopes.",
|
||||||
|
}
|
||||||
|
if resp.status_code not in (200, 201):
|
||||||
|
return {
|
||||||
|
"status": "error",
|
||||||
|
"message": f"Graph API error: {resp.status_code} — {resp.text[:200]}",
|
||||||
|
}
|
||||||
|
|
||||||
|
msg_data = resp.json()
|
||||||
|
return {
|
||||||
|
"status": "success",
|
||||||
|
"message_id": msg_data.get("id"),
|
||||||
|
"message": "Message sent to Teams channel.",
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
from langgraph.errors import GraphInterrupt
|
||||||
|
|
||||||
|
if isinstance(e, GraphInterrupt):
|
||||||
|
raise
|
||||||
|
logger.error("Error sending Teams message: %s", e, exc_info=True)
|
||||||
|
return {"status": "error", "message": "Failed to send Teams message."}
|
||||||
|
|
||||||
|
return send_teams_message
|
||||||
Loading…
Add table
Add a link
Reference in a new issue