Merge upstream/dev into feature/multi-agent

This commit is contained in:
CREDO23 2026-05-05 01:44:46 +02:00
commit 5119915f4f
278 changed files with 34669 additions and 8970 deletions

View file

@ -7,6 +7,7 @@ from sqlalchemy.orm.attributes import flag_modified
from app.agents.new_chat.tools.hitl import request_approval
from app.connectors.confluence_history import ConfluenceHistoryConnector
from app.db import async_session_maker
from app.services.confluence import ConfluenceToolMetadataService
logger = logging.getLogger(__name__)
@ -18,6 +19,23 @@ def create_create_confluence_page_tool(
user_id: str | None = None,
connector_id: int | None = None,
):
"""
Factory function to create the create_confluence_page tool.
The tool acquires its own short-lived ``AsyncSession`` per call via
:data:`async_session_maker` so the closure is safe to share across
HTTP requests by the compiled-agent cache. Capturing a per-request
session here would surface stale/closed sessions on cache hits.
Args:
db_session: Reserved for registry compatibility. Per-call sessions
are opened via :data:`async_session_maker` inside the tool body.
Returns:
Configured create_confluence_page tool
"""
del db_session # per-call session — see docstring
@tool
async def create_confluence_page(
title: str,
@ -42,160 +60,163 @@ def create_create_confluence_page_tool(
"""
logger.info(f"create_confluence_page called: title='{title}'")
if db_session is None or search_space_id is None or user_id is None:
if search_space_id is None or user_id is None:
return {
"status": "error",
"message": "Confluence tool not properly configured.",
}
try:
metadata_service = ConfluenceToolMetadataService(db_session)
context = await metadata_service.get_creation_context(
search_space_id, user_id
)
async with async_session_maker() as db_session:
metadata_service = ConfluenceToolMetadataService(db_session)
context = await metadata_service.get_creation_context(
search_space_id, user_id
)
if "error" in context:
return {"status": "error", "message": context["error"]}
if "error" in context:
return {"status": "error", "message": context["error"]}
accounts = context.get("accounts", [])
if accounts and all(a.get("auth_expired") for a in accounts):
return {
"status": "auth_error",
"message": "All connected Confluence accounts need re-authentication.",
"connector_type": "confluence",
}
accounts = context.get("accounts", [])
if accounts and all(a.get("auth_expired") for a in accounts):
return {
"status": "auth_error",
"message": "All connected Confluence accounts need re-authentication.",
"connector_type": "confluence",
}
result = request_approval(
action_type="confluence_page_creation",
tool_name="create_confluence_page",
params={
"title": title,
"content": content,
"space_id": space_id,
"connector_id": connector_id,
},
context=context,
)
result = request_approval(
action_type="confluence_page_creation",
tool_name="create_confluence_page",
params={
"title": title,
"content": content,
"space_id": space_id,
"connector_id": connector_id,
},
context=context,
)
if result.rejected:
return {
"status": "rejected",
"message": "User declined. Do not retry or suggest alternatives.",
}
if result.rejected:
return {
"status": "rejected",
"message": "User declined. Do not retry or suggest alternatives.",
}
final_title = result.params.get("title", title)
final_content = result.params.get("content", content) or ""
final_space_id = result.params.get("space_id", space_id)
final_connector_id = result.params.get("connector_id", connector_id)
final_title = result.params.get("title", title)
final_content = result.params.get("content", content) or ""
final_space_id = result.params.get("space_id", space_id)
final_connector_id = result.params.get("connector_id", connector_id)
if not final_title or not final_title.strip():
return {"status": "error", "message": "Page title cannot be empty."}
if not final_space_id:
return {"status": "error", "message": "A space must be selected."}
if not final_title or not final_title.strip():
return {"status": "error", "message": "Page title cannot be empty."}
if not final_space_id:
return {"status": "error", "message": "A space must be selected."}
from sqlalchemy.future import select
from sqlalchemy.future import select
from app.db import SearchSourceConnector, SearchSourceConnectorType
from app.db import SearchSourceConnector, SearchSourceConnectorType
actual_connector_id = final_connector_id
if actual_connector_id is None:
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.CONFLUENCE_CONNECTOR,
actual_connector_id = final_connector_id
if actual_connector_id is None:
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.CONFLUENCE_CONNECTOR,
)
)
)
connector = result.scalars().first()
if not connector:
return {
"status": "error",
"message": "No Confluence connector found.",
}
actual_connector_id = connector.id
else:
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == actual_connector_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.CONFLUENCE_CONNECTOR,
)
)
connector = result.scalars().first()
if not connector:
return {
"status": "error",
"message": "Selected Confluence connector is invalid.",
}
try:
client = ConfluenceHistoryConnector(
session=db_session, connector_id=actual_connector_id
)
api_result = await client.create_page(
space_id=final_space_id,
title=final_title,
body=final_content,
)
await client.close()
except Exception as api_err:
if (
"http 403" in str(api_err).lower()
or "status code 403" in str(api_err).lower()
):
try:
_conn = connector
_conn.config = {**_conn.config, "auth_expired": True}
flag_modified(_conn, "config")
await db_session.commit()
except Exception:
pass
return {
"status": "insufficient_permissions",
"connector_id": actual_connector_id,
"message": "This Confluence account needs additional permissions. Please re-authenticate in connector settings.",
}
raise
page_id = str(api_result.get("id", ""))
page_links = (
api_result.get("_links", {}) if isinstance(api_result, dict) else {}
)
page_url = ""
if page_links.get("base") and page_links.get("webui"):
page_url = f"{page_links['base']}{page_links['webui']}"
kb_message_suffix = ""
try:
from app.services.confluence import ConfluenceKBSyncService
kb_service = ConfluenceKBSyncService(db_session)
kb_result = await kb_service.sync_after_create(
page_id=page_id,
page_title=final_title,
space_id=final_space_id,
body_content=final_content,
connector_id=actual_connector_id,
search_space_id=search_space_id,
user_id=user_id,
)
if kb_result["status"] == "success":
kb_message_suffix = " Your knowledge base has also been updated."
connector = result.scalars().first()
if not connector:
return {
"status": "error",
"message": "No Confluence connector found.",
}
actual_connector_id = connector.id
else:
kb_message_suffix = " This page will be added to your knowledge base in the next scheduled sync."
except Exception as kb_err:
logger.warning(f"KB sync after create failed: {kb_err}")
kb_message_suffix = " This page will be added to your knowledge base in the next scheduled sync."
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == actual_connector_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.CONFLUENCE_CONNECTOR,
)
)
connector = result.scalars().first()
if not connector:
return {
"status": "error",
"message": "Selected Confluence connector is invalid.",
}
return {
"status": "success",
"page_id": page_id,
"page_url": page_url,
"message": f"Confluence page '{final_title}' created successfully.{kb_message_suffix}",
}
try:
client = ConfluenceHistoryConnector(
session=db_session, connector_id=actual_connector_id
)
api_result = await client.create_page(
space_id=final_space_id,
title=final_title,
body=final_content,
)
await client.close()
except Exception as api_err:
if (
"http 403" in str(api_err).lower()
or "status code 403" in str(api_err).lower()
):
try:
_conn = connector
_conn.config = {**_conn.config, "auth_expired": True}
flag_modified(_conn, "config")
await db_session.commit()
except Exception:
pass
return {
"status": "insufficient_permissions",
"connector_id": actual_connector_id,
"message": "This Confluence account needs additional permissions. Please re-authenticate in connector settings.",
}
raise
page_id = str(api_result.get("id", ""))
page_links = (
api_result.get("_links", {}) if isinstance(api_result, dict) else {}
)
page_url = ""
if page_links.get("base") and page_links.get("webui"):
page_url = f"{page_links['base']}{page_links['webui']}"
kb_message_suffix = ""
try:
from app.services.confluence import ConfluenceKBSyncService
kb_service = ConfluenceKBSyncService(db_session)
kb_result = await kb_service.sync_after_create(
page_id=page_id,
page_title=final_title,
space_id=final_space_id,
body_content=final_content,
connector_id=actual_connector_id,
search_space_id=search_space_id,
user_id=user_id,
)
if kb_result["status"] == "success":
kb_message_suffix = (
" Your knowledge base has also been updated."
)
else:
kb_message_suffix = " This page will be added to your knowledge base in the next scheduled sync."
except Exception as kb_err:
logger.warning(f"KB sync after create failed: {kb_err}")
kb_message_suffix = " This page will be added to your knowledge base in the next scheduled sync."
return {
"status": "success",
"page_id": page_id,
"page_url": page_url,
"message": f"Confluence page '{final_title}' created successfully.{kb_message_suffix}",
}
except Exception as e:
from langgraph.errors import GraphInterrupt

View file

@ -7,6 +7,7 @@ from sqlalchemy.orm.attributes import flag_modified
from app.agents.new_chat.tools.hitl import request_approval
from app.connectors.confluence_history import ConfluenceHistoryConnector
from app.db import async_session_maker
from app.services.confluence import ConfluenceToolMetadataService
logger = logging.getLogger(__name__)
@ -18,6 +19,23 @@ def create_delete_confluence_page_tool(
user_id: str | None = None,
connector_id: int | None = None,
):
"""
Factory function to create the delete_confluence_page tool.
The tool acquires its own short-lived ``AsyncSession`` per call via
:data:`async_session_maker` so the closure is safe to share across
HTTP requests by the compiled-agent cache. Capturing a per-request
session here would surface stale/closed sessions on cache hits.
Args:
db_session: Reserved for registry compatibility. Per-call sessions
are opened via :data:`async_session_maker` inside the tool body.
Returns:
Configured delete_confluence_page tool
"""
del db_session # per-call session — see docstring
@tool
async def delete_confluence_page(
page_title_or_id: str,
@ -43,137 +61,143 @@ def create_delete_confluence_page_tool(
f"delete_confluence_page called: page_title_or_id='{page_title_or_id}'"
)
if db_session is None or search_space_id is None or user_id is None:
if search_space_id is None or user_id is None:
return {
"status": "error",
"message": "Confluence tool not properly configured.",
}
try:
metadata_service = ConfluenceToolMetadataService(db_session)
context = await metadata_service.get_deletion_context(
search_space_id, user_id, page_title_or_id
)
if "error" in context:
error_msg = context["error"]
if context.get("auth_expired"):
return {
"status": "auth_error",
"message": error_msg,
"connector_id": context.get("connector_id"),
"connector_type": "confluence",
}
if "not found" in error_msg.lower():
return {"status": "not_found", "message": error_msg}
return {"status": "error", "message": error_msg}
page_data = context["page"]
page_id = page_data["page_id"]
page_title = page_data.get("page_title", "")
document_id = page_data["document_id"]
connector_id_from_context = context.get("account", {}).get("id")
result = request_approval(
action_type="confluence_page_deletion",
tool_name="delete_confluence_page",
params={
"page_id": page_id,
"connector_id": connector_id_from_context,
"delete_from_kb": delete_from_kb,
},
context=context,
)
if result.rejected:
return {
"status": "rejected",
"message": "User declined. Do not retry or suggest alternatives.",
}
final_page_id = result.params.get("page_id", page_id)
final_connector_id = result.params.get(
"connector_id", connector_id_from_context
)
final_delete_from_kb = result.params.get("delete_from_kb", delete_from_kb)
from sqlalchemy.future import select
from app.db import SearchSourceConnector, SearchSourceConnectorType
if not final_connector_id:
return {
"status": "error",
"message": "No connector found for this page.",
}
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == final_connector_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.CONFLUENCE_CONNECTOR,
async with async_session_maker() as db_session:
metadata_service = ConfluenceToolMetadataService(db_session)
context = await metadata_service.get_deletion_context(
search_space_id, user_id, page_title_or_id
)
)
connector = result.scalars().first()
if not connector:
return {
"status": "error",
"message": "Selected Confluence connector is invalid.",
}
try:
client = ConfluenceHistoryConnector(
session=db_session, connector_id=final_connector_id
if "error" in context:
error_msg = context["error"]
if context.get("auth_expired"):
return {
"status": "auth_error",
"message": error_msg,
"connector_id": context.get("connector_id"),
"connector_type": "confluence",
}
if "not found" in error_msg.lower():
return {"status": "not_found", "message": error_msg}
return {"status": "error", "message": error_msg}
page_data = context["page"]
page_id = page_data["page_id"]
page_title = page_data.get("page_title", "")
document_id = page_data["document_id"]
connector_id_from_context = context.get("account", {}).get("id")
result = request_approval(
action_type="confluence_page_deletion",
tool_name="delete_confluence_page",
params={
"page_id": page_id,
"connector_id": connector_id_from_context,
"delete_from_kb": delete_from_kb,
},
context=context,
)
await client.delete_page(final_page_id)
await client.close()
except Exception as api_err:
if (
"http 403" in str(api_err).lower()
or "status code 403" in str(api_err).lower()
):
try:
connector.config = {**connector.config, "auth_expired": True}
flag_modified(connector, "config")
await db_session.commit()
except Exception:
pass
if result.rejected:
return {
"status": "insufficient_permissions",
"connector_id": final_connector_id,
"message": "This Confluence account needs additional permissions. Please re-authenticate in connector settings.",
"status": "rejected",
"message": "User declined. Do not retry or suggest alternatives.",
}
raise
deleted_from_kb = False
if final_delete_from_kb and document_id:
try:
from app.db import Document
final_page_id = result.params.get("page_id", page_id)
final_connector_id = result.params.get(
"connector_id", connector_id_from_context
)
final_delete_from_kb = result.params.get(
"delete_from_kb", delete_from_kb
)
doc_result = await db_session.execute(
select(Document).filter(Document.id == document_id)
from sqlalchemy.future import select
from app.db import SearchSourceConnector, SearchSourceConnectorType
if not final_connector_id:
return {
"status": "error",
"message": "No connector found for this page.",
}
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == final_connector_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.CONFLUENCE_CONNECTOR,
)
document = doc_result.scalars().first()
if document:
await db_session.delete(document)
await db_session.commit()
deleted_from_kb = True
except Exception as e:
logger.error(f"Failed to delete document from KB: {e}")
await db_session.rollback()
)
connector = result.scalars().first()
if not connector:
return {
"status": "error",
"message": "Selected Confluence connector is invalid.",
}
message = f"Confluence page '{page_title}' deleted successfully."
if deleted_from_kb:
message += " Also removed from the knowledge base."
try:
client = ConfluenceHistoryConnector(
session=db_session, connector_id=final_connector_id
)
await client.delete_page(final_page_id)
await client.close()
except Exception as api_err:
if (
"http 403" in str(api_err).lower()
or "status code 403" in str(api_err).lower()
):
try:
connector.config = {
**connector.config,
"auth_expired": True,
}
flag_modified(connector, "config")
await db_session.commit()
except Exception:
pass
return {
"status": "insufficient_permissions",
"connector_id": final_connector_id,
"message": "This Confluence account needs additional permissions. Please re-authenticate in connector settings.",
}
raise
return {
"status": "success",
"page_id": final_page_id,
"deleted_from_kb": deleted_from_kb,
"message": message,
}
deleted_from_kb = False
if final_delete_from_kb and document_id:
try:
from app.db import Document
doc_result = await db_session.execute(
select(Document).filter(Document.id == document_id)
)
document = doc_result.scalars().first()
if document:
await db_session.delete(document)
await db_session.commit()
deleted_from_kb = True
except Exception as e:
logger.error(f"Failed to delete document from KB: {e}")
await db_session.rollback()
message = f"Confluence page '{page_title}' deleted successfully."
if deleted_from_kb:
message += " Also removed from the knowledge base."
return {
"status": "success",
"page_id": final_page_id,
"deleted_from_kb": deleted_from_kb,
"message": message,
}
except Exception as e:
from langgraph.errors import GraphInterrupt

View file

@ -7,6 +7,7 @@ from sqlalchemy.orm.attributes import flag_modified
from app.agents.new_chat.tools.hitl import request_approval
from app.connectors.confluence_history import ConfluenceHistoryConnector
from app.db import async_session_maker
from app.services.confluence import ConfluenceToolMetadataService
logger = logging.getLogger(__name__)
@ -18,6 +19,23 @@ def create_update_confluence_page_tool(
user_id: str | None = None,
connector_id: int | None = None,
):
"""
Factory function to create the update_confluence_page tool.
The tool acquires its own short-lived ``AsyncSession`` per call via
:data:`async_session_maker` so the closure is safe to share across
HTTP requests by the compiled-agent cache. Capturing a per-request
session here would surface stale/closed sessions on cache hits.
Args:
db_session: Reserved for registry compatibility. Per-call sessions
are opened via :data:`async_session_maker` inside the tool body.
Returns:
Configured update_confluence_page tool
"""
del db_session # per-call session — see docstring
@tool
async def update_confluence_page(
page_title_or_id: str,
@ -45,164 +63,168 @@ def create_update_confluence_page_tool(
f"update_confluence_page called: page_title_or_id='{page_title_or_id}'"
)
if db_session is None or search_space_id is None or user_id is None:
if search_space_id is None or user_id is None:
return {
"status": "error",
"message": "Confluence tool not properly configured.",
}
try:
metadata_service = ConfluenceToolMetadataService(db_session)
context = await metadata_service.get_update_context(
search_space_id, user_id, page_title_or_id
)
async with async_session_maker() as db_session:
metadata_service = ConfluenceToolMetadataService(db_session)
context = await metadata_service.get_update_context(
search_space_id, user_id, page_title_or_id
)
if "error" in context:
error_msg = context["error"]
if context.get("auth_expired"):
if "error" in context:
error_msg = context["error"]
if context.get("auth_expired"):
return {
"status": "auth_error",
"message": error_msg,
"connector_id": context.get("connector_id"),
"connector_type": "confluence",
}
if "not found" in error_msg.lower():
return {"status": "not_found", "message": error_msg}
return {"status": "error", "message": error_msg}
page_data = context["page"]
page_id = page_data["page_id"]
current_title = page_data["page_title"]
current_body = page_data.get("body", "")
current_version = page_data.get("version", 1)
document_id = page_data.get("document_id")
connector_id_from_context = context.get("account", {}).get("id")
result = request_approval(
action_type="confluence_page_update",
tool_name="update_confluence_page",
params={
"page_id": page_id,
"document_id": document_id,
"new_title": new_title,
"new_content": new_content,
"version": current_version,
"connector_id": connector_id_from_context,
},
context=context,
)
if result.rejected:
return {
"status": "auth_error",
"message": error_msg,
"connector_id": context.get("connector_id"),
"connector_type": "confluence",
"status": "rejected",
"message": "User declined. Do not retry or suggest alternatives.",
}
if "not found" in error_msg.lower():
return {"status": "not_found", "message": error_msg}
return {"status": "error", "message": error_msg}
page_data = context["page"]
page_id = page_data["page_id"]
current_title = page_data["page_title"]
current_body = page_data.get("body", "")
current_version = page_data.get("version", 1)
document_id = page_data.get("document_id")
connector_id_from_context = context.get("account", {}).get("id")
result = request_approval(
action_type="confluence_page_update",
tool_name="update_confluence_page",
params={
"page_id": page_id,
"document_id": document_id,
"new_title": new_title,
"new_content": new_content,
"version": current_version,
"connector_id": connector_id_from_context,
},
context=context,
)
if result.rejected:
return {
"status": "rejected",
"message": "User declined. Do not retry or suggest alternatives.",
}
final_page_id = result.params.get("page_id", page_id)
final_title = result.params.get("new_title", new_title) or current_title
final_content = result.params.get("new_content", new_content)
if final_content is None:
final_content = current_body
final_version = result.params.get("version", current_version)
final_connector_id = result.params.get(
"connector_id", connector_id_from_context
)
final_document_id = result.params.get("document_id", document_id)
from sqlalchemy.future import select
from app.db import SearchSourceConnector, SearchSourceConnectorType
if not final_connector_id:
return {
"status": "error",
"message": "No connector found for this page.",
}
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == final_connector_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.CONFLUENCE_CONNECTOR,
final_page_id = result.params.get("page_id", page_id)
final_title = result.params.get("new_title", new_title) or current_title
final_content = result.params.get("new_content", new_content)
if final_content is None:
final_content = current_body
final_version = result.params.get("version", current_version)
final_connector_id = result.params.get(
"connector_id", connector_id_from_context
)
)
connector = result.scalars().first()
if not connector:
return {
"status": "error",
"message": "Selected Confluence connector is invalid.",
}
final_document_id = result.params.get("document_id", document_id)
try:
client = ConfluenceHistoryConnector(
session=db_session, connector_id=final_connector_id
)
api_result = await client.update_page(
page_id=final_page_id,
title=final_title,
body=final_content,
version_number=final_version + 1,
)
await client.close()
except Exception as api_err:
if (
"http 403" in str(api_err).lower()
or "status code 403" in str(api_err).lower()
):
try:
connector.config = {**connector.config, "auth_expired": True}
flag_modified(connector, "config")
await db_session.commit()
except Exception:
pass
from sqlalchemy.future import select
from app.db import SearchSourceConnector, SearchSourceConnectorType
if not final_connector_id:
return {
"status": "insufficient_permissions",
"connector_id": final_connector_id,
"message": "This Confluence account needs additional permissions. Please re-authenticate in connector settings.",
"status": "error",
"message": "No connector found for this page.",
}
raise
page_links = (
api_result.get("_links", {}) if isinstance(api_result, dict) else {}
)
page_url = ""
if page_links.get("base") and page_links.get("webui"):
page_url = f"{page_links['base']}{page_links['webui']}"
kb_message_suffix = ""
if final_document_id:
try:
from app.services.confluence import ConfluenceKBSyncService
kb_service = ConfluenceKBSyncService(db_session)
kb_result = await kb_service.sync_after_update(
document_id=final_document_id,
page_id=final_page_id,
user_id=user_id,
search_space_id=search_space_id,
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == final_connector_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.CONFLUENCE_CONNECTOR,
)
if kb_result["status"] == "success":
kb_message_suffix = (
" Your knowledge base has also been updated."
)
connector = result.scalars().first()
if not connector:
return {
"status": "error",
"message": "Selected Confluence connector is invalid.",
}
try:
client = ConfluenceHistoryConnector(
session=db_session, connector_id=final_connector_id
)
api_result = await client.update_page(
page_id=final_page_id,
title=final_title,
body=final_content,
version_number=final_version + 1,
)
await client.close()
except Exception as api_err:
if (
"http 403" in str(api_err).lower()
or "status code 403" in str(api_err).lower()
):
try:
connector.config = {
**connector.config,
"auth_expired": True,
}
flag_modified(connector, "config")
await db_session.commit()
except Exception:
pass
return {
"status": "insufficient_permissions",
"connector_id": final_connector_id,
"message": "This Confluence account needs additional permissions. Please re-authenticate in connector settings.",
}
raise
page_links = (
api_result.get("_links", {}) if isinstance(api_result, dict) else {}
)
page_url = ""
if page_links.get("base") and page_links.get("webui"):
page_url = f"{page_links['base']}{page_links['webui']}"
kb_message_suffix = ""
if final_document_id:
try:
from app.services.confluence import ConfluenceKBSyncService
kb_service = ConfluenceKBSyncService(db_session)
kb_result = await kb_service.sync_after_update(
document_id=final_document_id,
page_id=final_page_id,
user_id=user_id,
search_space_id=search_space_id,
)
else:
if kb_result["status"] == "success":
kb_message_suffix = (
" Your knowledge base has also been updated."
)
else:
kb_message_suffix = (
" The knowledge base will be updated in the next sync."
)
except Exception as kb_err:
logger.warning(f"KB sync after update failed: {kb_err}")
kb_message_suffix = (
" The knowledge base will be updated in the next sync."
)
except Exception as kb_err:
logger.warning(f"KB sync after update failed: {kb_err}")
kb_message_suffix = (
" The knowledge base will be updated in the next sync."
)
return {
"status": "success",
"page_id": final_page_id,
"page_url": page_url,
"message": f"Confluence page '{final_title}' updated successfully.{kb_message_suffix}",
}
return {
"status": "success",
"page_id": final_page_id,
"page_url": page_url,
"message": f"Confluence page '{final_title}' updated successfully.{kb_message_suffix}",
}
except Exception as e:
from langgraph.errors import GraphInterrupt

View file

@ -17,7 +17,7 @@ from pydantic import BaseModel, Field
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select
from app.db import SearchSourceConnector, SearchSourceConnectorType
from app.db import SearchSourceConnector, SearchSourceConnectorType, async_session_maker
from app.services.mcp_oauth.registry import MCP_SERVICES
logger = logging.getLogger(__name__)
@ -53,6 +53,23 @@ def create_get_connected_accounts_tool(
search_space_id: int,
user_id: str,
) -> StructuredTool:
"""Factory function to create the get_connected_accounts tool.
The tool acquires its own short-lived ``AsyncSession`` per call via
:data:`async_session_maker` so the closure is safe to share across
HTTP requests by the compiled-agent cache. Capturing a per-request
session here would surface stale/closed sessions on cache hits.
Args:
db_session: Reserved for registry compatibility. Per-call sessions
are opened via :data:`async_session_maker` inside the tool body.
search_space_id: Search space ID to scope account discovery to.
user_id: User ID to scope account discovery to.
Returns:
Configured StructuredTool for connected-accounts discovery.
"""
del db_session # per-call session — see docstring
async def _run(service: str) -> list[dict[str, Any]]:
svc_cfg = MCP_SERVICES.get(service)
@ -68,40 +85,41 @@ def create_get_connected_accounts_tool(
except ValueError:
return [{"error": f"Connector type '{svc_cfg.connector_type}' not found."}]
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type == connector_type,
async with async_session_maker() as db_session:
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type == connector_type,
)
)
)
connectors = result.scalars().all()
connectors = result.scalars().all()
if not connectors:
return [
{
"error": f"No {svc_cfg.name} accounts connected. Ask the user to connect one in settings."
if not connectors:
return [
{
"error": f"No {svc_cfg.name} accounts connected. Ask the user to connect one in settings."
}
]
is_multi = len(connectors) > 1
accounts: list[dict[str, Any]] = []
for conn in connectors:
cfg = conn.config or {}
entry: dict[str, Any] = {
"connector_id": conn.id,
"display_name": _extract_display_name(conn),
"service": service,
}
]
if is_multi:
entry["tool_prefix"] = f"{service}_{conn.id}"
for key in svc_cfg.account_metadata_keys:
if key in cfg:
entry[key] = cfg[key]
accounts.append(entry)
is_multi = len(connectors) > 1
accounts: list[dict[str, Any]] = []
for conn in connectors:
cfg = conn.config or {}
entry: dict[str, Any] = {
"connector_id": conn.id,
"display_name": _extract_display_name(conn),
"service": service,
}
if is_multi:
entry["tool_prefix"] = f"{service}_{conn.id}"
for key in svc_cfg.account_metadata_keys:
if key in cfg:
entry[key] = cfg[key]
accounts.append(entry)
return accounts
return accounts
return StructuredTool(
name="get_connected_accounts",

View file

@ -5,6 +5,8 @@ import httpx
from langchain_core.tools import tool
from sqlalchemy.ext.asyncio import AsyncSession
from app.db import async_session_maker
from ._auth import DISCORD_API, get_bot_token, get_discord_connector, get_guild_id
logger = logging.getLogger(__name__)
@ -15,6 +17,23 @@ def create_list_discord_channels_tool(
search_space_id: int | None = None,
user_id: str | None = None,
):
"""
Factory function to create the list_discord_channels tool.
The tool acquires its own short-lived ``AsyncSession`` per call via
:data:`async_session_maker` so the closure is safe to share across
HTTP requests by the compiled-agent cache. Capturing a per-request
session here would surface stale/closed sessions on cache hits.
Args:
db_session: Reserved for registry compatibility. Per-call sessions
are opened via :data:`async_session_maker` inside the tool body.
Returns:
Configured list_discord_channels tool
"""
del db_session # per-call session — see docstring
@tool
async def list_discord_channels() -> dict[str, Any]:
"""List text channels in the connected Discord server.
@ -22,59 +41,60 @@ def create_list_discord_channels_tool(
Returns:
Dictionary with status and a list of channels (id, name).
"""
if db_session is None or search_space_id is None or user_id is None:
if search_space_id is None or user_id is None:
return {
"status": "error",
"message": "Discord tool not properly configured.",
}
try:
connector = await get_discord_connector(
db_session, search_space_id, user_id
)
if not connector:
return {"status": "error", "message": "No Discord connector found."}
guild_id = get_guild_id(connector)
if not guild_id:
return {
"status": "error",
"message": "No guild ID in Discord connector config.",
}
token = get_bot_token(connector)
async with httpx.AsyncClient() as client:
resp = await client.get(
f"{DISCORD_API}/guilds/{guild_id}/channels",
headers={"Authorization": f"Bot {token}"},
timeout=15.0,
async with async_session_maker() as db_session:
connector = await get_discord_connector(
db_session, search_space_id, user_id
)
if not connector:
return {"status": "error", "message": "No Discord connector found."}
if resp.status_code == 401:
return {
"status": "auth_error",
"message": "Discord bot token is invalid.",
"connector_type": "discord",
}
if resp.status_code != 200:
return {
"status": "error",
"message": f"Discord API error: {resp.status_code}",
}
guild_id = get_guild_id(connector)
if not guild_id:
return {
"status": "error",
"message": "No guild ID in Discord connector config.",
}
# Type 0 = text channel
channels = [
{"id": ch["id"], "name": ch["name"]}
for ch in resp.json()
if ch.get("type") == 0
]
return {
"status": "success",
"guild_id": guild_id,
"channels": channels,
"total": len(channels),
}
token = get_bot_token(connector)
async with httpx.AsyncClient() as client:
resp = await client.get(
f"{DISCORD_API}/guilds/{guild_id}/channels",
headers={"Authorization": f"Bot {token}"},
timeout=15.0,
)
if resp.status_code == 401:
return {
"status": "auth_error",
"message": "Discord bot token is invalid.",
"connector_type": "discord",
}
if resp.status_code != 200:
return {
"status": "error",
"message": f"Discord API error: {resp.status_code}",
}
# Type 0 = text channel
channels = [
{"id": ch["id"], "name": ch["name"]}
for ch in resp.json()
if ch.get("type") == 0
]
return {
"status": "success",
"guild_id": guild_id,
"channels": channels,
"total": len(channels),
}
except Exception as e:
from langgraph.errors import GraphInterrupt

View file

@ -5,6 +5,8 @@ import httpx
from langchain_core.tools import tool
from sqlalchemy.ext.asyncio import AsyncSession
from app.db import async_session_maker
from ._auth import DISCORD_API, get_bot_token, get_discord_connector
logger = logging.getLogger(__name__)
@ -15,6 +17,23 @@ def create_read_discord_messages_tool(
search_space_id: int | None = None,
user_id: str | None = None,
):
"""
Factory function to create the read_discord_messages tool.
The tool acquires its own short-lived ``AsyncSession`` per call via
:data:`async_session_maker` so the closure is safe to share across
HTTP requests by the compiled-agent cache. Capturing a per-request
session here would surface stale/closed sessions on cache hits.
Args:
db_session: Reserved for registry compatibility. Per-call sessions
are opened via :data:`async_session_maker` inside the tool body.
Returns:
Configured read_discord_messages tool
"""
del db_session # per-call session — see docstring
@tool
async def read_discord_messages(
channel_id: str,
@ -30,7 +49,7 @@ def create_read_discord_messages_tool(
Dictionary with status and a list of messages including
id, author, content, timestamp.
"""
if db_session is None or search_space_id is None or user_id is None:
if search_space_id is None or user_id is None:
return {
"status": "error",
"message": "Discord tool not properly configured.",
@ -39,55 +58,56 @@ def create_read_discord_messages_tool(
limit = min(limit, 50)
try:
connector = await get_discord_connector(
db_session, search_space_id, user_id
)
if not connector:
return {"status": "error", "message": "No Discord connector found."}
token = get_bot_token(connector)
async with httpx.AsyncClient() as client:
resp = await client.get(
f"{DISCORD_API}/channels/{channel_id}/messages",
headers={"Authorization": f"Bot {token}"},
params={"limit": limit},
timeout=15.0,
async with async_session_maker() as db_session:
connector = await get_discord_connector(
db_session, search_space_id, user_id
)
if not connector:
return {"status": "error", "message": "No Discord connector found."}
if resp.status_code == 401:
return {
"status": "auth_error",
"message": "Discord bot token is invalid.",
"connector_type": "discord",
}
if resp.status_code == 403:
return {
"status": "error",
"message": "Bot lacks permission to read this channel.",
}
if resp.status_code != 200:
return {
"status": "error",
"message": f"Discord API error: {resp.status_code}",
}
token = get_bot_token(connector)
messages = [
{
"id": m["id"],
"author": m.get("author", {}).get("username", "Unknown"),
"content": m.get("content", ""),
"timestamp": m.get("timestamp", ""),
}
for m in resp.json()
]
async with httpx.AsyncClient() as client:
resp = await client.get(
f"{DISCORD_API}/channels/{channel_id}/messages",
headers={"Authorization": f"Bot {token}"},
params={"limit": limit},
timeout=15.0,
)
return {
"status": "success",
"channel_id": channel_id,
"messages": messages,
"total": len(messages),
}
if resp.status_code == 401:
return {
"status": "auth_error",
"message": "Discord bot token is invalid.",
"connector_type": "discord",
}
if resp.status_code == 403:
return {
"status": "error",
"message": "Bot lacks permission to read this channel.",
}
if resp.status_code != 200:
return {
"status": "error",
"message": f"Discord API error: {resp.status_code}",
}
messages = [
{
"id": m["id"],
"author": m.get("author", {}).get("username", "Unknown"),
"content": m.get("content", ""),
"timestamp": m.get("timestamp", ""),
}
for m in resp.json()
]
return {
"status": "success",
"channel_id": channel_id,
"messages": messages,
"total": len(messages),
}
except Exception as e:
from langgraph.errors import GraphInterrupt

View file

@ -6,6 +6,7 @@ from langchain_core.tools import tool
from sqlalchemy.ext.asyncio import AsyncSession
from app.agents.new_chat.tools.hitl import request_approval
from app.db import async_session_maker
from ._auth import DISCORD_API, get_bot_token, get_discord_connector
@ -17,6 +18,23 @@ def create_send_discord_message_tool(
search_space_id: int | None = None,
user_id: str | None = None,
):
"""
Factory function to create the send_discord_message tool.
The tool acquires its own short-lived ``AsyncSession`` per call via
:data:`async_session_maker` so the closure is safe to share across
HTTP requests by the compiled-agent cache. Capturing a per-request
session here would surface stale/closed sessions on cache hits.
Args:
db_session: Reserved for registry compatibility. Per-call sessions
are opened via :data:`async_session_maker` inside the tool body.
Returns:
Configured send_discord_message tool
"""
del db_session # per-call session — see docstring
@tool
async def send_discord_message(
channel_id: str,
@ -34,7 +52,7 @@ def create_send_discord_message_tool(
IMPORTANT:
- If status is "rejected", the user explicitly declined. Do NOT retry.
"""
if db_session is None or search_space_id is None or user_id is None:
if search_space_id is None or user_id is None:
return {
"status": "error",
"message": "Discord tool not properly configured.",
@ -47,64 +65,65 @@ def create_send_discord_message_tool(
}
try:
connector = await get_discord_connector(
db_session, search_space_id, user_id
)
if not connector:
return {"status": "error", "message": "No Discord connector found."}
async with async_session_maker() as db_session:
connector = await get_discord_connector(
db_session, search_space_id, user_id
)
if not connector:
return {"status": "error", "message": "No Discord connector found."}
result = request_approval(
action_type="discord_send_message",
tool_name="send_discord_message",
params={"channel_id": channel_id, "content": content},
context={"connector_id": connector.id},
)
if result.rejected:
return {
"status": "rejected",
"message": "User declined. Message was not sent.",
}
final_content = result.params.get("content", content)
final_channel = result.params.get("channel_id", channel_id)
token = get_bot_token(connector)
async with httpx.AsyncClient() as client:
resp = await client.post(
f"{DISCORD_API}/channels/{final_channel}/messages",
headers={
"Authorization": f"Bot {token}",
"Content-Type": "application/json",
},
json={"content": final_content},
timeout=15.0,
result = request_approval(
action_type="discord_send_message",
tool_name="send_discord_message",
params={"channel_id": channel_id, "content": content},
context={"connector_id": connector.id},
)
if resp.status_code == 401:
return {
"status": "auth_error",
"message": "Discord bot token is invalid.",
"connector_type": "discord",
}
if resp.status_code == 403:
return {
"status": "error",
"message": "Bot lacks permission to send messages in this channel.",
}
if resp.status_code not in (200, 201):
return {
"status": "error",
"message": f"Discord API error: {resp.status_code}",
}
if result.rejected:
return {
"status": "rejected",
"message": "User declined. Message was not sent.",
}
msg_data = resp.json()
return {
"status": "success",
"message_id": msg_data.get("id"),
"message": f"Message sent to channel {final_channel}.",
}
final_content = result.params.get("content", content)
final_channel = result.params.get("channel_id", channel_id)
token = get_bot_token(connector)
async with httpx.AsyncClient() as client:
resp = await client.post(
f"{DISCORD_API}/channels/{final_channel}/messages",
headers={
"Authorization": f"Bot {token}",
"Content-Type": "application/json",
},
json={"content": final_content},
timeout=15.0,
)
if resp.status_code == 401:
return {
"status": "auth_error",
"message": "Discord bot token is invalid.",
"connector_type": "discord",
}
if resp.status_code == 403:
return {
"status": "error",
"message": "Bot lacks permission to send messages in this channel.",
}
if resp.status_code not in (200, 201):
return {
"status": "error",
"message": f"Discord API error: {resp.status_code}",
}
msg_data = resp.json()
return {
"status": "success",
"message_id": msg_data.get("id"),
"message": f"Message sent to channel {final_channel}.",
}
except Exception as e:
from langgraph.errors import GraphInterrupt

View file

@ -10,7 +10,7 @@ from sqlalchemy.future import select
from app.agents.new_chat.tools.hitl import request_approval
from app.connectors.dropbox.client import DropboxClient
from app.db import SearchSourceConnector, SearchSourceConnectorType
from app.db import SearchSourceConnector, SearchSourceConnectorType, async_session_maker
logger = logging.getLogger(__name__)
@ -59,6 +59,23 @@ def create_create_dropbox_file_tool(
search_space_id: int | None = None,
user_id: str | None = None,
):
"""
Factory function to create the create_dropbox_file tool.
The tool acquires its own short-lived ``AsyncSession`` per call via
:data:`async_session_maker` so the closure is safe to share across
HTTP requests by the compiled-agent cache. Capturing a per-request
session here would surface stale/closed sessions on cache hits.
Args:
db_session: Reserved for registry compatibility. Per-call sessions
are opened via :data:`async_session_maker` inside the tool body.
Returns:
Configured create_dropbox_file tool
"""
del db_session # per-call session — see docstring
@tool
async def create_dropbox_file(
name: str,
@ -82,184 +99,191 @@ def create_create_dropbox_file_tool(
f"create_dropbox_file called: name='{name}', file_type='{file_type}'"
)
if db_session is None or search_space_id is None or user_id is None:
if search_space_id is None or user_id is None:
return {
"status": "error",
"message": "Dropbox tool not properly configured.",
}
try:
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.DROPBOX_CONNECTOR,
)
)
connectors = result.scalars().all()
if not connectors:
return {
"status": "error",
"message": "No Dropbox connector found. Please connect Dropbox in your workspace settings.",
}
accounts = []
for c in connectors:
cfg = c.config or {}
accounts.append(
{
"id": c.id,
"name": c.name,
"user_email": cfg.get("user_email"),
"auth_expired": cfg.get("auth_expired", False),
}
)
if all(a.get("auth_expired") for a in accounts):
return {
"status": "auth_error",
"message": "All connected Dropbox accounts need re-authentication.",
"connector_type": "dropbox",
}
parent_folders: dict[int, list[dict[str, str]]] = {}
for acc in accounts:
cid = acc["id"]
if acc.get("auth_expired"):
parent_folders[cid] = []
continue
try:
client = DropboxClient(session=db_session, connector_id=cid)
items, err = await client.list_folder("")
if err:
logger.warning(
"Failed to list folders for connector %s: %s", cid, err
)
parent_folders[cid] = []
else:
parent_folders[cid] = [
{
"folder_path": item.get("path_lower", ""),
"name": item["name"],
}
for item in items
if item.get(".tag") == "folder" and item.get("name")
]
except Exception:
logger.warning(
"Error fetching folders for connector %s", cid, exc_info=True
)
parent_folders[cid] = []
context: dict[str, Any] = {
"accounts": accounts,
"parent_folders": parent_folders,
"supported_types": _SUPPORTED_TYPES,
}
result = request_approval(
action_type="dropbox_file_creation",
tool_name="create_dropbox_file",
params={
"name": name,
"file_type": file_type,
"content": content,
"connector_id": None,
"parent_folder_path": None,
},
context=context,
)
if result.rejected:
return {
"status": "rejected",
"message": "User declined. Do not retry or suggest alternatives.",
}
final_name = result.params.get("name", name)
final_file_type = result.params.get("file_type", file_type)
final_content = result.params.get("content", content)
final_connector_id = result.params.get("connector_id")
final_parent_folder_path = result.params.get("parent_folder_path")
if not final_name or not final_name.strip():
return {"status": "error", "message": "File name cannot be empty."}
final_name = _ensure_extension(final_name, final_file_type)
if final_connector_id is not None:
async with async_session_maker() as db_session:
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == final_connector_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.DROPBOX_CONNECTOR,
)
)
connector = result.scalars().first()
else:
connector = connectors[0]
connectors = result.scalars().all()
if not connector:
return {
"status": "error",
"message": "Selected Dropbox connector is invalid.",
if not connectors:
return {
"status": "error",
"message": "No Dropbox connector found. Please connect Dropbox in your workspace settings.",
}
accounts = []
for c in connectors:
cfg = c.config or {}
accounts.append(
{
"id": c.id,
"name": c.name,
"user_email": cfg.get("user_email"),
"auth_expired": cfg.get("auth_expired", False),
}
)
if all(a.get("auth_expired") for a in accounts):
return {
"status": "auth_error",
"message": "All connected Dropbox accounts need re-authentication.",
"connector_type": "dropbox",
}
parent_folders: dict[int, list[dict[str, str]]] = {}
for acc in accounts:
cid = acc["id"]
if acc.get("auth_expired"):
parent_folders[cid] = []
continue
try:
client = DropboxClient(session=db_session, connector_id=cid)
items, err = await client.list_folder("")
if err:
logger.warning(
"Failed to list folders for connector %s: %s", cid, err
)
parent_folders[cid] = []
else:
parent_folders[cid] = [
{
"folder_path": item.get("path_lower", ""),
"name": item["name"],
}
for item in items
if item.get(".tag") == "folder" and item.get("name")
]
except Exception:
logger.warning(
"Error fetching folders for connector %s",
cid,
exc_info=True,
)
parent_folders[cid] = []
context: dict[str, Any] = {
"accounts": accounts,
"parent_folders": parent_folders,
"supported_types": _SUPPORTED_TYPES,
}
client = DropboxClient(session=db_session, connector_id=connector.id)
parent_path = final_parent_folder_path or ""
file_path = (
f"{parent_path}/{final_name}" if parent_path else f"/{final_name}"
)
if final_file_type == "paper":
created = await client.create_paper_doc(file_path, final_content or "")
file_id = created.get("file_id", "")
web_url = created.get("url", "")
else:
docx_bytes = _markdown_to_docx(final_content or "")
created = await client.upload_file(
file_path, docx_bytes, mode="add", autorename=True
result = request_approval(
action_type="dropbox_file_creation",
tool_name="create_dropbox_file",
params={
"name": name,
"file_type": file_type,
"content": content,
"connector_id": None,
"parent_folder_path": None,
},
context=context,
)
file_id = created.get("id", "")
web_url = ""
logger.info(f"Dropbox file created: id={file_id}, name={final_name}")
if result.rejected:
return {
"status": "rejected",
"message": "User declined. Do not retry or suggest alternatives.",
}
kb_message_suffix = ""
try:
from app.services.dropbox import DropboxKBSyncService
final_name = result.params.get("name", name)
final_file_type = result.params.get("file_type", file_type)
final_content = result.params.get("content", content)
final_connector_id = result.params.get("connector_id")
final_parent_folder_path = result.params.get("parent_folder_path")
kb_service = DropboxKBSyncService(db_session)
kb_result = await kb_service.sync_after_create(
file_id=file_id,
file_name=final_name,
file_path=file_path,
web_url=web_url,
content=final_content,
connector_id=connector.id,
search_space_id=search_space_id,
user_id=user_id,
)
if kb_result["status"] == "success":
kb_message_suffix = " Your knowledge base has also been updated."
if not final_name or not final_name.strip():
return {"status": "error", "message": "File name cannot be empty."}
final_name = _ensure_extension(final_name, final_file_type)
if final_connector_id is not None:
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == final_connector_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.DROPBOX_CONNECTOR,
)
)
connector = result.scalars().first()
else:
kb_message_suffix = " This file will be added to your knowledge base in the next scheduled sync."
except Exception as kb_err:
logger.warning(f"KB sync after create failed: {kb_err}")
kb_message_suffix = " This file will be added to your knowledge base in the next scheduled sync."
connector = connectors[0]
return {
"status": "success",
"file_id": file_id,
"name": final_name,
"web_url": web_url,
"message": f"Successfully created '{final_name}' in Dropbox.{kb_message_suffix}",
}
if not connector:
return {
"status": "error",
"message": "Selected Dropbox connector is invalid.",
}
client = DropboxClient(session=db_session, connector_id=connector.id)
parent_path = final_parent_folder_path or ""
file_path = (
f"{parent_path}/{final_name}" if parent_path else f"/{final_name}"
)
if final_file_type == "paper":
created = await client.create_paper_doc(
file_path, final_content or ""
)
file_id = created.get("file_id", "")
web_url = created.get("url", "")
else:
docx_bytes = _markdown_to_docx(final_content or "")
created = await client.upload_file(
file_path, docx_bytes, mode="add", autorename=True
)
file_id = created.get("id", "")
web_url = ""
logger.info(f"Dropbox file created: id={file_id}, name={final_name}")
kb_message_suffix = ""
try:
from app.services.dropbox import DropboxKBSyncService
kb_service = DropboxKBSyncService(db_session)
kb_result = await kb_service.sync_after_create(
file_id=file_id,
file_name=final_name,
file_path=file_path,
web_url=web_url,
content=final_content,
connector_id=connector.id,
search_space_id=search_space_id,
user_id=user_id,
)
if kb_result["status"] == "success":
kb_message_suffix = (
" Your knowledge base has also been updated."
)
else:
kb_message_suffix = " This file will be added to your knowledge base in the next scheduled sync."
except Exception as kb_err:
logger.warning(f"KB sync after create failed: {kb_err}")
kb_message_suffix = " This file will be added to your knowledge base in the next scheduled sync."
return {
"status": "success",
"file_id": file_id,
"name": final_name,
"web_url": web_url,
"message": f"Successfully created '{final_name}' in Dropbox.{kb_message_suffix}",
}
except Exception as e:
from langgraph.errors import GraphInterrupt

View file

@ -13,6 +13,7 @@ from app.db import (
DocumentType,
SearchSourceConnector,
SearchSourceConnectorType,
async_session_maker,
)
logger = logging.getLogger(__name__)
@ -23,6 +24,23 @@ def create_delete_dropbox_file_tool(
search_space_id: int | None = None,
user_id: str | None = None,
):
"""
Factory function to create the delete_dropbox_file tool.
The tool acquires its own short-lived ``AsyncSession`` per call via
:data:`async_session_maker` so the closure is safe to share across
HTTP requests by the compiled-agent cache. Capturing a per-request
session here would surface stale/closed sessions on cache hits.
Args:
db_session: Reserved for registry compatibility. Per-call sessions
are opened via :data:`async_session_maker` inside the tool body.
Returns:
Configured delete_dropbox_file tool
"""
del db_session # per-call session — see docstring
@tool
async def delete_dropbox_file(
file_name: str,
@ -55,33 +73,14 @@ def create_delete_dropbox_file_tool(
f"delete_dropbox_file called: file_name='{file_name}', delete_from_kb={delete_from_kb}"
)
if db_session is None or search_space_id is None or user_id is None:
if search_space_id is None or user_id is None:
return {
"status": "error",
"message": "Dropbox tool not properly configured.",
}
try:
doc_result = await db_session.execute(
select(Document)
.join(
SearchSourceConnector,
Document.connector_id == SearchSourceConnector.id,
)
.filter(
and_(
Document.search_space_id == search_space_id,
Document.document_type == DocumentType.DROPBOX_FILE,
func.lower(Document.title) == func.lower(file_name),
SearchSourceConnector.user_id == user_id,
)
)
.order_by(Document.updated_at.desc().nullslast())
.limit(1)
)
document = doc_result.scalars().first()
if not document:
async with async_session_maker() as db_session:
doc_result = await db_session.execute(
select(Document)
.join(
@ -92,13 +91,7 @@ def create_delete_dropbox_file_tool(
and_(
Document.search_space_id == search_space_id,
Document.document_type == DocumentType.DROPBOX_FILE,
func.lower(
cast(
Document.document_metadata["dropbox_file_name"],
String,
)
)
== func.lower(file_name),
func.lower(Document.title) == func.lower(file_name),
SearchSourceConnector.user_id == user_id,
)
)
@ -107,99 +100,63 @@ def create_delete_dropbox_file_tool(
)
document = doc_result.scalars().first()
if not document:
return {
"status": "not_found",
"message": (
f"File '{file_name}' not found in your indexed Dropbox files. "
"This could mean: (1) the file doesn't exist, (2) it hasn't been indexed yet, "
"or (3) the file name is different."
),
}
if not document.connector_id:
return {
"status": "error",
"message": "Document has no associated connector.",
}
meta = document.document_metadata or {}
file_path = meta.get("dropbox_path")
file_id = meta.get("dropbox_file_id")
document_id = document.id
if not file_path:
return {
"status": "error",
"message": "File path is missing. Please re-index the file.",
}
conn_result = await db_session.execute(
select(SearchSourceConnector).filter(
and_(
SearchSourceConnector.id == document.connector_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.DROPBOX_CONNECTOR,
if not document:
doc_result = await db_session.execute(
select(Document)
.join(
SearchSourceConnector,
Document.connector_id == SearchSourceConnector.id,
)
.filter(
and_(
Document.search_space_id == search_space_id,
Document.document_type == DocumentType.DROPBOX_FILE,
func.lower(
cast(
Document.document_metadata["dropbox_file_name"],
String,
)
)
== func.lower(file_name),
SearchSourceConnector.user_id == user_id,
)
)
.order_by(Document.updated_at.desc().nullslast())
.limit(1)
)
)
)
connector = conn_result.scalars().first()
if not connector:
return {
"status": "error",
"message": "Dropbox connector not found or access denied.",
}
document = doc_result.scalars().first()
cfg = connector.config or {}
if cfg.get("auth_expired"):
return {
"status": "auth_error",
"message": "Dropbox account needs re-authentication. Please re-authenticate in your connector settings.",
"connector_type": "dropbox",
}
if not document:
return {
"status": "not_found",
"message": (
f"File '{file_name}' not found in your indexed Dropbox files. "
"This could mean: (1) the file doesn't exist, (2) it hasn't been indexed yet, "
"or (3) the file name is different."
),
}
context = {
"file": {
"file_id": file_id,
"file_path": file_path,
"name": file_name,
"document_id": document_id,
},
"account": {
"id": connector.id,
"name": connector.name,
"user_email": cfg.get("user_email"),
},
}
if not document.connector_id:
return {
"status": "error",
"message": "Document has no associated connector.",
}
result = request_approval(
action_type="dropbox_file_trash",
tool_name="delete_dropbox_file",
params={
"file_path": file_path,
"connector_id": connector.id,
"delete_from_kb": delete_from_kb,
},
context=context,
)
meta = document.document_metadata or {}
file_path = meta.get("dropbox_path")
file_id = meta.get("dropbox_file_id")
document_id = document.id
if result.rejected:
return {
"status": "rejected",
"message": "User declined. Do not retry or suggest alternatives.",
}
if not file_path:
return {
"status": "error",
"message": "File path is missing. Please re-index the file.",
}
final_file_path = result.params.get("file_path", file_path)
final_connector_id = result.params.get("connector_id", connector.id)
final_delete_from_kb = result.params.get("delete_from_kb", delete_from_kb)
if final_connector_id != connector.id:
result = await db_session.execute(
conn_result = await db_session.execute(
select(SearchSourceConnector).filter(
and_(
SearchSourceConnector.id == final_connector_id,
SearchSourceConnector.id == document.connector_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
@ -207,61 +164,128 @@ def create_delete_dropbox_file_tool(
)
)
)
validated_connector = result.scalars().first()
if not validated_connector:
connector = conn_result.scalars().first()
if not connector:
return {
"status": "error",
"message": "Selected Dropbox connector is invalid or has been disconnected.",
"message": "Dropbox connector not found or access denied.",
}
actual_connector_id = validated_connector.id
else:
actual_connector_id = connector.id
logger.info(
f"Deleting Dropbox file: path='{final_file_path}', connector={actual_connector_id}"
)
cfg = connector.config or {}
if cfg.get("auth_expired"):
return {
"status": "auth_error",
"message": "Dropbox account needs re-authentication. Please re-authenticate in your connector settings.",
"connector_type": "dropbox",
}
client = DropboxClient(session=db_session, connector_id=actual_connector_id)
await client.delete_file(final_file_path)
context = {
"file": {
"file_id": file_id,
"file_path": file_path,
"name": file_name,
"document_id": document_id,
},
"account": {
"id": connector.id,
"name": connector.name,
"user_email": cfg.get("user_email"),
},
}
logger.info(f"Dropbox file deleted: path={final_file_path}")
trash_result: dict[str, Any] = {
"status": "success",
"file_id": file_id,
"message": f"Successfully deleted '{file_name}' from Dropbox.",
}
deleted_from_kb = False
if final_delete_from_kb and document_id:
try:
doc_result = await db_session.execute(
select(Document).filter(Document.id == document_id)
)
doc = doc_result.scalars().first()
if doc:
await db_session.delete(doc)
await db_session.commit()
deleted_from_kb = True
logger.info(
f"Deleted document {document_id} from knowledge base"
)
else:
logger.warning(f"Document {document_id} not found in KB")
except Exception as e:
logger.error(f"Failed to delete document from KB: {e}")
await db_session.rollback()
trash_result["warning"] = (
f"File deleted, but failed to remove from knowledge base: {e!s}"
)
trash_result["deleted_from_kb"] = deleted_from_kb
if deleted_from_kb:
trash_result["message"] = (
f"{trash_result.get('message', '')} (also removed from knowledge base)"
result = request_approval(
action_type="dropbox_file_trash",
tool_name="delete_dropbox_file",
params={
"file_path": file_path,
"connector_id": connector.id,
"delete_from_kb": delete_from_kb,
},
context=context,
)
return trash_result
if result.rejected:
return {
"status": "rejected",
"message": "User declined. Do not retry or suggest alternatives.",
}
final_file_path = result.params.get("file_path", file_path)
final_connector_id = result.params.get("connector_id", connector.id)
final_delete_from_kb = result.params.get(
"delete_from_kb", delete_from_kb
)
if final_connector_id != connector.id:
result = await db_session.execute(
select(SearchSourceConnector).filter(
and_(
SearchSourceConnector.id == final_connector_id,
SearchSourceConnector.search_space_id
== search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.DROPBOX_CONNECTOR,
)
)
)
validated_connector = result.scalars().first()
if not validated_connector:
return {
"status": "error",
"message": "Selected Dropbox connector is invalid or has been disconnected.",
}
actual_connector_id = validated_connector.id
else:
actual_connector_id = connector.id
logger.info(
f"Deleting Dropbox file: path='{final_file_path}', connector={actual_connector_id}"
)
client = DropboxClient(
session=db_session, connector_id=actual_connector_id
)
await client.delete_file(final_file_path)
logger.info(f"Dropbox file deleted: path={final_file_path}")
trash_result: dict[str, Any] = {
"status": "success",
"file_id": file_id,
"message": f"Successfully deleted '{file_name}' from Dropbox.",
}
deleted_from_kb = False
if final_delete_from_kb and document_id:
try:
doc_result = await db_session.execute(
select(Document).filter(Document.id == document_id)
)
doc = doc_result.scalars().first()
if doc:
await db_session.delete(doc)
await db_session.commit()
deleted_from_kb = True
logger.info(
f"Deleted document {document_id} from knowledge base"
)
else:
logger.warning(f"Document {document_id} not found in KB")
except Exception as e:
logger.error(f"Failed to delete document from KB: {e}")
await db_session.rollback()
trash_result["warning"] = (
f"File deleted, but failed to remove from knowledge base: {e!s}"
)
trash_result["deleted_from_kb"] = deleted_from_kb
if deleted_from_kb:
trash_result["message"] = (
f"{trash_result.get('message', '')} (also removed from knowledge base)"
)
return trash_result
except Exception as e:
from langgraph.errors import GraphInterrupt

View file

@ -31,6 +31,7 @@ from app.services.image_gen_router_service import (
ImageGenRouterService,
is_image_gen_auto_mode,
)
from app.services.provider_api_base import resolve_api_base
from app.utils.signed_image_urls import generate_image_token
logger = logging.getLogger(__name__)
@ -49,12 +50,16 @@ _PROVIDER_MAP = {
}
def _resolve_provider_prefix(provider: str, custom_provider: str | None) -> str:
if custom_provider:
return custom_provider
return _PROVIDER_MAP.get(provider.upper(), provider.lower())
def _build_model_string(
provider: str, model_name: str, custom_provider: str | None
) -> str:
if custom_provider:
return f"{custom_provider}/{model_name}"
prefix = _PROVIDER_MAP.get(provider.upper(), provider.lower())
prefix = _resolve_provider_prefix(provider, custom_provider)
return f"{prefix}/{model_name}"
@ -146,14 +151,18 @@ def create_generate_image_tool(
"error": f"Image generation config {config_id} not found"
}
model_string = _build_model_string(
cfg.get("provider", ""),
cfg["model_name"],
cfg.get("custom_provider"),
provider_prefix = _resolve_provider_prefix(
cfg.get("provider", ""), cfg.get("custom_provider")
)
model_string = f"{provider_prefix}/{cfg['model_name']}"
gen_kwargs["api_key"] = cfg.get("api_key")
if cfg.get("api_base"):
gen_kwargs["api_base"] = cfg["api_base"]
api_base = resolve_api_base(
provider=cfg.get("provider"),
provider_prefix=provider_prefix,
config_api_base=cfg.get("api_base"),
)
if api_base:
gen_kwargs["api_base"] = api_base
if cfg.get("api_version"):
gen_kwargs["api_version"] = cfg["api_version"]
if cfg.get("litellm_params"):
@ -175,14 +184,18 @@ def create_generate_image_tool(
"error": f"Image generation config {config_id} not found"
}
model_string = _build_model_string(
db_cfg.provider.value,
db_cfg.model_name,
db_cfg.custom_provider,
provider_prefix = _resolve_provider_prefix(
db_cfg.provider.value, db_cfg.custom_provider
)
model_string = f"{provider_prefix}/{db_cfg.model_name}"
gen_kwargs["api_key"] = db_cfg.api_key
if db_cfg.api_base:
gen_kwargs["api_base"] = db_cfg.api_base
api_base = resolve_api_base(
provider=db_cfg.provider.value,
provider_prefix=provider_prefix,
config_api_base=db_cfg.api_base,
)
if api_base:
gen_kwargs["api_base"] = api_base
if db_cfg.api_version:
gen_kwargs["api_version"] = db_cfg.api_version
if db_cfg.litellm_params:

View file

@ -0,0 +1,41 @@
from typing import Any
from app.db import SearchSourceConnector
from app.services.composio_service import ComposioService
def split_recipients(value: str | None) -> list[str]:
if not value:
return []
return [recipient.strip() for recipient in value.split(",") if recipient.strip()]
def unwrap_composio_data(data: Any) -> Any:
if isinstance(data, dict):
inner = data.get("data", data)
if isinstance(inner, dict):
return inner.get("response_data", inner)
return inner
return data
async def execute_composio_gmail_tool(
connector: SearchSourceConnector,
user_id: str,
tool_name: str,
params: dict[str, Any],
) -> tuple[Any, str | None]:
cca_id = connector.config.get("composio_connected_account_id")
if not cca_id:
return None, "Composio connected account ID not found for this Gmail connector."
result = await ComposioService().execute_tool(
connected_account_id=cca_id,
tool_name=tool_name,
params=params,
entity_id=f"surfsense_{user_id}",
)
if not result.get("success"):
return None, result.get("error", "Unknown Composio Gmail error")
return unwrap_composio_data(result.get("data")), None

View file

@ -9,6 +9,7 @@ from langchain_core.tools import tool
from sqlalchemy.ext.asyncio import AsyncSession
from app.agents.new_chat.tools.hitl import request_approval
from app.db import async_session_maker
from app.services.gmail import GmailToolMetadataService
logger = logging.getLogger(__name__)
@ -19,6 +20,23 @@ def create_create_gmail_draft_tool(
search_space_id: int | None = None,
user_id: str | None = None,
):
"""
Factory function to create the create_gmail_draft tool.
The tool acquires its own short-lived ``AsyncSession`` per call via
:data:`async_session_maker` so the closure is safe to share across
HTTP requests by the compiled-agent cache. Capturing a per-request
session here would surface stale/closed sessions on cache hits.
Args:
db_session: Reserved for registry compatibility. Per-call sessions
are opened via :data:`async_session_maker` inside the tool body.
Returns:
Configured create_gmail_draft tool
"""
del db_session # per-call session — see docstring
@tool
async def create_gmail_draft(
to: str,
@ -57,246 +75,276 @@ def create_create_gmail_draft_tool(
"""
logger.info(f"create_gmail_draft called: to='{to}', subject='{subject}'")
if db_session is None or search_space_id is None or user_id is None:
if search_space_id is None or user_id is None:
return {
"status": "error",
"message": "Gmail tool not properly configured. Please contact support.",
}
try:
metadata_service = GmailToolMetadataService(db_session)
context = await metadata_service.get_creation_context(
search_space_id, user_id
)
if "error" in context:
logger.error(f"Failed to fetch creation context: {context['error']}")
return {"status": "error", "message": context["error"]}
accounts = context.get("accounts", [])
if accounts and all(a.get("auth_expired") for a in accounts):
logger.warning("All Gmail accounts have expired authentication")
return {
"status": "auth_error",
"message": "All connected Gmail accounts need re-authentication. Please re-authenticate in your connector settings.",
"connector_type": "gmail",
}
logger.info(
f"Requesting approval for creating Gmail draft: to='{to}', subject='{subject}'"
)
result = request_approval(
action_type="gmail_draft_creation",
tool_name="create_gmail_draft",
params={
"to": to,
"subject": subject,
"body": body,
"cc": cc,
"bcc": bcc,
"connector_id": None,
},
context=context,
)
if result.rejected:
return {
"status": "rejected",
"message": "User declined. The draft was not created. Do not ask again or suggest alternatives.",
}
final_to = result.params.get("to", to)
final_subject = result.params.get("subject", subject)
final_body = result.params.get("body", body)
final_cc = result.params.get("cc", cc)
final_bcc = result.params.get("bcc", bcc)
final_connector_id = result.params.get("connector_id")
from sqlalchemy.future import select
from app.db import SearchSourceConnector, SearchSourceConnectorType
_gmail_types = [
SearchSourceConnectorType.GOOGLE_GMAIL_CONNECTOR,
SearchSourceConnectorType.COMPOSIO_GMAIL_CONNECTOR,
]
if final_connector_id is not None:
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == final_connector_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type.in_(_gmail_types),
)
async with async_session_maker() as db_session:
metadata_service = GmailToolMetadataService(db_session)
context = await metadata_service.get_creation_context(
search_space_id, user_id
)
connector = result.scalars().first()
if not connector:
return {
"status": "error",
"message": "Selected Gmail connector is invalid or has been disconnected.",
}
actual_connector_id = connector.id
else:
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type.in_(_gmail_types),
if "error" in context:
logger.error(
f"Failed to fetch creation context: {context['error']}"
)
)
connector = result.scalars().first()
if not connector:
return {"status": "error", "message": context["error"]}
accounts = context.get("accounts", [])
if accounts and all(a.get("auth_expired") for a in accounts):
logger.warning("All Gmail accounts have expired authentication")
return {
"status": "error",
"message": "No Gmail connector found. Please connect Gmail in your workspace settings.",
"status": "auth_error",
"message": "All connected Gmail accounts need re-authentication. Please re-authenticate in your connector settings.",
"connector_type": "gmail",
}
actual_connector_id = connector.id
logger.info(
f"Creating Gmail draft: to='{final_to}', subject='{final_subject}', connector={actual_connector_id}"
)
logger.info(
f"Requesting approval for creating Gmail draft: to='{to}', subject='{subject}'"
)
result = request_approval(
action_type="gmail_draft_creation",
tool_name="create_gmail_draft",
params={
"to": to,
"subject": subject,
"body": body,
"cc": cc,
"bcc": bcc,
"connector_id": None,
},
context=context,
)
if (
connector.connector_type
== SearchSourceConnectorType.COMPOSIO_GMAIL_CONNECTOR
):
from app.utils.google_credentials import build_composio_credentials
if result.rejected:
return {
"status": "rejected",
"message": "User declined. The draft was not created. Do not ask again or suggest alternatives.",
}
cca_id = connector.config.get("composio_connected_account_id")
if cca_id:
creds = build_composio_credentials(cca_id)
final_to = result.params.get("to", to)
final_subject = result.params.get("subject", subject)
final_body = result.params.get("body", body)
final_cc = result.params.get("cc", cc)
final_bcc = result.params.get("bcc", bcc)
final_connector_id = result.params.get("connector_id")
from sqlalchemy.future import select
from app.db import SearchSourceConnector, SearchSourceConnectorType
_gmail_types = [
SearchSourceConnectorType.GOOGLE_GMAIL_CONNECTOR,
SearchSourceConnectorType.COMPOSIO_GMAIL_CONNECTOR,
]
if final_connector_id is not None:
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == final_connector_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type.in_(_gmail_types),
)
)
connector = result.scalars().first()
if not connector:
return {
"status": "error",
"message": "Selected Gmail connector is invalid or has been disconnected.",
}
actual_connector_id = connector.id
else:
return {
"status": "error",
"message": "Composio connected account ID not found for this Gmail connector.",
}
else:
from google.oauth2.credentials import Credentials
from app.config import config
from app.utils.oauth_security import TokenEncryption
config_data = dict(connector.config)
token_encrypted = config_data.get("_token_encrypted", False)
if token_encrypted and config.SECRET_KEY:
token_encryption = TokenEncryption(config.SECRET_KEY)
if config_data.get("token"):
config_data["token"] = token_encryption.decrypt_token(
config_data["token"]
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type.in_(_gmail_types),
)
if config_data.get("refresh_token"):
config_data["refresh_token"] = token_encryption.decrypt_token(
config_data["refresh_token"]
)
if config_data.get("client_secret"):
config_data["client_secret"] = token_encryption.decrypt_token(
config_data["client_secret"]
)
exp = config_data.get("expiry", "")
if exp:
exp = exp.replace("Z", "")
creds = Credentials(
token=config_data.get("token"),
refresh_token=config_data.get("refresh_token"),
token_uri=config_data.get("token_uri"),
client_id=config_data.get("client_id"),
client_secret=config_data.get("client_secret"),
scopes=config_data.get("scopes", []),
expiry=datetime.fromisoformat(exp) if exp else None,
)
from googleapiclient.discovery import build
gmail_service = build("gmail", "v1", credentials=creds)
message = MIMEText(final_body)
message["to"] = final_to
message["subject"] = final_subject
if final_cc:
message["cc"] = final_cc
if final_bcc:
message["bcc"] = final_bcc
raw = base64.urlsafe_b64encode(message.as_bytes()).decode()
try:
created = await asyncio.get_event_loop().run_in_executor(
None,
lambda: (
gmail_service.users()
.drafts()
.create(userId="me", body={"message": {"raw": raw}})
.execute()
),
)
except Exception as api_err:
from googleapiclient.errors import HttpError
if isinstance(api_err, HttpError) and api_err.resp.status == 403:
logger.warning(
f"Insufficient permissions for connector {actual_connector_id}: {api_err}"
)
try:
from sqlalchemy.orm.attributes import flag_modified
connector = result.scalars().first()
if not connector:
return {
"status": "error",
"message": "No Gmail connector found. Please connect Gmail in your workspace settings.",
}
actual_connector_id = connector.id
_res = await db_session.execute(
select(SearchSourceConnector).where(
SearchSourceConnector.id == actual_connector_id
logger.info(
f"Creating Gmail draft: to='{final_to}', subject='{final_subject}', connector={actual_connector_id}"
)
is_composio_gmail = (
connector.connector_type
== SearchSourceConnectorType.COMPOSIO_GMAIL_CONNECTOR
)
if is_composio_gmail:
cca_id = connector.config.get("composio_connected_account_id")
if not cca_id:
return {
"status": "error",
"message": "Composio connected account ID not found for this Gmail connector.",
}
else:
from google.oauth2.credentials import Credentials
from app.config import config
from app.utils.oauth_security import TokenEncryption
config_data = dict(connector.config)
token_encrypted = config_data.get("_token_encrypted", False)
if token_encrypted and config.SECRET_KEY:
token_encryption = TokenEncryption(config.SECRET_KEY)
if config_data.get("token"):
config_data["token"] = token_encryption.decrypt_token(
config_data["token"]
)
if config_data.get("refresh_token"):
config_data["refresh_token"] = (
token_encryption.decrypt_token(
config_data["refresh_token"]
)
)
if config_data.get("client_secret"):
config_data["client_secret"] = (
token_encryption.decrypt_token(
config_data["client_secret"]
)
)
exp = config_data.get("expiry", "")
if exp:
exp = exp.replace("Z", "")
creds = Credentials(
token=config_data.get("token"),
refresh_token=config_data.get("refresh_token"),
token_uri=config_data.get("token_uri"),
client_id=config_data.get("client_id"),
client_secret=config_data.get("client_secret"),
scopes=config_data.get("scopes", []),
expiry=datetime.fromisoformat(exp) if exp else None,
)
message = MIMEText(final_body)
message["to"] = final_to
message["subject"] = final_subject
if final_cc:
message["cc"] = final_cc
if final_bcc:
message["bcc"] = final_bcc
raw = base64.urlsafe_b64encode(message.as_bytes()).decode()
try:
if is_composio_gmail:
from app.agents.new_chat.tools.gmail.composio_helpers import (
execute_composio_gmail_tool,
split_recipients,
)
_conn = _res.scalar_one_or_none()
if _conn and not _conn.config.get("auth_expired"):
_conn.config = {**_conn.config, "auth_expired": True}
flag_modified(_conn, "config")
await db_session.commit()
except Exception:
created, error = await execute_composio_gmail_tool(
connector,
user_id,
"GMAIL_CREATE_EMAIL_DRAFT",
{
"user_id": "me",
"recipient_email": final_to,
"subject": final_subject,
"body": final_body,
"cc": split_recipients(final_cc),
"bcc": split_recipients(final_bcc),
"is_html": False,
},
)
if error:
raise RuntimeError(error)
if not isinstance(created, dict):
created = {}
else:
from googleapiclient.discovery import build
gmail_service = build("gmail", "v1", credentials=creds)
created = await asyncio.get_event_loop().run_in_executor(
None,
lambda: (
gmail_service.users()
.drafts()
.create(userId="me", body={"message": {"raw": raw}})
.execute()
),
)
except Exception as api_err:
from googleapiclient.errors import HttpError
if isinstance(api_err, HttpError) and api_err.resp.status == 403:
logger.warning(
"Failed to persist auth_expired for connector %s",
actual_connector_id,
exc_info=True,
f"Insufficient permissions for connector {actual_connector_id}: {api_err}"
)
return {
"status": "insufficient_permissions",
"connector_id": actual_connector_id,
"message": "This Gmail account needs additional permissions. Please re-authenticate in connector settings.",
}
raise
try:
from sqlalchemy.orm.attributes import flag_modified
logger.info(f"Gmail draft created: id={created.get('id')}")
_res = await db_session.execute(
select(SearchSourceConnector).where(
SearchSourceConnector.id == actual_connector_id
)
)
_conn = _res.scalar_one_or_none()
if _conn and not _conn.config.get("auth_expired"):
_conn.config = {**_conn.config, "auth_expired": True}
flag_modified(_conn, "config")
await db_session.commit()
except Exception:
logger.warning(
"Failed to persist auth_expired for connector %s",
actual_connector_id,
exc_info=True,
)
return {
"status": "insufficient_permissions",
"connector_id": actual_connector_id,
"message": "This Gmail account needs additional permissions. Please re-authenticate in connector settings.",
}
raise
kb_message_suffix = ""
try:
from app.services.gmail import GmailKBSyncService
logger.info(f"Gmail draft created: id={created.get('id')}")
kb_service = GmailKBSyncService(db_session)
draft_message = created.get("message", {})
kb_result = await kb_service.sync_after_create(
message_id=draft_message.get("id", ""),
thread_id=draft_message.get("threadId", ""),
subject=final_subject,
sender="me",
date_str=datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
body_text=final_body,
connector_id=actual_connector_id,
search_space_id=search_space_id,
user_id=user_id,
draft_id=created.get("id"),
)
if kb_result["status"] == "success":
kb_message_suffix = " Your knowledge base has also been updated."
else:
kb_message_suffix = ""
try:
from app.services.gmail import GmailKBSyncService
kb_service = GmailKBSyncService(db_session)
draft_message = created.get("message", {})
kb_result = await kb_service.sync_after_create(
message_id=draft_message.get("id", ""),
thread_id=draft_message.get("threadId", ""),
subject=final_subject,
sender="me",
date_str=datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
body_text=final_body,
connector_id=actual_connector_id,
search_space_id=search_space_id,
user_id=user_id,
draft_id=created.get("id"),
)
if kb_result["status"] == "success":
kb_message_suffix = (
" Your knowledge base has also been updated."
)
else:
kb_message_suffix = " This draft will be added to your knowledge base in the next scheduled sync."
except Exception as kb_err:
logger.warning(f"KB sync after create failed: {kb_err}")
kb_message_suffix = " This draft will be added to your knowledge base in the next scheduled sync."
except Exception as kb_err:
logger.warning(f"KB sync after create failed: {kb_err}")
kb_message_suffix = " This draft will be added to your knowledge base in the next scheduled sync."
return {
"status": "success",
"draft_id": created.get("id"),
"message": f"Successfully created Gmail draft with subject '{final_subject}'.{kb_message_suffix}",
}
return {
"status": "success",
"draft_id": created.get("id"),
"message": f"Successfully created Gmail draft with subject '{final_subject}'.{kb_message_suffix}",
}
except Exception as e:
from langgraph.errors import GraphInterrupt

View file

@ -5,7 +5,7 @@ from langchain_core.tools import tool
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select
from app.db import SearchSourceConnector, SearchSourceConnectorType
from app.db import SearchSourceConnector, SearchSourceConnectorType, async_session_maker
logger = logging.getLogger(__name__)
@ -20,6 +20,23 @@ def create_read_gmail_email_tool(
search_space_id: int | None = None,
user_id: str | None = None,
):
"""
Factory function to create the read_gmail_email tool.
The tool acquires its own short-lived ``AsyncSession`` per call via
:data:`async_session_maker` so the closure is safe to share across
HTTP requests by the compiled-agent cache. Capturing a per-request
session here would surface stale/closed sessions on cache hits.
Args:
db_session: Reserved for registry compatibility. Per-call sessions
are opened via :data:`async_session_maker` inside the tool body.
Returns:
Configured read_gmail_email tool
"""
del db_session # per-call session — see docstring
@tool
async def read_gmail_email(message_id: str) -> dict[str, Any]:
"""Read the full content of a specific Gmail email by its message ID.
@ -32,60 +49,115 @@ def create_read_gmail_email_tool(
Returns:
Dictionary with status and the full email content formatted as markdown.
"""
if db_session is None or search_space_id is None or user_id is None:
if search_space_id is None or user_id is None:
return {"status": "error", "message": "Gmail tool not properly configured."}
try:
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type.in_(_GMAIL_TYPES),
async with async_session_maker() as db_session:
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type.in_(_GMAIL_TYPES),
)
)
)
connector = result.scalars().first()
if not connector:
return {
"status": "error",
"message": "No Gmail connector found. Please connect Gmail in your workspace settings.",
}
from app.agents.new_chat.tools.gmail.search_emails import _build_credentials
creds = _build_credentials(connector)
from app.connectors.google_gmail_connector import GoogleGmailConnector
gmail = GoogleGmailConnector(
credentials=creds,
session=db_session,
user_id=user_id,
connector_id=connector.id,
)
detail, error = await gmail.get_message_details(message_id)
if error:
if (
"re-authenticate" in error.lower()
or "authentication failed" in error.lower()
):
connector = result.scalars().first()
if not connector:
return {
"status": "auth_error",
"message": error,
"connector_type": "gmail",
"status": "error",
"message": "No Gmail connector found. Please connect Gmail in your workspace settings.",
}
return {"status": "error", "message": error}
if not detail:
if (
connector.connector_type
== SearchSourceConnectorType.COMPOSIO_GMAIL_CONNECTOR
):
cca_id = connector.config.get("composio_connected_account_id")
if not cca_id:
return {
"status": "error",
"message": "Composio connected account ID not found.",
}
from app.agents.new_chat.tools.gmail.search_emails import (
_format_gmail_summary,
)
from app.services.composio_service import ComposioService
service = ComposioService()
detail, error = await service.get_gmail_message_detail(
connected_account_id=cca_id,
entity_id=f"surfsense_{user_id}",
message_id=message_id,
)
if error:
return {"status": "error", "message": error}
if not detail:
return {
"status": "not_found",
"message": f"Email with ID '{message_id}' not found.",
}
summary = _format_gmail_summary(detail)
content = (
f"# {summary['subject']}\n\n"
f"**From:** {summary['from']}\n"
f"**To:** {summary['to']}\n"
f"**Date:** {summary['date']}\n\n"
f"## Message Content\n\n"
f"{detail.get('messageText') or detail.get('snippet') or ''}\n\n"
f"## Message Details\n\n"
f"- **Message ID:** {summary['message_id']}\n"
f"- **Thread ID:** {summary['thread_id']}\n"
)
return {
"status": "success",
"message_id": summary["message_id"] or message_id,
"content": content,
}
from app.agents.new_chat.tools.gmail.search_emails import (
_build_credentials,
)
creds = _build_credentials(connector)
from app.connectors.google_gmail_connector import GoogleGmailConnector
gmail = GoogleGmailConnector(
credentials=creds,
session=db_session,
user_id=user_id,
connector_id=connector.id,
)
detail, error = await gmail.get_message_details(message_id)
if error:
if (
"re-authenticate" in error.lower()
or "authentication failed" in error.lower()
):
return {
"status": "auth_error",
"message": error,
"connector_type": "gmail",
}
return {"status": "error", "message": error}
if not detail:
return {
"status": "not_found",
"message": f"Email with ID '{message_id}' not found.",
}
content = gmail.format_message_to_markdown(detail)
return {
"status": "not_found",
"message": f"Email with ID '{message_id}' not found.",
"status": "success",
"message_id": message_id,
"content": content,
}
content = gmail.format_message_to_markdown(detail)
return {"status": "success", "message_id": message_id, "content": content}
except Exception as e:
from langgraph.errors import GraphInterrupt

View file

@ -6,7 +6,7 @@ from langchain_core.tools import tool
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select
from app.db import SearchSourceConnector, SearchSourceConnectorType
from app.db import SearchSourceConnector, SearchSourceConnectorType, async_session_maker
logger = logging.getLogger(__name__)
@ -39,12 +39,7 @@ def _build_credentials(connector: SearchSourceConnector):
from app.utils.google_credentials import COMPOSIO_GOOGLE_CONNECTOR_TYPES
if connector.connector_type in COMPOSIO_GOOGLE_CONNECTOR_TYPES:
from app.utils.google_credentials import build_composio_credentials
cca_id = connector.config.get("composio_connected_account_id")
if not cca_id:
raise ValueError("Composio connected account ID not found.")
return build_composio_credentials(cca_id)
raise ValueError("Composio connectors must use Composio tool execution.")
from google.oauth2.credentials import Credentials
@ -67,11 +62,85 @@ def _build_credentials(connector: SearchSourceConnector):
)
def _gmail_headers(message: dict[str, Any]) -> dict[str, str]:
headers = message.get("payload", {}).get("headers", [])
return {
header.get("name", "").lower(): header.get("value", "")
for header in headers
if isinstance(header, dict)
}
def _format_gmail_summary(message: dict[str, Any]) -> dict[str, Any]:
headers = _gmail_headers(message)
return {
"message_id": message.get("id") or message.get("messageId"),
"thread_id": message.get("threadId"),
"subject": message.get("subject") or headers.get("subject", "No Subject"),
"from": message.get("sender") or headers.get("from", "Unknown"),
"to": message.get("to") or headers.get("to", ""),
"date": message.get("messageTimestamp") or headers.get("date", ""),
"snippet": message.get("snippet") or message.get("messageText", "")[:300],
"labels": message.get("labelIds", []),
}
async def _search_composio_gmail(
connector: SearchSourceConnector,
user_id: str,
query: str,
max_results: int,
) -> dict[str, Any]:
cca_id = connector.config.get("composio_connected_account_id")
if not cca_id:
return {
"status": "error",
"message": "Composio connected account ID not found.",
}
from app.services.composio_service import ComposioService
service = ComposioService()
messages, _next_token, _estimate, error = await service.get_gmail_messages(
connected_account_id=cca_id,
entity_id=f"surfsense_{user_id}",
query=query,
max_results=max_results,
)
if error:
return {"status": "error", "message": error}
emails = [_format_gmail_summary(message) for message in messages]
return {
"status": "success",
"emails": emails,
"total": len(emails),
"message": "No emails found." if not emails else None,
}
def create_search_gmail_tool(
db_session: AsyncSession | None = None,
search_space_id: int | None = None,
user_id: str | None = None,
):
"""
Factory function to create the search_gmail tool.
The tool acquires its own short-lived ``AsyncSession`` per call via
:data:`async_session_maker` so the closure is safe to share across
HTTP requests by the compiled-agent cache. Capturing a per-request
session here would surface stale/closed sessions on cache hits.
Args:
db_session: Reserved for registry compatibility. Per-call sessions
are opened via :data:`async_session_maker` inside the tool body.
Returns:
Configured search_gmail tool
"""
del db_session # per-call session — see docstring
@tool
async def search_gmail(
query: str,
@ -90,83 +159,92 @@ def create_search_gmail_tool(
Dictionary with status and a list of email summaries including
message_id, subject, from, date, snippet.
"""
if db_session is None or search_space_id is None or user_id is None:
if search_space_id is None or user_id is None:
return {"status": "error", "message": "Gmail tool not properly configured."}
max_results = min(max_results, 20)
try:
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type.in_(_GMAIL_TYPES),
async with async_session_maker() as db_session:
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type.in_(_GMAIL_TYPES),
)
)
)
connector = result.scalars().first()
if not connector:
return {
"status": "error",
"message": "No Gmail connector found. Please connect Gmail in your workspace settings.",
}
creds = _build_credentials(connector)
from app.connectors.google_gmail_connector import GoogleGmailConnector
gmail = GoogleGmailConnector(
credentials=creds,
session=db_session,
user_id=user_id,
connector_id=connector.id,
)
messages_list, error = await gmail.get_messages_list(
max_results=max_results, query=query
)
if error:
if (
"re-authenticate" in error.lower()
or "authentication failed" in error.lower()
):
connector = result.scalars().first()
if not connector:
return {
"status": "auth_error",
"message": error,
"connector_type": "gmail",
"status": "error",
"message": "No Gmail connector found. Please connect Gmail in your workspace settings.",
}
return {"status": "error", "message": error}
if not messages_list:
return {
"status": "success",
"emails": [],
"total": 0,
"message": "No emails found.",
}
if (
connector.connector_type
== SearchSourceConnectorType.COMPOSIO_GMAIL_CONNECTOR
):
return await _search_composio_gmail(
connector, str(user_id), query, max_results
)
emails = []
for msg in messages_list:
detail, err = await gmail.get_message_details(msg["id"])
if err:
continue
headers = {
h["name"].lower(): h["value"]
for h in detail.get("payload", {}).get("headers", [])
}
emails.append(
{
"message_id": detail.get("id"),
"thread_id": detail.get("threadId"),
"subject": headers.get("subject", "No Subject"),
"from": headers.get("from", "Unknown"),
"to": headers.get("to", ""),
"date": headers.get("date", ""),
"snippet": detail.get("snippet", ""),
"labels": detail.get("labelIds", []),
}
creds = _build_credentials(connector)
from app.connectors.google_gmail_connector import GoogleGmailConnector
gmail = GoogleGmailConnector(
credentials=creds,
session=db_session,
user_id=user_id,
connector_id=connector.id,
)
return {"status": "success", "emails": emails, "total": len(emails)}
messages_list, error = await gmail.get_messages_list(
max_results=max_results, query=query
)
if error:
if (
"re-authenticate" in error.lower()
or "authentication failed" in error.lower()
):
return {
"status": "auth_error",
"message": error,
"connector_type": "gmail",
}
return {"status": "error", "message": error}
if not messages_list:
return {
"status": "success",
"emails": [],
"total": 0,
"message": "No emails found.",
}
emails = []
for msg in messages_list:
detail, err = await gmail.get_message_details(msg["id"])
if err:
continue
headers = {
h["name"].lower(): h["value"]
for h in detail.get("payload", {}).get("headers", [])
}
emails.append(
{
"message_id": detail.get("id"),
"thread_id": detail.get("threadId"),
"subject": headers.get("subject", "No Subject"),
"from": headers.get("from", "Unknown"),
"to": headers.get("to", ""),
"date": headers.get("date", ""),
"snippet": detail.get("snippet", ""),
"labels": detail.get("labelIds", []),
}
)
return {"status": "success", "emails": emails, "total": len(emails)}
except Exception as e:
from langgraph.errors import GraphInterrupt

View file

@ -9,6 +9,7 @@ from langchain_core.tools import tool
from sqlalchemy.ext.asyncio import AsyncSession
from app.agents.new_chat.tools.hitl import request_approval
from app.db import async_session_maker
from app.services.gmail import GmailToolMetadataService
logger = logging.getLogger(__name__)
@ -19,6 +20,23 @@ def create_send_gmail_email_tool(
search_space_id: int | None = None,
user_id: str | None = None,
):
"""
Factory function to create the send_gmail_email tool.
The tool acquires its own short-lived ``AsyncSession`` per call via
:data:`async_session_maker` so the closure is safe to share across
HTTP requests by the compiled-agent cache. Capturing a per-request
session here would surface stale/closed sessions on cache hits.
Args:
db_session: Reserved for registry compatibility. Per-call sessions
are opened via :data:`async_session_maker` inside the tool body.
Returns:
Configured send_gmail_email tool
"""
del db_session # per-call session — see docstring
@tool
async def send_gmail_email(
to: str,
@ -58,247 +76,277 @@ def create_send_gmail_email_tool(
"""
logger.info(f"send_gmail_email called: to='{to}', subject='{subject}'")
if db_session is None or search_space_id is None or user_id is None:
if search_space_id is None or user_id is None:
return {
"status": "error",
"message": "Gmail tool not properly configured. Please contact support.",
}
try:
metadata_service = GmailToolMetadataService(db_session)
context = await metadata_service.get_creation_context(
search_space_id, user_id
)
if "error" in context:
logger.error(f"Failed to fetch creation context: {context['error']}")
return {"status": "error", "message": context["error"]}
accounts = context.get("accounts", [])
if accounts and all(a.get("auth_expired") for a in accounts):
logger.warning("All Gmail accounts have expired authentication")
return {
"status": "auth_error",
"message": "All connected Gmail accounts need re-authentication. Please re-authenticate in your connector settings.",
"connector_type": "gmail",
}
logger.info(
f"Requesting approval for sending Gmail email: to='{to}', subject='{subject}'"
)
result = request_approval(
action_type="gmail_email_send",
tool_name="send_gmail_email",
params={
"to": to,
"subject": subject,
"body": body,
"cc": cc,
"bcc": bcc,
"connector_id": None,
},
context=context,
)
if result.rejected:
return {
"status": "rejected",
"message": "User declined. The email was not sent. Do not ask again or suggest alternatives.",
}
final_to = result.params.get("to", to)
final_subject = result.params.get("subject", subject)
final_body = result.params.get("body", body)
final_cc = result.params.get("cc", cc)
final_bcc = result.params.get("bcc", bcc)
final_connector_id = result.params.get("connector_id")
from sqlalchemy.future import select
from app.db import SearchSourceConnector, SearchSourceConnectorType
_gmail_types = [
SearchSourceConnectorType.GOOGLE_GMAIL_CONNECTOR,
SearchSourceConnectorType.COMPOSIO_GMAIL_CONNECTOR,
]
if final_connector_id is not None:
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == final_connector_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type.in_(_gmail_types),
)
async with async_session_maker() as db_session:
metadata_service = GmailToolMetadataService(db_session)
context = await metadata_service.get_creation_context(
search_space_id, user_id
)
connector = result.scalars().first()
if not connector:
return {
"status": "error",
"message": "Selected Gmail connector is invalid or has been disconnected.",
}
actual_connector_id = connector.id
else:
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type.in_(_gmail_types),
if "error" in context:
logger.error(
f"Failed to fetch creation context: {context['error']}"
)
)
connector = result.scalars().first()
if not connector:
return {"status": "error", "message": context["error"]}
accounts = context.get("accounts", [])
if accounts and all(a.get("auth_expired") for a in accounts):
logger.warning("All Gmail accounts have expired authentication")
return {
"status": "error",
"message": "No Gmail connector found. Please connect Gmail in your workspace settings.",
"status": "auth_error",
"message": "All connected Gmail accounts need re-authentication. Please re-authenticate in your connector settings.",
"connector_type": "gmail",
}
actual_connector_id = connector.id
logger.info(
f"Sending Gmail email: to='{final_to}', subject='{final_subject}', connector={actual_connector_id}"
)
logger.info(
f"Requesting approval for sending Gmail email: to='{to}', subject='{subject}'"
)
result = request_approval(
action_type="gmail_email_send",
tool_name="send_gmail_email",
params={
"to": to,
"subject": subject,
"body": body,
"cc": cc,
"bcc": bcc,
"connector_id": None,
},
context=context,
)
if (
connector.connector_type
== SearchSourceConnectorType.COMPOSIO_GMAIL_CONNECTOR
):
from app.utils.google_credentials import build_composio_credentials
if result.rejected:
return {
"status": "rejected",
"message": "User declined. The email was not sent. Do not ask again or suggest alternatives.",
}
cca_id = connector.config.get("composio_connected_account_id")
if cca_id:
creds = build_composio_credentials(cca_id)
final_to = result.params.get("to", to)
final_subject = result.params.get("subject", subject)
final_body = result.params.get("body", body)
final_cc = result.params.get("cc", cc)
final_bcc = result.params.get("bcc", bcc)
final_connector_id = result.params.get("connector_id")
from sqlalchemy.future import select
from app.db import SearchSourceConnector, SearchSourceConnectorType
_gmail_types = [
SearchSourceConnectorType.GOOGLE_GMAIL_CONNECTOR,
SearchSourceConnectorType.COMPOSIO_GMAIL_CONNECTOR,
]
if final_connector_id is not None:
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == final_connector_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type.in_(_gmail_types),
)
)
connector = result.scalars().first()
if not connector:
return {
"status": "error",
"message": "Selected Gmail connector is invalid or has been disconnected.",
}
actual_connector_id = connector.id
else:
return {
"status": "error",
"message": "Composio connected account ID not found for this Gmail connector.",
}
else:
from google.oauth2.credentials import Credentials
from app.config import config
from app.utils.oauth_security import TokenEncryption
config_data = dict(connector.config)
token_encrypted = config_data.get("_token_encrypted", False)
if token_encrypted and config.SECRET_KEY:
token_encryption = TokenEncryption(config.SECRET_KEY)
if config_data.get("token"):
config_data["token"] = token_encryption.decrypt_token(
config_data["token"]
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type.in_(_gmail_types),
)
if config_data.get("refresh_token"):
config_data["refresh_token"] = token_encryption.decrypt_token(
config_data["refresh_token"]
)
if config_data.get("client_secret"):
config_data["client_secret"] = token_encryption.decrypt_token(
config_data["client_secret"]
)
exp = config_data.get("expiry", "")
if exp:
exp = exp.replace("Z", "")
creds = Credentials(
token=config_data.get("token"),
refresh_token=config_data.get("refresh_token"),
token_uri=config_data.get("token_uri"),
client_id=config_data.get("client_id"),
client_secret=config_data.get("client_secret"),
scopes=config_data.get("scopes", []),
expiry=datetime.fromisoformat(exp) if exp else None,
)
from googleapiclient.discovery import build
gmail_service = build("gmail", "v1", credentials=creds)
message = MIMEText(final_body)
message["to"] = final_to
message["subject"] = final_subject
if final_cc:
message["cc"] = final_cc
if final_bcc:
message["bcc"] = final_bcc
raw = base64.urlsafe_b64encode(message.as_bytes()).decode()
try:
sent = await asyncio.get_event_loop().run_in_executor(
None,
lambda: (
gmail_service.users()
.messages()
.send(userId="me", body={"raw": raw})
.execute()
),
)
except Exception as api_err:
from googleapiclient.errors import HttpError
if isinstance(api_err, HttpError) and api_err.resp.status == 403:
logger.warning(
f"Insufficient permissions for connector {actual_connector_id}: {api_err}"
)
try:
from sqlalchemy.orm.attributes import flag_modified
connector = result.scalars().first()
if not connector:
return {
"status": "error",
"message": "No Gmail connector found. Please connect Gmail in your workspace settings.",
}
actual_connector_id = connector.id
_res = await db_session.execute(
select(SearchSourceConnector).where(
SearchSourceConnector.id == actual_connector_id
logger.info(
f"Sending Gmail email: to='{final_to}', subject='{final_subject}', connector={actual_connector_id}"
)
is_composio_gmail = (
connector.connector_type
== SearchSourceConnectorType.COMPOSIO_GMAIL_CONNECTOR
)
if is_composio_gmail:
cca_id = connector.config.get("composio_connected_account_id")
if not cca_id:
return {
"status": "error",
"message": "Composio connected account ID not found for this Gmail connector.",
}
else:
from google.oauth2.credentials import Credentials
from app.config import config
from app.utils.oauth_security import TokenEncryption
config_data = dict(connector.config)
token_encrypted = config_data.get("_token_encrypted", False)
if token_encrypted and config.SECRET_KEY:
token_encryption = TokenEncryption(config.SECRET_KEY)
if config_data.get("token"):
config_data["token"] = token_encryption.decrypt_token(
config_data["token"]
)
if config_data.get("refresh_token"):
config_data["refresh_token"] = (
token_encryption.decrypt_token(
config_data["refresh_token"]
)
)
if config_data.get("client_secret"):
config_data["client_secret"] = (
token_encryption.decrypt_token(
config_data["client_secret"]
)
)
exp = config_data.get("expiry", "")
if exp:
exp = exp.replace("Z", "")
creds = Credentials(
token=config_data.get("token"),
refresh_token=config_data.get("refresh_token"),
token_uri=config_data.get("token_uri"),
client_id=config_data.get("client_id"),
client_secret=config_data.get("client_secret"),
scopes=config_data.get("scopes", []),
expiry=datetime.fromisoformat(exp) if exp else None,
)
message = MIMEText(final_body)
message["to"] = final_to
message["subject"] = final_subject
if final_cc:
message["cc"] = final_cc
if final_bcc:
message["bcc"] = final_bcc
raw = base64.urlsafe_b64encode(message.as_bytes()).decode()
try:
if is_composio_gmail:
from app.agents.new_chat.tools.gmail.composio_helpers import (
execute_composio_gmail_tool,
split_recipients,
)
_conn = _res.scalar_one_or_none()
if _conn and not _conn.config.get("auth_expired"):
_conn.config = {**_conn.config, "auth_expired": True}
flag_modified(_conn, "config")
await db_session.commit()
except Exception:
sent, error = await execute_composio_gmail_tool(
connector,
user_id,
"GMAIL_SEND_EMAIL",
{
"user_id": "me",
"recipient_email": final_to,
"subject": final_subject,
"body": final_body,
"cc": split_recipients(final_cc),
"bcc": split_recipients(final_bcc),
"is_html": False,
},
)
if error:
raise RuntimeError(error)
if not isinstance(sent, dict):
sent = {}
else:
from googleapiclient.discovery import build
gmail_service = build("gmail", "v1", credentials=creds)
sent = await asyncio.get_event_loop().run_in_executor(
None,
lambda: (
gmail_service.users()
.messages()
.send(userId="me", body={"raw": raw})
.execute()
),
)
except Exception as api_err:
from googleapiclient.errors import HttpError
if isinstance(api_err, HttpError) and api_err.resp.status == 403:
logger.warning(
"Failed to persist auth_expired for connector %s",
actual_connector_id,
exc_info=True,
f"Insufficient permissions for connector {actual_connector_id}: {api_err}"
)
return {
"status": "insufficient_permissions",
"connector_id": actual_connector_id,
"message": "This Gmail account needs additional permissions. Please re-authenticate in connector settings.",
}
raise
try:
from sqlalchemy.orm.attributes import flag_modified
logger.info(
f"Gmail email sent: id={sent.get('id')}, threadId={sent.get('threadId')}"
)
_res = await db_session.execute(
select(SearchSourceConnector).where(
SearchSourceConnector.id == actual_connector_id
)
)
_conn = _res.scalar_one_or_none()
if _conn and not _conn.config.get("auth_expired"):
_conn.config = {**_conn.config, "auth_expired": True}
flag_modified(_conn, "config")
await db_session.commit()
except Exception:
logger.warning(
"Failed to persist auth_expired for connector %s",
actual_connector_id,
exc_info=True,
)
return {
"status": "insufficient_permissions",
"connector_id": actual_connector_id,
"message": "This Gmail account needs additional permissions. Please re-authenticate in connector settings.",
}
raise
kb_message_suffix = ""
try:
from app.services.gmail import GmailKBSyncService
kb_service = GmailKBSyncService(db_session)
kb_result = await kb_service.sync_after_create(
message_id=sent.get("id", ""),
thread_id=sent.get("threadId", ""),
subject=final_subject,
sender="me",
date_str=datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
body_text=final_body,
connector_id=actual_connector_id,
search_space_id=search_space_id,
user_id=user_id,
logger.info(
f"Gmail email sent: id={sent.get('id')}, threadId={sent.get('threadId')}"
)
if kb_result["status"] == "success":
kb_message_suffix = " Your knowledge base has also been updated."
else:
kb_message_suffix = " This email will be added to your knowledge base in the next scheduled sync."
except Exception as kb_err:
logger.warning(f"KB sync after send failed: {kb_err}")
kb_message_suffix = " This email will be added to your knowledge base in the next scheduled sync."
return {
"status": "success",
"message_id": sent.get("id"),
"thread_id": sent.get("threadId"),
"message": f"Successfully sent email to '{final_to}' with subject '{final_subject}'.{kb_message_suffix}",
}
kb_message_suffix = ""
try:
from app.services.gmail import GmailKBSyncService
kb_service = GmailKBSyncService(db_session)
kb_result = await kb_service.sync_after_create(
message_id=sent.get("id", ""),
thread_id=sent.get("threadId", ""),
subject=final_subject,
sender="me",
date_str=datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
body_text=final_body,
connector_id=actual_connector_id,
search_space_id=search_space_id,
user_id=user_id,
)
if kb_result["status"] == "success":
kb_message_suffix = (
" Your knowledge base has also been updated."
)
else:
kb_message_suffix = " This email will be added to your knowledge base in the next scheduled sync."
except Exception as kb_err:
logger.warning(f"KB sync after send failed: {kb_err}")
kb_message_suffix = " This email will be added to your knowledge base in the next scheduled sync."
return {
"status": "success",
"message_id": sent.get("id"),
"thread_id": sent.get("threadId"),
"message": f"Successfully sent email to '{final_to}' with subject '{final_subject}'.{kb_message_suffix}",
}
except Exception as e:
from langgraph.errors import GraphInterrupt

View file

@ -7,6 +7,7 @@ from langchain_core.tools import tool
from sqlalchemy.ext.asyncio import AsyncSession
from app.agents.new_chat.tools.hitl import request_approval
from app.db import async_session_maker
from app.services.gmail import GmailToolMetadataService
logger = logging.getLogger(__name__)
@ -17,6 +18,23 @@ def create_trash_gmail_email_tool(
search_space_id: int | None = None,
user_id: str | None = None,
):
"""
Factory function to create the trash_gmail_email tool.
The tool acquires its own short-lived ``AsyncSession`` per call via
:data:`async_session_maker` so the closure is safe to share across
HTTP requests by the compiled-agent cache. Capturing a per-request
session here would surface stale/closed sessions on cache hits.
Args:
db_session: Reserved for registry compatibility. Per-call sessions
are opened via :data:`async_session_maker` inside the tool body.
Returns:
Configured trash_gmail_email tool
"""
del db_session # per-call session — see docstring
@tool
async def trash_gmail_email(
email_subject_or_id: str,
@ -55,244 +73,261 @@ def create_trash_gmail_email_tool(
f"trash_gmail_email called: email_subject_or_id='{email_subject_or_id}', delete_from_kb={delete_from_kb}"
)
if db_session is None or search_space_id is None or user_id is None:
if search_space_id is None or user_id is None:
return {
"status": "error",
"message": "Gmail tool not properly configured. Please contact support.",
}
try:
metadata_service = GmailToolMetadataService(db_session)
context = await metadata_service.get_trash_context(
search_space_id, user_id, email_subject_or_id
)
if "error" in context:
error_msg = context["error"]
if "not found" in error_msg.lower():
logger.warning(f"Email not found: {error_msg}")
return {"status": "not_found", "message": error_msg}
logger.error(f"Failed to fetch trash context: {error_msg}")
return {"status": "error", "message": error_msg}
account = context.get("account", {})
if account.get("auth_expired"):
logger.warning(
"Gmail account %s has expired authentication",
account.get("id"),
async with async_session_maker() as db_session:
metadata_service = GmailToolMetadataService(db_session)
context = await metadata_service.get_trash_context(
search_space_id, user_id, email_subject_or_id
)
return {
"status": "auth_error",
"message": "The Gmail account for this email needs re-authentication. Please re-authenticate in your connector settings.",
"connector_type": "gmail",
}
email = context["email"]
message_id = email["message_id"]
document_id = email.get("document_id")
connector_id_from_context = context["account"]["id"]
if "error" in context:
error_msg = context["error"]
if "not found" in error_msg.lower():
logger.warning(f"Email not found: {error_msg}")
return {"status": "not_found", "message": error_msg}
logger.error(f"Failed to fetch trash context: {error_msg}")
return {"status": "error", "message": error_msg}
if not message_id:
return {
"status": "error",
"message": "Message ID is missing from the indexed document. Please re-index the email and try again.",
}
account = context.get("account", {})
if account.get("auth_expired"):
logger.warning(
"Gmail account %s has expired authentication",
account.get("id"),
)
return {
"status": "auth_error",
"message": "The Gmail account for this email needs re-authentication. Please re-authenticate in your connector settings.",
"connector_type": "gmail",
}
logger.info(
f"Requesting approval for trashing Gmail email: '{email_subject_or_id}' (message_id={message_id}, delete_from_kb={delete_from_kb})"
)
result = request_approval(
action_type="gmail_email_trash",
tool_name="trash_gmail_email",
params={
"message_id": message_id,
"connector_id": connector_id_from_context,
"delete_from_kb": delete_from_kb,
},
context=context,
)
email = context["email"]
message_id = email["message_id"]
document_id = email.get("document_id")
connector_id_from_context = context["account"]["id"]
if result.rejected:
return {
"status": "rejected",
"message": "User declined. The email was not trashed. Do not ask again or suggest alternatives.",
}
final_message_id = result.params.get("message_id", message_id)
final_connector_id = result.params.get(
"connector_id", connector_id_from_context
)
final_delete_from_kb = result.params.get("delete_from_kb", delete_from_kb)
if not final_connector_id:
return {
"status": "error",
"message": "No connector found for this email.",
}
from sqlalchemy.future import select
from app.db import SearchSourceConnector, SearchSourceConnectorType
_gmail_types = [
SearchSourceConnectorType.GOOGLE_GMAIL_CONNECTOR,
SearchSourceConnectorType.COMPOSIO_GMAIL_CONNECTOR,
]
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == final_connector_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type.in_(_gmail_types),
)
)
connector = result.scalars().first()
if not connector:
return {
"status": "error",
"message": "Selected Gmail connector is invalid or has been disconnected.",
}
logger.info(
f"Trashing Gmail email: message_id='{final_message_id}', connector={final_connector_id}"
)
if (
connector.connector_type
== SearchSourceConnectorType.COMPOSIO_GMAIL_CONNECTOR
):
from app.utils.google_credentials import build_composio_credentials
cca_id = connector.config.get("composio_connected_account_id")
if cca_id:
creds = build_composio_credentials(cca_id)
else:
if not message_id:
return {
"status": "error",
"message": "Composio connected account ID not found for this Gmail connector.",
"message": "Message ID is missing from the indexed document. Please re-index the email and try again.",
}
else:
from google.oauth2.credentials import Credentials
from app.config import config
from app.utils.oauth_security import TokenEncryption
config_data = dict(connector.config)
token_encrypted = config_data.get("_token_encrypted", False)
if token_encrypted and config.SECRET_KEY:
token_encryption = TokenEncryption(config.SECRET_KEY)
if config_data.get("token"):
config_data["token"] = token_encryption.decrypt_token(
config_data["token"]
)
if config_data.get("refresh_token"):
config_data["refresh_token"] = token_encryption.decrypt_token(
config_data["refresh_token"]
)
if config_data.get("client_secret"):
config_data["client_secret"] = token_encryption.decrypt_token(
config_data["client_secret"]
)
exp = config_data.get("expiry", "")
if exp:
exp = exp.replace("Z", "")
creds = Credentials(
token=config_data.get("token"),
refresh_token=config_data.get("refresh_token"),
token_uri=config_data.get("token_uri"),
client_id=config_data.get("client_id"),
client_secret=config_data.get("client_secret"),
scopes=config_data.get("scopes", []),
expiry=datetime.fromisoformat(exp) if exp else None,
logger.info(
f"Requesting approval for trashing Gmail email: '{email_subject_or_id}' (message_id={message_id}, delete_from_kb={delete_from_kb})"
)
result = request_approval(
action_type="gmail_email_trash",
tool_name="trash_gmail_email",
params={
"message_id": message_id,
"connector_id": connector_id_from_context,
"delete_from_kb": delete_from_kb,
},
context=context,
)
from googleapiclient.discovery import build
gmail_service = build("gmail", "v1", credentials=creds)
try:
await asyncio.get_event_loop().run_in_executor(
None,
lambda: (
gmail_service.users()
.messages()
.trash(userId="me", id=final_message_id)
.execute()
),
)
except Exception as api_err:
from googleapiclient.errors import HttpError
if isinstance(api_err, HttpError) and api_err.resp.status == 403:
logger.warning(
f"Insufficient permissions for connector {connector.id}: {api_err}"
)
try:
from sqlalchemy.orm.attributes import flag_modified
if not connector.config.get("auth_expired"):
connector.config = {
**connector.config,
"auth_expired": True,
}
flag_modified(connector, "config")
await db_session.commit()
except Exception:
logger.warning(
"Failed to persist auth_expired for connector %s",
connector.id,
exc_info=True,
)
if result.rejected:
return {
"status": "insufficient_permissions",
"connector_id": connector.id,
"message": "This Gmail account needs additional permissions. Please re-authenticate in connector settings.",
"status": "rejected",
"message": "User declined. The email was not trashed. Do not ask again or suggest alternatives.",
}
raise
logger.info(f"Gmail email trashed: message_id={final_message_id}")
trash_result: dict[str, Any] = {
"status": "success",
"message_id": final_message_id,
"message": f"Successfully moved email '{email.get('subject', email_subject_or_id)}' to trash.",
}
deleted_from_kb = False
if final_delete_from_kb and document_id:
try:
from app.db import Document
doc_result = await db_session.execute(
select(Document).filter(Document.id == document_id)
)
document = doc_result.scalars().first()
if document:
await db_session.delete(document)
await db_session.commit()
deleted_from_kb = True
logger.info(
f"Deleted document {document_id} from knowledge base"
)
else:
logger.warning(f"Document {document_id} not found in KB")
except Exception as e:
logger.error(f"Failed to delete document from KB: {e}")
await db_session.rollback()
trash_result["warning"] = (
f"Email trashed, but failed to remove from knowledge base: {e!s}"
)
trash_result["deleted_from_kb"] = deleted_from_kb
if deleted_from_kb:
trash_result["message"] = (
f"{trash_result.get('message', '')} (also removed from knowledge base)"
final_message_id = result.params.get("message_id", message_id)
final_connector_id = result.params.get(
"connector_id", connector_id_from_context
)
final_delete_from_kb = result.params.get(
"delete_from_kb", delete_from_kb
)
return trash_result
if not final_connector_id:
return {
"status": "error",
"message": "No connector found for this email.",
}
from sqlalchemy.future import select
from app.db import SearchSourceConnector, SearchSourceConnectorType
_gmail_types = [
SearchSourceConnectorType.GOOGLE_GMAIL_CONNECTOR,
SearchSourceConnectorType.COMPOSIO_GMAIL_CONNECTOR,
]
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == final_connector_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type.in_(_gmail_types),
)
)
connector = result.scalars().first()
if not connector:
return {
"status": "error",
"message": "Selected Gmail connector is invalid or has been disconnected.",
}
logger.info(
f"Trashing Gmail email: message_id='{final_message_id}', connector={final_connector_id}"
)
is_composio_gmail = (
connector.connector_type
== SearchSourceConnectorType.COMPOSIO_GMAIL_CONNECTOR
)
if is_composio_gmail:
cca_id = connector.config.get("composio_connected_account_id")
if not cca_id:
return {
"status": "error",
"message": "Composio connected account ID not found for this Gmail connector.",
}
else:
from google.oauth2.credentials import Credentials
from app.config import config
from app.utils.oauth_security import TokenEncryption
config_data = dict(connector.config)
token_encrypted = config_data.get("_token_encrypted", False)
if token_encrypted and config.SECRET_KEY:
token_encryption = TokenEncryption(config.SECRET_KEY)
if config_data.get("token"):
config_data["token"] = token_encryption.decrypt_token(
config_data["token"]
)
if config_data.get("refresh_token"):
config_data["refresh_token"] = (
token_encryption.decrypt_token(
config_data["refresh_token"]
)
)
if config_data.get("client_secret"):
config_data["client_secret"] = (
token_encryption.decrypt_token(
config_data["client_secret"]
)
)
exp = config_data.get("expiry", "")
if exp:
exp = exp.replace("Z", "")
creds = Credentials(
token=config_data.get("token"),
refresh_token=config_data.get("refresh_token"),
token_uri=config_data.get("token_uri"),
client_id=config_data.get("client_id"),
client_secret=config_data.get("client_secret"),
scopes=config_data.get("scopes", []),
expiry=datetime.fromisoformat(exp) if exp else None,
)
try:
if is_composio_gmail:
from app.agents.new_chat.tools.gmail.composio_helpers import (
execute_composio_gmail_tool,
)
_trashed, error = await execute_composio_gmail_tool(
connector,
user_id,
"GMAIL_MOVE_TO_TRASH",
{"user_id": "me", "message_id": final_message_id},
)
if error:
raise RuntimeError(error)
else:
from googleapiclient.discovery import build
gmail_service = build("gmail", "v1", credentials=creds)
await asyncio.get_event_loop().run_in_executor(
None,
lambda: (
gmail_service.users()
.messages()
.trash(userId="me", id=final_message_id)
.execute()
),
)
except Exception as api_err:
from googleapiclient.errors import HttpError
if isinstance(api_err, HttpError) and api_err.resp.status == 403:
logger.warning(
f"Insufficient permissions for connector {connector.id}: {api_err}"
)
try:
from sqlalchemy.orm.attributes import flag_modified
if not connector.config.get("auth_expired"):
connector.config = {
**connector.config,
"auth_expired": True,
}
flag_modified(connector, "config")
await db_session.commit()
except Exception:
logger.warning(
"Failed to persist auth_expired for connector %s",
connector.id,
exc_info=True,
)
return {
"status": "insufficient_permissions",
"connector_id": connector.id,
"message": "This Gmail account needs additional permissions. Please re-authenticate in connector settings.",
}
raise
logger.info(f"Gmail email trashed: message_id={final_message_id}")
trash_result: dict[str, Any] = {
"status": "success",
"message_id": final_message_id,
"message": f"Successfully moved email '{email.get('subject', email_subject_or_id)}' to trash.",
}
deleted_from_kb = False
if final_delete_from_kb and document_id:
try:
from app.db import Document
doc_result = await db_session.execute(
select(Document).filter(Document.id == document_id)
)
document = doc_result.scalars().first()
if document:
await db_session.delete(document)
await db_session.commit()
deleted_from_kb = True
logger.info(
f"Deleted document {document_id} from knowledge base"
)
else:
logger.warning(f"Document {document_id} not found in KB")
except Exception as e:
logger.error(f"Failed to delete document from KB: {e}")
await db_session.rollback()
trash_result["warning"] = (
f"Email trashed, but failed to remove from knowledge base: {e!s}"
)
trash_result["deleted_from_kb"] = deleted_from_kb
if deleted_from_kb:
trash_result["message"] = (
f"{trash_result.get('message', '')} (also removed from knowledge base)"
)
return trash_result
except Exception as e:
from langgraph.errors import GraphInterrupt

View file

@ -9,6 +9,7 @@ from langchain_core.tools import tool
from sqlalchemy.ext.asyncio import AsyncSession
from app.agents.new_chat.tools.hitl import request_approval
from app.db import async_session_maker
from app.services.gmail import GmailToolMetadataService
logger = logging.getLogger(__name__)
@ -19,6 +20,23 @@ def create_update_gmail_draft_tool(
search_space_id: int | None = None,
user_id: str | None = None,
):
"""
Factory function to create the update_gmail_draft tool.
The tool acquires its own short-lived ``AsyncSession`` per call via
:data:`async_session_maker` so the closure is safe to share across
HTTP requests by the compiled-agent cache. Capturing a per-request
session here would surface stale/closed sessions on cache hits.
Args:
db_session: Reserved for registry compatibility. Per-call sessions
are opened via :data:`async_session_maker` inside the tool body.
Returns:
Configured update_gmail_draft tool
"""
del db_session # per-call session — see docstring
@tool
async def update_gmail_draft(
draft_subject_or_id: str,
@ -76,294 +94,329 @@ def create_update_gmail_draft_tool(
f"update_gmail_draft called: draft_subject_or_id='{draft_subject_or_id}'"
)
if db_session is None or search_space_id is None or user_id is None:
if search_space_id is None or user_id is None:
return {
"status": "error",
"message": "Gmail tool not properly configured. Please contact support.",
}
try:
metadata_service = GmailToolMetadataService(db_session)
context = await metadata_service.get_update_context(
search_space_id, user_id, draft_subject_or_id
)
if "error" in context:
error_msg = context["error"]
if "not found" in error_msg.lower():
logger.warning(f"Draft not found: {error_msg}")
return {"status": "not_found", "message": error_msg}
logger.error(f"Failed to fetch update context: {error_msg}")
return {"status": "error", "message": error_msg}
account = context.get("account", {})
if account.get("auth_expired"):
logger.warning(
"Gmail account %s has expired authentication",
account.get("id"),
)
return {
"status": "auth_error",
"message": "The Gmail account for this draft needs re-authentication. Please re-authenticate in your connector settings.",
"connector_type": "gmail",
}
email = context["email"]
message_id = email["message_id"]
document_id = email.get("document_id")
connector_id_from_context = account["id"]
draft_id_from_context = context.get("draft_id")
original_subject = email.get("subject", draft_subject_or_id)
final_subject_default = subject if subject else original_subject
final_to_default = to if to else ""
logger.info(
f"Requesting approval for updating Gmail draft: '{original_subject}' "
f"(message_id={message_id}, draft_id={draft_id_from_context})"
)
result = request_approval(
action_type="gmail_draft_update",
tool_name="update_gmail_draft",
params={
"message_id": message_id,
"draft_id": draft_id_from_context,
"to": final_to_default,
"subject": final_subject_default,
"body": body,
"cc": cc,
"bcc": bcc,
"connector_id": connector_id_from_context,
},
context=context,
)
if result.rejected:
return {
"status": "rejected",
"message": "User declined. The draft was not updated. Do not ask again or suggest alternatives.",
}
final_to = result.params.get("to", final_to_default)
final_subject = result.params.get("subject", final_subject_default)
final_body = result.params.get("body", body)
final_cc = result.params.get("cc", cc)
final_bcc = result.params.get("bcc", bcc)
final_connector_id = result.params.get(
"connector_id", connector_id_from_context
)
final_draft_id = result.params.get("draft_id", draft_id_from_context)
if not final_connector_id:
return {
"status": "error",
"message": "No connector found for this draft.",
}
from sqlalchemy.future import select
from app.db import SearchSourceConnector, SearchSourceConnectorType
_gmail_types = [
SearchSourceConnectorType.GOOGLE_GMAIL_CONNECTOR,
SearchSourceConnectorType.COMPOSIO_GMAIL_CONNECTOR,
]
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == final_connector_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type.in_(_gmail_types),
)
)
connector = result.scalars().first()
if not connector:
return {
"status": "error",
"message": "Selected Gmail connector is invalid or has been disconnected.",
}
logger.info(
f"Updating Gmail draft: subject='{final_subject}', connector={final_connector_id}"
)
if (
connector.connector_type
== SearchSourceConnectorType.COMPOSIO_GMAIL_CONNECTOR
):
from app.utils.google_credentials import build_composio_credentials
cca_id = connector.config.get("composio_connected_account_id")
if cca_id:
creds = build_composio_credentials(cca_id)
else:
return {
"status": "error",
"message": "Composio connected account ID not found for this Gmail connector.",
}
else:
from google.oauth2.credentials import Credentials
from app.config import config
from app.utils.oauth_security import TokenEncryption
config_data = dict(connector.config)
token_encrypted = config_data.get("_token_encrypted", False)
if token_encrypted and config.SECRET_KEY:
token_encryption = TokenEncryption(config.SECRET_KEY)
if config_data.get("token"):
config_data["token"] = token_encryption.decrypt_token(
config_data["token"]
)
if config_data.get("refresh_token"):
config_data["refresh_token"] = token_encryption.decrypt_token(
config_data["refresh_token"]
)
if config_data.get("client_secret"):
config_data["client_secret"] = token_encryption.decrypt_token(
config_data["client_secret"]
)
exp = config_data.get("expiry", "")
if exp:
exp = exp.replace("Z", "")
creds = Credentials(
token=config_data.get("token"),
refresh_token=config_data.get("refresh_token"),
token_uri=config_data.get("token_uri"),
client_id=config_data.get("client_id"),
client_secret=config_data.get("client_secret"),
scopes=config_data.get("scopes", []),
expiry=datetime.fromisoformat(exp) if exp else None,
async with async_session_maker() as db_session:
metadata_service = GmailToolMetadataService(db_session)
context = await metadata_service.get_update_context(
search_space_id, user_id, draft_subject_or_id
)
from googleapiclient.discovery import build
if "error" in context:
error_msg = context["error"]
if "not found" in error_msg.lower():
logger.warning(f"Draft not found: {error_msg}")
return {"status": "not_found", "message": error_msg}
logger.error(f"Failed to fetch update context: {error_msg}")
return {"status": "error", "message": error_msg}
gmail_service = build("gmail", "v1", credentials=creds)
# Resolve draft_id if not already available
if not final_draft_id:
logger.info(
f"draft_id not in metadata, looking up via drafts.list for message_id={message_id}"
)
final_draft_id = await _find_draft_id_by_message(
gmail_service, message_id
)
if not final_draft_id:
return {
"status": "error",
"message": (
"Could not find this draft in Gmail. "
"It may have already been sent or deleted."
),
}
message = MIMEText(final_body)
if final_to:
message["to"] = final_to
message["subject"] = final_subject
if final_cc:
message["cc"] = final_cc
if final_bcc:
message["bcc"] = final_bcc
raw = base64.urlsafe_b64encode(message.as_bytes()).decode()
try:
updated = await asyncio.get_event_loop().run_in_executor(
None,
lambda: (
gmail_service.users()
.drafts()
.update(
userId="me",
id=final_draft_id,
body={"message": {"raw": raw}},
)
.execute()
),
)
except Exception as api_err:
from googleapiclient.errors import HttpError
if isinstance(api_err, HttpError) and api_err.resp.status == 403:
account = context.get("account", {})
if account.get("auth_expired"):
logger.warning(
f"Insufficient permissions for connector {connector.id}: {api_err}"
"Gmail account %s has expired authentication",
account.get("id"),
)
try:
from sqlalchemy.orm.attributes import flag_modified
if not connector.config.get("auth_expired"):
connector.config = {
**connector.config,
"auth_expired": True,
}
flag_modified(connector, "config")
await db_session.commit()
except Exception:
logger.warning(
"Failed to persist auth_expired for connector %s",
connector.id,
exc_info=True,
)
return {
"status": "insufficient_permissions",
"connector_id": connector.id,
"message": "This Gmail account needs additional permissions. Please re-authenticate in connector settings.",
"status": "auth_error",
"message": "The Gmail account for this draft needs re-authentication. Please re-authenticate in your connector settings.",
"connector_type": "gmail",
}
if isinstance(api_err, HttpError) and api_err.resp.status == 404:
email = context["email"]
message_id = email["message_id"]
document_id = email.get("document_id")
connector_id_from_context = account["id"]
draft_id_from_context = context.get("draft_id")
original_subject = email.get("subject", draft_subject_or_id)
final_subject_default = subject if subject else original_subject
final_to_default = to if to else ""
logger.info(
f"Requesting approval for updating Gmail draft: '{original_subject}' "
f"(message_id={message_id}, draft_id={draft_id_from_context})"
)
result = request_approval(
action_type="gmail_draft_update",
tool_name="update_gmail_draft",
params={
"message_id": message_id,
"draft_id": draft_id_from_context,
"to": final_to_default,
"subject": final_subject_default,
"body": body,
"cc": cc,
"bcc": bcc,
"connector_id": connector_id_from_context,
},
context=context,
)
if result.rejected:
return {
"status": "rejected",
"message": "User declined. The draft was not updated. Do not ask again or suggest alternatives.",
}
final_to = result.params.get("to", final_to_default)
final_subject = result.params.get("subject", final_subject_default)
final_body = result.params.get("body", body)
final_cc = result.params.get("cc", cc)
final_bcc = result.params.get("bcc", bcc)
final_connector_id = result.params.get(
"connector_id", connector_id_from_context
)
final_draft_id = result.params.get("draft_id", draft_id_from_context)
if not final_connector_id:
return {
"status": "error",
"message": "Draft no longer exists in Gmail. It may have been sent or deleted.",
"message": "No connector found for this draft.",
}
raise
logger.info(f"Gmail draft updated: id={updated.get('id')}")
from sqlalchemy.future import select
kb_message_suffix = ""
if document_id:
try:
from sqlalchemy.future import select as sa_select
from sqlalchemy.orm.attributes import flag_modified
from app.db import SearchSourceConnector, SearchSourceConnectorType
from app.db import Document
_gmail_types = [
SearchSourceConnectorType.GOOGLE_GMAIL_CONNECTOR,
SearchSourceConnectorType.COMPOSIO_GMAIL_CONNECTOR,
]
doc_result = await db_session.execute(
sa_select(Document).filter(Document.id == document_id)
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == final_connector_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type.in_(_gmail_types),
)
document = doc_result.scalars().first()
if document:
document.source_markdown = final_body
document.title = final_subject
meta = dict(document.document_metadata or {})
meta["subject"] = final_subject
meta["draft_id"] = updated.get("id", final_draft_id)
updated_msg = updated.get("message", {})
if updated_msg.get("id"):
meta["message_id"] = updated_msg["id"]
document.document_metadata = meta
flag_modified(document, "document_metadata")
await db_session.commit()
kb_message_suffix = (
" Your knowledge base has also been updated."
)
logger.info(
f"KB document {document_id} updated for draft {final_draft_id}"
)
connector = result.scalars().first()
if not connector:
return {
"status": "error",
"message": "Selected Gmail connector is invalid or has been disconnected.",
}
logger.info(
f"Updating Gmail draft: subject='{final_subject}', connector={final_connector_id}"
)
is_composio_gmail = (
connector.connector_type
== SearchSourceConnectorType.COMPOSIO_GMAIL_CONNECTOR
)
if is_composio_gmail:
cca_id = connector.config.get("composio_connected_account_id")
if not cca_id:
return {
"status": "error",
"message": "Composio connected account ID not found for this Gmail connector.",
}
else:
from google.oauth2.credentials import Credentials
from app.config import config
from app.utils.oauth_security import TokenEncryption
config_data = dict(connector.config)
token_encrypted = config_data.get("_token_encrypted", False)
if token_encrypted and config.SECRET_KEY:
token_encryption = TokenEncryption(config.SECRET_KEY)
if config_data.get("token"):
config_data["token"] = token_encryption.decrypt_token(
config_data["token"]
)
if config_data.get("refresh_token"):
config_data["refresh_token"] = (
token_encryption.decrypt_token(
config_data["refresh_token"]
)
)
if config_data.get("client_secret"):
config_data["client_secret"] = (
token_encryption.decrypt_token(
config_data["client_secret"]
)
)
exp = config_data.get("expiry", "")
if exp:
exp = exp.replace("Z", "")
creds = Credentials(
token=config_data.get("token"),
refresh_token=config_data.get("refresh_token"),
token_uri=config_data.get("token_uri"),
client_id=config_data.get("client_id"),
client_secret=config_data.get("client_secret"),
scopes=config_data.get("scopes", []),
expiry=datetime.fromisoformat(exp) if exp else None,
)
# Resolve draft_id if not already available
if not final_draft_id:
logger.info(
f"draft_id not in metadata, looking up via drafts.list for message_id={message_id}"
)
if is_composio_gmail:
final_draft_id = await _find_composio_draft_id_by_message(
connector, user_id, message_id
)
else:
kb_message_suffix = " This draft will be fully updated in your knowledge base in the next scheduled sync."
except Exception as kb_err:
logger.warning(f"KB update after draft edit failed: {kb_err}")
await db_session.rollback()
kb_message_suffix = " This draft will be fully updated in your knowledge base in the next scheduled sync."
from googleapiclient.discovery import build
return {
"status": "success",
"draft_id": updated.get("id"),
"message": f"Successfully updated Gmail draft with subject '{final_subject}'.{kb_message_suffix}",
}
gmail_service = build("gmail", "v1", credentials=creds)
final_draft_id = await _find_draft_id_by_message(
gmail_service, message_id
)
if not final_draft_id:
return {
"status": "error",
"message": (
"Could not find this draft in Gmail. "
"It may have already been sent or deleted."
),
}
message = MIMEText(final_body)
if final_to:
message["to"] = final_to
message["subject"] = final_subject
if final_cc:
message["cc"] = final_cc
if final_bcc:
message["bcc"] = final_bcc
raw = base64.urlsafe_b64encode(message.as_bytes()).decode()
try:
if is_composio_gmail:
from app.agents.new_chat.tools.gmail.composio_helpers import (
execute_composio_gmail_tool,
split_recipients,
)
updated, error = await execute_composio_gmail_tool(
connector,
user_id,
"GMAIL_UPDATE_DRAFT",
{
"user_id": "me",
"draft_id": final_draft_id,
"recipient_email": final_to,
"subject": final_subject,
"body": final_body,
"cc": split_recipients(final_cc),
"bcc": split_recipients(final_bcc),
"is_html": False,
},
)
if error:
raise RuntimeError(error)
if not isinstance(updated, dict):
updated = {}
else:
from googleapiclient.discovery import build
gmail_service = build("gmail", "v1", credentials=creds)
updated = await asyncio.get_event_loop().run_in_executor(
None,
lambda: (
gmail_service.users()
.drafts()
.update(
userId="me",
id=final_draft_id,
body={"message": {"raw": raw}},
)
.execute()
),
)
except Exception as api_err:
from googleapiclient.errors import HttpError
if isinstance(api_err, HttpError) and api_err.resp.status == 403:
logger.warning(
f"Insufficient permissions for connector {connector.id}: {api_err}"
)
try:
from sqlalchemy.orm.attributes import flag_modified
if not connector.config.get("auth_expired"):
connector.config = {
**connector.config,
"auth_expired": True,
}
flag_modified(connector, "config")
await db_session.commit()
except Exception:
logger.warning(
"Failed to persist auth_expired for connector %s",
connector.id,
exc_info=True,
)
return {
"status": "insufficient_permissions",
"connector_id": connector.id,
"message": "This Gmail account needs additional permissions. Please re-authenticate in connector settings.",
}
if isinstance(api_err, HttpError) and api_err.resp.status == 404:
return {
"status": "error",
"message": "Draft no longer exists in Gmail. It may have been sent or deleted.",
}
raise
logger.info(f"Gmail draft updated: id={updated.get('id')}")
kb_message_suffix = ""
if document_id:
try:
from sqlalchemy.future import select as sa_select
from sqlalchemy.orm.attributes import flag_modified
from app.db import Document
doc_result = await db_session.execute(
sa_select(Document).filter(Document.id == document_id)
)
document = doc_result.scalars().first()
if document:
document.source_markdown = final_body
document.title = final_subject
meta = dict(document.document_metadata or {})
meta["subject"] = final_subject
meta["draft_id"] = updated.get("id", final_draft_id)
updated_msg = updated.get("message", {})
if updated_msg.get("id"):
meta["message_id"] = updated_msg["id"]
document.document_metadata = meta
flag_modified(document, "document_metadata")
await db_session.commit()
kb_message_suffix = (
" Your knowledge base has also been updated."
)
logger.info(
f"KB document {document_id} updated for draft {final_draft_id}"
)
else:
kb_message_suffix = " This draft will be fully updated in your knowledge base in the next scheduled sync."
except Exception as kb_err:
logger.warning(f"KB update after draft edit failed: {kb_err}")
await db_session.rollback()
kb_message_suffix = " This draft will be fully updated in your knowledge base in the next scheduled sync."
return {
"status": "success",
"draft_id": updated.get("id"),
"message": f"Successfully updated Gmail draft with subject '{final_subject}'.{kb_message_suffix}",
}
except Exception as e:
from langgraph.errors import GraphInterrupt
@ -408,3 +461,35 @@ async def _find_draft_id_by_message(gmail_service: Any, message_id: str) -> str
except Exception as e:
logger.warning(f"Failed to look up draft by message_id: {e}")
return None
async def _find_composio_draft_id_by_message(
connector: Any, user_id: str, message_id: str
) -> str | None:
from app.agents.new_chat.tools.gmail.composio_helpers import (
execute_composio_gmail_tool,
)
page_token = ""
while True:
params: dict[str, Any] = {
"user_id": "me",
"max_results": 100,
"verbose": False,
}
if page_token:
params["page_token"] = page_token
data, error = await execute_composio_gmail_tool(
connector, user_id, "GMAIL_LIST_DRAFTS", params
)
if error or not isinstance(data, dict):
return None
for draft in data.get("drafts", []):
if draft.get("message", {}).get("id") == message_id:
return draft.get("id")
page_token = data.get("nextPageToken") or data.get("next_page_token") or ""
if not page_token:
return None

View file

@ -9,6 +9,7 @@ from langchain_core.tools import tool
from sqlalchemy.ext.asyncio import AsyncSession
from app.agents.new_chat.tools.hitl import request_approval
from app.db import async_session_maker
from app.services.google_calendar import GoogleCalendarToolMetadataService
logger = logging.getLogger(__name__)
@ -19,6 +20,23 @@ def create_create_calendar_event_tool(
search_space_id: int | None = None,
user_id: str | None = None,
):
"""
Factory function to create the create_calendar_event tool.
The tool acquires its own short-lived ``AsyncSession`` per call via
:data:`async_session_maker` so the closure is safe to share across
HTTP requests by the compiled-agent cache. Capturing a per-request
session here would surface stale/closed sessions on cache hits.
Args:
db_session: Reserved for registry compatibility. Per-call sessions
are opened via :data:`async_session_maker` inside the tool body.
Returns:
Configured create_calendar_event tool
"""
del db_session # per-call session — see docstring
@tool
async def create_calendar_event(
summary: str,
@ -60,254 +78,294 @@ def create_create_calendar_event_tool(
f"create_calendar_event called: summary='{summary}', start='{start_datetime}', end='{end_datetime}'"
)
if db_session is None or search_space_id is None or user_id is None:
if search_space_id is None or user_id is None:
return {
"status": "error",
"message": "Google Calendar tool not properly configured. Please contact support.",
}
try:
metadata_service = GoogleCalendarToolMetadataService(db_session)
context = await metadata_service.get_creation_context(
search_space_id, user_id
)
if "error" in context:
logger.error(f"Failed to fetch creation context: {context['error']}")
return {"status": "error", "message": context["error"]}
accounts = context.get("accounts", [])
if accounts and all(a.get("auth_expired") for a in accounts):
logger.warning(
"All Google Calendar accounts have expired authentication"
async with async_session_maker() as db_session:
metadata_service = GoogleCalendarToolMetadataService(db_session)
context = await metadata_service.get_creation_context(
search_space_id, user_id
)
return {
"status": "auth_error",
"message": "All connected Google Calendar accounts need re-authentication. Please re-authenticate in your connector settings.",
"connector_type": "google_calendar",
}
logger.info(
f"Requesting approval for creating calendar event: summary='{summary}'"
)
result = request_approval(
action_type="google_calendar_event_creation",
tool_name="create_calendar_event",
params={
"summary": summary,
"start_datetime": start_datetime,
"end_datetime": end_datetime,
"description": description,
"location": location,
"attendees": attendees,
"timezone": context.get("timezone"),
"connector_id": None,
},
context=context,
)
if result.rejected:
return {
"status": "rejected",
"message": "User declined. The event was not created. Do not ask again or suggest alternatives.",
}
final_summary = result.params.get("summary", summary)
final_start_datetime = result.params.get("start_datetime", start_datetime)
final_end_datetime = result.params.get("end_datetime", end_datetime)
final_description = result.params.get("description", description)
final_location = result.params.get("location", location)
final_attendees = result.params.get("attendees", attendees)
final_connector_id = result.params.get("connector_id")
if not final_summary or not final_summary.strip():
return {"status": "error", "message": "Event summary cannot be empty."}
from sqlalchemy.future import select
from app.db import SearchSourceConnector, SearchSourceConnectorType
_calendar_types = [
SearchSourceConnectorType.GOOGLE_CALENDAR_CONNECTOR,
SearchSourceConnectorType.COMPOSIO_GOOGLE_CALENDAR_CONNECTOR,
]
if final_connector_id is not None:
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == final_connector_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type.in_(_calendar_types),
if "error" in context:
logger.error(
f"Failed to fetch creation context: {context['error']}"
)
)
connector = result.scalars().first()
if not connector:
return {
"status": "error",
"message": "Selected Google Calendar connector is invalid or has been disconnected.",
}
actual_connector_id = connector.id
else:
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type.in_(_calendar_types),
return {"status": "error", "message": context["error"]}
accounts = context.get("accounts", [])
if accounts and all(a.get("auth_expired") for a in accounts):
logger.warning(
"All Google Calendar accounts have expired authentication"
)
return {
"status": "auth_error",
"message": "All connected Google Calendar accounts need re-authentication. Please re-authenticate in your connector settings.",
"connector_type": "google_calendar",
}
logger.info(
f"Requesting approval for creating calendar event: summary='{summary}'"
)
connector = result.scalars().first()
if not connector:
return {
"status": "error",
"message": "No Google Calendar connector found. Please connect Google Calendar in your workspace settings.",
}
actual_connector_id = connector.id
logger.info(
f"Creating calendar event: summary='{final_summary}', connector={actual_connector_id}"
)
if (
connector.connector_type
== SearchSourceConnectorType.COMPOSIO_GOOGLE_CALENDAR_CONNECTOR
):
from app.utils.google_credentials import build_composio_credentials
cca_id = connector.config.get("composio_connected_account_id")
if cca_id:
creds = build_composio_credentials(cca_id)
else:
return {
"status": "error",
"message": "Composio connected account ID not found for this connector.",
}
else:
config_data = dict(connector.config)
from app.config import config as app_config
from app.utils.oauth_security import TokenEncryption
token_encrypted = config_data.get("_token_encrypted", False)
if token_encrypted and app_config.SECRET_KEY:
token_encryption = TokenEncryption(app_config.SECRET_KEY)
for key in ("token", "refresh_token", "client_secret"):
if config_data.get(key):
config_data[key] = token_encryption.decrypt_token(
config_data[key]
)
exp = config_data.get("expiry", "")
if exp:
exp = exp.replace("Z", "")
creds = Credentials(
token=config_data.get("token"),
refresh_token=config_data.get("refresh_token"),
token_uri=config_data.get("token_uri"),
client_id=config_data.get("client_id"),
client_secret=config_data.get("client_secret"),
scopes=config_data.get("scopes", []),
expiry=datetime.fromisoformat(exp) if exp else None,
result = request_approval(
action_type="google_calendar_event_creation",
tool_name="create_calendar_event",
params={
"summary": summary,
"start_datetime": start_datetime,
"end_datetime": end_datetime,
"description": description,
"location": location,
"attendees": attendees,
"timezone": context.get("timezone"),
"connector_id": None,
},
context=context,
)
service = await asyncio.get_event_loop().run_in_executor(
None, lambda: build("calendar", "v3", credentials=creds)
)
if result.rejected:
return {
"status": "rejected",
"message": "User declined. The event was not created. Do not ask again or suggest alternatives.",
}
tz = context.get("timezone", "UTC")
event_body: dict[str, Any] = {
"summary": final_summary,
"start": {"dateTime": final_start_datetime, "timeZone": tz},
"end": {"dateTime": final_end_datetime, "timeZone": tz},
}
if final_description:
event_body["description"] = final_description
if final_location:
event_body["location"] = final_location
if final_attendees:
event_body["attendees"] = [
{"email": e.strip()} for e in final_attendees if e.strip()
final_summary = result.params.get("summary", summary)
final_start_datetime = result.params.get(
"start_datetime", start_datetime
)
final_end_datetime = result.params.get("end_datetime", end_datetime)
final_description = result.params.get("description", description)
final_location = result.params.get("location", location)
final_attendees = result.params.get("attendees", attendees)
final_connector_id = result.params.get("connector_id")
if not final_summary or not final_summary.strip():
return {
"status": "error",
"message": "Event summary cannot be empty.",
}
from sqlalchemy.future import select
from app.db import SearchSourceConnector, SearchSourceConnectorType
_calendar_types = [
SearchSourceConnectorType.GOOGLE_CALENDAR_CONNECTOR,
SearchSourceConnectorType.COMPOSIO_GOOGLE_CALENDAR_CONNECTOR,
]
try:
created = await asyncio.get_event_loop().run_in_executor(
None,
lambda: (
service.events()
.insert(calendarId="primary", body=event_body)
.execute()
),
)
except Exception as api_err:
from googleapiclient.errors import HttpError
if isinstance(api_err, HttpError) and api_err.resp.status == 403:
logger.warning(
f"Insufficient permissions for connector {actual_connector_id}: {api_err}"
if final_connector_id is not None:
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == final_connector_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type.in_(_calendar_types),
)
)
try:
from sqlalchemy.orm.attributes import flag_modified
_res = await db_session.execute(
select(SearchSourceConnector).where(
SearchSourceConnector.id == actual_connector_id
)
)
_conn = _res.scalar_one_or_none()
if _conn and not _conn.config.get("auth_expired"):
_conn.config = {**_conn.config, "auth_expired": True}
flag_modified(_conn, "config")
await db_session.commit()
except Exception:
logger.warning(
"Failed to persist auth_expired for connector %s",
actual_connector_id,
exc_info=True,
)
return {
"status": "insufficient_permissions",
"connector_id": actual_connector_id,
"message": "This Google Calendar account needs additional permissions. Please re-authenticate in connector settings.",
}
raise
logger.info(
f"Calendar event created: id={created.get('id')}, summary={created.get('summary')}"
)
kb_message_suffix = ""
try:
from app.services.google_calendar import GoogleCalendarKBSyncService
kb_service = GoogleCalendarKBSyncService(db_session)
kb_result = await kb_service.sync_after_create(
event_id=created.get("id"),
event_summary=final_summary,
calendar_id="primary",
start_time=final_start_datetime,
end_time=final_end_datetime,
location=final_location,
html_link=created.get("htmlLink"),
description=final_description,
connector_id=actual_connector_id,
search_space_id=search_space_id,
user_id=user_id,
)
if kb_result["status"] == "success":
kb_message_suffix = " Your knowledge base has also been updated."
connector = result.scalars().first()
if not connector:
return {
"status": "error",
"message": "Selected Google Calendar connector is invalid or has been disconnected.",
}
actual_connector_id = connector.id
else:
kb_message_suffix = " This event will be added to your knowledge base in the next scheduled sync."
except Exception as kb_err:
logger.warning(f"KB sync after create failed: {kb_err}")
kb_message_suffix = " This event will be added to your knowledge base in the next scheduled sync."
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type.in_(_calendar_types),
)
)
connector = result.scalars().first()
if not connector:
return {
"status": "error",
"message": "No Google Calendar connector found. Please connect Google Calendar in your workspace settings.",
}
actual_connector_id = connector.id
return {
"status": "success",
"event_id": created.get("id"),
"html_link": created.get("htmlLink"),
"message": f"Successfully created '{final_summary}' on Google Calendar.{kb_message_suffix}",
}
logger.info(
f"Creating calendar event: summary='{final_summary}', connector={actual_connector_id}"
)
is_composio_calendar = (
connector.connector_type
== SearchSourceConnectorType.COMPOSIO_GOOGLE_CALENDAR_CONNECTOR
)
if is_composio_calendar:
cca_id = connector.config.get("composio_connected_account_id")
if not cca_id:
return {
"status": "error",
"message": "Composio connected account ID not found for this connector.",
}
else:
config_data = dict(connector.config)
from app.config import config as app_config
from app.utils.oauth_security import TokenEncryption
token_encrypted = config_data.get("_token_encrypted", False)
if token_encrypted and app_config.SECRET_KEY:
token_encryption = TokenEncryption(app_config.SECRET_KEY)
for key in ("token", "refresh_token", "client_secret"):
if config_data.get(key):
config_data[key] = token_encryption.decrypt_token(
config_data[key]
)
exp = config_data.get("expiry", "")
if exp:
exp = exp.replace("Z", "")
creds = Credentials(
token=config_data.get("token"),
refresh_token=config_data.get("refresh_token"),
token_uri=config_data.get("token_uri"),
client_id=config_data.get("client_id"),
client_secret=config_data.get("client_secret"),
scopes=config_data.get("scopes", []),
expiry=datetime.fromisoformat(exp) if exp else None,
)
tz = context.get("timezone", "UTC")
event_body: dict[str, Any] = {
"summary": final_summary,
"start": {"dateTime": final_start_datetime, "timeZone": tz},
"end": {"dateTime": final_end_datetime, "timeZone": tz},
}
if final_description:
event_body["description"] = final_description
if final_location:
event_body["location"] = final_location
if final_attendees:
event_body["attendees"] = [
{"email": e.strip()} for e in final_attendees if e.strip()
]
try:
if is_composio_calendar:
from app.services.composio_service import ComposioService
composio_params = {
"calendar_id": "primary",
"summary": final_summary,
"start_datetime": final_start_datetime,
"end_datetime": final_end_datetime,
"timezone": tz,
"attendees": final_attendees or [],
}
if final_description:
composio_params["description"] = final_description
if final_location:
composio_params["location"] = final_location
composio_result = await ComposioService().execute_tool(
connected_account_id=cca_id,
tool_name="GOOGLECALENDAR_CREATE_EVENT",
params=composio_params,
entity_id=f"surfsense_{user_id}",
)
if not composio_result.get("success"):
raise RuntimeError(
composio_result.get(
"error", "Unknown Composio Calendar error"
)
)
created = composio_result.get("data", {})
if isinstance(created, dict):
created = created.get("data", created)
if isinstance(created, dict):
created = created.get("response_data", created)
else:
service = await asyncio.get_event_loop().run_in_executor(
None, lambda: build("calendar", "v3", credentials=creds)
)
created = await asyncio.get_event_loop().run_in_executor(
None,
lambda: (
service.events()
.insert(calendarId="primary", body=event_body)
.execute()
),
)
except Exception as api_err:
from googleapiclient.errors import HttpError
if isinstance(api_err, HttpError) and api_err.resp.status == 403:
logger.warning(
f"Insufficient permissions for connector {actual_connector_id}: {api_err}"
)
try:
from sqlalchemy.orm.attributes import flag_modified
_res = await db_session.execute(
select(SearchSourceConnector).where(
SearchSourceConnector.id == actual_connector_id
)
)
_conn = _res.scalar_one_or_none()
if _conn and not _conn.config.get("auth_expired"):
_conn.config = {**_conn.config, "auth_expired": True}
flag_modified(_conn, "config")
await db_session.commit()
except Exception:
logger.warning(
"Failed to persist auth_expired for connector %s",
actual_connector_id,
exc_info=True,
)
return {
"status": "insufficient_permissions",
"connector_id": actual_connector_id,
"message": "This Google Calendar account needs additional permissions. Please re-authenticate in connector settings.",
}
raise
logger.info(
f"Calendar event created: id={created.get('id')}, summary={created.get('summary')}"
)
kb_message_suffix = ""
try:
from app.services.google_calendar import GoogleCalendarKBSyncService
kb_service = GoogleCalendarKBSyncService(db_session)
kb_result = await kb_service.sync_after_create(
event_id=created.get("id"),
event_summary=final_summary,
calendar_id="primary",
start_time=final_start_datetime,
end_time=final_end_datetime,
location=final_location,
html_link=created.get("htmlLink"),
description=final_description,
connector_id=actual_connector_id,
search_space_id=search_space_id,
user_id=user_id,
)
if kb_result["status"] == "success":
kb_message_suffix = (
" Your knowledge base has also been updated."
)
else:
kb_message_suffix = " This event will be added to your knowledge base in the next scheduled sync."
except Exception as kb_err:
logger.warning(f"KB sync after create failed: {kb_err}")
kb_message_suffix = " This event will be added to your knowledge base in the next scheduled sync."
return {
"status": "success",
"event_id": created.get("id"),
"html_link": created.get("htmlLink"),
"message": f"Successfully created '{final_summary}' on Google Calendar.{kb_message_suffix}",
}
except Exception as e:
from langgraph.errors import GraphInterrupt

View file

@ -9,6 +9,7 @@ from langchain_core.tools import tool
from sqlalchemy.ext.asyncio import AsyncSession
from app.agents.new_chat.tools.hitl import request_approval
from app.db import async_session_maker
from app.services.google_calendar import GoogleCalendarToolMetadataService
logger = logging.getLogger(__name__)
@ -19,6 +20,23 @@ def create_delete_calendar_event_tool(
search_space_id: int | None = None,
user_id: str | None = None,
):
"""
Factory function to create the delete_calendar_event tool.
The tool acquires its own short-lived ``AsyncSession`` per call via
:data:`async_session_maker` so the closure is safe to share across
HTTP requests by the compiled-agent cache. Capturing a per-request
session here would surface stale/closed sessions on cache hits.
Args:
db_session: Reserved for registry compatibility. Per-call sessions
are opened via :data:`async_session_maker` inside the tool body.
Returns:
Configured delete_calendar_event tool
"""
del db_session # per-call session — see docstring
@tool
async def delete_calendar_event(
event_title_or_id: str,
@ -54,240 +72,258 @@ def create_delete_calendar_event_tool(
f"delete_calendar_event called: event_ref='{event_title_or_id}', delete_from_kb={delete_from_kb}"
)
if db_session is None or search_space_id is None or user_id is None:
if search_space_id is None or user_id is None:
return {
"status": "error",
"message": "Google Calendar tool not properly configured. Please contact support.",
}
try:
metadata_service = GoogleCalendarToolMetadataService(db_session)
context = await metadata_service.get_deletion_context(
search_space_id, user_id, event_title_or_id
)
if "error" in context:
error_msg = context["error"]
if "not found" in error_msg.lower():
logger.warning(f"Event not found: {error_msg}")
return {"status": "not_found", "message": error_msg}
logger.error(f"Failed to fetch deletion context: {error_msg}")
return {"status": "error", "message": error_msg}
account = context.get("account", {})
if account.get("auth_expired"):
logger.warning(
"Google Calendar account %s has expired authentication",
account.get("id"),
async with async_session_maker() as db_session:
metadata_service = GoogleCalendarToolMetadataService(db_session)
context = await metadata_service.get_deletion_context(
search_space_id, user_id, event_title_or_id
)
return {
"status": "auth_error",
"message": "The Google Calendar account for this event needs re-authentication. Please re-authenticate in your connector settings.",
"connector_type": "google_calendar",
}
event = context["event"]
event_id = event["event_id"]
document_id = event.get("document_id")
connector_id_from_context = context["account"]["id"]
if "error" in context:
error_msg = context["error"]
if "not found" in error_msg.lower():
logger.warning(f"Event not found: {error_msg}")
return {"status": "not_found", "message": error_msg}
logger.error(f"Failed to fetch deletion context: {error_msg}")
return {"status": "error", "message": error_msg}
if not event_id:
return {
"status": "error",
"message": "Event ID is missing from the indexed document. Please re-index the event and try again.",
}
account = context.get("account", {})
if account.get("auth_expired"):
logger.warning(
"Google Calendar account %s has expired authentication",
account.get("id"),
)
return {
"status": "auth_error",
"message": "The Google Calendar account for this event needs re-authentication. Please re-authenticate in your connector settings.",
"connector_type": "google_calendar",
}
logger.info(
f"Requesting approval for deleting calendar event: '{event_title_or_id}' (event_id={event_id}, delete_from_kb={delete_from_kb})"
)
result = request_approval(
action_type="google_calendar_event_deletion",
tool_name="delete_calendar_event",
params={
"event_id": event_id,
"connector_id": connector_id_from_context,
"delete_from_kb": delete_from_kb,
},
context=context,
)
event = context["event"]
event_id = event["event_id"]
document_id = event.get("document_id")
connector_id_from_context = context["account"]["id"]
if result.rejected:
return {
"status": "rejected",
"message": "User declined. The event was not deleted. Do not ask again or suggest alternatives.",
}
final_event_id = result.params.get("event_id", event_id)
final_connector_id = result.params.get(
"connector_id", connector_id_from_context
)
final_delete_from_kb = result.params.get("delete_from_kb", delete_from_kb)
if not final_connector_id:
return {
"status": "error",
"message": "No connector found for this event.",
}
from sqlalchemy.future import select
from app.db import SearchSourceConnector, SearchSourceConnectorType
_calendar_types = [
SearchSourceConnectorType.GOOGLE_CALENDAR_CONNECTOR,
SearchSourceConnectorType.COMPOSIO_GOOGLE_CALENDAR_CONNECTOR,
]
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == final_connector_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type.in_(_calendar_types),
)
)
connector = result.scalars().first()
if not connector:
return {
"status": "error",
"message": "Selected Google Calendar connector is invalid or has been disconnected.",
}
actual_connector_id = connector.id
logger.info(
f"Deleting calendar event: event_id='{final_event_id}', connector={actual_connector_id}"
)
if (
connector.connector_type
== SearchSourceConnectorType.COMPOSIO_GOOGLE_CALENDAR_CONNECTOR
):
from app.utils.google_credentials import build_composio_credentials
cca_id = connector.config.get("composio_connected_account_id")
if cca_id:
creds = build_composio_credentials(cca_id)
else:
if not event_id:
return {
"status": "error",
"message": "Composio connected account ID not found for this connector.",
"message": "Event ID is missing from the indexed document. Please re-index the event and try again.",
}
else:
config_data = dict(connector.config)
from app.config import config as app_config
from app.utils.oauth_security import TokenEncryption
token_encrypted = config_data.get("_token_encrypted", False)
if token_encrypted and app_config.SECRET_KEY:
token_encryption = TokenEncryption(app_config.SECRET_KEY)
for key in ("token", "refresh_token", "client_secret"):
if config_data.get(key):
config_data[key] = token_encryption.decrypt_token(
config_data[key]
)
exp = config_data.get("expiry", "")
if exp:
exp = exp.replace("Z", "")
creds = Credentials(
token=config_data.get("token"),
refresh_token=config_data.get("refresh_token"),
token_uri=config_data.get("token_uri"),
client_id=config_data.get("client_id"),
client_secret=config_data.get("client_secret"),
scopes=config_data.get("scopes", []),
expiry=datetime.fromisoformat(exp) if exp else None,
logger.info(
f"Requesting approval for deleting calendar event: '{event_title_or_id}' (event_id={event_id}, delete_from_kb={delete_from_kb})"
)
result = request_approval(
action_type="google_calendar_event_deletion",
tool_name="delete_calendar_event",
params={
"event_id": event_id,
"connector_id": connector_id_from_context,
"delete_from_kb": delete_from_kb,
},
context=context,
)
service = await asyncio.get_event_loop().run_in_executor(
None, lambda: build("calendar", "v3", credentials=creds)
)
try:
await asyncio.get_event_loop().run_in_executor(
None,
lambda: (
service.events()
.delete(calendarId="primary", eventId=final_event_id)
.execute()
),
)
except Exception as api_err:
from googleapiclient.errors import HttpError
if isinstance(api_err, HttpError) and api_err.resp.status == 403:
logger.warning(
f"Insufficient permissions for connector {actual_connector_id}: {api_err}"
)
try:
from sqlalchemy.orm.attributes import flag_modified
_res = await db_session.execute(
select(SearchSourceConnector).where(
SearchSourceConnector.id == actual_connector_id
)
)
_conn = _res.scalar_one_or_none()
if _conn and not _conn.config.get("auth_expired"):
_conn.config = {**_conn.config, "auth_expired": True}
flag_modified(_conn, "config")
await db_session.commit()
except Exception:
logger.warning(
"Failed to persist auth_expired for connector %s",
actual_connector_id,
exc_info=True,
)
if result.rejected:
return {
"status": "insufficient_permissions",
"connector_id": actual_connector_id,
"message": "This Google Calendar account needs additional permissions. Please re-authenticate in connector settings.",
"status": "rejected",
"message": "User declined. The event was not deleted. Do not ask again or suggest alternatives.",
}
raise
logger.info(f"Calendar event deleted: event_id={final_event_id}")
delete_result: dict[str, Any] = {
"status": "success",
"event_id": final_event_id,
"message": f"Successfully deleted the calendar event '{event.get('summary', event_title_or_id)}'.",
}
deleted_from_kb = False
if final_delete_from_kb and document_id:
try:
from app.db import Document
doc_result = await db_session.execute(
select(Document).filter(Document.id == document_id)
)
document = doc_result.scalars().first()
if document:
await db_session.delete(document)
await db_session.commit()
deleted_from_kb = True
logger.info(
f"Deleted document {document_id} from knowledge base"
)
else:
logger.warning(f"Document {document_id} not found in KB")
except Exception as e:
logger.error(f"Failed to delete document from KB: {e}")
await db_session.rollback()
delete_result["warning"] = (
f"Event deleted, but failed to remove from knowledge base: {e!s}"
)
delete_result["deleted_from_kb"] = deleted_from_kb
if deleted_from_kb:
delete_result["message"] = (
f"{delete_result.get('message', '')} (also removed from knowledge base)"
final_event_id = result.params.get("event_id", event_id)
final_connector_id = result.params.get(
"connector_id", connector_id_from_context
)
final_delete_from_kb = result.params.get(
"delete_from_kb", delete_from_kb
)
return delete_result
if not final_connector_id:
return {
"status": "error",
"message": "No connector found for this event.",
}
from sqlalchemy.future import select
from app.db import SearchSourceConnector, SearchSourceConnectorType
_calendar_types = [
SearchSourceConnectorType.GOOGLE_CALENDAR_CONNECTOR,
SearchSourceConnectorType.COMPOSIO_GOOGLE_CALENDAR_CONNECTOR,
]
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == final_connector_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type.in_(_calendar_types),
)
)
connector = result.scalars().first()
if not connector:
return {
"status": "error",
"message": "Selected Google Calendar connector is invalid or has been disconnected.",
}
actual_connector_id = connector.id
logger.info(
f"Deleting calendar event: event_id='{final_event_id}', connector={actual_connector_id}"
)
is_composio_calendar = (
connector.connector_type
== SearchSourceConnectorType.COMPOSIO_GOOGLE_CALENDAR_CONNECTOR
)
if is_composio_calendar:
cca_id = connector.config.get("composio_connected_account_id")
if not cca_id:
return {
"status": "error",
"message": "Composio connected account ID not found for this connector.",
}
else:
config_data = dict(connector.config)
from app.config import config as app_config
from app.utils.oauth_security import TokenEncryption
token_encrypted = config_data.get("_token_encrypted", False)
if token_encrypted and app_config.SECRET_KEY:
token_encryption = TokenEncryption(app_config.SECRET_KEY)
for key in ("token", "refresh_token", "client_secret"):
if config_data.get(key):
config_data[key] = token_encryption.decrypt_token(
config_data[key]
)
exp = config_data.get("expiry", "")
if exp:
exp = exp.replace("Z", "")
creds = Credentials(
token=config_data.get("token"),
refresh_token=config_data.get("refresh_token"),
token_uri=config_data.get("token_uri"),
client_id=config_data.get("client_id"),
client_secret=config_data.get("client_secret"),
scopes=config_data.get("scopes", []),
expiry=datetime.fromisoformat(exp) if exp else None,
)
try:
if is_composio_calendar:
from app.services.composio_service import ComposioService
composio_result = await ComposioService().execute_tool(
connected_account_id=cca_id,
tool_name="GOOGLECALENDAR_DELETE_EVENT",
params={
"calendar_id": "primary",
"event_id": final_event_id,
},
entity_id=f"surfsense_{user_id}",
)
if not composio_result.get("success"):
raise RuntimeError(
composio_result.get(
"error", "Unknown Composio Calendar error"
)
)
else:
service = await asyncio.get_event_loop().run_in_executor(
None, lambda: build("calendar", "v3", credentials=creds)
)
await asyncio.get_event_loop().run_in_executor(
None,
lambda: (
service.events()
.delete(calendarId="primary", eventId=final_event_id)
.execute()
),
)
except Exception as api_err:
from googleapiclient.errors import HttpError
if isinstance(api_err, HttpError) and api_err.resp.status == 403:
logger.warning(
f"Insufficient permissions for connector {actual_connector_id}: {api_err}"
)
try:
from sqlalchemy.orm.attributes import flag_modified
_res = await db_session.execute(
select(SearchSourceConnector).where(
SearchSourceConnector.id == actual_connector_id
)
)
_conn = _res.scalar_one_or_none()
if _conn and not _conn.config.get("auth_expired"):
_conn.config = {**_conn.config, "auth_expired": True}
flag_modified(_conn, "config")
await db_session.commit()
except Exception:
logger.warning(
"Failed to persist auth_expired for connector %s",
actual_connector_id,
exc_info=True,
)
return {
"status": "insufficient_permissions",
"connector_id": actual_connector_id,
"message": "This Google Calendar account needs additional permissions. Please re-authenticate in connector settings.",
}
raise
logger.info(f"Calendar event deleted: event_id={final_event_id}")
delete_result: dict[str, Any] = {
"status": "success",
"event_id": final_event_id,
"message": f"Successfully deleted the calendar event '{event.get('summary', event_title_or_id)}'.",
}
deleted_from_kb = False
if final_delete_from_kb and document_id:
try:
from app.db import Document
doc_result = await db_session.execute(
select(Document).filter(Document.id == document_id)
)
document = doc_result.scalars().first()
if document:
await db_session.delete(document)
await db_session.commit()
deleted_from_kb = True
logger.info(
f"Deleted document {document_id} from knowledge base"
)
else:
logger.warning(f"Document {document_id} not found in KB")
except Exception as e:
logger.error(f"Failed to delete document from KB: {e}")
await db_session.rollback()
delete_result["warning"] = (
f"Event deleted, but failed to remove from knowledge base: {e!s}"
)
delete_result["deleted_from_kb"] = deleted_from_kb
if deleted_from_kb:
delete_result["message"] = (
f"{delete_result.get('message', '')} (also removed from knowledge base)"
)
return delete_result
except Exception as e:
from langgraph.errors import GraphInterrupt

View file

@ -6,7 +6,7 @@ from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select
from app.agents.new_chat.tools.gmail.search_emails import _build_credentials
from app.db import SearchSourceConnector, SearchSourceConnectorType
from app.db import SearchSourceConnector, SearchSourceConnectorType, async_session_maker
logger = logging.getLogger(__name__)
@ -16,11 +16,57 @@ _CALENDAR_TYPES = [
]
def _to_calendar_boundary(value: str, *, is_end: bool) -> str:
if "T" in value:
return value
time = "23:59:59" if is_end else "00:00:00"
return f"{value}T{time}Z"
def _format_calendar_events(events_raw: list[dict[str, Any]]) -> list[dict[str, Any]]:
events = []
for ev in events_raw:
start = ev.get("start", {})
end = ev.get("end", {})
attendees_raw = ev.get("attendees", [])
events.append(
{
"event_id": ev.get("id"),
"summary": ev.get("summary", "No Title"),
"start": start.get("dateTime") or start.get("date", ""),
"end": end.get("dateTime") or end.get("date", ""),
"location": ev.get("location", ""),
"description": ev.get("description", ""),
"html_link": ev.get("htmlLink", ""),
"attendees": [a.get("email", "") for a in attendees_raw[:10]],
"status": ev.get("status", ""),
}
)
return events
def create_search_calendar_events_tool(
db_session: AsyncSession | None = None,
search_space_id: int | None = None,
user_id: str | None = None,
):
"""
Factory function to create the search_calendar_events tool.
The tool acquires its own short-lived ``AsyncSession`` per call via
:data:`async_session_maker` so the closure is safe to share across
HTTP requests by the compiled-agent cache. Capturing a per-request
session here would surface stale/closed sessions on cache hits.
Args:
db_session: Reserved for registry compatibility. Per-call sessions
are opened via :data:`async_session_maker` inside the tool body.
Returns:
Configured search_calendar_events tool
"""
del db_session # per-call session — see docstring
@tool
async def search_calendar_events(
start_date: str,
@ -38,7 +84,7 @@ def create_search_calendar_events_tool(
Dictionary with status and a list of events including
event_id, summary, start, end, location, attendees.
"""
if db_session is None or search_space_id is None or user_id is None:
if search_space_id is None or user_id is None:
return {
"status": "error",
"message": "Calendar tool not properly configured.",
@ -47,76 +93,85 @@ def create_search_calendar_events_tool(
max_results = min(max_results, 50)
try:
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type.in_(_CALENDAR_TYPES),
async with async_session_maker() as db_session:
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type.in_(_CALENDAR_TYPES),
)
)
)
connector = result.scalars().first()
if not connector:
return {
"status": "error",
"message": "No Google Calendar connector found. Please connect Google Calendar in your workspace settings.",
}
connector = result.scalars().first()
if not connector:
return {
"status": "error",
"message": "No Google Calendar connector found. Please connect Google Calendar in your workspace settings.",
}
creds = _build_credentials(connector)
from app.connectors.google_calendar_connector import GoogleCalendarConnector
cal = GoogleCalendarConnector(
credentials=creds,
session=db_session,
user_id=user_id,
connector_id=connector.id,
)
events_raw, error = await cal.get_all_primary_calendar_events(
start_date=start_date,
end_date=end_date,
max_results=max_results,
)
if error:
if (
"re-authenticate" in error.lower()
or "authentication failed" in error.lower()
connector.connector_type
== SearchSourceConnectorType.COMPOSIO_GOOGLE_CALENDAR_CONNECTOR
):
return {
"status": "auth_error",
"message": error,
"connector_type": "google_calendar",
}
if "no events found" in error.lower():
return {
"status": "success",
"events": [],
"total": 0,
"message": error,
}
return {"status": "error", "message": error}
cca_id = connector.config.get("composio_connected_account_id")
if not cca_id:
return {
"status": "error",
"message": "Composio connected account ID not found for this connector.",
}
events = []
for ev in events_raw:
start = ev.get("start", {})
end = ev.get("end", {})
attendees_raw = ev.get("attendees", [])
events.append(
{
"event_id": ev.get("id"),
"summary": ev.get("summary", "No Title"),
"start": start.get("dateTime") or start.get("date", ""),
"end": end.get("dateTime") or end.get("date", ""),
"location": ev.get("location", ""),
"description": ev.get("description", ""),
"html_link": ev.get("htmlLink", ""),
"attendees": [a.get("email", "") for a in attendees_raw[:10]],
"status": ev.get("status", ""),
}
)
from app.services.composio_service import ComposioService
return {"status": "success", "events": events, "total": len(events)}
events_raw, error = await ComposioService().get_calendar_events(
connected_account_id=cca_id,
entity_id=f"surfsense_{user_id}",
time_min=_to_calendar_boundary(start_date, is_end=False),
time_max=_to_calendar_boundary(end_date, is_end=True),
max_results=max_results,
)
if not events_raw and not error:
error = "No events found in the specified date range."
else:
creds = _build_credentials(connector)
from app.connectors.google_calendar_connector import (
GoogleCalendarConnector,
)
cal = GoogleCalendarConnector(
credentials=creds,
session=db_session,
user_id=user_id,
connector_id=connector.id,
)
events_raw, error = await cal.get_all_primary_calendar_events(
start_date=start_date,
end_date=end_date,
max_results=max_results,
)
if error:
if (
"re-authenticate" in error.lower()
or "authentication failed" in error.lower()
):
return {
"status": "auth_error",
"message": error,
"connector_type": "google_calendar",
}
if "no events found" in error.lower():
return {
"status": "success",
"events": [],
"total": 0,
"message": error,
}
return {"status": "error", "message": error}
events = _format_calendar_events(events_raw)
return {"status": "success", "events": events, "total": len(events)}
except Exception as e:
from langgraph.errors import GraphInterrupt

View file

@ -9,6 +9,7 @@ from langchain_core.tools import tool
from sqlalchemy.ext.asyncio import AsyncSession
from app.agents.new_chat.tools.hitl import request_approval
from app.db import async_session_maker
from app.services.google_calendar import GoogleCalendarToolMetadataService
logger = logging.getLogger(__name__)
@ -33,6 +34,23 @@ def create_update_calendar_event_tool(
search_space_id: int | None = None,
user_id: str | None = None,
):
"""
Factory function to create the update_calendar_event tool.
The tool acquires its own short-lived ``AsyncSession`` per call via
:data:`async_session_maker` so the closure is safe to share across
HTTP requests by the compiled-agent cache. Capturing a per-request
session here would surface stale/closed sessions on cache hits.
Args:
db_session: Reserved for registry compatibility. Per-call sessions
are opened via :data:`async_session_maker` inside the tool body.
Returns:
Configured update_calendar_event tool
"""
del db_session # per-call session — see docstring
@tool
async def update_calendar_event(
event_title_or_id: str,
@ -74,272 +92,317 @@ def create_update_calendar_event_tool(
"""
logger.info(f"update_calendar_event called: event_ref='{event_title_or_id}'")
if db_session is None or search_space_id is None or user_id is None:
if search_space_id is None or user_id is None:
return {
"status": "error",
"message": "Google Calendar tool not properly configured. Please contact support.",
}
try:
metadata_service = GoogleCalendarToolMetadataService(db_session)
context = await metadata_service.get_update_context(
search_space_id, user_id, event_title_or_id
)
if "error" in context:
error_msg = context["error"]
if "not found" in error_msg.lower():
logger.warning(f"Event not found: {error_msg}")
return {"status": "not_found", "message": error_msg}
logger.error(f"Failed to fetch update context: {error_msg}")
return {"status": "error", "message": error_msg}
if context.get("auth_expired"):
logger.warning("Google Calendar account has expired authentication")
return {
"status": "auth_error",
"message": "The Google Calendar account for this event needs re-authentication. Please re-authenticate in your connector settings.",
"connector_type": "google_calendar",
}
event = context["event"]
event_id = event["event_id"]
document_id = event.get("document_id")
connector_id_from_context = context["account"]["id"]
if not event_id:
return {
"status": "error",
"message": "Event ID is missing from the indexed document. Please re-index the event and try again.",
}
logger.info(
f"Requesting approval for updating calendar event: '{event_title_or_id}' (event_id={event_id})"
)
result = request_approval(
action_type="google_calendar_event_update",
tool_name="update_calendar_event",
params={
"event_id": event_id,
"document_id": document_id,
"connector_id": connector_id_from_context,
"new_summary": new_summary,
"new_start_datetime": new_start_datetime,
"new_end_datetime": new_end_datetime,
"new_description": new_description,
"new_location": new_location,
"new_attendees": new_attendees,
},
context=context,
)
if result.rejected:
return {
"status": "rejected",
"message": "User declined. The event was not updated. Do not ask again or suggest alternatives.",
}
final_event_id = result.params.get("event_id", event_id)
final_connector_id = result.params.get(
"connector_id", connector_id_from_context
)
final_new_summary = result.params.get("new_summary", new_summary)
final_new_start_datetime = result.params.get(
"new_start_datetime", new_start_datetime
)
final_new_end_datetime = result.params.get(
"new_end_datetime", new_end_datetime
)
final_new_description = result.params.get(
"new_description", new_description
)
final_new_location = result.params.get("new_location", new_location)
final_new_attendees = result.params.get("new_attendees", new_attendees)
if not final_connector_id:
return {
"status": "error",
"message": "No connector found for this event.",
}
from sqlalchemy.future import select
from app.db import SearchSourceConnector, SearchSourceConnectorType
_calendar_types = [
SearchSourceConnectorType.GOOGLE_CALENDAR_CONNECTOR,
SearchSourceConnectorType.COMPOSIO_GOOGLE_CALENDAR_CONNECTOR,
]
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == final_connector_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type.in_(_calendar_types),
async with async_session_maker() as db_session:
metadata_service = GoogleCalendarToolMetadataService(db_session)
context = await metadata_service.get_update_context(
search_space_id, user_id, event_title_or_id
)
)
connector = result.scalars().first()
if not connector:
return {
"status": "error",
"message": "Selected Google Calendar connector is invalid or has been disconnected.",
}
actual_connector_id = connector.id
if "error" in context:
error_msg = context["error"]
if "not found" in error_msg.lower():
logger.warning(f"Event not found: {error_msg}")
return {"status": "not_found", "message": error_msg}
logger.error(f"Failed to fetch update context: {error_msg}")
return {"status": "error", "message": error_msg}
logger.info(
f"Updating calendar event: event_id='{final_event_id}', connector={actual_connector_id}"
)
if context.get("auth_expired"):
logger.warning("Google Calendar account has expired authentication")
return {
"status": "auth_error",
"message": "The Google Calendar account for this event needs re-authentication. Please re-authenticate in your connector settings.",
"connector_type": "google_calendar",
}
if (
connector.connector_type
== SearchSourceConnectorType.COMPOSIO_GOOGLE_CALENDAR_CONNECTOR
):
from app.utils.google_credentials import build_composio_credentials
event = context["event"]
event_id = event["event_id"]
document_id = event.get("document_id")
connector_id_from_context = context["account"]["id"]
cca_id = connector.config.get("composio_connected_account_id")
if cca_id:
creds = build_composio_credentials(cca_id)
else:
if not event_id:
return {
"status": "error",
"message": "Composio connected account ID not found for this connector.",
"message": "Event ID is missing from the indexed document. Please re-index the event and try again.",
}
else:
config_data = dict(connector.config)
from app.config import config as app_config
from app.utils.oauth_security import TokenEncryption
token_encrypted = config_data.get("_token_encrypted", False)
if token_encrypted and app_config.SECRET_KEY:
token_encryption = TokenEncryption(app_config.SECRET_KEY)
for key in ("token", "refresh_token", "client_secret"):
if config_data.get(key):
config_data[key] = token_encryption.decrypt_token(
config_data[key]
)
exp = config_data.get("expiry", "")
if exp:
exp = exp.replace("Z", "")
creds = Credentials(
token=config_data.get("token"),
refresh_token=config_data.get("refresh_token"),
token_uri=config_data.get("token_uri"),
client_id=config_data.get("client_id"),
client_secret=config_data.get("client_secret"),
scopes=config_data.get("scopes", []),
expiry=datetime.fromisoformat(exp) if exp else None,
logger.info(
f"Requesting approval for updating calendar event: '{event_title_or_id}' (event_id={event_id})"
)
result = request_approval(
action_type="google_calendar_event_update",
tool_name="update_calendar_event",
params={
"event_id": event_id,
"document_id": document_id,
"connector_id": connector_id_from_context,
"new_summary": new_summary,
"new_start_datetime": new_start_datetime,
"new_end_datetime": new_end_datetime,
"new_description": new_description,
"new_location": new_location,
"new_attendees": new_attendees,
},
context=context,
)
service = await asyncio.get_event_loop().run_in_executor(
None, lambda: build("calendar", "v3", credentials=creds)
)
if result.rejected:
return {
"status": "rejected",
"message": "User declined. The event was not updated. Do not ask again or suggest alternatives.",
}
update_body: dict[str, Any] = {}
if final_new_summary is not None:
update_body["summary"] = final_new_summary
if final_new_start_datetime is not None:
update_body["start"] = _build_time_body(
final_new_start_datetime, context
final_event_id = result.params.get("event_id", event_id)
final_connector_id = result.params.get(
"connector_id", connector_id_from_context
)
if final_new_end_datetime is not None:
update_body["end"] = _build_time_body(final_new_end_datetime, context)
if final_new_description is not None:
update_body["description"] = final_new_description
if final_new_location is not None:
update_body["location"] = final_new_location
if final_new_attendees is not None:
update_body["attendees"] = [
{"email": e.strip()} for e in final_new_attendees if e.strip()
final_new_summary = result.params.get("new_summary", new_summary)
final_new_start_datetime = result.params.get(
"new_start_datetime", new_start_datetime
)
final_new_end_datetime = result.params.get(
"new_end_datetime", new_end_datetime
)
final_new_description = result.params.get(
"new_description", new_description
)
final_new_location = result.params.get("new_location", new_location)
final_new_attendees = result.params.get("new_attendees", new_attendees)
if not final_connector_id:
return {
"status": "error",
"message": "No connector found for this event.",
}
from sqlalchemy.future import select
from app.db import SearchSourceConnector, SearchSourceConnectorType
_calendar_types = [
SearchSourceConnectorType.GOOGLE_CALENDAR_CONNECTOR,
SearchSourceConnectorType.COMPOSIO_GOOGLE_CALENDAR_CONNECTOR,
]
if not update_body:
return {
"status": "error",
"message": "No changes specified. Please provide at least one field to update.",
}
try:
updated = await asyncio.get_event_loop().run_in_executor(
None,
lambda: (
service.events()
.patch(
calendarId="primary",
eventId=final_event_id,
body=update_body,
)
.execute()
),
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == final_connector_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type.in_(_calendar_types),
)
)
except Exception as api_err:
from googleapiclient.errors import HttpError
if isinstance(api_err, HttpError) and api_err.resp.status == 403:
logger.warning(
f"Insufficient permissions for connector {actual_connector_id}: {api_err}"
)
try:
from sqlalchemy.orm.attributes import flag_modified
_res = await db_session.execute(
select(SearchSourceConnector).where(
SearchSourceConnector.id == actual_connector_id
)
)
_conn = _res.scalar_one_or_none()
if _conn and not _conn.config.get("auth_expired"):
_conn.config = {**_conn.config, "auth_expired": True}
flag_modified(_conn, "config")
await db_session.commit()
except Exception:
logger.warning(
"Failed to persist auth_expired for connector %s",
actual_connector_id,
exc_info=True,
)
connector = result.scalars().first()
if not connector:
return {
"status": "insufficient_permissions",
"connector_id": actual_connector_id,
"message": "This Google Calendar account needs additional permissions. Please re-authenticate in connector settings.",
"status": "error",
"message": "Selected Google Calendar connector is invalid or has been disconnected.",
}
raise
logger.info(f"Calendar event updated: event_id={final_event_id}")
actual_connector_id = connector.id
kb_message_suffix = ""
if document_id is not None:
try:
from app.services.google_calendar import GoogleCalendarKBSyncService
logger.info(
f"Updating calendar event: event_id='{final_event_id}', connector={actual_connector_id}"
)
kb_service = GoogleCalendarKBSyncService(db_session)
kb_result = await kb_service.sync_after_update(
document_id=document_id,
event_id=final_event_id,
connector_id=actual_connector_id,
search_space_id=search_space_id,
user_id=user_id,
is_composio_calendar = (
connector.connector_type
== SearchSourceConnectorType.COMPOSIO_GOOGLE_CALENDAR_CONNECTOR
)
if is_composio_calendar:
cca_id = connector.config.get("composio_connected_account_id")
if not cca_id:
return {
"status": "error",
"message": "Composio connected account ID not found for this connector.",
}
else:
config_data = dict(connector.config)
from app.config import config as app_config
from app.utils.oauth_security import TokenEncryption
token_encrypted = config_data.get("_token_encrypted", False)
if token_encrypted and app_config.SECRET_KEY:
token_encryption = TokenEncryption(app_config.SECRET_KEY)
for key in ("token", "refresh_token", "client_secret"):
if config_data.get(key):
config_data[key] = token_encryption.decrypt_token(
config_data[key]
)
exp = config_data.get("expiry", "")
if exp:
exp = exp.replace("Z", "")
creds = Credentials(
token=config_data.get("token"),
refresh_token=config_data.get("refresh_token"),
token_uri=config_data.get("token_uri"),
client_id=config_data.get("client_id"),
client_secret=config_data.get("client_secret"),
scopes=config_data.get("scopes", []),
expiry=datetime.fromisoformat(exp) if exp else None,
)
if kb_result["status"] == "success":
kb_message_suffix = (
" Your knowledge base has also been updated."
)
else:
kb_message_suffix = " The knowledge base will be updated in the next scheduled sync."
except Exception as kb_err:
logger.warning(f"KB sync after update failed: {kb_err}")
kb_message_suffix = " The knowledge base will be updated in the next scheduled sync."
return {
"status": "success",
"event_id": final_event_id,
"html_link": updated.get("htmlLink"),
"message": f"Successfully updated the calendar event.{kb_message_suffix}",
}
update_body: dict[str, Any] = {}
if final_new_summary is not None:
update_body["summary"] = final_new_summary
if final_new_start_datetime is not None:
update_body["start"] = _build_time_body(
final_new_start_datetime, context
)
if final_new_end_datetime is not None:
update_body["end"] = _build_time_body(
final_new_end_datetime, context
)
if final_new_description is not None:
update_body["description"] = final_new_description
if final_new_location is not None:
update_body["location"] = final_new_location
if final_new_attendees is not None:
update_body["attendees"] = [
{"email": e.strip()} for e in final_new_attendees if e.strip()
]
if not update_body:
return {
"status": "error",
"message": "No changes specified. Please provide at least one field to update.",
}
try:
if is_composio_calendar:
from app.services.composio_service import ComposioService
composio_params: dict[str, Any] = {
"calendar_id": "primary",
"event_id": final_event_id,
}
if final_new_summary is not None:
composio_params["summary"] = final_new_summary
if final_new_start_datetime is not None:
composio_params["start_time"] = final_new_start_datetime
if final_new_end_datetime is not None:
composio_params["end_time"] = final_new_end_datetime
if final_new_description is not None:
composio_params["description"] = final_new_description
if final_new_location is not None:
composio_params["location"] = final_new_location
if final_new_attendees is not None:
composio_params["attendees"] = [
e.strip() for e in final_new_attendees if e.strip()
]
if not _is_date_only(
final_new_start_datetime or final_new_end_datetime or ""
):
composio_params["timezone"] = context.get("timezone", "UTC")
composio_result = await ComposioService().execute_tool(
connected_account_id=cca_id,
tool_name="GOOGLECALENDAR_PATCH_EVENT",
params=composio_params,
entity_id=f"surfsense_{user_id}",
)
if not composio_result.get("success"):
raise RuntimeError(
composio_result.get(
"error", "Unknown Composio Calendar error"
)
)
updated = composio_result.get("data", {})
if isinstance(updated, dict):
updated = updated.get("data", updated)
if isinstance(updated, dict):
updated = updated.get("response_data", updated)
else:
service = await asyncio.get_event_loop().run_in_executor(
None, lambda: build("calendar", "v3", credentials=creds)
)
updated = await asyncio.get_event_loop().run_in_executor(
None,
lambda: (
service.events()
.patch(
calendarId="primary",
eventId=final_event_id,
body=update_body,
)
.execute()
),
)
except Exception as api_err:
from googleapiclient.errors import HttpError
if isinstance(api_err, HttpError) and api_err.resp.status == 403:
logger.warning(
f"Insufficient permissions for connector {actual_connector_id}: {api_err}"
)
try:
from sqlalchemy.orm.attributes import flag_modified
_res = await db_session.execute(
select(SearchSourceConnector).where(
SearchSourceConnector.id == actual_connector_id
)
)
_conn = _res.scalar_one_or_none()
if _conn and not _conn.config.get("auth_expired"):
_conn.config = {**_conn.config, "auth_expired": True}
flag_modified(_conn, "config")
await db_session.commit()
except Exception:
logger.warning(
"Failed to persist auth_expired for connector %s",
actual_connector_id,
exc_info=True,
)
return {
"status": "insufficient_permissions",
"connector_id": actual_connector_id,
"message": "This Google Calendar account needs additional permissions. Please re-authenticate in connector settings.",
}
raise
logger.info(f"Calendar event updated: event_id={final_event_id}")
kb_message_suffix = ""
if document_id is not None:
try:
from app.services.google_calendar import (
GoogleCalendarKBSyncService,
)
kb_service = GoogleCalendarKBSyncService(db_session)
kb_result = await kb_service.sync_after_update(
document_id=document_id,
event_id=final_event_id,
connector_id=actual_connector_id,
search_space_id=search_space_id,
user_id=user_id,
)
if kb_result["status"] == "success":
kb_message_suffix = (
" Your knowledge base has also been updated."
)
else:
kb_message_suffix = " The knowledge base will be updated in the next scheduled sync."
except Exception as kb_err:
logger.warning(f"KB sync after update failed: {kb_err}")
kb_message_suffix = " The knowledge base will be updated in the next scheduled sync."
return {
"status": "success",
"event_id": final_event_id,
"html_link": updated.get("htmlLink"),
"message": f"Successfully updated the calendar event.{kb_message_suffix}",
}
except Exception as e:
from langgraph.errors import GraphInterrupt

View file

@ -8,6 +8,7 @@ from sqlalchemy.ext.asyncio import AsyncSession
from app.agents.new_chat.tools.hitl import request_approval
from app.connectors.google_drive.client import GoogleDriveClient
from app.connectors.google_drive.file_types import GOOGLE_DOC, GOOGLE_SHEET
from app.db import async_session_maker
from app.services.google_drive import GoogleDriveToolMetadataService
logger = logging.getLogger(__name__)
@ -23,6 +24,25 @@ def create_create_google_drive_file_tool(
search_space_id: int | None = None,
user_id: str | None = None,
):
"""
Factory function to create the create_google_drive_file tool.
The tool acquires its own short-lived ``AsyncSession`` per call via
:data:`async_session_maker` so the closure is safe to share across
HTTP requests by the compiled-agent cache. Capturing a per-request
session here would surface stale/closed sessions on cache hits.
Args:
db_session: Reserved for registry compatibility. Per-call sessions
are opened via :data:`async_session_maker` inside the tool body.
search_space_id: Search space ID to find the Google Drive connector
user_id: User ID for fetching user-specific context
Returns:
Configured create_google_drive_file tool
"""
del db_session # per-call session — see docstring
@tool
async def create_google_drive_file(
name: str,
@ -65,7 +85,7 @@ def create_create_google_drive_file_tool(
f"create_google_drive_file called: name='{name}', type='{file_type}'"
)
if db_session is None or search_space_id is None or user_id is None:
if search_space_id is None or user_id is None:
return {
"status": "error",
"message": "Google Drive tool not properly configured. Please contact support.",
@ -78,195 +98,232 @@ def create_create_google_drive_file_tool(
}
try:
metadata_service = GoogleDriveToolMetadataService(db_session)
context = await metadata_service.get_creation_context(
search_space_id, user_id
)
async with async_session_maker() as db_session:
metadata_service = GoogleDriveToolMetadataService(db_session)
context = await metadata_service.get_creation_context(
search_space_id, user_id
)
if "error" in context:
logger.error(f"Failed to fetch creation context: {context['error']}")
return {"status": "error", "message": context["error"]}
accounts = context.get("accounts", [])
if accounts and all(a.get("auth_expired") for a in accounts):
logger.warning("All Google Drive accounts have expired authentication")
return {
"status": "auth_error",
"message": "All connected Google Drive accounts need re-authentication. Please re-authenticate in your connector settings.",
"connector_type": "google_drive",
}
logger.info(
f"Requesting approval for creating Google Drive file: name='{name}', type='{file_type}'"
)
result = request_approval(
action_type="google_drive_file_creation",
tool_name="create_google_drive_file",
params={
"name": name,
"file_type": file_type,
"content": content,
"connector_id": None,
"parent_folder_id": None,
},
context=context,
)
if result.rejected:
return {
"status": "rejected",
"message": "User declined. The file was not created. Do not ask again or suggest alternatives.",
}
final_name = result.params.get("name", name)
final_file_type = result.params.get("file_type", file_type)
final_content = result.params.get("content", content)
final_connector_id = result.params.get("connector_id")
final_parent_folder_id = result.params.get("parent_folder_id")
if not final_name or not final_name.strip():
return {"status": "error", "message": "File name cannot be empty."}
mime_type = _MIME_MAP.get(final_file_type)
if not mime_type:
return {
"status": "error",
"message": f"Unsupported file type '{final_file_type}'.",
}
from sqlalchemy.future import select
from app.db import SearchSourceConnector, SearchSourceConnectorType
_drive_types = [
SearchSourceConnectorType.GOOGLE_DRIVE_CONNECTOR,
SearchSourceConnectorType.COMPOSIO_GOOGLE_DRIVE_CONNECTOR,
]
if final_connector_id is not None:
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == final_connector_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type.in_(_drive_types),
if "error" in context:
logger.error(
f"Failed to fetch creation context: {context['error']}"
)
)
connector = result.scalars().first()
if not connector:
return {
"status": "error",
"message": "Selected Google Drive connector is invalid or has been disconnected.",
}
actual_connector_id = connector.id
else:
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type.in_(_drive_types),
)
)
connector = result.scalars().first()
if not connector:
return {
"status": "error",
"message": "No Google Drive connector found. Please connect Google Drive in your workspace settings.",
}
actual_connector_id = connector.id
return {"status": "error", "message": context["error"]}
logger.info(
f"Creating Google Drive file: name='{final_name}', type='{final_file_type}', connector={actual_connector_id}"
)
pre_built_creds = None
if (
connector.connector_type
== SearchSourceConnectorType.COMPOSIO_GOOGLE_DRIVE_CONNECTOR
):
from app.utils.google_credentials import build_composio_credentials
cca_id = connector.config.get("composio_connected_account_id")
if cca_id:
pre_built_creds = build_composio_credentials(cca_id)
client = GoogleDriveClient(
session=db_session,
connector_id=actual_connector_id,
credentials=pre_built_creds,
)
try:
created = await client.create_file(
name=final_name,
mime_type=mime_type,
parent_folder_id=final_parent_folder_id,
content=final_content,
)
except HttpError as http_err:
if http_err.resp.status == 403:
accounts = context.get("accounts", [])
if accounts and all(a.get("auth_expired") for a in accounts):
logger.warning(
f"Insufficient permissions for connector {actual_connector_id}: {http_err}"
"All Google Drive accounts have expired authentication"
)
try:
from sqlalchemy.orm.attributes import flag_modified
_res = await db_session.execute(
select(SearchSourceConnector).where(
SearchSourceConnector.id == actual_connector_id
)
)
_conn = _res.scalar_one_or_none()
if _conn and not _conn.config.get("auth_expired"):
_conn.config = {**_conn.config, "auth_expired": True}
flag_modified(_conn, "config")
await db_session.commit()
except Exception:
logger.warning(
"Failed to persist auth_expired for connector %s",
actual_connector_id,
exc_info=True,
)
return {
"status": "insufficient_permissions",
"connector_id": actual_connector_id,
"message": "This Google Drive account needs additional permissions. Please re-authenticate in connector settings.",
"status": "auth_error",
"message": "All connected Google Drive accounts need re-authentication. Please re-authenticate in your connector settings.",
"connector_type": "google_drive",
}
raise
logger.info(
f"Google Drive file created: id={created.get('id')}, name={created.get('name')}"
)
kb_message_suffix = ""
try:
from app.services.google_drive import GoogleDriveKBSyncService
kb_service = GoogleDriveKBSyncService(db_session)
kb_result = await kb_service.sync_after_create(
file_id=created.get("id"),
file_name=created.get("name", final_name),
mime_type=mime_type,
web_view_link=created.get("webViewLink"),
content=final_content,
connector_id=actual_connector_id,
search_space_id=search_space_id,
user_id=user_id,
logger.info(
f"Requesting approval for creating Google Drive file: name='{name}', type='{file_type}'"
)
result = request_approval(
action_type="google_drive_file_creation",
tool_name="create_google_drive_file",
params={
"name": name,
"file_type": file_type,
"content": content,
"connector_id": None,
"parent_folder_id": None,
},
context=context,
)
if kb_result["status"] == "success":
kb_message_suffix = " Your knowledge base has also been updated."
else:
kb_message_suffix = " This file will be added to your knowledge base in the next scheduled sync."
except Exception as kb_err:
logger.warning(f"KB sync after create failed: {kb_err}")
kb_message_suffix = " This file will be added to your knowledge base in the next scheduled sync."
return {
"status": "success",
"file_id": created.get("id"),
"name": created.get("name"),
"web_view_link": created.get("webViewLink"),
"message": f"Successfully created '{created.get('name')}' in Google Drive.{kb_message_suffix}",
}
if result.rejected:
return {
"status": "rejected",
"message": "User declined. The file was not created. Do not ask again or suggest alternatives.",
}
final_name = result.params.get("name", name)
final_file_type = result.params.get("file_type", file_type)
final_content = result.params.get("content", content)
final_connector_id = result.params.get("connector_id")
final_parent_folder_id = result.params.get("parent_folder_id")
if not final_name or not final_name.strip():
return {"status": "error", "message": "File name cannot be empty."}
mime_type = _MIME_MAP.get(final_file_type)
if not mime_type:
return {
"status": "error",
"message": f"Unsupported file type '{final_file_type}'.",
}
from sqlalchemy.future import select
from app.db import SearchSourceConnector, SearchSourceConnectorType
_drive_types = [
SearchSourceConnectorType.GOOGLE_DRIVE_CONNECTOR,
SearchSourceConnectorType.COMPOSIO_GOOGLE_DRIVE_CONNECTOR,
]
if final_connector_id is not None:
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == final_connector_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type.in_(_drive_types),
)
)
connector = result.scalars().first()
if not connector:
return {
"status": "error",
"message": "Selected Google Drive connector is invalid or has been disconnected.",
}
actual_connector_id = connector.id
else:
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type.in_(_drive_types),
)
)
connector = result.scalars().first()
if not connector:
return {
"status": "error",
"message": "No Google Drive connector found. Please connect Google Drive in your workspace settings.",
}
actual_connector_id = connector.id
logger.info(
f"Creating Google Drive file: name='{final_name}', type='{final_file_type}', connector={actual_connector_id}"
)
is_composio_drive = (
connector.connector_type
== SearchSourceConnectorType.COMPOSIO_GOOGLE_DRIVE_CONNECTOR
)
if is_composio_drive:
cca_id = connector.config.get("composio_connected_account_id")
if not cca_id:
return {
"status": "error",
"message": "Composio connected account ID not found for this Drive connector.",
}
client = GoogleDriveClient(
session=db_session,
connector_id=actual_connector_id,
)
try:
if is_composio_drive:
from app.services.composio_service import ComposioService
params: dict[str, Any] = {
"name": final_name,
"mimeType": mime_type,
"fields": "id,name,webViewLink,mimeType",
}
if final_parent_folder_id:
params["parents"] = [final_parent_folder_id]
if final_content:
params["description"] = final_content[:4096]
result = await ComposioService().execute_tool(
connected_account_id=cca_id,
tool_name="GOOGLEDRIVE_CREATE_FILE",
params=params,
entity_id=f"surfsense_{user_id}",
)
if not result.get("success"):
raise RuntimeError(
result.get("error", "Unknown Composio Drive error")
)
created = result.get("data", {})
if isinstance(created, dict):
created = created.get("data", created)
if isinstance(created, dict):
created = created.get("response_data", created)
if not isinstance(created, dict):
created = {}
else:
created = await client.create_file(
name=final_name,
mime_type=mime_type,
parent_folder_id=final_parent_folder_id,
content=final_content,
)
except HttpError as http_err:
if http_err.resp.status == 403:
logger.warning(
f"Insufficient permissions for connector {actual_connector_id}: {http_err}"
)
try:
from sqlalchemy.orm.attributes import flag_modified
_res = await db_session.execute(
select(SearchSourceConnector).where(
SearchSourceConnector.id == actual_connector_id
)
)
_conn = _res.scalar_one_or_none()
if _conn and not _conn.config.get("auth_expired"):
_conn.config = {**_conn.config, "auth_expired": True}
flag_modified(_conn, "config")
await db_session.commit()
except Exception:
logger.warning(
"Failed to persist auth_expired for connector %s",
actual_connector_id,
exc_info=True,
)
return {
"status": "insufficient_permissions",
"connector_id": actual_connector_id,
"message": "This Google Drive account needs additional permissions. Please re-authenticate in connector settings.",
}
raise
logger.info(
f"Google Drive file created: id={created.get('id')}, name={created.get('name')}"
)
kb_message_suffix = ""
try:
from app.services.google_drive import GoogleDriveKBSyncService
kb_service = GoogleDriveKBSyncService(db_session)
kb_result = await kb_service.sync_after_create(
file_id=created.get("id"),
file_name=created.get("name", final_name),
mime_type=mime_type,
web_view_link=created.get("webViewLink"),
content=final_content,
connector_id=actual_connector_id,
search_space_id=search_space_id,
user_id=user_id,
)
if kb_result["status"] == "success":
kb_message_suffix = (
" Your knowledge base has also been updated."
)
else:
kb_message_suffix = " This file will be added to your knowledge base in the next scheduled sync."
except Exception as kb_err:
logger.warning(f"KB sync after create failed: {kb_err}")
kb_message_suffix = " This file will be added to your knowledge base in the next scheduled sync."
return {
"status": "success",
"file_id": created.get("id"),
"name": created.get("name"),
"web_view_link": created.get("webViewLink"),
"message": f"Successfully created '{created.get('name')}' in Google Drive.{kb_message_suffix}",
}
except Exception as e:
from langgraph.errors import GraphInterrupt

View file

@ -7,6 +7,7 @@ from sqlalchemy.ext.asyncio import AsyncSession
from app.agents.new_chat.tools.hitl import request_approval
from app.connectors.google_drive.client import GoogleDriveClient
from app.db import async_session_maker
from app.services.google_drive import GoogleDriveToolMetadataService
logger = logging.getLogger(__name__)
@ -17,6 +18,25 @@ def create_delete_google_drive_file_tool(
search_space_id: int | None = None,
user_id: str | None = None,
):
"""
Factory function to create the delete_google_drive_file tool.
The tool acquires its own short-lived ``AsyncSession`` per call via
:data:`async_session_maker` so the closure is safe to share across
HTTP requests by the compiled-agent cache. Capturing a per-request
session here would surface stale/closed sessions on cache hits.
Args:
db_session: Reserved for registry compatibility. Per-call sessions
are opened via :data:`async_session_maker` inside the tool body.
search_space_id: Search space ID to find the Google Drive connector
user_id: User ID for fetching user-specific context
Returns:
Configured delete_google_drive_file tool
"""
del db_session # per-call session — see docstring
@tool
async def delete_google_drive_file(
file_name: str,
@ -55,197 +75,214 @@ def create_delete_google_drive_file_tool(
f"delete_google_drive_file called: file_name='{file_name}', delete_from_kb={delete_from_kb}"
)
if db_session is None or search_space_id is None or user_id is None:
if search_space_id is None or user_id is None:
return {
"status": "error",
"message": "Google Drive tool not properly configured. Please contact support.",
}
try:
metadata_service = GoogleDriveToolMetadataService(db_session)
context = await metadata_service.get_trash_context(
search_space_id, user_id, file_name
)
if "error" in context:
error_msg = context["error"]
if "not found" in error_msg.lower():
logger.warning(f"File not found: {error_msg}")
return {"status": "not_found", "message": error_msg}
logger.error(f"Failed to fetch trash context: {error_msg}")
return {"status": "error", "message": error_msg}
account = context.get("account", {})
if account.get("auth_expired"):
logger.warning(
"Google Drive account %s has expired authentication",
account.get("id"),
async with async_session_maker() as db_session:
metadata_service = GoogleDriveToolMetadataService(db_session)
context = await metadata_service.get_trash_context(
search_space_id, user_id, file_name
)
return {
"status": "auth_error",
"message": "The Google Drive account for this file needs re-authentication. Please re-authenticate in your connector settings.",
"connector_type": "google_drive",
}
file = context["file"]
file_id = file["file_id"]
document_id = file.get("document_id")
connector_id_from_context = context["account"]["id"]
if "error" in context:
error_msg = context["error"]
if "not found" in error_msg.lower():
logger.warning(f"File not found: {error_msg}")
return {"status": "not_found", "message": error_msg}
logger.error(f"Failed to fetch trash context: {error_msg}")
return {"status": "error", "message": error_msg}
if not file_id:
return {
"status": "error",
"message": "File ID is missing from the indexed document. Please re-index the file and try again.",
}
logger.info(
f"Requesting approval for deleting Google Drive file: '{file_name}' (file_id={file_id}, delete_from_kb={delete_from_kb})"
)
result = request_approval(
action_type="google_drive_file_trash",
tool_name="delete_google_drive_file",
params={
"file_id": file_id,
"connector_id": connector_id_from_context,
"delete_from_kb": delete_from_kb,
},
context=context,
)
if result.rejected:
return {
"status": "rejected",
"message": "User declined. The file was not trashed. Do not ask again or suggest alternatives.",
}
final_file_id = result.params.get("file_id", file_id)
final_connector_id = result.params.get(
"connector_id", connector_id_from_context
)
final_delete_from_kb = result.params.get("delete_from_kb", delete_from_kb)
if not final_connector_id:
return {
"status": "error",
"message": "No connector found for this file.",
}
from sqlalchemy.future import select
from app.db import SearchSourceConnector, SearchSourceConnectorType
_drive_types = [
SearchSourceConnectorType.GOOGLE_DRIVE_CONNECTOR,
SearchSourceConnectorType.COMPOSIO_GOOGLE_DRIVE_CONNECTOR,
]
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == final_connector_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type.in_(_drive_types),
)
)
connector = result.scalars().first()
if not connector:
return {
"status": "error",
"message": "Selected Google Drive connector is invalid or has been disconnected.",
}
logger.info(
f"Deleting Google Drive file: file_id='{final_file_id}', connector={final_connector_id}"
)
pre_built_creds = None
if (
connector.connector_type
== SearchSourceConnectorType.COMPOSIO_GOOGLE_DRIVE_CONNECTOR
):
from app.utils.google_credentials import build_composio_credentials
cca_id = connector.config.get("composio_connected_account_id")
if cca_id:
pre_built_creds = build_composio_credentials(cca_id)
client = GoogleDriveClient(
session=db_session,
connector_id=connector.id,
credentials=pre_built_creds,
)
try:
await client.trash_file(file_id=final_file_id)
except HttpError as http_err:
if http_err.resp.status == 403:
account = context.get("account", {})
if account.get("auth_expired"):
logger.warning(
f"Insufficient permissions for connector {connector.id}: {http_err}"
"Google Drive account %s has expired authentication",
account.get("id"),
)
try:
from sqlalchemy.orm.attributes import flag_modified
if not connector.config.get("auth_expired"):
connector.config = {
**connector.config,
"auth_expired": True,
}
flag_modified(connector, "config")
await db_session.commit()
except Exception:
logger.warning(
"Failed to persist auth_expired for connector %s",
connector.id,
exc_info=True,
)
return {
"status": "insufficient_permissions",
"connector_id": connector.id,
"message": "This Google Drive account needs additional permissions. Please re-authenticate in connector settings.",
"status": "auth_error",
"message": "The Google Drive account for this file needs re-authentication. Please re-authenticate in your connector settings.",
"connector_type": "google_drive",
}
raise
logger.info(
f"Google Drive file deleted (moved to trash): file_id={final_file_id}"
)
file = context["file"]
file_id = file["file_id"]
document_id = file.get("document_id")
connector_id_from_context = context["account"]["id"]
trash_result: dict[str, Any] = {
"status": "success",
"file_id": final_file_id,
"message": f"Successfully moved '{file['name']}' to trash.",
}
if not file_id:
return {
"status": "error",
"message": "File ID is missing from the indexed document. Please re-index the file and try again.",
}
deleted_from_kb = False
if final_delete_from_kb and document_id:
try:
from app.db import Document
doc_result = await db_session.execute(
select(Document).filter(Document.id == document_id)
)
document = doc_result.scalars().first()
if document:
await db_session.delete(document)
await db_session.commit()
deleted_from_kb = True
logger.info(
f"Deleted document {document_id} from knowledge base"
)
else:
logger.warning(f"Document {document_id} not found in KB")
except Exception as e:
logger.error(f"Failed to delete document from KB: {e}")
await db_session.rollback()
trash_result["warning"] = (
f"File moved to trash, but failed to remove from knowledge base: {e!s}"
)
trash_result["deleted_from_kb"] = deleted_from_kb
if deleted_from_kb:
trash_result["message"] = (
f"{trash_result.get('message', '')} (also removed from knowledge base)"
logger.info(
f"Requesting approval for deleting Google Drive file: '{file_name}' (file_id={file_id}, delete_from_kb={delete_from_kb})"
)
result = request_approval(
action_type="google_drive_file_trash",
tool_name="delete_google_drive_file",
params={
"file_id": file_id,
"connector_id": connector_id_from_context,
"delete_from_kb": delete_from_kb,
},
context=context,
)
return trash_result
if result.rejected:
return {
"status": "rejected",
"message": "User declined. The file was not trashed. Do not ask again or suggest alternatives.",
}
final_file_id = result.params.get("file_id", file_id)
final_connector_id = result.params.get(
"connector_id", connector_id_from_context
)
final_delete_from_kb = result.params.get(
"delete_from_kb", delete_from_kb
)
if not final_connector_id:
return {
"status": "error",
"message": "No connector found for this file.",
}
from sqlalchemy.future import select
from app.db import SearchSourceConnector, SearchSourceConnectorType
_drive_types = [
SearchSourceConnectorType.GOOGLE_DRIVE_CONNECTOR,
SearchSourceConnectorType.COMPOSIO_GOOGLE_DRIVE_CONNECTOR,
]
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == final_connector_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type.in_(_drive_types),
)
)
connector = result.scalars().first()
if not connector:
return {
"status": "error",
"message": "Selected Google Drive connector is invalid or has been disconnected.",
}
logger.info(
f"Deleting Google Drive file: file_id='{final_file_id}', connector={final_connector_id}"
)
is_composio_drive = (
connector.connector_type
== SearchSourceConnectorType.COMPOSIO_GOOGLE_DRIVE_CONNECTOR
)
if is_composio_drive:
cca_id = connector.config.get("composio_connected_account_id")
if not cca_id:
return {
"status": "error",
"message": "Composio connected account ID not found for this Drive connector.",
}
client = GoogleDriveClient(
session=db_session,
connector_id=connector.id,
)
try:
if is_composio_drive:
from app.services.composio_service import ComposioService
result = await ComposioService().execute_tool(
connected_account_id=cca_id,
tool_name="GOOGLEDRIVE_TRASH_FILE",
params={"file_id": final_file_id},
entity_id=f"surfsense_{user_id}",
)
if not result.get("success"):
raise RuntimeError(
result.get("error", "Unknown Composio Drive error")
)
else:
await client.trash_file(file_id=final_file_id)
except HttpError as http_err:
if http_err.resp.status == 403:
logger.warning(
f"Insufficient permissions for connector {connector.id}: {http_err}"
)
try:
from sqlalchemy.orm.attributes import flag_modified
if not connector.config.get("auth_expired"):
connector.config = {
**connector.config,
"auth_expired": True,
}
flag_modified(connector, "config")
await db_session.commit()
except Exception:
logger.warning(
"Failed to persist auth_expired for connector %s",
connector.id,
exc_info=True,
)
return {
"status": "insufficient_permissions",
"connector_id": connector.id,
"message": "This Google Drive account needs additional permissions. Please re-authenticate in connector settings.",
}
raise
logger.info(
f"Google Drive file deleted (moved to trash): file_id={final_file_id}"
)
trash_result: dict[str, Any] = {
"status": "success",
"file_id": final_file_id,
"message": f"Successfully moved '{file['name']}' to trash.",
}
deleted_from_kb = False
if final_delete_from_kb and document_id:
try:
from app.db import Document
doc_result = await db_session.execute(
select(Document).filter(Document.id == document_id)
)
document = doc_result.scalars().first()
if document:
await db_session.delete(document)
await db_session.commit()
deleted_from_kb = True
logger.info(
f"Deleted document {document_id} from knowledge base"
)
else:
logger.warning(f"Document {document_id} not found in KB")
except Exception as e:
logger.error(f"Failed to delete document from KB: {e}")
await db_session.rollback()
trash_result["warning"] = (
f"File moved to trash, but failed to remove from knowledge base: {e!s}"
)
trash_result["deleted_from_kb"] = deleted_from_kb
if deleted_from_kb:
trash_result["message"] = (
f"{trash_result.get('message', '')} (also removed from knowledge base)"
)
return trash_result
except Exception as e:
from langgraph.errors import GraphInterrupt

View file

@ -50,6 +50,7 @@ DEFAULT_AUTO_APPROVED_TOOLS: frozenset[str] = frozenset(
{
"create_gmail_draft",
"update_gmail_draft",
"create_calendar_event",
"create_notion_page",
"create_confluence_page",
"create_google_drive_file",

View file

@ -8,6 +8,7 @@ from sqlalchemy.orm.attributes import flag_modified
from app.agents.new_chat.tools.hitl import request_approval
from app.connectors.jira_history import JiraHistoryConnector
from app.db import async_session_maker
from app.services.jira import JiraToolMetadataService
logger = logging.getLogger(__name__)
@ -19,6 +20,28 @@ def create_create_jira_issue_tool(
user_id: str | None = None,
connector_id: int | None = None,
):
"""Factory function to create the create_jira_issue tool.
The tool acquires its own short-lived ``AsyncSession`` per call via
:data:`async_session_maker`. This is critical for the compiled-agent
cache: the compiled graph (and therefore this closure) is reused
across HTTP requests, so capturing a per-request session here would
surface stale/closed sessions on cache hits. Per-call sessions also
keep the request's outer transaction free of long-running Jira API
blocking.
Args:
db_session: Reserved for registry compatibility. Per-call sessions
are opened via :data:`async_session_maker` inside the tool body.
search_space_id: Search space ID to find the Jira connector
user_id: User ID for fetching user-specific context
connector_id: Optional specific connector ID (if known)
Returns:
Configured create_jira_issue tool
"""
del db_session # per-call session — see docstring
@tool
async def create_jira_issue(
project_key: str,
@ -49,158 +72,167 @@ def create_create_jira_issue_tool(
f"create_jira_issue called: project_key='{project_key}', summary='{summary}'"
)
if db_session is None or search_space_id is None or user_id is None:
if search_space_id is None or user_id is None:
return {"status": "error", "message": "Jira tool not properly configured."}
try:
metadata_service = JiraToolMetadataService(db_session)
context = await metadata_service.get_creation_context(
search_space_id, user_id
)
if "error" in context:
return {"status": "error", "message": context["error"]}
accounts = context.get("accounts", [])
if accounts and all(a.get("auth_expired") for a in accounts):
return {
"status": "auth_error",
"message": "All connected Jira accounts need re-authentication.",
"connector_type": "jira",
}
result = request_approval(
action_type="jira_issue_creation",
tool_name="create_jira_issue",
params={
"project_key": project_key,
"summary": summary,
"issue_type": issue_type,
"description": description,
"priority": priority,
"connector_id": connector_id,
},
context=context,
)
if result.rejected:
return {
"status": "rejected",
"message": "User declined. Do not retry or suggest alternatives.",
}
final_project_key = result.params.get("project_key", project_key)
final_summary = result.params.get("summary", summary)
final_issue_type = result.params.get("issue_type", issue_type)
final_description = result.params.get("description", description)
final_priority = result.params.get("priority", priority)
final_connector_id = result.params.get("connector_id", connector_id)
if not final_summary or not final_summary.strip():
return {"status": "error", "message": "Issue summary cannot be empty."}
if not final_project_key:
return {"status": "error", "message": "A project must be selected."}
from sqlalchemy.future import select
from app.db import SearchSourceConnector, SearchSourceConnectorType
actual_connector_id = final_connector_id
if actual_connector_id is None:
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.JIRA_CONNECTOR,
)
async with async_session_maker() as db_session:
metadata_service = JiraToolMetadataService(db_session)
context = await metadata_service.get_creation_context(
search_space_id, user_id
)
connector = result.scalars().first()
if not connector:
return {"status": "error", "message": "No Jira connector found."}
actual_connector_id = connector.id
else:
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == actual_connector_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.JIRA_CONNECTOR,
)
if "error" in context:
return {"status": "error", "message": context["error"]}
accounts = context.get("accounts", [])
if accounts and all(a.get("auth_expired") for a in accounts):
return {
"status": "auth_error",
"message": "All connected Jira accounts need re-authentication.",
"connector_type": "jira",
}
result = request_approval(
action_type="jira_issue_creation",
tool_name="create_jira_issue",
params={
"project_key": project_key,
"summary": summary,
"issue_type": issue_type,
"description": description,
"priority": priority,
"connector_id": connector_id,
},
context=context,
)
connector = result.scalars().first()
if not connector:
if result.rejected:
return {
"status": "rejected",
"message": "User declined. Do not retry or suggest alternatives.",
}
final_project_key = result.params.get("project_key", project_key)
final_summary = result.params.get("summary", summary)
final_issue_type = result.params.get("issue_type", issue_type)
final_description = result.params.get("description", description)
final_priority = result.params.get("priority", priority)
final_connector_id = result.params.get("connector_id", connector_id)
if not final_summary or not final_summary.strip():
return {
"status": "error",
"message": "Selected Jira connector is invalid.",
"message": "Issue summary cannot be empty.",
}
if not final_project_key:
return {"status": "error", "message": "A project must be selected."}
try:
jira_history = JiraHistoryConnector(
session=db_session, connector_id=actual_connector_id
)
jira_client = await jira_history._get_jira_client()
api_result = await asyncio.to_thread(
jira_client.create_issue,
project_key=final_project_key,
summary=final_summary,
issue_type=final_issue_type,
description=final_description,
priority=final_priority,
)
except Exception as api_err:
if "status code 403" in str(api_err).lower():
try:
_conn = connector
_conn.config = {**_conn.config, "auth_expired": True}
flag_modified(_conn, "config")
await db_session.commit()
except Exception:
pass
return {
"status": "insufficient_permissions",
"connector_id": actual_connector_id,
"message": "This Jira account needs additional permissions. Please re-authenticate in connector settings.",
}
raise
from sqlalchemy.future import select
issue_key = api_result.get("key", "")
issue_url = (
f"{jira_history._base_url}/browse/{issue_key}"
if jira_history._base_url and issue_key
else ""
)
from app.db import SearchSourceConnector, SearchSourceConnectorType
kb_message_suffix = ""
try:
from app.services.jira import JiraKBSyncService
kb_service = JiraKBSyncService(db_session)
kb_result = await kb_service.sync_after_create(
issue_id=issue_key,
issue_identifier=issue_key,
issue_title=final_summary,
description=final_description,
state="To Do",
connector_id=actual_connector_id,
search_space_id=search_space_id,
user_id=user_id,
)
if kb_result["status"] == "success":
kb_message_suffix = " Your knowledge base has also been updated."
actual_connector_id = final_connector_id
if actual_connector_id is None:
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.JIRA_CONNECTOR,
)
)
connector = result.scalars().first()
if not connector:
return {
"status": "error",
"message": "No Jira connector found.",
}
actual_connector_id = connector.id
else:
kb_message_suffix = " This issue will be added to your knowledge base in the next scheduled sync."
except Exception as kb_err:
logger.warning(f"KB sync after create failed: {kb_err}")
kb_message_suffix = " This issue will be added to your knowledge base in the next scheduled sync."
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == actual_connector_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.JIRA_CONNECTOR,
)
)
connector = result.scalars().first()
if not connector:
return {
"status": "error",
"message": "Selected Jira connector is invalid.",
}
return {
"status": "success",
"issue_key": issue_key,
"issue_url": issue_url,
"message": f"Jira issue {issue_key} created successfully.{kb_message_suffix}",
}
try:
jira_history = JiraHistoryConnector(
session=db_session, connector_id=actual_connector_id
)
jira_client = await jira_history._get_jira_client()
api_result = await asyncio.to_thread(
jira_client.create_issue,
project_key=final_project_key,
summary=final_summary,
issue_type=final_issue_type,
description=final_description,
priority=final_priority,
)
except Exception as api_err:
if "status code 403" in str(api_err).lower():
try:
_conn = connector
_conn.config = {**_conn.config, "auth_expired": True}
flag_modified(_conn, "config")
await db_session.commit()
except Exception:
pass
return {
"status": "insufficient_permissions",
"connector_id": actual_connector_id,
"message": "This Jira account needs additional permissions. Please re-authenticate in connector settings.",
}
raise
issue_key = api_result.get("key", "")
issue_url = (
f"{jira_history._base_url}/browse/{issue_key}"
if jira_history._base_url and issue_key
else ""
)
kb_message_suffix = ""
try:
from app.services.jira import JiraKBSyncService
kb_service = JiraKBSyncService(db_session)
kb_result = await kb_service.sync_after_create(
issue_id=issue_key,
issue_identifier=issue_key,
issue_title=final_summary,
description=final_description,
state="To Do",
connector_id=actual_connector_id,
search_space_id=search_space_id,
user_id=user_id,
)
if kb_result["status"] == "success":
kb_message_suffix = (
" Your knowledge base has also been updated."
)
else:
kb_message_suffix = " This issue will be added to your knowledge base in the next scheduled sync."
except Exception as kb_err:
logger.warning(f"KB sync after create failed: {kb_err}")
kb_message_suffix = " This issue will be added to your knowledge base in the next scheduled sync."
return {
"status": "success",
"issue_key": issue_key,
"issue_url": issue_url,
"message": f"Jira issue {issue_key} created successfully.{kb_message_suffix}",
}
except Exception as e:
from langgraph.errors import GraphInterrupt

View file

@ -8,6 +8,7 @@ from sqlalchemy.orm.attributes import flag_modified
from app.agents.new_chat.tools.hitl import request_approval
from app.connectors.jira_history import JiraHistoryConnector
from app.db import async_session_maker
from app.services.jira import JiraToolMetadataService
logger = logging.getLogger(__name__)
@ -19,6 +20,26 @@ def create_delete_jira_issue_tool(
user_id: str | None = None,
connector_id: int | None = None,
):
"""Factory function to create the delete_jira_issue tool.
The tool acquires its own short-lived ``AsyncSession`` per call via
:data:`async_session_maker`. This is critical for the compiled-agent
cache: the compiled graph (and therefore this closure) is reused
across HTTP requests, so capturing a per-request session here would
surface stale/closed sessions on cache hits.
Args:
db_session: Reserved for registry compatibility. Per-call sessions
are opened via :data:`async_session_maker` inside the tool body.
search_space_id: Search space ID to find the Jira connector
user_id: User ID for fetching user-specific context
connector_id: Optional specific connector ID (if known)
Returns:
Configured delete_jira_issue tool
"""
del db_session # per-call session — see docstring
@tool
async def delete_jira_issue(
issue_title_or_key: str,
@ -44,130 +65,136 @@ def create_delete_jira_issue_tool(
f"delete_jira_issue called: issue_title_or_key='{issue_title_or_key}'"
)
if db_session is None or search_space_id is None or user_id is None:
if search_space_id is None or user_id is None:
return {"status": "error", "message": "Jira tool not properly configured."}
try:
metadata_service = JiraToolMetadataService(db_session)
context = await metadata_service.get_deletion_context(
search_space_id, user_id, issue_title_or_key
)
if "error" in context:
error_msg = context["error"]
if context.get("auth_expired"):
return {
"status": "auth_error",
"message": error_msg,
"connector_id": context.get("connector_id"),
"connector_type": "jira",
}
if "not found" in error_msg.lower():
return {"status": "not_found", "message": error_msg}
return {"status": "error", "message": error_msg}
issue_data = context["issue"]
issue_key = issue_data["issue_id"]
document_id = issue_data["document_id"]
connector_id_from_context = context.get("account", {}).get("id")
result = request_approval(
action_type="jira_issue_deletion",
tool_name="delete_jira_issue",
params={
"issue_key": issue_key,
"connector_id": connector_id_from_context,
"delete_from_kb": delete_from_kb,
},
context=context,
)
if result.rejected:
return {
"status": "rejected",
"message": "User declined. Do not retry or suggest alternatives.",
}
final_issue_key = result.params.get("issue_key", issue_key)
final_connector_id = result.params.get(
"connector_id", connector_id_from_context
)
final_delete_from_kb = result.params.get("delete_from_kb", delete_from_kb)
from sqlalchemy.future import select
from app.db import SearchSourceConnector, SearchSourceConnectorType
if not final_connector_id:
return {
"status": "error",
"message": "No connector found for this issue.",
}
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == final_connector_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.JIRA_CONNECTOR,
async with async_session_maker() as db_session:
metadata_service = JiraToolMetadataService(db_session)
context = await metadata_service.get_deletion_context(
search_space_id, user_id, issue_title_or_key
)
)
connector = result.scalars().first()
if not connector:
return {
"status": "error",
"message": "Selected Jira connector is invalid.",
}
try:
jira_history = JiraHistoryConnector(
session=db_session, connector_id=final_connector_id
if "error" in context:
error_msg = context["error"]
if context.get("auth_expired"):
return {
"status": "auth_error",
"message": error_msg,
"connector_id": context.get("connector_id"),
"connector_type": "jira",
}
if "not found" in error_msg.lower():
return {"status": "not_found", "message": error_msg}
return {"status": "error", "message": error_msg}
issue_data = context["issue"]
issue_key = issue_data["issue_id"]
document_id = issue_data["document_id"]
connector_id_from_context = context.get("account", {}).get("id")
result = request_approval(
action_type="jira_issue_deletion",
tool_name="delete_jira_issue",
params={
"issue_key": issue_key,
"connector_id": connector_id_from_context,
"delete_from_kb": delete_from_kb,
},
context=context,
)
jira_client = await jira_history._get_jira_client()
await asyncio.to_thread(jira_client.delete_issue, final_issue_key)
except Exception as api_err:
if "status code 403" in str(api_err).lower():
try:
connector.config = {**connector.config, "auth_expired": True}
flag_modified(connector, "config")
await db_session.commit()
except Exception:
pass
if result.rejected:
return {
"status": "insufficient_permissions",
"connector_id": final_connector_id,
"message": "This Jira account needs additional permissions. Please re-authenticate in connector settings.",
"status": "rejected",
"message": "User declined. Do not retry or suggest alternatives.",
}
raise
deleted_from_kb = False
if final_delete_from_kb and document_id:
try:
from app.db import Document
final_issue_key = result.params.get("issue_key", issue_key)
final_connector_id = result.params.get(
"connector_id", connector_id_from_context
)
final_delete_from_kb = result.params.get(
"delete_from_kb", delete_from_kb
)
doc_result = await db_session.execute(
select(Document).filter(Document.id == document_id)
from sqlalchemy.future import select
from app.db import SearchSourceConnector, SearchSourceConnectorType
if not final_connector_id:
return {
"status": "error",
"message": "No connector found for this issue.",
}
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == final_connector_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.JIRA_CONNECTOR,
)
document = doc_result.scalars().first()
if document:
await db_session.delete(document)
await db_session.commit()
deleted_from_kb = True
except Exception as e:
logger.error(f"Failed to delete document from KB: {e}")
await db_session.rollback()
)
connector = result.scalars().first()
if not connector:
return {
"status": "error",
"message": "Selected Jira connector is invalid.",
}
message = f"Jira issue {final_issue_key} deleted successfully."
if deleted_from_kb:
message += " Also removed from the knowledge base."
try:
jira_history = JiraHistoryConnector(
session=db_session, connector_id=final_connector_id
)
jira_client = await jira_history._get_jira_client()
await asyncio.to_thread(jira_client.delete_issue, final_issue_key)
except Exception as api_err:
if "status code 403" in str(api_err).lower():
try:
connector.config = {
**connector.config,
"auth_expired": True,
}
flag_modified(connector, "config")
await db_session.commit()
except Exception:
pass
return {
"status": "insufficient_permissions",
"connector_id": final_connector_id,
"message": "This Jira account needs additional permissions. Please re-authenticate in connector settings.",
}
raise
return {
"status": "success",
"issue_key": final_issue_key,
"deleted_from_kb": deleted_from_kb,
"message": message,
}
deleted_from_kb = False
if final_delete_from_kb and document_id:
try:
from app.db import Document
doc_result = await db_session.execute(
select(Document).filter(Document.id == document_id)
)
document = doc_result.scalars().first()
if document:
await db_session.delete(document)
await db_session.commit()
deleted_from_kb = True
except Exception as e:
logger.error(f"Failed to delete document from KB: {e}")
await db_session.rollback()
message = f"Jira issue {final_issue_key} deleted successfully."
if deleted_from_kb:
message += " Also removed from the knowledge base."
return {
"status": "success",
"issue_key": final_issue_key,
"deleted_from_kb": deleted_from_kb,
"message": message,
}
except Exception as e:
from langgraph.errors import GraphInterrupt

View file

@ -8,6 +8,7 @@ from sqlalchemy.orm.attributes import flag_modified
from app.agents.new_chat.tools.hitl import request_approval
from app.connectors.jira_history import JiraHistoryConnector
from app.db import async_session_maker
from app.services.jira import JiraToolMetadataService
logger = logging.getLogger(__name__)
@ -19,6 +20,26 @@ def create_update_jira_issue_tool(
user_id: str | None = None,
connector_id: int | None = None,
):
"""Factory function to create the update_jira_issue tool.
The tool acquires its own short-lived ``AsyncSession`` per call via
:data:`async_session_maker`. This is critical for the compiled-agent
cache: the compiled graph (and therefore this closure) is reused
across HTTP requests, so capturing a per-request session here would
surface stale/closed sessions on cache hits.
Args:
db_session: Reserved for registry compatibility. Per-call sessions
are opened via :data:`async_session_maker` inside the tool body.
search_space_id: Search space ID to find the Jira connector
user_id: User ID for fetching user-specific context
connector_id: Optional specific connector ID (if known)
Returns:
Configured update_jira_issue tool
"""
del db_session # per-call session — see docstring
@tool
async def update_jira_issue(
issue_title_or_key: str,
@ -48,169 +69,177 @@ def create_update_jira_issue_tool(
f"update_jira_issue called: issue_title_or_key='{issue_title_or_key}'"
)
if db_session is None or search_space_id is None or user_id is None:
if search_space_id is None or user_id is None:
return {"status": "error", "message": "Jira tool not properly configured."}
try:
metadata_service = JiraToolMetadataService(db_session)
context = await metadata_service.get_update_context(
search_space_id, user_id, issue_title_or_key
)
if "error" in context:
error_msg = context["error"]
if context.get("auth_expired"):
return {
"status": "auth_error",
"message": error_msg,
"connector_id": context.get("connector_id"),
"connector_type": "jira",
}
if "not found" in error_msg.lower():
return {"status": "not_found", "message": error_msg}
return {"status": "error", "message": error_msg}
issue_data = context["issue"]
issue_key = issue_data["issue_id"]
document_id = issue_data.get("document_id")
connector_id_from_context = context.get("account", {}).get("id")
result = request_approval(
action_type="jira_issue_update",
tool_name="update_jira_issue",
params={
"issue_key": issue_key,
"document_id": document_id,
"new_summary": new_summary,
"new_description": new_description,
"new_priority": new_priority,
"connector_id": connector_id_from_context,
},
context=context,
)
if result.rejected:
return {
"status": "rejected",
"message": "User declined. Do not retry or suggest alternatives.",
}
final_issue_key = result.params.get("issue_key", issue_key)
final_summary = result.params.get("new_summary", new_summary)
final_description = result.params.get("new_description", new_description)
final_priority = result.params.get("new_priority", new_priority)
final_connector_id = result.params.get(
"connector_id", connector_id_from_context
)
final_document_id = result.params.get("document_id", document_id)
from sqlalchemy.future import select
from app.db import SearchSourceConnector, SearchSourceConnectorType
if not final_connector_id:
return {
"status": "error",
"message": "No connector found for this issue.",
}
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == final_connector_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.JIRA_CONNECTOR,
async with async_session_maker() as db_session:
metadata_service = JiraToolMetadataService(db_session)
context = await metadata_service.get_update_context(
search_space_id, user_id, issue_title_or_key
)
)
connector = result.scalars().first()
if not connector:
return {
"status": "error",
"message": "Selected Jira connector is invalid.",
}
fields: dict[str, Any] = {}
if final_summary:
fields["summary"] = final_summary
if final_description is not None:
fields["description"] = {
"type": "doc",
"version": 1,
"content": [
{
"type": "paragraph",
"content": [{"type": "text", "text": final_description}],
if "error" in context:
error_msg = context["error"]
if context.get("auth_expired"):
return {
"status": "auth_error",
"message": error_msg,
"connector_id": context.get("connector_id"),
"connector_type": "jira",
}
],
}
if final_priority:
fields["priority"] = {"name": final_priority}
if "not found" in error_msg.lower():
return {"status": "not_found", "message": error_msg}
return {"status": "error", "message": error_msg}
if not fields:
return {"status": "error", "message": "No changes specified."}
issue_data = context["issue"]
issue_key = issue_data["issue_id"]
document_id = issue_data.get("document_id")
connector_id_from_context = context.get("account", {}).get("id")
try:
jira_history = JiraHistoryConnector(
session=db_session, connector_id=final_connector_id
result = request_approval(
action_type="jira_issue_update",
tool_name="update_jira_issue",
params={
"issue_key": issue_key,
"document_id": document_id,
"new_summary": new_summary,
"new_description": new_description,
"new_priority": new_priority,
"connector_id": connector_id_from_context,
},
context=context,
)
jira_client = await jira_history._get_jira_client()
await asyncio.to_thread(
jira_client.update_issue, final_issue_key, fields
)
except Exception as api_err:
if "status code 403" in str(api_err).lower():
try:
connector.config = {**connector.config, "auth_expired": True}
flag_modified(connector, "config")
await db_session.commit()
except Exception:
pass
if result.rejected:
return {
"status": "insufficient_permissions",
"connector_id": final_connector_id,
"message": "This Jira account needs additional permissions. Please re-authenticate in connector settings.",
"status": "rejected",
"message": "User declined. Do not retry or suggest alternatives.",
}
raise
issue_url = (
f"{jira_history._base_url}/browse/{final_issue_key}"
if jira_history._base_url and final_issue_key
else ""
)
final_issue_key = result.params.get("issue_key", issue_key)
final_summary = result.params.get("new_summary", new_summary)
final_description = result.params.get(
"new_description", new_description
)
final_priority = result.params.get("new_priority", new_priority)
final_connector_id = result.params.get(
"connector_id", connector_id_from_context
)
final_document_id = result.params.get("document_id", document_id)
kb_message_suffix = ""
if final_document_id:
try:
from app.services.jira import JiraKBSyncService
from sqlalchemy.future import select
kb_service = JiraKBSyncService(db_session)
kb_result = await kb_service.sync_after_update(
document_id=final_document_id,
issue_id=final_issue_key,
user_id=user_id,
search_space_id=search_space_id,
from app.db import SearchSourceConnector, SearchSourceConnectorType
if not final_connector_id:
return {
"status": "error",
"message": "No connector found for this issue.",
}
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == final_connector_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.JIRA_CONNECTOR,
)
if kb_result["status"] == "success":
kb_message_suffix = (
" Your knowledge base has also been updated."
)
connector = result.scalars().first()
if not connector:
return {
"status": "error",
"message": "Selected Jira connector is invalid.",
}
fields: dict[str, Any] = {}
if final_summary:
fields["summary"] = final_summary
if final_description is not None:
fields["description"] = {
"type": "doc",
"version": 1,
"content": [
{
"type": "paragraph",
"content": [
{"type": "text", "text": final_description}
],
}
],
}
if final_priority:
fields["priority"] = {"name": final_priority}
if not fields:
return {"status": "error", "message": "No changes specified."}
try:
jira_history = JiraHistoryConnector(
session=db_session, connector_id=final_connector_id
)
jira_client = await jira_history._get_jira_client()
await asyncio.to_thread(
jira_client.update_issue, final_issue_key, fields
)
except Exception as api_err:
if "status code 403" in str(api_err).lower():
try:
connector.config = {
**connector.config,
"auth_expired": True,
}
flag_modified(connector, "config")
await db_session.commit()
except Exception:
pass
return {
"status": "insufficient_permissions",
"connector_id": final_connector_id,
"message": "This Jira account needs additional permissions. Please re-authenticate in connector settings.",
}
raise
issue_url = (
f"{jira_history._base_url}/browse/{final_issue_key}"
if jira_history._base_url and final_issue_key
else ""
)
kb_message_suffix = ""
if final_document_id:
try:
from app.services.jira import JiraKBSyncService
kb_service = JiraKBSyncService(db_session)
kb_result = await kb_service.sync_after_update(
document_id=final_document_id,
issue_id=final_issue_key,
user_id=user_id,
search_space_id=search_space_id,
)
else:
if kb_result["status"] == "success":
kb_message_suffix = (
" Your knowledge base has also been updated."
)
else:
kb_message_suffix = (
" The knowledge base will be updated in the next sync."
)
except Exception as kb_err:
logger.warning(f"KB sync after update failed: {kb_err}")
kb_message_suffix = (
" The knowledge base will be updated in the next sync."
)
except Exception as kb_err:
logger.warning(f"KB sync after update failed: {kb_err}")
kb_message_suffix = (
" The knowledge base will be updated in the next sync."
)
return {
"status": "success",
"issue_key": final_issue_key,
"issue_url": issue_url,
"message": f"Jira issue {final_issue_key} updated successfully.{kb_message_suffix}",
}
return {
"status": "success",
"issue_key": final_issue_key,
"issue_url": issue_url,
"message": f"Jira issue {final_issue_key} updated successfully.{kb_message_suffix}",
}
except Exception as e:
from langgraph.errors import GraphInterrupt

View file

@ -6,6 +6,7 @@ from sqlalchemy.ext.asyncio import AsyncSession
from app.agents.new_chat.tools.hitl import request_approval
from app.connectors.linear_connector import LinearAPIError, LinearConnector
from app.db import async_session_maker
from app.services.linear import LinearToolMetadataService
logger = logging.getLogger(__name__)
@ -17,11 +18,17 @@ def create_create_linear_issue_tool(
user_id: str | None = None,
connector_id: int | None = None,
):
"""
Factory function to create the create_linear_issue tool.
"""Factory function to create the create_linear_issue tool.
The tool acquires its own short-lived ``AsyncSession`` per call via
:data:`async_session_maker`. This is critical for the compiled-agent
cache: the compiled graph (and therefore this closure) is reused
across HTTP requests, so capturing a per-request session here would
surface stale/closed sessions on cache hits.
Args:
db_session: Database session for accessing the Linear connector
db_session: Reserved for registry compatibility. Per-call sessions
are opened via :data:`async_session_maker` inside the tool body.
search_space_id: Search space ID to find the Linear connector
user_id: User ID for fetching user-specific context
connector_id: Optional specific connector ID (if known)
@ -29,6 +36,7 @@ def create_create_linear_issue_tool(
Returns:
Configured create_linear_issue tool
"""
del db_session # per-call session — see docstring
@tool
async def create_linear_issue(
@ -65,7 +73,7 @@ def create_create_linear_issue_tool(
"""
logger.info(f"create_linear_issue called: title='{title}'")
if db_session is None or search_space_id is None or user_id is None:
if search_space_id is None or user_id is None:
logger.error(
"Linear tool not properly configured - missing required parameters"
)
@ -75,160 +83,170 @@ def create_create_linear_issue_tool(
}
try:
metadata_service = LinearToolMetadataService(db_session)
context = await metadata_service.get_creation_context(
search_space_id, user_id
)
if "error" in context:
logger.error(f"Failed to fetch creation context: {context['error']}")
return {"status": "error", "message": context["error"]}
workspaces = context.get("workspaces", [])
if workspaces and all(w.get("auth_expired") for w in workspaces):
logger.warning("All Linear accounts have expired authentication")
return {
"status": "auth_error",
"message": "All connected Linear accounts need re-authentication. Please re-authenticate in your connector settings.",
"connector_type": "linear",
}
logger.info(f"Requesting approval for creating Linear issue: '{title}'")
result = request_approval(
action_type="linear_issue_creation",
tool_name="create_linear_issue",
params={
"title": title,
"description": description,
"team_id": None,
"state_id": None,
"assignee_id": None,
"priority": None,
"label_ids": [],
"connector_id": connector_id,
},
context=context,
)
if result.rejected:
logger.info("Linear issue creation rejected by user")
return {
"status": "rejected",
"message": "User declined. Do not retry or suggest alternatives.",
}
final_title = result.params.get("title", title)
final_description = result.params.get("description", description)
final_team_id = result.params.get("team_id")
final_state_id = result.params.get("state_id")
final_assignee_id = result.params.get("assignee_id")
final_priority = result.params.get("priority")
final_label_ids = result.params.get("label_ids") or []
final_connector_id = result.params.get("connector_id", connector_id)
if not final_title or not final_title.strip():
logger.error("Title is empty or contains only whitespace")
return {"status": "error", "message": "Issue title cannot be empty."}
if not final_team_id:
return {
"status": "error",
"message": "A team must be selected to create an issue.",
}
from sqlalchemy.future import select
from app.db import SearchSourceConnector, SearchSourceConnectorType
actual_connector_id = final_connector_id
if actual_connector_id is None:
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.LINEAR_CONNECTOR,
)
async with async_session_maker() as db_session:
metadata_service = LinearToolMetadataService(db_session)
context = await metadata_service.get_creation_context(
search_space_id, user_id
)
connector = result.scalars().first()
if not connector:
if "error" in context:
logger.error(
f"Failed to fetch creation context: {context['error']}"
)
return {"status": "error", "message": context["error"]}
workspaces = context.get("workspaces", [])
if workspaces and all(w.get("auth_expired") for w in workspaces):
logger.warning("All Linear accounts have expired authentication")
return {
"status": "auth_error",
"message": "All connected Linear accounts need re-authentication. Please re-authenticate in your connector settings.",
"connector_type": "linear",
}
logger.info(f"Requesting approval for creating Linear issue: '{title}'")
result = request_approval(
action_type="linear_issue_creation",
tool_name="create_linear_issue",
params={
"title": title,
"description": description,
"team_id": None,
"state_id": None,
"assignee_id": None,
"priority": None,
"label_ids": [],
"connector_id": connector_id,
},
context=context,
)
if result.rejected:
logger.info("Linear issue creation rejected by user")
return {
"status": "rejected",
"message": "User declined. Do not retry or suggest alternatives.",
}
final_title = result.params.get("title", title)
final_description = result.params.get("description", description)
final_team_id = result.params.get("team_id")
final_state_id = result.params.get("state_id")
final_assignee_id = result.params.get("assignee_id")
final_priority = result.params.get("priority")
final_label_ids = result.params.get("label_ids") or []
final_connector_id = result.params.get("connector_id", connector_id)
if not final_title or not final_title.strip():
logger.error("Title is empty or contains only whitespace")
return {
"status": "error",
"message": "No Linear connector found. Please connect Linear in your workspace settings.",
"message": "Issue title cannot be empty.",
}
actual_connector_id = connector.id
logger.info(f"Found Linear connector: id={actual_connector_id}")
else:
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == actual_connector_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.LINEAR_CONNECTOR,
)
)
connector = result.scalars().first()
if not connector:
if not final_team_id:
return {
"status": "error",
"message": "Selected Linear connector is invalid or has been disconnected.",
"message": "A team must be selected to create an issue.",
}
logger.info(f"Validated Linear connector: id={actual_connector_id}")
logger.info(
f"Creating Linear issue with final params: title='{final_title}'"
)
linear_client = LinearConnector(
session=db_session, connector_id=actual_connector_id
)
result = await linear_client.create_issue(
team_id=final_team_id,
title=final_title,
description=final_description,
state_id=final_state_id,
assignee_id=final_assignee_id,
priority=final_priority,
label_ids=final_label_ids if final_label_ids else None,
)
from sqlalchemy.future import select
if result.get("status") == "error":
logger.error(f"Failed to create Linear issue: {result.get('message')}")
return {"status": "error", "message": result.get("message")}
from app.db import SearchSourceConnector, SearchSourceConnectorType
logger.info(
f"Linear issue created: {result.get('identifier')} - {result.get('title')}"
)
kb_message_suffix = ""
try:
from app.services.linear import LinearKBSyncService
kb_service = LinearKBSyncService(db_session)
kb_result = await kb_service.sync_after_create(
issue_id=result.get("id"),
issue_identifier=result.get("identifier", ""),
issue_title=result.get("title", final_title),
issue_url=result.get("url"),
description=final_description,
connector_id=actual_connector_id,
search_space_id=search_space_id,
user_id=user_id,
)
if kb_result["status"] == "success":
kb_message_suffix = " Your knowledge base has also been updated."
actual_connector_id = final_connector_id
if actual_connector_id is None:
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.LINEAR_CONNECTOR,
)
)
connector = result.scalars().first()
if not connector:
return {
"status": "error",
"message": "No Linear connector found. Please connect Linear in your workspace settings.",
}
actual_connector_id = connector.id
logger.info(f"Found Linear connector: id={actual_connector_id}")
else:
kb_message_suffix = " This issue will be added to your knowledge base in the next scheduled sync."
except Exception as kb_err:
logger.warning(f"KB sync after create failed: {kb_err}")
kb_message_suffix = " This issue will be added to your knowledge base in the next scheduled sync."
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == actual_connector_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.LINEAR_CONNECTOR,
)
)
connector = result.scalars().first()
if not connector:
return {
"status": "error",
"message": "Selected Linear connector is invalid or has been disconnected.",
}
logger.info(f"Validated Linear connector: id={actual_connector_id}")
return {
"status": "success",
"issue_id": result.get("id"),
"identifier": result.get("identifier"),
"url": result.get("url"),
"message": (result.get("message", "") + kb_message_suffix),
}
logger.info(
f"Creating Linear issue with final params: title='{final_title}'"
)
linear_client = LinearConnector(
session=db_session, connector_id=actual_connector_id
)
result = await linear_client.create_issue(
team_id=final_team_id,
title=final_title,
description=final_description,
state_id=final_state_id,
assignee_id=final_assignee_id,
priority=final_priority,
label_ids=final_label_ids if final_label_ids else None,
)
if result.get("status") == "error":
logger.error(
f"Failed to create Linear issue: {result.get('message')}"
)
return {"status": "error", "message": result.get("message")}
logger.info(
f"Linear issue created: {result.get('identifier')} - {result.get('title')}"
)
kb_message_suffix = ""
try:
from app.services.linear import LinearKBSyncService
kb_service = LinearKBSyncService(db_session)
kb_result = await kb_service.sync_after_create(
issue_id=result.get("id"),
issue_identifier=result.get("identifier", ""),
issue_title=result.get("title", final_title),
issue_url=result.get("url"),
description=final_description,
connector_id=actual_connector_id,
search_space_id=search_space_id,
user_id=user_id,
)
if kb_result["status"] == "success":
kb_message_suffix = (
" Your knowledge base has also been updated."
)
else:
kb_message_suffix = " This issue will be added to your knowledge base in the next scheduled sync."
except Exception as kb_err:
logger.warning(f"KB sync after create failed: {kb_err}")
kb_message_suffix = " This issue will be added to your knowledge base in the next scheduled sync."
return {
"status": "success",
"issue_id": result.get("id"),
"identifier": result.get("identifier"),
"url": result.get("url"),
"message": (result.get("message", "") + kb_message_suffix),
}
except Exception as e:
from langgraph.errors import GraphInterrupt

View file

@ -6,6 +6,7 @@ from sqlalchemy.ext.asyncio import AsyncSession
from app.agents.new_chat.tools.hitl import request_approval
from app.connectors.linear_connector import LinearAPIError, LinearConnector
from app.db import async_session_maker
from app.services.linear import LinearToolMetadataService
logger = logging.getLogger(__name__)
@ -17,11 +18,17 @@ def create_delete_linear_issue_tool(
user_id: str | None = None,
connector_id: int | None = None,
):
"""
Factory function to create the delete_linear_issue tool.
"""Factory function to create the delete_linear_issue tool.
The tool acquires its own short-lived ``AsyncSession`` per call via
:data:`async_session_maker`. This is critical for the compiled-agent
cache: the compiled graph (and therefore this closure) is reused
across HTTP requests, so capturing a per-request session here would
surface stale/closed sessions on cache hits.
Args:
db_session: Database session for accessing the Linear connector
db_session: Reserved for registry compatibility. Per-call sessions
are opened via :data:`async_session_maker` inside the tool body.
search_space_id: Search space ID to find the Linear connector
user_id: User ID for finding the correct Linear connector
connector_id: Optional specific connector ID (if known)
@ -29,6 +36,7 @@ def create_delete_linear_issue_tool(
Returns:
Configured delete_linear_issue tool
"""
del db_session # per-call session — see docstring
@tool
async def delete_linear_issue(
@ -73,7 +81,7 @@ def create_delete_linear_issue_tool(
f"delete_linear_issue called: issue_ref='{issue_ref}', delete_from_kb={delete_from_kb}"
)
if db_session is None or search_space_id is None or user_id is None:
if search_space_id is None or user_id is None:
logger.error(
"Linear tool not properly configured - missing required parameters"
)
@ -83,149 +91,152 @@ def create_delete_linear_issue_tool(
}
try:
metadata_service = LinearToolMetadataService(db_session)
context = await metadata_service.get_delete_context(
search_space_id, user_id, issue_ref
)
if "error" in context:
error_msg = context["error"]
if context.get("auth_expired"):
logger.warning(f"Auth expired for delete context: {error_msg}")
return {
"status": "auth_error",
"message": error_msg,
"connector_id": context.get("connector_id"),
"connector_type": "linear",
}
if "not found" in error_msg.lower():
logger.warning(f"Issue not found: {error_msg}")
return {"status": "not_found", "message": error_msg}
else:
logger.error(f"Failed to fetch delete context: {error_msg}")
return {"status": "error", "message": error_msg}
issue_id = context["issue"]["id"]
issue_identifier = context["issue"].get("identifier", "")
document_id = context["issue"]["document_id"]
connector_id_from_context = context.get("workspace", {}).get("id")
logger.info(
f"Requesting approval for deleting Linear issue: '{issue_ref}' "
f"(id={issue_id}, delete_from_kb={delete_from_kb})"
)
result = request_approval(
action_type="linear_issue_deletion",
tool_name="delete_linear_issue",
params={
"issue_id": issue_id,
"connector_id": connector_id_from_context,
"delete_from_kb": delete_from_kb,
},
context=context,
)
if result.rejected:
logger.info("Linear issue deletion rejected by user")
return {
"status": "rejected",
"message": "User declined. Do not retry or suggest alternatives.",
}
final_issue_id = result.params.get("issue_id", issue_id)
final_connector_id = result.params.get(
"connector_id", connector_id_from_context
)
final_delete_from_kb = result.params.get("delete_from_kb", delete_from_kb)
logger.info(
f"Deleting Linear issue with final params: issue_id={final_issue_id}, "
f"connector_id={final_connector_id}, delete_from_kb={final_delete_from_kb}"
)
from sqlalchemy.future import select
from app.db import SearchSourceConnector, SearchSourceConnectorType
if final_connector_id:
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == final_connector_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.LINEAR_CONNECTOR,
)
async with async_session_maker() as db_session:
metadata_service = LinearToolMetadataService(db_session)
context = await metadata_service.get_delete_context(
search_space_id, user_id, issue_ref
)
connector = result.scalars().first()
if not connector:
logger.error(
f"Invalid connector_id={final_connector_id} for search_space_id={search_space_id}"
if "error" in context:
error_msg = context["error"]
if context.get("auth_expired"):
logger.warning(f"Auth expired for delete context: {error_msg}")
return {
"status": "auth_error",
"message": error_msg,
"connector_id": context.get("connector_id"),
"connector_type": "linear",
}
if "not found" in error_msg.lower():
logger.warning(f"Issue not found: {error_msg}")
return {"status": "not_found", "message": error_msg}
else:
logger.error(f"Failed to fetch delete context: {error_msg}")
return {"status": "error", "message": error_msg}
issue_id = context["issue"]["id"]
issue_identifier = context["issue"].get("identifier", "")
document_id = context["issue"]["document_id"]
connector_id_from_context = context.get("workspace", {}).get("id")
logger.info(
f"Requesting approval for deleting Linear issue: '{issue_ref}' "
f"(id={issue_id}, delete_from_kb={delete_from_kb})"
)
result = request_approval(
action_type="linear_issue_deletion",
tool_name="delete_linear_issue",
params={
"issue_id": issue_id,
"connector_id": connector_id_from_context,
"delete_from_kb": delete_from_kb,
},
context=context,
)
if result.rejected:
logger.info("Linear issue deletion rejected by user")
return {
"status": "rejected",
"message": "User declined. Do not retry or suggest alternatives.",
}
final_issue_id = result.params.get("issue_id", issue_id)
final_connector_id = result.params.get(
"connector_id", connector_id_from_context
)
final_delete_from_kb = result.params.get(
"delete_from_kb", delete_from_kb
)
logger.info(
f"Deleting Linear issue with final params: issue_id={final_issue_id}, "
f"connector_id={final_connector_id}, delete_from_kb={final_delete_from_kb}"
)
from sqlalchemy.future import select
from app.db import SearchSourceConnector, SearchSourceConnectorType
if final_connector_id:
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == final_connector_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.LINEAR_CONNECTOR,
)
)
connector = result.scalars().first()
if not connector:
logger.error(
f"Invalid connector_id={final_connector_id} for search_space_id={search_space_id}"
)
return {
"status": "error",
"message": "Selected Linear connector is invalid or has been disconnected.",
}
actual_connector_id = connector.id
logger.info(f"Validated Linear connector: id={actual_connector_id}")
else:
logger.error("No connector found for this issue")
return {
"status": "error",
"message": "Selected Linear connector is invalid or has been disconnected.",
"message": "No connector found for this issue.",
}
actual_connector_id = connector.id
logger.info(f"Validated Linear connector: id={actual_connector_id}")
else:
logger.error("No connector found for this issue")
return {
"status": "error",
"message": "No connector found for this issue.",
}
linear_client = LinearConnector(
session=db_session, connector_id=actual_connector_id
)
linear_client = LinearConnector(
session=db_session, connector_id=actual_connector_id
)
result = await linear_client.archive_issue(issue_id=final_issue_id)
result = await linear_client.archive_issue(issue_id=final_issue_id)
logger.info(
f"archive_issue result: {result.get('status')} - {result.get('message', '')}"
)
logger.info(
f"archive_issue result: {result.get('status')} - {result.get('message', '')}"
)
deleted_from_kb = False
if (
result.get("status") == "success"
and final_delete_from_kb
and document_id
):
try:
from app.db import Document
deleted_from_kb = False
if (
result.get("status") == "success"
and final_delete_from_kb
and document_id
):
try:
from app.db import Document
doc_result = await db_session.execute(
select(Document).filter(Document.id == document_id)
)
document = doc_result.scalars().first()
if document:
await db_session.delete(document)
await db_session.commit()
deleted_from_kb = True
logger.info(
f"Deleted document {document_id} from knowledge base"
doc_result = await db_session.execute(
select(Document).filter(Document.id == document_id)
)
document = doc_result.scalars().first()
if document:
await db_session.delete(document)
await db_session.commit()
deleted_from_kb = True
logger.info(
f"Deleted document {document_id} from knowledge base"
)
else:
logger.warning(f"Document {document_id} not found in KB")
except Exception as e:
logger.error(f"Failed to delete document from KB: {e}")
await db_session.rollback()
result["warning"] = (
f"Issue archived in Linear, but failed to remove from knowledge base: {e!s}"
)
else:
logger.warning(f"Document {document_id} not found in KB")
except Exception as e:
logger.error(f"Failed to delete document from KB: {e}")
await db_session.rollback()
result["warning"] = (
f"Issue archived in Linear, but failed to remove from knowledge base: {e!s}"
)
if result.get("status") == "success":
result["deleted_from_kb"] = deleted_from_kb
if issue_identifier:
result["message"] = (
f"Issue {issue_identifier} archived successfully."
)
if deleted_from_kb:
result["message"] = (
f"{result.get('message', '')} Also removed from the knowledge base."
)
if result.get("status") == "success":
result["deleted_from_kb"] = deleted_from_kb
if issue_identifier:
result["message"] = (
f"Issue {issue_identifier} archived successfully."
)
if deleted_from_kb:
result["message"] = (
f"{result.get('message', '')} Also removed from the knowledge base."
)
return result
return result
except Exception as e:
from langgraph.errors import GraphInterrupt

View file

@ -6,6 +6,7 @@ from sqlalchemy.ext.asyncio import AsyncSession
from app.agents.new_chat.tools.hitl import request_approval
from app.connectors.linear_connector import LinearAPIError, LinearConnector
from app.db import async_session_maker
from app.services.linear import LinearKBSyncService, LinearToolMetadataService
logger = logging.getLogger(__name__)
@ -17,11 +18,17 @@ def create_update_linear_issue_tool(
user_id: str | None = None,
connector_id: int | None = None,
):
"""
Factory function to create the update_linear_issue tool.
"""Factory function to create the update_linear_issue tool.
The tool acquires its own short-lived ``AsyncSession`` per call via
:data:`async_session_maker`. This is critical for the compiled-agent
cache: the compiled graph (and therefore this closure) is reused
across HTTP requests, so capturing a per-request session here would
surface stale/closed sessions on cache hits.
Args:
db_session: Database session for accessing the Linear connector
db_session: Reserved for registry compatibility. Per-call sessions
are opened via :data:`async_session_maker` inside the tool body.
search_space_id: Search space ID to find the Linear connector
user_id: User ID for fetching user-specific context
connector_id: Optional specific connector ID (if known)
@ -29,6 +36,7 @@ def create_update_linear_issue_tool(
Returns:
Configured update_linear_issue tool
"""
del db_session # per-call session — see docstring
@tool
async def update_linear_issue(
@ -86,7 +94,7 @@ def create_update_linear_issue_tool(
"""
logger.info(f"update_linear_issue called: issue_ref='{issue_ref}'")
if db_session is None or search_space_id is None or user_id is None:
if search_space_id is None or user_id is None:
logger.error(
"Linear tool not properly configured - missing required parameters"
)
@ -96,176 +104,177 @@ def create_update_linear_issue_tool(
}
try:
metadata_service = LinearToolMetadataService(db_session)
context = await metadata_service.get_update_context(
search_space_id, user_id, issue_ref
)
if "error" in context:
error_msg = context["error"]
if context.get("auth_expired"):
logger.warning(f"Auth expired for update context: {error_msg}")
return {
"status": "auth_error",
"message": error_msg,
"connector_id": context.get("connector_id"),
"connector_type": "linear",
}
if "not found" in error_msg.lower():
logger.warning(f"Issue not found: {error_msg}")
return {"status": "not_found", "message": error_msg}
else:
logger.error(f"Failed to fetch update context: {error_msg}")
return {"status": "error", "message": error_msg}
issue_id = context["issue"]["id"]
document_id = context["issue"]["document_id"]
connector_id_from_context = context.get("workspace", {}).get("id")
team = context.get("team", {})
new_state_id = _resolve_state(team, new_state_name)
new_assignee_id = _resolve_assignee(team, new_assignee_email)
new_label_ids = _resolve_labels(team, new_label_names)
logger.info(
f"Requesting approval for updating Linear issue: '{issue_ref}' (id={issue_id})"
)
result = request_approval(
action_type="linear_issue_update",
tool_name="update_linear_issue",
params={
"issue_id": issue_id,
"document_id": document_id,
"new_title": new_title,
"new_description": new_description,
"new_state_id": new_state_id,
"new_assignee_id": new_assignee_id,
"new_priority": new_priority,
"new_label_ids": new_label_ids,
"connector_id": connector_id_from_context,
},
context=context,
)
if result.rejected:
logger.info("Linear issue update rejected by user")
return {
"status": "rejected",
"message": "User declined. Do not retry or suggest alternatives.",
}
final_issue_id = result.params.get("issue_id", issue_id)
final_document_id = result.params.get("document_id", document_id)
final_new_title = result.params.get("new_title", new_title)
final_new_description = result.params.get(
"new_description", new_description
)
final_new_state_id = result.params.get("new_state_id", new_state_id)
final_new_assignee_id = result.params.get(
"new_assignee_id", new_assignee_id
)
final_new_priority = result.params.get("new_priority", new_priority)
final_new_label_ids: list[str] | None = result.params.get(
"new_label_ids", new_label_ids
)
final_connector_id = result.params.get(
"connector_id", connector_id_from_context
)
if not final_connector_id:
logger.error("No connector found for this issue")
return {
"status": "error",
"message": "No connector found for this issue.",
}
from sqlalchemy.future import select
from app.db import SearchSourceConnector, SearchSourceConnectorType
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == final_connector_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.LINEAR_CONNECTOR,
async with async_session_maker() as db_session:
metadata_service = LinearToolMetadataService(db_session)
context = await metadata_service.get_update_context(
search_space_id, user_id, issue_ref
)
)
connector = result.scalars().first()
if not connector:
logger.error(
f"Invalid connector_id={final_connector_id} for search_space_id={search_space_id}"
)
return {
"status": "error",
"message": "Selected Linear connector is invalid or has been disconnected.",
}
logger.info(f"Validated Linear connector: id={final_connector_id}")
logger.info(
f"Updating Linear issue with final params: issue_id={final_issue_id}"
)
linear_client = LinearConnector(
session=db_session, connector_id=final_connector_id
)
updated_issue = await linear_client.update_issue(
issue_id=final_issue_id,
title=final_new_title,
description=final_new_description,
state_id=final_new_state_id,
assignee_id=final_new_assignee_id,
priority=final_new_priority,
label_ids=final_new_label_ids,
)
if "error" in context:
error_msg = context["error"]
if context.get("auth_expired"):
logger.warning(f"Auth expired for update context: {error_msg}")
return {
"status": "auth_error",
"message": error_msg,
"connector_id": context.get("connector_id"),
"connector_type": "linear",
}
if "not found" in error_msg.lower():
logger.warning(f"Issue not found: {error_msg}")
return {"status": "not_found", "message": error_msg}
else:
logger.error(f"Failed to fetch update context: {error_msg}")
return {"status": "error", "message": error_msg}
if updated_issue.get("status") == "error":
logger.error(
f"Failed to update Linear issue: {updated_issue.get('message')}"
)
return {
"status": "error",
"message": updated_issue.get("message"),
}
issue_id = context["issue"]["id"]
document_id = context["issue"]["document_id"]
connector_id_from_context = context.get("workspace", {}).get("id")
logger.info(
f"update_issue result: {updated_issue.get('identifier')} - {updated_issue.get('title')}"
)
team = context.get("team", {})
new_state_id = _resolve_state(team, new_state_name)
new_assignee_id = _resolve_assignee(team, new_assignee_email)
new_label_ids = _resolve_labels(team, new_label_names)
if final_document_id is not None:
logger.info(
f"Updating knowledge base for document {final_document_id}..."
f"Requesting approval for updating Linear issue: '{issue_ref}' (id={issue_id})"
)
kb_service = LinearKBSyncService(db_session)
kb_result = await kb_service.sync_after_update(
document_id=final_document_id,
issue_id=final_issue_id,
user_id=user_id,
search_space_id=search_space_id,
result = request_approval(
action_type="linear_issue_update",
tool_name="update_linear_issue",
params={
"issue_id": issue_id,
"document_id": document_id,
"new_title": new_title,
"new_description": new_description,
"new_state_id": new_state_id,
"new_assignee_id": new_assignee_id,
"new_priority": new_priority,
"new_label_ids": new_label_ids,
"connector_id": connector_id_from_context,
},
context=context,
)
if kb_result["status"] == "success":
logger.info(
f"Knowledge base successfully updated for issue {final_issue_id}"
)
kb_message = " Your knowledge base has also been updated."
elif kb_result["status"] == "not_indexed":
kb_message = " This issue will be added to your knowledge base in the next scheduled sync."
else:
logger.warning(
f"KB update failed for issue {final_issue_id}: {kb_result.get('message')}"
)
kb_message = " Your knowledge base will be updated in the next scheduled sync."
else:
kb_message = ""
identifier = updated_issue.get("identifier")
default_msg = f"Issue {identifier} updated successfully."
return {
"status": "success",
"identifier": identifier,
"url": updated_issue.get("url"),
"message": f"{updated_issue.get('message', default_msg)}{kb_message}",
}
if result.rejected:
logger.info("Linear issue update rejected by user")
return {
"status": "rejected",
"message": "User declined. Do not retry or suggest alternatives.",
}
final_issue_id = result.params.get("issue_id", issue_id)
final_document_id = result.params.get("document_id", document_id)
final_new_title = result.params.get("new_title", new_title)
final_new_description = result.params.get(
"new_description", new_description
)
final_new_state_id = result.params.get("new_state_id", new_state_id)
final_new_assignee_id = result.params.get(
"new_assignee_id", new_assignee_id
)
final_new_priority = result.params.get("new_priority", new_priority)
final_new_label_ids: list[str] | None = result.params.get(
"new_label_ids", new_label_ids
)
final_connector_id = result.params.get(
"connector_id", connector_id_from_context
)
if not final_connector_id:
logger.error("No connector found for this issue")
return {
"status": "error",
"message": "No connector found for this issue.",
}
from sqlalchemy.future import select
from app.db import SearchSourceConnector, SearchSourceConnectorType
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == final_connector_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.LINEAR_CONNECTOR,
)
)
connector = result.scalars().first()
if not connector:
logger.error(
f"Invalid connector_id={final_connector_id} for search_space_id={search_space_id}"
)
return {
"status": "error",
"message": "Selected Linear connector is invalid or has been disconnected.",
}
logger.info(f"Validated Linear connector: id={final_connector_id}")
logger.info(
f"Updating Linear issue with final params: issue_id={final_issue_id}"
)
linear_client = LinearConnector(
session=db_session, connector_id=final_connector_id
)
updated_issue = await linear_client.update_issue(
issue_id=final_issue_id,
title=final_new_title,
description=final_new_description,
state_id=final_new_state_id,
assignee_id=final_new_assignee_id,
priority=final_new_priority,
label_ids=final_new_label_ids,
)
if updated_issue.get("status") == "error":
logger.error(
f"Failed to update Linear issue: {updated_issue.get('message')}"
)
return {
"status": "error",
"message": updated_issue.get("message"),
}
logger.info(
f"update_issue result: {updated_issue.get('identifier')} - {updated_issue.get('title')}"
)
if final_document_id is not None:
logger.info(
f"Updating knowledge base for document {final_document_id}..."
)
kb_service = LinearKBSyncService(db_session)
kb_result = await kb_service.sync_after_update(
document_id=final_document_id,
issue_id=final_issue_id,
user_id=user_id,
search_space_id=search_space_id,
)
if kb_result["status"] == "success":
logger.info(
f"Knowledge base successfully updated for issue {final_issue_id}"
)
kb_message = " Your knowledge base has also been updated."
elif kb_result["status"] == "not_indexed":
kb_message = " This issue will be added to your knowledge base in the next scheduled sync."
else:
logger.warning(
f"KB update failed for issue {final_issue_id}: {kb_result.get('message')}"
)
kb_message = " Your knowledge base will be updated in the next scheduled sync."
else:
kb_message = ""
identifier = updated_issue.get("identifier")
default_msg = f"Issue {identifier} updated successfully."
return {
"status": "success",
"identifier": identifier,
"url": updated_issue.get("url"),
"message": f"{updated_issue.get('message', default_msg)}{kb_message}",
}
except Exception as e:
from langgraph.errors import GraphInterrupt

View file

@ -6,6 +6,7 @@ from langchain_core.tools import tool
from sqlalchemy.ext.asyncio import AsyncSession
from app.agents.new_chat.tools.hitl import request_approval
from app.db import async_session_maker
from ._auth import LUMA_API, get_api_key, get_luma_connector, luma_headers
@ -17,6 +18,23 @@ def create_create_luma_event_tool(
search_space_id: int | None = None,
user_id: str | None = None,
):
"""
Factory function to create the create_luma_event tool.
The tool acquires its own short-lived ``AsyncSession`` per call via
:data:`async_session_maker` so the closure is safe to share across
HTTP requests by the compiled-agent cache. Capturing a per-request
session here would surface stale/closed sessions on cache hits.
Args:
db_session: Reserved for registry compatibility. Per-call sessions
are opened via :data:`async_session_maker` inside the tool body.
Returns:
Configured create_luma_event tool
"""
del db_session # per-call session — see docstring
@tool
async def create_luma_event(
name: str,
@ -40,83 +58,86 @@ def create_create_luma_event_tool(
IMPORTANT:
- If status is "rejected", the user explicitly declined. Do NOT retry.
"""
if db_session is None or search_space_id is None or user_id is None:
if search_space_id is None or user_id is None:
return {"status": "error", "message": "Luma tool not properly configured."}
try:
connector = await get_luma_connector(db_session, search_space_id, user_id)
if not connector:
return {"status": "error", "message": "No Luma connector found."}
async with async_session_maker() as db_session:
connector = await get_luma_connector(
db_session, search_space_id, user_id
)
if not connector:
return {"status": "error", "message": "No Luma connector found."}
result = request_approval(
action_type="luma_create_event",
tool_name="create_luma_event",
params={
"name": name,
"start_at": start_at,
"end_at": end_at,
"description": description,
"timezone": timezone,
},
context={"connector_id": connector.id},
)
if result.rejected:
return {
"status": "rejected",
"message": "User declined. Event was not created.",
}
final_name = result.params.get("name", name)
final_start = result.params.get("start_at", start_at)
final_end = result.params.get("end_at", end_at)
final_desc = result.params.get("description", description)
final_tz = result.params.get("timezone", timezone)
api_key = get_api_key(connector)
headers = luma_headers(api_key)
body: dict[str, Any] = {
"name": final_name,
"start_at": final_start,
"end_at": final_end,
"timezone": final_tz,
}
if final_desc:
body["description_md"] = final_desc
async with httpx.AsyncClient(timeout=20.0) as client:
resp = await client.post(
f"{LUMA_API}/event/create",
headers=headers,
json=body,
result = request_approval(
action_type="luma_create_event",
tool_name="create_luma_event",
params={
"name": name,
"start_at": start_at,
"end_at": end_at,
"description": description,
"timezone": timezone,
},
context={"connector_id": connector.id},
)
if resp.status_code == 401:
return {
"status": "auth_error",
"message": "Luma API key is invalid.",
"connector_type": "luma",
}
if resp.status_code == 403:
return {
"status": "error",
"message": "Luma Plus subscription required to create events via API.",
}
if resp.status_code not in (200, 201):
return {
"status": "error",
"message": f"Luma API error: {resp.status_code}{resp.text[:200]}",
}
if result.rejected:
return {
"status": "rejected",
"message": "User declined. Event was not created.",
}
data = resp.json()
event_id = data.get("api_id") or data.get("event", {}).get("api_id")
final_name = result.params.get("name", name)
final_start = result.params.get("start_at", start_at)
final_end = result.params.get("end_at", end_at)
final_desc = result.params.get("description", description)
final_tz = result.params.get("timezone", timezone)
return {
"status": "success",
"event_id": event_id,
"message": f"Event '{final_name}' created on Luma.",
}
api_key = get_api_key(connector)
headers = luma_headers(api_key)
body: dict[str, Any] = {
"name": final_name,
"start_at": final_start,
"end_at": final_end,
"timezone": final_tz,
}
if final_desc:
body["description_md"] = final_desc
async with httpx.AsyncClient(timeout=20.0) as client:
resp = await client.post(
f"{LUMA_API}/event/create",
headers=headers,
json=body,
)
if resp.status_code == 401:
return {
"status": "auth_error",
"message": "Luma API key is invalid.",
"connector_type": "luma",
}
if resp.status_code == 403:
return {
"status": "error",
"message": "Luma Plus subscription required to create events via API.",
}
if resp.status_code not in (200, 201):
return {
"status": "error",
"message": f"Luma API error: {resp.status_code}{resp.text[:200]}",
}
data = resp.json()
event_id = data.get("api_id") or data.get("event", {}).get("api_id")
return {
"status": "success",
"event_id": event_id,
"message": f"Event '{final_name}' created on Luma.",
}
except Exception as e:
from langgraph.errors import GraphInterrupt

View file

@ -5,6 +5,8 @@ import httpx
from langchain_core.tools import tool
from sqlalchemy.ext.asyncio import AsyncSession
from app.db import async_session_maker
from ._auth import LUMA_API, get_api_key, get_luma_connector, luma_headers
logger = logging.getLogger(__name__)
@ -15,6 +17,23 @@ def create_list_luma_events_tool(
search_space_id: int | None = None,
user_id: str | None = None,
):
"""
Factory function to create the list_luma_events tool.
The tool acquires its own short-lived ``AsyncSession`` per call via
:data:`async_session_maker` so the closure is safe to share across
HTTP requests by the compiled-agent cache. Capturing a per-request
session here would surface stale/closed sessions on cache hits.
Args:
db_session: Reserved for registry compatibility. Per-call sessions
are opened via :data:`async_session_maker` inside the tool body.
Returns:
Configured list_luma_events tool
"""
del db_session # per-call session — see docstring
@tool
async def list_luma_events(
max_results: int = 25,
@ -28,77 +47,80 @@ def create_list_luma_events_tool(
Dictionary with status and a list of events including
event_id, name, start_at, end_at, location, url.
"""
if db_session is None or search_space_id is None or user_id is None:
if search_space_id is None or user_id is None:
return {"status": "error", "message": "Luma tool not properly configured."}
max_results = min(max_results, 50)
try:
connector = await get_luma_connector(db_session, search_space_id, user_id)
if not connector:
return {"status": "error", "message": "No Luma connector found."}
async with async_session_maker() as db_session:
connector = await get_luma_connector(
db_session, search_space_id, user_id
)
if not connector:
return {"status": "error", "message": "No Luma connector found."}
api_key = get_api_key(connector)
headers = luma_headers(api_key)
api_key = get_api_key(connector)
headers = luma_headers(api_key)
all_entries: list[dict] = []
cursor = None
all_entries: list[dict] = []
cursor = None
async with httpx.AsyncClient(timeout=20.0) as client:
while len(all_entries) < max_results:
params: dict[str, Any] = {
"limit": min(100, max_results - len(all_entries))
}
if cursor:
params["cursor"] = cursor
async with httpx.AsyncClient(timeout=20.0) as client:
while len(all_entries) < max_results:
params: dict[str, Any] = {
"limit": min(100, max_results - len(all_entries))
}
if cursor:
params["cursor"] = cursor
resp = await client.get(
f"{LUMA_API}/calendar/list-events",
headers=headers,
params=params,
resp = await client.get(
f"{LUMA_API}/calendar/list-events",
headers=headers,
params=params,
)
if resp.status_code == 401:
return {
"status": "auth_error",
"message": "Luma API key is invalid.",
"connector_type": "luma",
}
if resp.status_code != 200:
return {
"status": "error",
"message": f"Luma API error: {resp.status_code}",
}
data = resp.json()
entries = data.get("entries", [])
if not entries:
break
all_entries.extend(entries)
next_cursor = data.get("next_cursor")
if not next_cursor:
break
cursor = next_cursor
events = []
for entry in all_entries[:max_results]:
ev = entry.get("event", {})
geo = ev.get("geo_info", {})
events.append(
{
"event_id": entry.get("api_id"),
"name": ev.get("name", "Untitled"),
"start_at": ev.get("start_at", ""),
"end_at": ev.get("end_at", ""),
"timezone": ev.get("timezone", ""),
"location": geo.get("name", ""),
"url": ev.get("url", ""),
"visibility": ev.get("visibility", ""),
}
)
if resp.status_code == 401:
return {
"status": "auth_error",
"message": "Luma API key is invalid.",
"connector_type": "luma",
}
if resp.status_code != 200:
return {
"status": "error",
"message": f"Luma API error: {resp.status_code}",
}
data = resp.json()
entries = data.get("entries", [])
if not entries:
break
all_entries.extend(entries)
next_cursor = data.get("next_cursor")
if not next_cursor:
break
cursor = next_cursor
events = []
for entry in all_entries[:max_results]:
ev = entry.get("event", {})
geo = ev.get("geo_info", {})
events.append(
{
"event_id": entry.get("api_id"),
"name": ev.get("name", "Untitled"),
"start_at": ev.get("start_at", ""),
"end_at": ev.get("end_at", ""),
"timezone": ev.get("timezone", ""),
"location": geo.get("name", ""),
"url": ev.get("url", ""),
"visibility": ev.get("visibility", ""),
}
)
return {"status": "success", "events": events, "total": len(events)}
return {"status": "success", "events": events, "total": len(events)}
except Exception as e:
from langgraph.errors import GraphInterrupt

View file

@ -5,6 +5,8 @@ import httpx
from langchain_core.tools import tool
from sqlalchemy.ext.asyncio import AsyncSession
from app.db import async_session_maker
from ._auth import LUMA_API, get_api_key, get_luma_connector, luma_headers
logger = logging.getLogger(__name__)
@ -15,6 +17,23 @@ def create_read_luma_event_tool(
search_space_id: int | None = None,
user_id: str | None = None,
):
"""
Factory function to create the read_luma_event tool.
The tool acquires its own short-lived ``AsyncSession`` per call via
:data:`async_session_maker` so the closure is safe to share across
HTTP requests by the compiled-agent cache. Capturing a per-request
session here would surface stale/closed sessions on cache hits.
Args:
db_session: Reserved for registry compatibility. Per-call sessions
are opened via :data:`async_session_maker` inside the tool body.
Returns:
Configured read_luma_event tool
"""
del db_session # per-call session — see docstring
@tool
async def read_luma_event(event_id: str) -> dict[str, Any]:
"""Read detailed information about a specific Luma event.
@ -26,60 +45,63 @@ def create_read_luma_event_tool(
Dictionary with status and full event details including
description, attendees count, meeting URL.
"""
if db_session is None or search_space_id is None or user_id is None:
if search_space_id is None or user_id is None:
return {"status": "error", "message": "Luma tool not properly configured."}
try:
connector = await get_luma_connector(db_session, search_space_id, user_id)
if not connector:
return {"status": "error", "message": "No Luma connector found."}
api_key = get_api_key(connector)
headers = luma_headers(api_key)
async with httpx.AsyncClient(timeout=15.0) as client:
resp = await client.get(
f"{LUMA_API}/events/{event_id}",
headers=headers,
async with async_session_maker() as db_session:
connector = await get_luma_connector(
db_session, search_space_id, user_id
)
if not connector:
return {"status": "error", "message": "No Luma connector found."}
if resp.status_code == 401:
return {
"status": "auth_error",
"message": "Luma API key is invalid.",
"connector_type": "luma",
}
if resp.status_code == 404:
return {
"status": "not_found",
"message": f"Event '{event_id}' not found.",
}
if resp.status_code != 200:
return {
"status": "error",
"message": f"Luma API error: {resp.status_code}",
api_key = get_api_key(connector)
headers = luma_headers(api_key)
async with httpx.AsyncClient(timeout=15.0) as client:
resp = await client.get(
f"{LUMA_API}/events/{event_id}",
headers=headers,
)
if resp.status_code == 401:
return {
"status": "auth_error",
"message": "Luma API key is invalid.",
"connector_type": "luma",
}
if resp.status_code == 404:
return {
"status": "not_found",
"message": f"Event '{event_id}' not found.",
}
if resp.status_code != 200:
return {
"status": "error",
"message": f"Luma API error: {resp.status_code}",
}
data = resp.json()
ev = data.get("event", data)
geo = ev.get("geo_info", {})
event_detail = {
"event_id": event_id,
"name": ev.get("name", ""),
"description": ev.get("description", ""),
"start_at": ev.get("start_at", ""),
"end_at": ev.get("end_at", ""),
"timezone": ev.get("timezone", ""),
"location_name": geo.get("name", ""),
"address": geo.get("address", ""),
"url": ev.get("url", ""),
"meeting_url": ev.get("meeting_url", ""),
"visibility": ev.get("visibility", ""),
"cover_url": ev.get("cover_url", ""),
}
data = resp.json()
ev = data.get("event", data)
geo = ev.get("geo_info", {})
event_detail = {
"event_id": event_id,
"name": ev.get("name", ""),
"description": ev.get("description", ""),
"start_at": ev.get("start_at", ""),
"end_at": ev.get("end_at", ""),
"timezone": ev.get("timezone", ""),
"location_name": geo.get("name", ""),
"address": geo.get("address", ""),
"url": ev.get("url", ""),
"meeting_url": ev.get("meeting_url", ""),
"visibility": ev.get("visibility", ""),
"cover_url": ev.get("cover_url", ""),
}
return {"status": "success", "event": event_detail}
return {"status": "success", "event": event_detail}
except Exception as e:
from langgraph.errors import GraphInterrupt

View file

@ -6,6 +6,7 @@ from sqlalchemy.ext.asyncio import AsyncSession
from app.agents.new_chat.tools.hitl import request_approval
from app.connectors.notion_history import NotionAPIError, NotionHistoryConnector
from app.db import async_session_maker
from app.services.notion import NotionToolMetadataService
logger = logging.getLogger(__name__)
@ -20,8 +21,17 @@ def create_create_notion_page_tool(
"""
Factory function to create the create_notion_page tool.
The tool acquires its own short-lived ``AsyncSession`` per call via
:data:`async_session_maker`. This is critical for the compiled-agent
cache: the compiled graph (and therefore this closure) is reused
across HTTP requests, so capturing a per-request session here would
surface stale/closed sessions on cache hits. Per-call sessions also
keep the request's outer transaction free of long-running Notion API
blocking.
Args:
db_session: Database session for accessing Notion connector
db_session: Reserved for registry compatibility. Per-call sessions
are opened via :data:`async_session_maker` inside the tool body.
search_space_id: Search space ID to find the Notion connector
user_id: User ID for fetching user-specific context
connector_id: Optional specific connector ID (if known)
@ -29,6 +39,7 @@ def create_create_notion_page_tool(
Returns:
Configured create_notion_page tool
"""
del db_session # per-call session — see docstring
@tool
async def create_notion_page(
@ -67,7 +78,7 @@ def create_create_notion_page_tool(
"""
logger.info(f"create_notion_page called: title='{title}'")
if db_session is None or search_space_id is None or user_id is None:
if search_space_id is None or user_id is None:
logger.error(
"Notion tool not properly configured - missing required parameters"
)
@ -77,154 +88,157 @@ def create_create_notion_page_tool(
}
try:
metadata_service = NotionToolMetadataService(db_session)
context = await metadata_service.get_creation_context(
search_space_id, user_id
)
if "error" in context:
logger.error(f"Failed to fetch creation context: {context['error']}")
return {
"status": "error",
"message": context["error"],
}
accounts = context.get("accounts", [])
if accounts and all(a.get("auth_expired") for a in accounts):
logger.warning("All Notion accounts have expired authentication")
return {
"status": "auth_error",
"message": "All connected Notion accounts need re-authentication. Please re-authenticate in your connector settings.",
"connector_type": "notion",
}
logger.info(f"Requesting approval for creating Notion page: '{title}'")
result = request_approval(
action_type="notion_page_creation",
tool_name="create_notion_page",
params={
"title": title,
"content": content,
"parent_page_id": None,
"connector_id": connector_id,
},
context=context,
)
if result.rejected:
logger.info("Notion page creation rejected by user")
return {
"status": "rejected",
"message": "User declined. Do not retry or suggest alternatives.",
}
final_title = result.params.get("title", title)
final_content = result.params.get("content", content)
final_parent_page_id = result.params.get("parent_page_id")
final_connector_id = result.params.get("connector_id", connector_id)
if not final_title or not final_title.strip():
logger.error("Title is empty or contains only whitespace")
return {
"status": "error",
"message": "Page title cannot be empty. Please provide a valid title.",
}
logger.info(
f"Creating Notion page with final params: title='{final_title}'"
)
from sqlalchemy.future import select
from app.db import SearchSourceConnector, SearchSourceConnectorType
actual_connector_id = final_connector_id
if actual_connector_id is None:
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.NOTION_CONNECTOR,
)
async with async_session_maker() as db_session:
metadata_service = NotionToolMetadataService(db_session)
context = await metadata_service.get_creation_context(
search_space_id, user_id
)
connector = result.scalars().first()
if not connector:
logger.warning(
f"No Notion connector found for search_space_id={search_space_id}"
)
return {
"status": "error",
"message": "No Notion connector found. Please connect Notion in your workspace settings.",
}
actual_connector_id = connector.id
logger.info(f"Found Notion connector: id={actual_connector_id}")
else:
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == actual_connector_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.NOTION_CONNECTOR,
)
)
connector = result.scalars().first()
if not connector:
if "error" in context:
logger.error(
f"Invalid connector_id={actual_connector_id} for search_space_id={search_space_id}"
f"Failed to fetch creation context: {context['error']}"
)
return {
"status": "error",
"message": "Selected Notion account is invalid or has been disconnected. Please select a valid account.",
"message": context["error"],
}
logger.info(f"Validated Notion connector: id={actual_connector_id}")
notion_connector = NotionHistoryConnector(
session=db_session,
connector_id=actual_connector_id,
)
accounts = context.get("accounts", [])
if accounts and all(a.get("auth_expired") for a in accounts):
logger.warning("All Notion accounts have expired authentication")
return {
"status": "auth_error",
"message": "All connected Notion accounts need re-authentication. Please re-authenticate in your connector settings.",
"connector_type": "notion",
}
result = await notion_connector.create_page(
title=final_title,
content=final_content,
parent_page_id=final_parent_page_id,
)
logger.info(
f"create_page result: {result.get('status')} - {result.get('message', '')}"
)
logger.info(f"Requesting approval for creating Notion page: '{title}'")
result = request_approval(
action_type="notion_page_creation",
tool_name="create_notion_page",
params={
"title": title,
"content": content,
"parent_page_id": None,
"connector_id": connector_id,
},
context=context,
)
if result.get("status") == "success":
kb_message_suffix = ""
try:
from app.services.notion import NotionKBSyncService
if result.rejected:
logger.info("Notion page creation rejected by user")
return {
"status": "rejected",
"message": "User declined. Do not retry or suggest alternatives.",
}
kb_service = NotionKBSyncService(db_session)
kb_result = await kb_service.sync_after_create(
page_id=result.get("page_id"),
page_title=result.get("title", final_title),
page_url=result.get("url"),
content=final_content,
connector_id=actual_connector_id,
search_space_id=search_space_id,
user_id=user_id,
)
if kb_result["status"] == "success":
kb_message_suffix = (
" Your knowledge base has also been updated."
final_title = result.params.get("title", title)
final_content = result.params.get("content", content)
final_parent_page_id = result.params.get("parent_page_id")
final_connector_id = result.params.get("connector_id", connector_id)
if not final_title or not final_title.strip():
logger.error("Title is empty or contains only whitespace")
return {
"status": "error",
"message": "Page title cannot be empty. Please provide a valid title.",
}
logger.info(
f"Creating Notion page with final params: title='{final_title}'"
)
from sqlalchemy.future import select
from app.db import SearchSourceConnector, SearchSourceConnectorType
actual_connector_id = final_connector_id
if actual_connector_id is None:
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.NOTION_CONNECTOR,
)
else:
)
connector = result.scalars().first()
if not connector:
logger.warning(
f"No Notion connector found for search_space_id={search_space_id}"
)
return {
"status": "error",
"message": "No Notion connector found. Please connect Notion in your workspace settings.",
}
actual_connector_id = connector.id
logger.info(f"Found Notion connector: id={actual_connector_id}")
else:
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == actual_connector_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.NOTION_CONNECTOR,
)
)
connector = result.scalars().first()
if not connector:
logger.error(
f"Invalid connector_id={actual_connector_id} for search_space_id={search_space_id}"
)
return {
"status": "error",
"message": "Selected Notion account is invalid or has been disconnected. Please select a valid account.",
}
logger.info(f"Validated Notion connector: id={actual_connector_id}")
notion_connector = NotionHistoryConnector(
session=db_session,
connector_id=actual_connector_id,
)
result = await notion_connector.create_page(
title=final_title,
content=final_content,
parent_page_id=final_parent_page_id,
)
logger.info(
f"create_page result: {result.get('status')} - {result.get('message', '')}"
)
if result.get("status") == "success":
kb_message_suffix = ""
try:
from app.services.notion import NotionKBSyncService
kb_service = NotionKBSyncService(db_session)
kb_result = await kb_service.sync_after_create(
page_id=result.get("page_id"),
page_title=result.get("title", final_title),
page_url=result.get("url"),
content=final_content,
connector_id=actual_connector_id,
search_space_id=search_space_id,
user_id=user_id,
)
if kb_result["status"] == "success":
kb_message_suffix = (
" Your knowledge base has also been updated."
)
else:
kb_message_suffix = " This page will be added to your knowledge base in the next scheduled sync."
except Exception as kb_err:
logger.warning(f"KB sync after create failed: {kb_err}")
kb_message_suffix = " This page will be added to your knowledge base in the next scheduled sync."
except Exception as kb_err:
logger.warning(f"KB sync after create failed: {kb_err}")
kb_message_suffix = " This page will be added to your knowledge base in the next scheduled sync."
result["message"] = result.get("message", "") + kb_message_suffix
result["message"] = result.get("message", "") + kb_message_suffix
return result
return result
except Exception as e:
from langgraph.errors import GraphInterrupt

View file

@ -6,6 +6,7 @@ from sqlalchemy.ext.asyncio import AsyncSession
from app.agents.new_chat.tools.hitl import request_approval
from app.connectors.notion_history import NotionAPIError, NotionHistoryConnector
from app.db import async_session_maker
from app.services.notion.tool_metadata_service import NotionToolMetadataService
logger = logging.getLogger(__name__)
@ -20,8 +21,14 @@ def create_delete_notion_page_tool(
"""
Factory function to create the delete_notion_page tool.
The tool acquires its own short-lived ``AsyncSession`` per call via
:data:`async_session_maker` so the closure is safe to share across
HTTP requests by the compiled-agent cache. Capturing a per-request
session here would surface stale/closed sessions on cache hits.
Args:
db_session: Database session for accessing Notion connector
db_session: Reserved for registry compatibility. Per-call sessions
are opened via :data:`async_session_maker` inside the tool body.
search_space_id: Search space ID to find the Notion connector
user_id: User ID for finding the correct Notion connector
connector_id: Optional specific connector ID (if known)
@ -29,6 +36,7 @@ def create_delete_notion_page_tool(
Returns:
Configured delete_notion_page tool
"""
del db_session # per-call session — see docstring
@tool
async def delete_notion_page(
@ -63,7 +71,7 @@ def create_delete_notion_page_tool(
f"delete_notion_page called: page_title='{page_title}', delete_from_kb={delete_from_kb}"
)
if db_session is None or search_space_id is None or user_id is None:
if search_space_id is None or user_id is None:
logger.error(
"Notion tool not properly configured - missing required parameters"
)
@ -73,164 +81,167 @@ def create_delete_notion_page_tool(
}
try:
# Get page context (page_id, account, title) from indexed data
metadata_service = NotionToolMetadataService(db_session)
context = await metadata_service.get_delete_context(
search_space_id, user_id, page_title
)
if "error" in context:
error_msg = context["error"]
# Check if it's a "not found" error (softer handling for LLM)
if "not found" in error_msg.lower():
logger.warning(f"Page not found: {error_msg}")
return {
"status": "not_found",
"message": error_msg,
}
else:
logger.error(f"Failed to fetch delete context: {error_msg}")
return {
"status": "error",
"message": error_msg,
}
account = context.get("account", {})
if account.get("auth_expired"):
logger.warning(
"Notion account %s has expired authentication",
account.get("id"),
async with async_session_maker() as db_session:
# Get page context (page_id, account, title) from indexed data
metadata_service = NotionToolMetadataService(db_session)
context = await metadata_service.get_delete_context(
search_space_id, user_id, page_title
)
return {
"status": "auth_error",
"message": "The Notion account for this page needs re-authentication. Please re-authenticate in your connector settings.",
}
page_id = context.get("page_id")
connector_id_from_context = account.get("id")
document_id = context.get("document_id")
logger.info(
f"Requesting approval for deleting Notion page: '{page_title}' (page_id={page_id}, delete_from_kb={delete_from_kb})"
)
result = request_approval(
action_type="notion_page_deletion",
tool_name="delete_notion_page",
params={
"page_id": page_id,
"connector_id": connector_id_from_context,
"delete_from_kb": delete_from_kb,
},
context=context,
)
if result.rejected:
logger.info("Notion page deletion rejected by user")
return {
"status": "rejected",
"message": "User declined. Do not retry or suggest alternatives.",
}
final_page_id = result.params.get("page_id", page_id)
final_connector_id = result.params.get(
"connector_id", connector_id_from_context
)
final_delete_from_kb = result.params.get("delete_from_kb", delete_from_kb)
logger.info(
f"Deleting Notion page with final params: page_id={final_page_id}, connector_id={final_connector_id}, delete_from_kb={final_delete_from_kb}"
)
from sqlalchemy.future import select
from app.db import SearchSourceConnector, SearchSourceConnectorType
# Validate the connector
if final_connector_id:
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == final_connector_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.NOTION_CONNECTOR,
)
)
connector = result.scalars().first()
if not connector:
logger.error(
f"Invalid connector_id={final_connector_id} for search_space_id={search_space_id}"
)
return {
"status": "error",
"message": "Selected Notion account is invalid or has been disconnected. Please select a valid account.",
}
actual_connector_id = connector.id
logger.info(f"Validated Notion connector: id={actual_connector_id}")
else:
logger.error("No connector found for this page")
return {
"status": "error",
"message": "No connector found for this page.",
}
# Create connector instance
notion_connector = NotionHistoryConnector(
session=db_session,
connector_id=actual_connector_id,
)
# Delete the page from Notion
result = await notion_connector.delete_page(page_id=final_page_id)
logger.info(
f"delete_page result: {result.get('status')} - {result.get('message', '')}"
)
# If deletion was successful and user wants to delete from KB
deleted_from_kb = False
if (
result.get("status") == "success"
and final_delete_from_kb
and document_id
):
try:
from sqlalchemy.future import select
from app.db import Document
# Get the document
doc_result = await db_session.execute(
select(Document).filter(Document.id == document_id)
)
document = doc_result.scalars().first()
if document:
await db_session.delete(document)
await db_session.commit()
deleted_from_kb = True
logger.info(
f"Deleted document {document_id} from knowledge base"
)
if "error" in context:
error_msg = context["error"]
# Check if it's a "not found" error (softer handling for LLM)
if "not found" in error_msg.lower():
logger.warning(f"Page not found: {error_msg}")
return {
"status": "not_found",
"message": error_msg,
}
else:
logger.warning(f"Document {document_id} not found in KB")
except Exception as e:
logger.error(f"Failed to delete document from KB: {e}")
await db_session.rollback()
result["warning"] = (
f"Page deleted from Notion, but failed to remove from knowledge base: {e!s}"
)
logger.error(f"Failed to fetch delete context: {error_msg}")
return {
"status": "error",
"message": error_msg,
}
# Update result with KB deletion status
if result.get("status") == "success":
result["deleted_from_kb"] = deleted_from_kb
if deleted_from_kb:
result["message"] = (
f"{result.get('message', '')} (also removed from knowledge base)"
account = context.get("account", {})
if account.get("auth_expired"):
logger.warning(
"Notion account %s has expired authentication",
account.get("id"),
)
return {
"status": "auth_error",
"message": "The Notion account for this page needs re-authentication. Please re-authenticate in your connector settings.",
}
return result
page_id = context.get("page_id")
connector_id_from_context = account.get("id")
document_id = context.get("document_id")
logger.info(
f"Requesting approval for deleting Notion page: '{page_title}' (page_id={page_id}, delete_from_kb={delete_from_kb})"
)
result = request_approval(
action_type="notion_page_deletion",
tool_name="delete_notion_page",
params={
"page_id": page_id,
"connector_id": connector_id_from_context,
"delete_from_kb": delete_from_kb,
},
context=context,
)
if result.rejected:
logger.info("Notion page deletion rejected by user")
return {
"status": "rejected",
"message": "User declined. Do not retry or suggest alternatives.",
}
final_page_id = result.params.get("page_id", page_id)
final_connector_id = result.params.get(
"connector_id", connector_id_from_context
)
final_delete_from_kb = result.params.get(
"delete_from_kb", delete_from_kb
)
logger.info(
f"Deleting Notion page with final params: page_id={final_page_id}, connector_id={final_connector_id}, delete_from_kb={final_delete_from_kb}"
)
from sqlalchemy.future import select
from app.db import SearchSourceConnector, SearchSourceConnectorType
# Validate the connector
if final_connector_id:
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == final_connector_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.NOTION_CONNECTOR,
)
)
connector = result.scalars().first()
if not connector:
logger.error(
f"Invalid connector_id={final_connector_id} for search_space_id={search_space_id}"
)
return {
"status": "error",
"message": "Selected Notion account is invalid or has been disconnected. Please select a valid account.",
}
actual_connector_id = connector.id
logger.info(f"Validated Notion connector: id={actual_connector_id}")
else:
logger.error("No connector found for this page")
return {
"status": "error",
"message": "No connector found for this page.",
}
# Create connector instance
notion_connector = NotionHistoryConnector(
session=db_session,
connector_id=actual_connector_id,
)
# Delete the page from Notion
result = await notion_connector.delete_page(page_id=final_page_id)
logger.info(
f"delete_page result: {result.get('status')} - {result.get('message', '')}"
)
# If deletion was successful and user wants to delete from KB
deleted_from_kb = False
if (
result.get("status") == "success"
and final_delete_from_kb
and document_id
):
try:
from sqlalchemy.future import select
from app.db import Document
# Get the document
doc_result = await db_session.execute(
select(Document).filter(Document.id == document_id)
)
document = doc_result.scalars().first()
if document:
await db_session.delete(document)
await db_session.commit()
deleted_from_kb = True
logger.info(
f"Deleted document {document_id} from knowledge base"
)
else:
logger.warning(f"Document {document_id} not found in KB")
except Exception as e:
logger.error(f"Failed to delete document from KB: {e}")
await db_session.rollback()
result["warning"] = (
f"Page deleted from Notion, but failed to remove from knowledge base: {e!s}"
)
# Update result with KB deletion status
if result.get("status") == "success":
result["deleted_from_kb"] = deleted_from_kb
if deleted_from_kb:
result["message"] = (
f"{result.get('message', '')} (also removed from knowledge base)"
)
return result
except Exception as e:
from langgraph.errors import GraphInterrupt

View file

@ -6,6 +6,7 @@ from sqlalchemy.ext.asyncio import AsyncSession
from app.agents.new_chat.tools.hitl import request_approval
from app.connectors.notion_history import NotionAPIError, NotionHistoryConnector
from app.db import async_session_maker
from app.services.notion import NotionToolMetadataService
logger = logging.getLogger(__name__)
@ -20,8 +21,14 @@ def create_update_notion_page_tool(
"""
Factory function to create the update_notion_page tool.
The tool acquires its own short-lived ``AsyncSession`` per call via
:data:`async_session_maker` so the closure is safe to share across
HTTP requests by the compiled-agent cache (see
``create_create_notion_page_tool`` for the full rationale).
Args:
db_session: Database session for accessing Notion connector
db_session: Reserved for registry compatibility. Per-call sessions
are opened via :data:`async_session_maker` inside the tool body.
search_space_id: Search space ID to find the Notion connector
user_id: User ID for fetching user-specific context
connector_id: Optional specific connector ID (if known)
@ -29,6 +36,7 @@ def create_update_notion_page_tool(
Returns:
Configured update_notion_page tool
"""
del db_session # per-call session — see docstring
@tool
async def update_notion_page(
@ -71,7 +79,7 @@ def create_update_notion_page_tool(
f"update_notion_page called: page_title='{page_title}', content_length={len(content) if content else 0}"
)
if db_session is None or search_space_id is None or user_id is None:
if search_space_id is None or user_id is None:
logger.error(
"Notion tool not properly configured - missing required parameters"
)
@ -88,152 +96,155 @@ def create_update_notion_page_tool(
}
try:
metadata_service = NotionToolMetadataService(db_session)
context = await metadata_service.get_update_context(
search_space_id, user_id, page_title
)
if "error" in context:
error_msg = context["error"]
# Check if it's a "not found" error (softer handling for LLM)
if "not found" in error_msg.lower():
logger.warning(f"Page not found: {error_msg}")
return {
"status": "not_found",
"message": error_msg,
}
else:
logger.error(f"Failed to fetch update context: {error_msg}")
return {
"status": "error",
"message": error_msg,
}
account = context.get("account", {})
if account.get("auth_expired"):
logger.warning(
"Notion account %s has expired authentication",
account.get("id"),
)
return {
"status": "auth_error",
"message": "The Notion account for this page needs re-authentication. Please re-authenticate in your connector settings.",
}
page_id = context.get("page_id")
document_id = context.get("document_id")
connector_id_from_context = context.get("account", {}).get("id")
logger.info(
f"Requesting approval for updating Notion page: '{page_title}' (page_id={page_id})"
)
result = request_approval(
action_type="notion_page_update",
tool_name="update_notion_page",
params={
"page_id": page_id,
"content": content,
"connector_id": connector_id_from_context,
},
context=context,
)
if result.rejected:
logger.info("Notion page update rejected by user")
return {
"status": "rejected",
"message": "User declined. Do not retry or suggest alternatives.",
}
final_page_id = result.params.get("page_id", page_id)
final_content = result.params.get("content", content)
final_connector_id = result.params.get(
"connector_id", connector_id_from_context
)
logger.info(
f"Updating Notion page with final params: page_id={final_page_id}, has_content={final_content is not None}"
)
from sqlalchemy.future import select
from app.db import SearchSourceConnector, SearchSourceConnectorType
if final_connector_id:
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == final_connector_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.NOTION_CONNECTOR,
)
)
connector = result.scalars().first()
if not connector:
logger.error(
f"Invalid connector_id={final_connector_id} for search_space_id={search_space_id}"
)
return {
"status": "error",
"message": "Selected Notion account is invalid or has been disconnected. Please select a valid account.",
}
actual_connector_id = connector.id
logger.info(f"Validated Notion connector: id={actual_connector_id}")
else:
logger.error("No connector found for this page")
return {
"status": "error",
"message": "No connector found for this page.",
}
notion_connector = NotionHistoryConnector(
session=db_session,
connector_id=actual_connector_id,
)
result = await notion_connector.update_page(
page_id=final_page_id,
content=final_content,
)
logger.info(
f"update_page result: {result.get('status')} - {result.get('message', '')}"
)
if result.get("status") == "success" and document_id is not None:
from app.services.notion import NotionKBSyncService
logger.info(f"Updating knowledge base for document {document_id}...")
kb_service = NotionKBSyncService(db_session)
kb_result = await kb_service.sync_after_update(
document_id=document_id,
appended_content=final_content,
user_id=user_id,
search_space_id=search_space_id,
appended_block_ids=result.get("appended_block_ids"),
async with async_session_maker() as db_session:
metadata_service = NotionToolMetadataService(db_session)
context = await metadata_service.get_update_context(
search_space_id, user_id, page_title
)
if kb_result["status"] == "success":
result["message"] = (
f"{result['message']}. Your knowledge base has also been updated."
)
logger.info(
f"Knowledge base successfully updated for page {final_page_id}"
)
elif kb_result["status"] == "not_indexed":
result["message"] = (
f"{result['message']}. This page will be added to your knowledge base in the next scheduled sync."
)
else:
result["message"] = (
f"{result['message']}. Your knowledge base will be updated in the next scheduled sync."
)
if "error" in context:
error_msg = context["error"]
# Check if it's a "not found" error (softer handling for LLM)
if "not found" in error_msg.lower():
logger.warning(f"Page not found: {error_msg}")
return {
"status": "not_found",
"message": error_msg,
}
else:
logger.error(f"Failed to fetch update context: {error_msg}")
return {
"status": "error",
"message": error_msg,
}
account = context.get("account", {})
if account.get("auth_expired"):
logger.warning(
f"KB update failed for page {final_page_id}: {kb_result['message']}"
"Notion account %s has expired authentication",
account.get("id"),
)
return {
"status": "auth_error",
"message": "The Notion account for this page needs re-authentication. Please re-authenticate in your connector settings.",
}
page_id = context.get("page_id")
document_id = context.get("document_id")
connector_id_from_context = context.get("account", {}).get("id")
logger.info(
f"Requesting approval for updating Notion page: '{page_title}' (page_id={page_id})"
)
result = request_approval(
action_type="notion_page_update",
tool_name="update_notion_page",
params={
"page_id": page_id,
"content": content,
"connector_id": connector_id_from_context,
},
context=context,
)
if result.rejected:
logger.info("Notion page update rejected by user")
return {
"status": "rejected",
"message": "User declined. Do not retry or suggest alternatives.",
}
final_page_id = result.params.get("page_id", page_id)
final_content = result.params.get("content", content)
final_connector_id = result.params.get(
"connector_id", connector_id_from_context
)
logger.info(
f"Updating Notion page with final params: page_id={final_page_id}, has_content={final_content is not None}"
)
from sqlalchemy.future import select
from app.db import SearchSourceConnector, SearchSourceConnectorType
if final_connector_id:
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == final_connector_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.NOTION_CONNECTOR,
)
)
connector = result.scalars().first()
if not connector:
logger.error(
f"Invalid connector_id={final_connector_id} for search_space_id={search_space_id}"
)
return {
"status": "error",
"message": "Selected Notion account is invalid or has been disconnected. Please select a valid account.",
}
actual_connector_id = connector.id
logger.info(f"Validated Notion connector: id={actual_connector_id}")
else:
logger.error("No connector found for this page")
return {
"status": "error",
"message": "No connector found for this page.",
}
notion_connector = NotionHistoryConnector(
session=db_session,
connector_id=actual_connector_id,
)
result = await notion_connector.update_page(
page_id=final_page_id,
content=final_content,
)
logger.info(
f"update_page result: {result.get('status')} - {result.get('message', '')}"
)
if result.get("status") == "success" and document_id is not None:
from app.services.notion import NotionKBSyncService
logger.info(
f"Updating knowledge base for document {document_id}..."
)
kb_service = NotionKBSyncService(db_session)
kb_result = await kb_service.sync_after_update(
document_id=document_id,
appended_content=final_content,
user_id=user_id,
search_space_id=search_space_id,
appended_block_ids=result.get("appended_block_ids"),
)
return result
if kb_result["status"] == "success":
result["message"] = (
f"{result['message']}. Your knowledge base has also been updated."
)
logger.info(
f"Knowledge base successfully updated for page {final_page_id}"
)
elif kb_result["status"] == "not_indexed":
result["message"] = (
f"{result['message']}. This page will be added to your knowledge base in the next scheduled sync."
)
else:
result["message"] = (
f"{result['message']}. Your knowledge base will be updated in the next scheduled sync."
)
logger.warning(
f"KB update failed for page {final_page_id}: {kb_result['message']}"
)
return result
except Exception as e:
from langgraph.errors import GraphInterrupt

View file

@ -10,7 +10,7 @@ from sqlalchemy.future import select
from app.agents.new_chat.tools.hitl import request_approval
from app.connectors.onedrive.client import OneDriveClient
from app.db import SearchSourceConnector, SearchSourceConnectorType
from app.db import SearchSourceConnector, SearchSourceConnectorType, async_session_maker
logger = logging.getLogger(__name__)
@ -48,6 +48,23 @@ def create_create_onedrive_file_tool(
search_space_id: int | None = None,
user_id: str | None = None,
):
"""
Factory function to create the create_onedrive_file tool.
The tool acquires its own short-lived ``AsyncSession`` per call via
:data:`async_session_maker` so the closure is safe to share across
HTTP requests by the compiled-agent cache. Capturing a per-request
session here would surface stale/closed sessions on cache hits.
Args:
db_session: Reserved for registry compatibility. Per-call sessions
are opened via :data:`async_session_maker` inside the tool body.
Returns:
Configured create_onedrive_file tool
"""
del db_session # per-call session — see docstring
@tool
async def create_onedrive_file(
name: str,
@ -70,173 +87,178 @@ def create_create_onedrive_file_tool(
"""
logger.info(f"create_onedrive_file called: name='{name}'")
if db_session is None or search_space_id is None or user_id is None:
if search_space_id is None or user_id is None:
return {
"status": "error",
"message": "OneDrive tool not properly configured.",
}
try:
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.ONEDRIVE_CONNECTOR,
)
)
connectors = result.scalars().all()
if not connectors:
return {
"status": "error",
"message": "No OneDrive connector found. Please connect OneDrive in your workspace settings.",
}
accounts = []
for c in connectors:
cfg = c.config or {}
accounts.append(
{
"id": c.id,
"name": c.name,
"user_email": cfg.get("user_email"),
"auth_expired": cfg.get("auth_expired", False),
}
)
if all(a.get("auth_expired") for a in accounts):
return {
"status": "auth_error",
"message": "All connected OneDrive accounts need re-authentication.",
"connector_type": "onedrive",
}
parent_folders: dict[int, list[dict[str, str]]] = {}
for acc in accounts:
cid = acc["id"]
if acc.get("auth_expired"):
parent_folders[cid] = []
continue
try:
client = OneDriveClient(session=db_session, connector_id=cid)
items, err = await client.list_children("root")
if err:
logger.warning(
"Failed to list folders for connector %s: %s", cid, err
)
parent_folders[cid] = []
else:
parent_folders[cid] = [
{"folder_id": item["id"], "name": item["name"]}
for item in items
if item.get("folder") is not None
and item.get("id")
and item.get("name")
]
except Exception:
logger.warning(
"Error fetching folders for connector %s", cid, exc_info=True
)
parent_folders[cid] = []
context: dict[str, Any] = {
"accounts": accounts,
"parent_folders": parent_folders,
}
result = request_approval(
action_type="onedrive_file_creation",
tool_name="create_onedrive_file",
params={
"name": name,
"content": content,
"connector_id": None,
"parent_folder_id": None,
},
context=context,
)
if result.rejected:
return {
"status": "rejected",
"message": "User declined. Do not retry or suggest alternatives.",
}
final_name = result.params.get("name", name)
final_content = result.params.get("content", content)
final_connector_id = result.params.get("connector_id")
final_parent_folder_id = result.params.get("parent_folder_id")
if not final_name or not final_name.strip():
return {"status": "error", "message": "File name cannot be empty."}
final_name = _ensure_docx_extension(final_name)
if final_connector_id is not None:
async with async_session_maker() as db_session:
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == final_connector_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.ONEDRIVE_CONNECTOR,
)
)
connector = result.scalars().first()
else:
connector = connectors[0]
connectors = result.scalars().all()
if not connector:
return {
"status": "error",
"message": "Selected OneDrive connector is invalid.",
if not connectors:
return {
"status": "error",
"message": "No OneDrive connector found. Please connect OneDrive in your workspace settings.",
}
accounts = []
for c in connectors:
cfg = c.config or {}
accounts.append(
{
"id": c.id,
"name": c.name,
"user_email": cfg.get("user_email"),
"auth_expired": cfg.get("auth_expired", False),
}
)
if all(a.get("auth_expired") for a in accounts):
return {
"status": "auth_error",
"message": "All connected OneDrive accounts need re-authentication.",
"connector_type": "onedrive",
}
parent_folders: dict[int, list[dict[str, str]]] = {}
for acc in accounts:
cid = acc["id"]
if acc.get("auth_expired"):
parent_folders[cid] = []
continue
try:
client = OneDriveClient(session=db_session, connector_id=cid)
items, err = await client.list_children("root")
if err:
logger.warning(
"Failed to list folders for connector %s: %s", cid, err
)
parent_folders[cid] = []
else:
parent_folders[cid] = [
{"folder_id": item["id"], "name": item["name"]}
for item in items
if item.get("folder") is not None
and item.get("id")
and item.get("name")
]
except Exception:
logger.warning(
"Error fetching folders for connector %s",
cid,
exc_info=True,
)
parent_folders[cid] = []
context: dict[str, Any] = {
"accounts": accounts,
"parent_folders": parent_folders,
}
docx_bytes = _markdown_to_docx(final_content or "")
client = OneDriveClient(session=db_session, connector_id=connector.id)
created = await client.create_file(
name=final_name,
parent_id=final_parent_folder_id,
content=docx_bytes,
mime_type=DOCX_MIME,
)
logger.info(
f"OneDrive file created: id={created.get('id')}, name={created.get('name')}"
)
kb_message_suffix = ""
try:
from app.services.onedrive import OneDriveKBSyncService
kb_service = OneDriveKBSyncService(db_session)
kb_result = await kb_service.sync_after_create(
file_id=created.get("id"),
file_name=created.get("name", final_name),
mime_type=DOCX_MIME,
web_url=created.get("webUrl"),
content=final_content,
connector_id=connector.id,
search_space_id=search_space_id,
user_id=user_id,
result = request_approval(
action_type="onedrive_file_creation",
tool_name="create_onedrive_file",
params={
"name": name,
"content": content,
"connector_id": None,
"parent_folder_id": None,
},
context=context,
)
if kb_result["status"] == "success":
kb_message_suffix = " Your knowledge base has also been updated."
else:
kb_message_suffix = " This file will be added to your knowledge base in the next scheduled sync."
except Exception as kb_err:
logger.warning(f"KB sync after create failed: {kb_err}")
kb_message_suffix = " This file will be added to your knowledge base in the next scheduled sync."
return {
"status": "success",
"file_id": created.get("id"),
"name": created.get("name"),
"web_url": created.get("webUrl"),
"message": f"Successfully created '{created.get('name')}' in OneDrive.{kb_message_suffix}",
}
if result.rejected:
return {
"status": "rejected",
"message": "User declined. Do not retry or suggest alternatives.",
}
final_name = result.params.get("name", name)
final_content = result.params.get("content", content)
final_connector_id = result.params.get("connector_id")
final_parent_folder_id = result.params.get("parent_folder_id")
if not final_name or not final_name.strip():
return {"status": "error", "message": "File name cannot be empty."}
final_name = _ensure_docx_extension(final_name)
if final_connector_id is not None:
result = await db_session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == final_connector_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.ONEDRIVE_CONNECTOR,
)
)
connector = result.scalars().first()
else:
connector = connectors[0]
if not connector:
return {
"status": "error",
"message": "Selected OneDrive connector is invalid.",
}
docx_bytes = _markdown_to_docx(final_content or "")
client = OneDriveClient(session=db_session, connector_id=connector.id)
created = await client.create_file(
name=final_name,
parent_id=final_parent_folder_id,
content=docx_bytes,
mime_type=DOCX_MIME,
)
logger.info(
f"OneDrive file created: id={created.get('id')}, name={created.get('name')}"
)
kb_message_suffix = ""
try:
from app.services.onedrive import OneDriveKBSyncService
kb_service = OneDriveKBSyncService(db_session)
kb_result = await kb_service.sync_after_create(
file_id=created.get("id"),
file_name=created.get("name", final_name),
mime_type=DOCX_MIME,
web_url=created.get("webUrl"),
content=final_content,
connector_id=connector.id,
search_space_id=search_space_id,
user_id=user_id,
)
if kb_result["status"] == "success":
kb_message_suffix = (
" Your knowledge base has also been updated."
)
else:
kb_message_suffix = " This file will be added to your knowledge base in the next scheduled sync."
except Exception as kb_err:
logger.warning(f"KB sync after create failed: {kb_err}")
kb_message_suffix = " This file will be added to your knowledge base in the next scheduled sync."
return {
"status": "success",
"file_id": created.get("id"),
"name": created.get("name"),
"web_url": created.get("webUrl"),
"message": f"Successfully created '{created.get('name')}' in OneDrive.{kb_message_suffix}",
}
except Exception as e:
from langgraph.errors import GraphInterrupt

View file

@ -13,6 +13,7 @@ from app.db import (
DocumentType,
SearchSourceConnector,
SearchSourceConnectorType,
async_session_maker,
)
logger = logging.getLogger(__name__)
@ -23,6 +24,23 @@ def create_delete_onedrive_file_tool(
search_space_id: int | None = None,
user_id: str | None = None,
):
"""
Factory function to create the delete_onedrive_file tool.
The tool acquires its own short-lived ``AsyncSession`` per call via
:data:`async_session_maker` so the closure is safe to share across
HTTP requests by the compiled-agent cache. Capturing a per-request
session here would surface stale/closed sessions on cache hits.
Args:
db_session: Reserved for registry compatibility. Per-call sessions
are opened via :data:`async_session_maker` inside the tool body.
Returns:
Configured delete_onedrive_file tool
"""
del db_session # per-call session — see docstring
@tool
async def delete_onedrive_file(
file_name: str,
@ -56,33 +74,14 @@ def create_delete_onedrive_file_tool(
f"delete_onedrive_file called: file_name='{file_name}', delete_from_kb={delete_from_kb}"
)
if db_session is None or search_space_id is None or user_id is None:
if search_space_id is None or user_id is None:
return {
"status": "error",
"message": "OneDrive tool not properly configured.",
}
try:
doc_result = await db_session.execute(
select(Document)
.join(
SearchSourceConnector,
Document.connector_id == SearchSourceConnector.id,
)
.filter(
and_(
Document.search_space_id == search_space_id,
Document.document_type == DocumentType.ONEDRIVE_FILE,
func.lower(Document.title) == func.lower(file_name),
SearchSourceConnector.user_id == user_id,
)
)
.order_by(Document.updated_at.desc().nullslast())
.limit(1)
)
document = doc_result.scalars().first()
if not document:
async with async_session_maker() as db_session:
doc_result = await db_session.execute(
select(Document)
.join(
@ -93,13 +92,7 @@ def create_delete_onedrive_file_tool(
and_(
Document.search_space_id == search_space_id,
Document.document_type == DocumentType.ONEDRIVE_FILE,
func.lower(
cast(
Document.document_metadata["onedrive_file_name"],
String,
)
)
== func.lower(file_name),
func.lower(Document.title) == func.lower(file_name),
SearchSourceConnector.user_id == user_id,
)
)
@ -108,98 +101,64 @@ def create_delete_onedrive_file_tool(
)
document = doc_result.scalars().first()
if not document:
return {
"status": "not_found",
"message": (
f"File '{file_name}' not found in your indexed OneDrive files. "
"This could mean: (1) the file doesn't exist, (2) it hasn't been indexed yet, "
"or (3) the file name is different."
),
}
if not document.connector_id:
return {
"status": "error",
"message": "Document has no associated connector.",
}
meta = document.document_metadata or {}
file_id = meta.get("onedrive_file_id")
document_id = document.id
if not file_id:
return {
"status": "error",
"message": "File ID is missing. Please re-index the file.",
}
conn_result = await db_session.execute(
select(SearchSourceConnector).filter(
and_(
SearchSourceConnector.id == document.connector_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.ONEDRIVE_CONNECTOR,
if not document:
doc_result = await db_session.execute(
select(Document)
.join(
SearchSourceConnector,
Document.connector_id == SearchSourceConnector.id,
)
.filter(
and_(
Document.search_space_id == search_space_id,
Document.document_type == DocumentType.ONEDRIVE_FILE,
func.lower(
cast(
Document.document_metadata[
"onedrive_file_name"
],
String,
)
)
== func.lower(file_name),
SearchSourceConnector.user_id == user_id,
)
)
.order_by(Document.updated_at.desc().nullslast())
.limit(1)
)
)
)
connector = conn_result.scalars().first()
if not connector:
return {
"status": "error",
"message": "OneDrive connector not found or access denied.",
}
document = doc_result.scalars().first()
cfg = connector.config or {}
if cfg.get("auth_expired"):
return {
"status": "auth_error",
"message": "OneDrive account needs re-authentication. Please re-authenticate in your connector settings.",
"connector_type": "onedrive",
}
if not document:
return {
"status": "not_found",
"message": (
f"File '{file_name}' not found in your indexed OneDrive files. "
"This could mean: (1) the file doesn't exist, (2) it hasn't been indexed yet, "
"or (3) the file name is different."
),
}
context = {
"file": {
"file_id": file_id,
"name": file_name,
"document_id": document_id,
"web_url": meta.get("web_url"),
},
"account": {
"id": connector.id,
"name": connector.name,
"user_email": cfg.get("user_email"),
},
}
if not document.connector_id:
return {
"status": "error",
"message": "Document has no associated connector.",
}
result = request_approval(
action_type="onedrive_file_trash",
tool_name="delete_onedrive_file",
params={
"file_id": file_id,
"connector_id": connector.id,
"delete_from_kb": delete_from_kb,
},
context=context,
)
meta = document.document_metadata or {}
file_id = meta.get("onedrive_file_id")
document_id = document.id
if result.rejected:
return {
"status": "rejected",
"message": "User declined. Do not retry or suggest alternatives.",
}
if not file_id:
return {
"status": "error",
"message": "File ID is missing. Please re-index the file.",
}
final_file_id = result.params.get("file_id", file_id)
final_connector_id = result.params.get("connector_id", connector.id)
final_delete_from_kb = result.params.get("delete_from_kb", delete_from_kb)
if final_connector_id != connector.id:
result = await db_session.execute(
conn_result = await db_session.execute(
select(SearchSourceConnector).filter(
and_(
SearchSourceConnector.id == final_connector_id,
SearchSourceConnector.id == document.connector_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
@ -207,65 +166,130 @@ def create_delete_onedrive_file_tool(
)
)
)
validated_connector = result.scalars().first()
if not validated_connector:
connector = conn_result.scalars().first()
if not connector:
return {
"status": "error",
"message": "Selected OneDrive connector is invalid or has been disconnected.",
"message": "OneDrive connector not found or access denied.",
}
actual_connector_id = validated_connector.id
else:
actual_connector_id = connector.id
logger.info(
f"Deleting OneDrive file: file_id='{final_file_id}', connector={actual_connector_id}"
)
cfg = connector.config or {}
if cfg.get("auth_expired"):
return {
"status": "auth_error",
"message": "OneDrive account needs re-authentication. Please re-authenticate in your connector settings.",
"connector_type": "onedrive",
}
client = OneDriveClient(
session=db_session, connector_id=actual_connector_id
)
await client.trash_file(final_file_id)
context = {
"file": {
"file_id": file_id,
"name": file_name,
"document_id": document_id,
"web_url": meta.get("web_url"),
},
"account": {
"id": connector.id,
"name": connector.name,
"user_email": cfg.get("user_email"),
},
}
logger.info(
f"OneDrive file deleted (moved to recycle bin): file_id={final_file_id}"
)
trash_result: dict[str, Any] = {
"status": "success",
"file_id": final_file_id,
"message": f"Successfully moved '{file_name}' to the recycle bin.",
}
deleted_from_kb = False
if final_delete_from_kb and document_id:
try:
doc_result = await db_session.execute(
select(Document).filter(Document.id == document_id)
)
doc = doc_result.scalars().first()
if doc:
await db_session.delete(doc)
await db_session.commit()
deleted_from_kb = True
logger.info(
f"Deleted document {document_id} from knowledge base"
)
else:
logger.warning(f"Document {document_id} not found in KB")
except Exception as e:
logger.error(f"Failed to delete document from KB: {e}")
await db_session.rollback()
trash_result["warning"] = (
f"File moved to recycle bin, but failed to remove from knowledge base: {e!s}"
)
trash_result["deleted_from_kb"] = deleted_from_kb
if deleted_from_kb:
trash_result["message"] = (
f"{trash_result.get('message', '')} (also removed from knowledge base)"
result = request_approval(
action_type="onedrive_file_trash",
tool_name="delete_onedrive_file",
params={
"file_id": file_id,
"connector_id": connector.id,
"delete_from_kb": delete_from_kb,
},
context=context,
)
return trash_result
if result.rejected:
return {
"status": "rejected",
"message": "User declined. Do not retry or suggest alternatives.",
}
final_file_id = result.params.get("file_id", file_id)
final_connector_id = result.params.get("connector_id", connector.id)
final_delete_from_kb = result.params.get(
"delete_from_kb", delete_from_kb
)
if final_connector_id != connector.id:
result = await db_session.execute(
select(SearchSourceConnector).filter(
and_(
SearchSourceConnector.id == final_connector_id,
SearchSourceConnector.search_space_id
== search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.ONEDRIVE_CONNECTOR,
)
)
)
validated_connector = result.scalars().first()
if not validated_connector:
return {
"status": "error",
"message": "Selected OneDrive connector is invalid or has been disconnected.",
}
actual_connector_id = validated_connector.id
else:
actual_connector_id = connector.id
logger.info(
f"Deleting OneDrive file: file_id='{final_file_id}', connector={actual_connector_id}"
)
client = OneDriveClient(
session=db_session, connector_id=actual_connector_id
)
await client.trash_file(final_file_id)
logger.info(
f"OneDrive file deleted (moved to recycle bin): file_id={final_file_id}"
)
trash_result: dict[str, Any] = {
"status": "success",
"file_id": final_file_id,
"message": f"Successfully moved '{file_name}' to the recycle bin.",
}
deleted_from_kb = False
if final_delete_from_kb and document_id:
try:
doc_result = await db_session.execute(
select(Document).filter(Document.id == document_id)
)
doc = doc_result.scalars().first()
if doc:
await db_session.delete(doc)
await db_session.commit()
deleted_from_kb = True
logger.info(
f"Deleted document {document_id} from knowledge base"
)
else:
logger.warning(f"Document {document_id} not found in KB")
except Exception as e:
logger.error(f"Failed to delete document from KB: {e}")
await db_session.rollback()
trash_result["warning"] = (
f"File moved to recycle bin, but failed to remove from knowledge base: {e!s}"
)
trash_result["deleted_from_kb"] = deleted_from_kb
if deleted_from_kb:
trash_result["message"] = (
f"{trash_result.get('message', '')} (also removed from knowledge base)"
)
return trash_result
except Exception as e:
from langgraph.errors import GraphInterrupt

View file

@ -824,13 +824,22 @@ async def build_tools_async(
"""Async version of build_tools that also loads MCP tools from database.
Design Note:
This function exists because MCP tools require database queries to load user configs,
while built-in tools are created synchronously from static code.
This function exists because MCP tools require database queries to load
user configs, while built-in tools are created synchronously from static
code.
Alternative: We could make build_tools() itself async and always query the database,
but that would force async everywhere even when only using built-in tools. The current
design keeps the simple case (static tools only) synchronous while supporting dynamic
database-loaded tools through this async wrapper.
Alternative: We could make build_tools() itself async and always query
the database, but that would force async everywhere even when only using
built-in tools. The current design keeps the simple case (static tools
only) synchronous while supporting dynamic database-loaded tools through
this async wrapper.
Phase 1.3: built-in tool construction (CPU; runs in a thread pool to
avoid event-loop stalls) and MCP tool loading (HTTP/DB I/O; runs on
the event loop) are kicked off concurrently. Cold-path savings are
bounded by the slower of the two typically MCP at ~200ms-1.7s
so the parallelization recovers the ~50-200ms previously spent
serially on built-in construction.
Args:
dependencies: Dict containing all possible dependencies
@ -843,33 +852,70 @@ async def build_tools_async(
List of configured tool instances ready for the agent, including MCP tools.
"""
import asyncio
import time
_perf_log = logging.getLogger("surfsense.perf")
_perf_log.setLevel(logging.DEBUG)
can_load_mcp = (
include_mcp_tools
and "db_session" in dependencies
and "search_space_id" in dependencies
)
# Built-in tool construction is synchronous + CPU-only. Off-loop it so
# MCP's HTTP/DB I/O can fire concurrently. ``build_tools`` is pure
# function over its inputs — safe to thread-shift.
_t0 = time.perf_counter()
tools = build_tools(dependencies, enabled_tools, disabled_tools, additional_tools)
builtin_task = asyncio.create_task(
asyncio.to_thread(
build_tools, dependencies, enabled_tools, disabled_tools, additional_tools
)
)
mcp_task: asyncio.Task | None = None
if can_load_mcp:
mcp_task = asyncio.create_task(
load_mcp_tools(
dependencies["db_session"],
dependencies["search_space_id"],
)
)
# Surface failures from each task independently so a flaky MCP
# endpoint never poisons built-in tool registration. ``return_exceptions``
# gives us per-task exceptions instead of dropping the second result
# when the first raises.
if mcp_task is not None:
builtin_result, mcp_result = await asyncio.gather(
builtin_task, mcp_task, return_exceptions=True
)
else:
builtin_result = await builtin_task
mcp_result = None
if isinstance(builtin_result, BaseException):
raise builtin_result # built-in registration failure is non-recoverable
tools: list[BaseTool] = builtin_result
_perf_log.info(
"[build_tools_async] Built-in tools in %.3fs (%d tools)",
"[build_tools_async] Built-in tools in %.3fs (%d tools, parallel)",
time.perf_counter() - _t0,
len(tools),
)
# Load MCP tools if requested and dependencies are available
if (
include_mcp_tools
and "db_session" in dependencies
and "search_space_id" in dependencies
):
try:
_t0 = time.perf_counter()
mcp_tools = await load_mcp_tools(
dependencies["db_session"],
dependencies["search_space_id"],
if mcp_task is not None:
if isinstance(mcp_result, BaseException):
# ``return_exceptions=True`` captures the exception out-of-band,
# so ``sys.exc_info()`` is empty here. Pass the captured
# exception via ``exc_info=`` to get a real traceback.
logging.error(
"Failed to load MCP tools: %s", mcp_result, exc_info=mcp_result
)
else:
mcp_tools = mcp_result or []
_perf_log.info(
"[build_tools_async] MCP tools loaded in %.3fs (%d tools)",
"[build_tools_async] MCP tools loaded in %.3fs (%d tools, parallel)",
time.perf_counter() - _t0,
len(mcp_tools),
)
@ -879,8 +925,6 @@ async def build_tools_async(
len(mcp_tools),
[t.name for t in mcp_tools],
)
except Exception as e:
logging.exception("Failed to load MCP tools: %s", e)
logging.info(
"Total tools for agent: %d%s",

View file

@ -15,7 +15,7 @@ from langchain_core.tools import tool
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from app.db import SurfsenseDocsChunk, SurfsenseDocsDocument
from app.db import SurfsenseDocsChunk, SurfsenseDocsDocument, async_session_maker
from app.utils.document_converters import embed_text
@ -124,12 +124,19 @@ def create_search_surfsense_docs_tool(db_session: AsyncSession):
"""
Factory function to create the search_surfsense_docs tool.
The tool acquires its own short-lived ``AsyncSession`` per call via
:data:`async_session_maker` so the closure is safe to share across
HTTP requests by the compiled-agent cache. Capturing a per-request
session here would surface stale/closed sessions on cache hits.
Args:
db_session: Database session for executing queries
db_session: Reserved for registry compatibility. Per-call sessions
are opened via :data:`async_session_maker` inside the tool body.
Returns:
A configured tool function for searching Surfsense documentation
"""
del db_session # per-call session — see docstring
@tool
async def search_surfsense_docs(query: str, top_k: int = 10) -> str:
@ -155,10 +162,11 @@ def create_search_surfsense_docs_tool(db_session: AsyncSession):
Returns:
Relevant documentation content formatted with chunk IDs for citations
"""
return await search_surfsense_docs_async(
query=query,
db_session=db_session,
top_k=top_k,
)
async with async_session_maker() as db_session:
return await search_surfsense_docs_async(
query=query,
db_session=db_session,
top_k=top_k,
)
return search_surfsense_docs

View file

@ -5,6 +5,8 @@ import httpx
from langchain_core.tools import tool
from sqlalchemy.ext.asyncio import AsyncSession
from app.db import async_session_maker
from ._auth import GRAPH_API, get_access_token, get_teams_connector
logger = logging.getLogger(__name__)
@ -15,6 +17,23 @@ def create_list_teams_channels_tool(
search_space_id: int | None = None,
user_id: str | None = None,
):
"""
Factory function to create the list_teams_channels tool.
The tool acquires its own short-lived ``AsyncSession`` per call via
:data:`async_session_maker` so the closure is safe to share across
HTTP requests by the compiled-agent cache. Capturing a per-request
session here would surface stale/closed sessions on cache hits.
Args:
db_session: Reserved for registry compatibility. Per-call sessions
are opened via :data:`async_session_maker` inside the tool body.
Returns:
Configured list_teams_channels tool
"""
del db_session # per-call session — see docstring
@tool
async def list_teams_channels() -> dict[str, Any]:
"""List all Microsoft Teams and their channels the user has access to.
@ -23,63 +42,66 @@ def create_list_teams_channels_tool(
Dictionary with status and a list of teams, each containing
team_id, team_name, and a list of channels (id, name).
"""
if db_session is None or search_space_id is None or user_id is None:
if search_space_id is None or user_id is None:
return {"status": "error", "message": "Teams tool not properly configured."}
try:
connector = await get_teams_connector(db_session, search_space_id, user_id)
if not connector:
return {"status": "error", "message": "No Teams connector found."}
token = await get_access_token(db_session, connector)
headers = {"Authorization": f"Bearer {token}"}
async with httpx.AsyncClient(timeout=20.0) as client:
teams_resp = await client.get(
f"{GRAPH_API}/me/joinedTeams", headers=headers
async with async_session_maker() as db_session:
connector = await get_teams_connector(
db_session, search_space_id, user_id
)
if not connector:
return {"status": "error", "message": "No Teams connector found."}
if teams_resp.status_code == 401:
return {
"status": "auth_error",
"message": "Teams token expired. Please re-authenticate.",
"connector_type": "teams",
}
if teams_resp.status_code != 200:
return {
"status": "error",
"message": f"Graph API error: {teams_resp.status_code}",
}
token = await get_access_token(db_session, connector)
headers = {"Authorization": f"Bearer {token}"}
teams_data = teams_resp.json().get("value", [])
result_teams = []
async with httpx.AsyncClient(timeout=20.0) as client:
for team in teams_data:
team_id = team["id"]
ch_resp = await client.get(
f"{GRAPH_API}/teams/{team_id}/channels",
headers=headers,
)
channels = []
if ch_resp.status_code == 200:
channels = [
{"id": ch["id"], "name": ch.get("displayName", "")}
for ch in ch_resp.json().get("value", [])
]
result_teams.append(
{
"team_id": team_id,
"team_name": team.get("displayName", ""),
"channels": channels,
}
async with httpx.AsyncClient(timeout=20.0) as client:
teams_resp = await client.get(
f"{GRAPH_API}/me/joinedTeams", headers=headers
)
return {
"status": "success",
"teams": result_teams,
"total_teams": len(result_teams),
}
if teams_resp.status_code == 401:
return {
"status": "auth_error",
"message": "Teams token expired. Please re-authenticate.",
"connector_type": "teams",
}
if teams_resp.status_code != 200:
return {
"status": "error",
"message": f"Graph API error: {teams_resp.status_code}",
}
teams_data = teams_resp.json().get("value", [])
result_teams = []
async with httpx.AsyncClient(timeout=20.0) as client:
for team in teams_data:
team_id = team["id"]
ch_resp = await client.get(
f"{GRAPH_API}/teams/{team_id}/channels",
headers=headers,
)
channels = []
if ch_resp.status_code == 200:
channels = [
{"id": ch["id"], "name": ch.get("displayName", "")}
for ch in ch_resp.json().get("value", [])
]
result_teams.append(
{
"team_id": team_id,
"team_name": team.get("displayName", ""),
"channels": channels,
}
)
return {
"status": "success",
"teams": result_teams,
"total_teams": len(result_teams),
}
except Exception as e:
from langgraph.errors import GraphInterrupt

View file

@ -5,6 +5,8 @@ import httpx
from langchain_core.tools import tool
from sqlalchemy.ext.asyncio import AsyncSession
from app.db import async_session_maker
from ._auth import GRAPH_API, get_access_token, get_teams_connector
logger = logging.getLogger(__name__)
@ -15,6 +17,23 @@ def create_read_teams_messages_tool(
search_space_id: int | None = None,
user_id: str | None = None,
):
"""
Factory function to create the read_teams_messages tool.
The tool acquires its own short-lived ``AsyncSession`` per call via
:data:`async_session_maker` so the closure is safe to share across
HTTP requests by the compiled-agent cache. Capturing a per-request
session here would surface stale/closed sessions on cache hits.
Args:
db_session: Reserved for registry compatibility. Per-call sessions
are opened via :data:`async_session_maker` inside the tool body.
Returns:
Configured read_teams_messages tool
"""
del db_session # per-call session — see docstring
@tool
async def read_teams_messages(
team_id: str,
@ -32,65 +51,68 @@ def create_read_teams_messages_tool(
Dictionary with status and a list of messages including
id, sender, content, timestamp.
"""
if db_session is None or search_space_id is None or user_id is None:
if search_space_id is None or user_id is None:
return {"status": "error", "message": "Teams tool not properly configured."}
limit = min(limit, 50)
try:
connector = await get_teams_connector(db_session, search_space_id, user_id)
if not connector:
return {"status": "error", "message": "No Teams connector found."}
token = await get_access_token(db_session, connector)
async with httpx.AsyncClient(timeout=20.0) as client:
resp = await client.get(
f"{GRAPH_API}/teams/{team_id}/channels/{channel_id}/messages",
headers={"Authorization": f"Bearer {token}"},
params={"$top": limit},
async with async_session_maker() as db_session:
connector = await get_teams_connector(
db_session, search_space_id, user_id
)
if not connector:
return {"status": "error", "message": "No Teams connector found."}
if resp.status_code == 401:
return {
"status": "auth_error",
"message": "Teams token expired. Please re-authenticate.",
"connector_type": "teams",
}
if resp.status_code == 403:
return {
"status": "error",
"message": "Insufficient permissions to read this channel.",
}
if resp.status_code != 200:
return {
"status": "error",
"message": f"Graph API error: {resp.status_code}",
}
token = await get_access_token(db_session, connector)
raw_msgs = resp.json().get("value", [])
messages = []
for m in raw_msgs:
sender = m.get("from", {})
user_info = sender.get("user", {}) if sender else {}
body = m.get("body", {})
messages.append(
{
"id": m.get("id"),
"sender": user_info.get("displayName", "Unknown"),
"content": body.get("content", ""),
"content_type": body.get("contentType", "text"),
"timestamp": m.get("createdDateTime", ""),
async with httpx.AsyncClient(timeout=20.0) as client:
resp = await client.get(
f"{GRAPH_API}/teams/{team_id}/channels/{channel_id}/messages",
headers={"Authorization": f"Bearer {token}"},
params={"$top": limit},
)
if resp.status_code == 401:
return {
"status": "auth_error",
"message": "Teams token expired. Please re-authenticate.",
"connector_type": "teams",
}
if resp.status_code == 403:
return {
"status": "error",
"message": "Insufficient permissions to read this channel.",
}
if resp.status_code != 200:
return {
"status": "error",
"message": f"Graph API error: {resp.status_code}",
}
)
return {
"status": "success",
"team_id": team_id,
"channel_id": channel_id,
"messages": messages,
"total": len(messages),
}
raw_msgs = resp.json().get("value", [])
messages = []
for m in raw_msgs:
sender = m.get("from", {})
user_info = sender.get("user", {}) if sender else {}
body = m.get("body", {})
messages.append(
{
"id": m.get("id"),
"sender": user_info.get("displayName", "Unknown"),
"content": body.get("content", ""),
"content_type": body.get("contentType", "text"),
"timestamp": m.get("createdDateTime", ""),
}
)
return {
"status": "success",
"team_id": team_id,
"channel_id": channel_id,
"messages": messages,
"total": len(messages),
}
except Exception as e:
from langgraph.errors import GraphInterrupt

View file

@ -6,6 +6,7 @@ from langchain_core.tools import tool
from sqlalchemy.ext.asyncio import AsyncSession
from app.agents.new_chat.tools.hitl import request_approval
from app.db import async_session_maker
from ._auth import GRAPH_API, get_access_token, get_teams_connector
@ -17,6 +18,23 @@ def create_send_teams_message_tool(
search_space_id: int | None = None,
user_id: str | None = None,
):
"""
Factory function to create the send_teams_message tool.
The tool acquires its own short-lived ``AsyncSession`` per call via
:data:`async_session_maker` so the closure is safe to share across
HTTP requests by the compiled-agent cache. Capturing a per-request
session here would surface stale/closed sessions on cache hits.
Args:
db_session: Reserved for registry compatibility. Per-call sessions
are opened via :data:`async_session_maker` inside the tool body.
Returns:
Configured send_teams_message tool
"""
del db_session # per-call session — see docstring
@tool
async def send_teams_message(
team_id: str,
@ -39,70 +57,73 @@ def create_send_teams_message_tool(
IMPORTANT:
- If status is "rejected", the user explicitly declined. Do NOT retry.
"""
if db_session is None or search_space_id is None or user_id is None:
if search_space_id is None or user_id is None:
return {"status": "error", "message": "Teams tool not properly configured."}
try:
connector = await get_teams_connector(db_session, search_space_id, user_id)
if not connector:
return {"status": "error", "message": "No Teams connector found."}
async with async_session_maker() as db_session:
connector = await get_teams_connector(
db_session, search_space_id, user_id
)
if not connector:
return {"status": "error", "message": "No Teams connector found."}
result = request_approval(
action_type="teams_send_message",
tool_name="send_teams_message",
params={
"team_id": team_id,
"channel_id": channel_id,
"content": content,
},
context={"connector_id": connector.id},
)
if result.rejected:
return {
"status": "rejected",
"message": "User declined. Message was not sent.",
}
final_content = result.params.get("content", content)
final_team = result.params.get("team_id", team_id)
final_channel = result.params.get("channel_id", channel_id)
token = await get_access_token(db_session, connector)
async with httpx.AsyncClient(timeout=20.0) as client:
resp = await client.post(
f"{GRAPH_API}/teams/{final_team}/channels/{final_channel}/messages",
headers={
"Authorization": f"Bearer {token}",
"Content-Type": "application/json",
result = request_approval(
action_type="teams_send_message",
tool_name="send_teams_message",
params={
"team_id": team_id,
"channel_id": channel_id,
"content": content,
},
json={"body": {"content": final_content}},
context={"connector_id": connector.id},
)
if resp.status_code == 401:
return {
"status": "auth_error",
"message": "Teams token expired. Please re-authenticate.",
"connector_type": "teams",
}
if resp.status_code == 403:
return {
"status": "insufficient_permissions",
"message": "Missing ChannelMessage.Send permission. Please re-authenticate with updated scopes.",
}
if resp.status_code not in (200, 201):
return {
"status": "error",
"message": f"Graph API error: {resp.status_code}{resp.text[:200]}",
}
if result.rejected:
return {
"status": "rejected",
"message": "User declined. Message was not sent.",
}
msg_data = resp.json()
return {
"status": "success",
"message_id": msg_data.get("id"),
"message": "Message sent to Teams channel.",
}
final_content = result.params.get("content", content)
final_team = result.params.get("team_id", team_id)
final_channel = result.params.get("channel_id", channel_id)
token = await get_access_token(db_session, connector)
async with httpx.AsyncClient(timeout=20.0) as client:
resp = await client.post(
f"{GRAPH_API}/teams/{final_team}/channels/{final_channel}/messages",
headers={
"Authorization": f"Bearer {token}",
"Content-Type": "application/json",
},
json={"body": {"content": final_content}},
)
if resp.status_code == 401:
return {
"status": "auth_error",
"message": "Teams token expired. Please re-authenticate.",
"connector_type": "teams",
}
if resp.status_code == 403:
return {
"status": "insufficient_permissions",
"message": "Missing ChannelMessage.Send permission. Please re-authenticate with updated scopes.",
}
if resp.status_code not in (200, 201):
return {
"status": "error",
"message": f"Graph API error: {resp.status_code}{resp.text[:200]}",
}
msg_data = resp.json()
return {
"status": "success",
"message_id": msg_data.get("id"),
"message": "Message sent to Teams channel.",
}
except Exception as e:
from langgraph.errors import GraphInterrupt

View file

@ -26,7 +26,7 @@ from langchain_core.tools import tool
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from app.db import SearchSpace, User
from app.db import SearchSpace, User, async_session_maker
logger = logging.getLogger(__name__)
@ -295,6 +295,25 @@ def create_update_memory_tool(
db_session: AsyncSession,
llm: Any | None = None,
):
"""Factory function to create the user-memory update tool.
The tool acquires its own short-lived ``AsyncSession`` per call via
:data:`async_session_maker` so the closure is safe to share across
HTTP requests by the compiled-agent cache. Capturing a per-request
session here would surface stale/closed sessions on cache hits.
The session's bound ``commit``/``rollback`` methods are captured at
call time, after ``async with`` has bound ``db_session`` locally.
Args:
user_id: ID of the user whose memory document is being updated.
db_session: Reserved for registry compatibility. Per-call sessions
are opened via :data:`async_session_maker` inside the tool body.
llm: Optional LLM for the forced-rewrite path.
Returns:
Configured update_memory tool for the user-memory scope.
"""
del db_session # per-call session — see docstring
uid = UUID(user_id) if isinstance(user_id, str) else user_id
@tool
@ -311,26 +330,26 @@ def create_update_memory_tool(
updated_memory: The FULL updated markdown document (not a diff).
"""
try:
result = await db_session.execute(select(User).where(User.id == uid))
user = result.scalars().first()
if not user:
return {"status": "error", "message": "User not found."}
async with async_session_maker() as db_session:
result = await db_session.execute(select(User).where(User.id == uid))
user = result.scalars().first()
if not user:
return {"status": "error", "message": "User not found."}
old_memory = user.memory_md
old_memory = user.memory_md
return await _save_memory(
updated_memory=updated_memory,
old_memory=old_memory,
llm=llm,
apply_fn=lambda content: setattr(user, "memory_md", content),
commit_fn=db_session.commit,
rollback_fn=db_session.rollback,
label="memory",
scope="user",
)
return await _save_memory(
updated_memory=updated_memory,
old_memory=old_memory,
llm=llm,
apply_fn=lambda content: setattr(user, "memory_md", content),
commit_fn=db_session.commit,
rollback_fn=db_session.rollback,
label="memory",
scope="user",
)
except Exception as e:
logger.exception("Failed to update user memory: %s", e)
await db_session.rollback()
return {
"status": "error",
"message": f"Failed to update memory: {e}",
@ -344,6 +363,27 @@ def create_update_team_memory_tool(
db_session: AsyncSession,
llm: Any | None = None,
):
"""Factory function to create the team-memory update tool.
The tool acquires its own short-lived ``AsyncSession`` per call via
:data:`async_session_maker` so the closure is safe to share across
HTTP requests by the compiled-agent cache. Capturing a per-request
session here would surface stale/closed sessions on cache hits.
The session's bound ``commit``/``rollback`` methods are captured at
call time, after ``async with`` has bound ``db_session`` locally.
Args:
search_space_id: ID of the search space whose team memory is being
updated.
db_session: Reserved for registry compatibility. Per-call sessions
are opened via :data:`async_session_maker` inside the tool body.
llm: Optional LLM for the forced-rewrite path.
Returns:
Configured update_memory tool for the team-memory scope.
"""
del db_session # per-call session — see docstring
@tool
async def update_memory(updated_memory: str) -> dict[str, Any]:
"""Update the team's shared memory document for this search space.
@ -359,28 +399,30 @@ def create_update_team_memory_tool(
updated_memory: The FULL updated markdown document (not a diff).
"""
try:
result = await db_session.execute(
select(SearchSpace).where(SearchSpace.id == search_space_id)
)
space = result.scalars().first()
if not space:
return {"status": "error", "message": "Search space not found."}
async with async_session_maker() as db_session:
result = await db_session.execute(
select(SearchSpace).where(SearchSpace.id == search_space_id)
)
space = result.scalars().first()
if not space:
return {"status": "error", "message": "Search space not found."}
old_memory = space.shared_memory_md
old_memory = space.shared_memory_md
return await _save_memory(
updated_memory=updated_memory,
old_memory=old_memory,
llm=llm,
apply_fn=lambda content: setattr(space, "shared_memory_md", content),
commit_fn=db_session.commit,
rollback_fn=db_session.rollback,
label="team memory",
scope="team",
)
return await _save_memory(
updated_memory=updated_memory,
old_memory=old_memory,
llm=llm,
apply_fn=lambda content: setattr(
space, "shared_memory_md", content
),
commit_fn=db_session.commit,
rollback_fn=db_session.rollback,
label="team memory",
scope="team",
)
except Exception as e:
logger.exception("Failed to update team memory: %s", e)
await db_session.rollback()
return {
"status": "error",
"message": f"Failed to update team memory: {e}",