chore: merge upstream with local feature additions

- Merged dexscreener connector, composio connectors, crypto realtime tools from upstream
- Kept local additions: dropbox/onedrive connectors, memory routes, model_list routes, RefreshToken model
- Resolved frontend conflicts: kept tool UIs from both sides
- Accepted upstream lock files (uv.lock, pnpm-lock.yaml)
This commit is contained in:
Vonic 2026-04-13 23:31:52 +07:00
commit 6e86cd7e8a
803 changed files with 152168 additions and 14005 deletions

View file

@ -12,7 +12,7 @@ from sqlalchemy.future import select
from app.config import config
from app.connectors.airtable_connector import AirtableConnector
from app.db import SearchSourceConnector
from app.routes.airtable_add_connector_route import refresh_airtable_token
from app.utils.airtable_token_utils import refresh_airtable_token
from app.schemas.airtable_auth_credentials import AirtableAuthCredentialsBase
from app.utils.oauth_security import TokenEncryption

View file

@ -14,10 +14,10 @@ from sqlalchemy.future import select
from app.config import config
from app.connectors.clickup_connector import ClickUpConnector
from app.db import SearchSourceConnector
from app.routes.clickup_add_connector_route import refresh_clickup_token
from app.schemas.clickup_auth_credentials import ClickUpAuthCredentialsBase
from app.utils.oauth_security import TokenEncryption
logger = logging.getLogger(__name__)
@ -184,6 +184,8 @@ class ClickUpHistoryConnector:
)
# Refresh token
# Lazy import to avoid circular dependency
from app.routes.clickup_add_connector_route import refresh_clickup_token
connector = await refresh_clickup_token(self._session, connector)
# Reload credentials after refresh

View file

@ -86,14 +86,6 @@ class ConfluenceHistoryConnector:
if is_oauth:
# OAuth 2.0 authentication
# Check if access_token exists before processing
raw_access_token = config_data.get("access_token")
if not raw_access_token:
raise ValueError(
"Confluence access token not found. "
"Please reconnect your Confluence account."
)
# Decrypt credentials if they are encrypted
token_encrypted = config_data.get("_token_encrypted", False)
if token_encrypted and config.SECRET_KEY:
@ -125,16 +117,6 @@ class ConfluenceHistoryConnector:
f"Failed to decrypt Confluence credentials: {e!s}"
) from e
# Final validation after decryption
final_token = config_data.get("access_token")
if not final_token or (
isinstance(final_token, str) and not final_token.strip()
):
raise ValueError(
"Confluence access token is invalid or empty. "
"Please reconnect your Confluence account."
)
try:
self._credentials = AtlassianAuthCredentialsBase.from_dict(
config_data
@ -189,11 +171,9 @@ class ConfluenceHistoryConnector:
f"Connector {self._connector_id} not found; cannot refresh token."
)
# Refresh token
# Lazy import to avoid circular dependency
from app.routes.confluence_add_connector_route import (
refresh_confluence_token,
)
from app.routes.confluence_add_connector_route import refresh_confluence_token
connector = await refresh_confluence_token(self._session, connector)
# Reload credentials after refresh
@ -344,61 +324,6 @@ class ConfluenceHistoryConnector:
logger.error(f"Confluence API request error: {e!s}", exc_info=True)
raise Exception(f"Confluence API request failed: {e!s}") from e
async def _make_api_request_with_method(
self,
endpoint: str,
method: str = "GET",
json_payload: dict[str, Any] | None = None,
params: dict[str, Any] | None = None,
) -> dict[str, Any]:
"""Make a request to the Confluence API with a specified HTTP method."""
if not self._use_oauth:
raise ValueError("Write operations require OAuth authentication")
token = await self._get_valid_token()
base_url = await self._get_base_url()
http_client = await self._get_client()
url = f"{base_url}/wiki/api/v2/{endpoint}"
headers = {
"Content-Type": "application/json",
"Authorization": f"Bearer {token}",
"Accept": "application/json",
}
try:
method_upper = method.upper()
if method_upper == "POST":
response = await http_client.post(
url, headers=headers, json=json_payload, params=params
)
elif method_upper == "PUT":
response = await http_client.put(
url, headers=headers, json=json_payload, params=params
)
elif method_upper == "DELETE":
response = await http_client.delete(url, headers=headers, params=params)
else:
response = await http_client.get(url, headers=headers, params=params)
response.raise_for_status()
if response.status_code == 204 or not response.text:
return {"status": "success"}
return response.json()
except httpx.HTTPStatusError as e:
error_detail = {
"status_code": e.response.status_code,
"url": str(e.request.url),
"response_text": e.response.text,
}
logger.error(f"Confluence API HTTP error: {error_detail}")
raise Exception(
f"Confluence API request failed (HTTP {e.response.status_code}): {e.response.text}"
) from e
except httpx.RequestError as e:
logger.error(f"Confluence API request error: {e!s}", exc_info=True)
raise Exception(f"Confluence API request failed: {e!s}") from e
async def get_all_spaces(self) -> list[dict[str, Any]]:
"""
Fetch all spaces from Confluence.
@ -651,65 +576,6 @@ class ConfluenceHistoryConnector:
except Exception as e:
return [], f"Error fetching pages: {e!s}"
async def get_page(self, page_id: str) -> dict[str, Any]:
"""Fetch a single page by ID with body content."""
return await self._make_api_request(
f"pages/{page_id}", params={"body-format": "storage"}
)
async def create_page(
self,
space_id: str,
title: str,
body: str,
parent_page_id: str | None = None,
) -> dict[str, Any]:
"""Create a new Confluence page."""
payload: dict[str, Any] = {
"spaceId": space_id,
"title": title,
"body": {
"representation": "storage",
"value": body,
},
"status": "current",
}
if parent_page_id:
payload["parentId"] = parent_page_id
return await self._make_api_request_with_method(
"pages", method="POST", json_payload=payload
)
async def update_page(
self,
page_id: str,
title: str,
body: str,
version_number: int,
) -> dict[str, Any]:
"""Update an existing Confluence page (requires version number)."""
payload: dict[str, Any] = {
"id": page_id,
"title": title,
"body": {
"representation": "storage",
"value": body,
},
"version": {
"number": version_number,
},
"status": "current",
}
return await self._make_api_request_with_method(
f"pages/{page_id}", method="PUT", json_payload=payload
)
async def delete_page(self, page_id: str) -> dict[str, Any]:
"""Delete a Confluence page."""
return await self._make_api_request_with_method(
f"pages/{page_id}", method="DELETE"
)
async def close(self):
"""Close the HTTP client connection."""
if self._http_client:

View file

@ -0,0 +1,258 @@
"""
DexScreener Connector Module
A module for retrieving cryptocurrency trading pair data from DexScreener API.
Allows fetching pair information for tracked tokens across multiple blockchain networks.
"""
import asyncio
import logging
from typing import Any
import httpx
logger = logging.getLogger(__name__)
class DexScreenerConnector:
"""Class for retrieving trading pair data from DexScreener API."""
def __init__(self):
"""
Initialize the DexScreenerConnector class.
Note: DexScreener API is public and doesn't require authentication.
"""
self.base_url = "https://api.dexscreener.com"
self.rate_limit_delay = 0.2 # 200ms delay between requests to respect rate limits
async def make_request(
self,
endpoint: str,
max_retries: int = 3
) -> dict[str, Any] | None:
"""
Make an async request to the DexScreener API with retry logic.
Args:
endpoint: API endpoint path (without base URL)
max_retries: Maximum number of retry attempts for failed requests
Returns:
Response data from the API, or None if request fails
Raises:
Exception: If the API request fails after all retries
"""
url = f"{self.base_url}/{endpoint.lstrip('/')}"
for attempt in range(max_retries):
try:
async with httpx.AsyncClient(timeout=30.0) as client:
response = await client.get(url)
if response.status_code == 200:
# Add delay to respect rate limits
await self._rate_limit_delay()
return response.json()
elif response.status_code == 429:
# Rate limit exceeded - exponential backoff
wait_time = (2 ** attempt) * 1.0 # 1s, 2s, 4s
logger.warning(f"Rate limit exceeded. Waiting {wait_time}s before retry...")
await asyncio.sleep(wait_time)
continue
elif response.status_code == 404:
# Token/pair not found - return None instead of raising
logger.info(f"Token not found: {endpoint}")
return None
else:
raise Exception(
f"API request failed with status code {response.status_code}: {response.text}"
)
except httpx.TimeoutException:
if attempt < max_retries - 1:
logger.warning(f"Request timeout. Retrying... (attempt {attempt + 1}/{max_retries})")
continue
else:
raise Exception(f"Request timeout after {max_retries} attempts")
except httpx.RequestError as e:
if attempt < max_retries - 1:
logger.warning(f"Network error: {e}. Retrying... (attempt {attempt + 1}/{max_retries})")
continue
else:
raise Exception(f"Network error after {max_retries} attempts: {e}") from e
return None
async def _rate_limit_delay(self):
"""Add delay to respect API rate limits (300 req/min = ~200ms between requests)."""
import asyncio
await asyncio.sleep(self.rate_limit_delay)
async def get_token_pairs(
self,
chain_id: str,
token_address: str
) -> tuple[list[dict[str, Any]], str | None]:
"""
Fetch all trading pairs for a specific token on a blockchain.
Args:
chain_id: Blockchain identifier (e.g., 'ethereum', 'bsc', 'polygon')
token_address: Token contract address (0x format)
Returns:
Tuple containing (list of pairs, error message or None)
"""
try:
endpoint = f"token-pairs/v1/{chain_id}/{token_address}"
response = await self.make_request(endpoint)
if response is None:
return [], f"Token not found: {chain_id}/{token_address}"
# DexScreener API returns {"pairs": [...]} or {"pairs": null}
if isinstance(response, dict):
pairs = response.get("pairs", [])
else:
# Fallback if API returns list directly (shouldn't happen)
pairs = response if isinstance(response, list) else []
if not pairs:
return [], f"No trading pairs found for {chain_id}/{token_address}"
return pairs, None
except Exception as e:
return [], f"Error fetching pairs for {chain_id}/{token_address}: {e!s}"
def format_pair_to_markdown(
self,
pair: dict[str, Any],
token_name: str | None = None
) -> str:
"""
Convert a trading pair to markdown format.
Args:
pair: The pair object from DexScreener API
token_name: Optional custom name for the token
Returns:
Markdown string representation of the trading pair
"""
# Extract pair details
pair_address = pair.get("pairAddress", "Unknown")
chain_id = pair.get("chainId", "Unknown")
dex_id = pair.get("dexId", "Unknown")
url = pair.get("url", "")
# Extract token information
base_token = pair.get("baseToken", {})
quote_token = pair.get("quoteToken", {})
base_symbol = base_token.get("symbol", "Unknown")
base_name = token_name or base_token.get("name", "Unknown")
quote_symbol = quote_token.get("symbol", "Unknown")
# Extract price and volume data
price_native = pair.get("priceNative", "N/A")
price_usd = pair.get("priceUsd", "N/A")
# Extract liquidity data
liquidity = pair.get("liquidity", {})
liquidity_usd = liquidity.get("usd", 0)
# Extract volume data
volume = pair.get("volume", {})
volume_24h = volume.get("h24", 0)
volume_6h = volume.get("h6", 0)
volume_1h = volume.get("h1", 0)
# Extract price change data
price_change = pair.get("priceChange", {})
price_change_24h = price_change.get("h24", 0)
# Extract market cap and FDV
market_cap = pair.get("marketCap", 0)
fdv = pair.get("fdv", 0)
# Extract transaction counts
txns = pair.get("txns", {})
txns_24h = txns.get("h24", {})
buys_24h = txns_24h.get("buys", 0)
sells_24h = txns_24h.get("sells", 0)
# Build markdown content
markdown_content = f"# {base_symbol}/{quote_symbol} Trading Pair\n\n"
if token_name:
markdown_content += f"**Token:** {base_name} ({base_symbol})\n"
markdown_content += f"**Chain:** {chain_id}\n"
markdown_content += f"**DEX:** {dex_id}\n"
markdown_content += f"**Pair Address:** `{pair_address}`\n\n"
# Add price information
markdown_content += "## Price Information\n\n"
markdown_content += f"- **Price (USD):** ${price_usd}\n"
markdown_content += f"- **Price (Native):** {price_native} {quote_symbol}\n"
markdown_content += f"- **24h Change:** {price_change_24h:+.2f}%\n\n"
# Add liquidity information
markdown_content += "## Liquidity\n\n"
markdown_content += f"- **Total Liquidity:** ${liquidity_usd:,.2f}\n\n"
# Add volume information
markdown_content += "## Trading Volume\n\n"
markdown_content += f"- **24h Volume:** ${volume_24h:,.2f}\n"
markdown_content += f"- **6h Volume:** ${volume_6h:,.2f}\n"
markdown_content += f"- **1h Volume:** ${volume_1h:,.2f}\n\n"
# Add market metrics
markdown_content += "## Market Metrics\n\n"
markdown_content += f"- **Market Cap:** ${market_cap:,.2f}\n"
markdown_content += f"- **FDV (Fully Diluted Valuation):** ${fdv:,.2f}\n\n"
# Add transaction information
markdown_content += "## Transactions (24h)\n\n"
markdown_content += f"- **Buys:** {buys_24h}\n"
markdown_content += f"- **Sells:** {sells_24h}\n"
markdown_content += f"- **Total:** {buys_24h + sells_24h}\n\n"
# Add link to DexScreener
if url:
markdown_content += f"**View on DexScreener:** {url}\n\n"
return markdown_content
# Example usage (uncomment to use):
"""
import asyncio
async def main():
connector = DexScreenerConnector()
# Example: Fetch WETH pairs on Ethereum
chain = "ethereum"
address = "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2"
pairs, error = await connector.get_token_pairs(chain, address)
if error:
print(f"Error: {error}")
else:
print(f"Found {len(pairs)} pairs for WETH")
# Format first pair to markdown
if pairs:
markdown = connector.format_pair_to_markdown(pairs[0], "Wrapped Ether")
print("\nSample Pair in Markdown:\n")
print(markdown)
if __name__ == "__main__":
asyncio.run(main())
"""

View file

@ -17,7 +17,6 @@ from sqlalchemy.future import select
from app.config import config
from app.db import SearchSourceConnector
from app.routes.discord_add_connector_route import refresh_discord_token
from app.schemas.discord_auth_credentials import DiscordAuthCredentialsBase
from app.utils.oauth_security import TokenEncryption
@ -177,6 +176,8 @@ class DiscordConnector(commands.Bot):
)
# Refresh token
# Lazy import to avoid circular dependency
from app.routes.discord_add_connector_route import refresh_discord_token
connector = await refresh_discord_token(self._session, connector)
# Reload credentials after refresh

View file

@ -85,14 +85,6 @@ class JiraHistoryConnector:
if is_oauth:
# OAuth 2.0 authentication
# Check if access_token exists before processing
raw_access_token = config_data.get("access_token")
if not raw_access_token:
raise ValueError(
"Jira access token not found. "
"Please reconnect your Jira account."
)
if not config.SECRET_KEY:
raise ValueError(
"SECRET_KEY not configured but tokens are marked as encrypted"
@ -126,16 +118,6 @@ class JiraHistoryConnector:
f"Failed to decrypt Jira credentials: {e!s}"
) from e
# Final validation after decryption
final_token = config_data.get("access_token")
if not final_token or (
isinstance(final_token, str) and not final_token.strip()
):
raise ValueError(
"Jira access token is invalid or empty. "
"Please reconnect your Jira account."
)
try:
self._credentials = AtlassianAuthCredentialsBase.from_dict(
config_data
@ -183,9 +165,9 @@ class JiraHistoryConnector:
f"Connector {self._connector_id} not found; cannot refresh token."
)
# Refresh token
# Lazy import to avoid circular dependency
from app.routes.jira_add_connector_route import refresh_jira_token
connector = await refresh_jira_token(self._session, connector)
# Reload credentials after refresh

View file

@ -1,12 +1,10 @@
import asyncio
import contextlib
import logging
from collections.abc import Awaitable, Callable
from typing import Any, TypeVar
from notion_client import AsyncClient
from notion_client.errors import APIResponseError
from notion_markdown import to_notion
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select
@ -17,15 +15,6 @@ from app.utils.oauth_security import TokenEncryption
logger = logging.getLogger(__name__)
class NotionAPIError(Exception):
"""Raised when the Notion API returns a non-200 response.
The message is always user-presentable; callers should surface it directly
without any additional prefix or wrapping.
"""
# Type variable for generic return type
T = TypeVar("T")
@ -37,12 +26,6 @@ T = TypeVar("T")
MAX_RETRIES = 5
BASE_RETRY_DELAY = 1.0 # seconds
MAX_RETRY_DELAY = 60.0 # seconds (Notion's max request timeout)
MAX_RATE_LIMIT_WAIT_SECONDS = float(
getattr(config, "NOTION_MAX_RETRY_AFTER_SECONDS", 30.0)
)
MAX_TOTAL_RETRY_WAIT_SECONDS = float(
getattr(config, "NOTION_MAX_TOTAL_RETRY_WAIT_SECONDS", 120.0)
)
# Type alias for retry callback function
# Signature: async callback(retry_reason, attempt, max_attempts, wait_seconds) -> None
@ -229,8 +212,8 @@ class NotionHistoryConnector:
)
# Refresh token
# Lazy import to avoid circular dependency
from app.routes.notion_add_connector_route import refresh_notion_token
connector = await refresh_notion_token(self._session, connector)
# Reload credentials after refresh
@ -259,9 +242,8 @@ class NotionHistoryConnector:
logger.error(
f"Failed to refresh Notion token for connector {self._connector_id}: {e!s}"
)
raise NotionAPIError(
"Failed to refresh your Notion connection. "
"Please try again or reconnect your Notion account."
raise Exception(
f"Failed to refresh Notion OAuth credentials: {e!s}"
) from e
return self._credentials.access_token
@ -311,7 +293,6 @@ class NotionHistoryConnector:
"""
last_exception: APIResponseError | None = None
retry_delay = BASE_RETRY_DELAY
total_wait_time = 0.0
for attempt in range(MAX_RETRIES):
try:
@ -345,15 +326,6 @@ class NotionHistoryConnector:
wait_time = retry_delay
else:
wait_time = retry_delay
# Avoid very long worker sleeps from external Retry-After values.
if wait_time > MAX_RATE_LIMIT_WAIT_SECONDS:
logger.warning(
f"Notion Retry-After ({wait_time}s) exceeds cap "
f"({MAX_RATE_LIMIT_WAIT_SECONDS}s). Clamping wait time."
)
wait_time = MAX_RATE_LIMIT_WAIT_SECONDS
logger.warning(
f"Notion API rate limited (429). "
f"Waiting {wait_time}s. Attempt {attempt + 1}/{MAX_RETRIES}"
@ -377,14 +349,6 @@ class NotionHistoryConnector:
# Notify about retry via callback (for user notifications)
# Call before sleeping so user sees the message while we wait
if total_wait_time + wait_time > MAX_TOTAL_RETRY_WAIT_SECONDS:
logger.error(
"Notion API retry budget exceeded "
f"({total_wait_time + wait_time:.1f}s > "
f"{MAX_TOTAL_RETRY_WAIT_SECONDS:.1f}s). Failing fast."
)
raise
if on_retry:
try:
await on_retry(
@ -399,7 +363,6 @@ class NotionHistoryConnector:
# Wait before retrying
await asyncio.sleep(wait_time)
total_wait_time += wait_time
# Exponential backoff for next attempt
retry_delay = min(retry_delay * 2, MAX_RETRY_DELAY)
@ -452,16 +415,6 @@ class NotionHistoryConnector:
if page_title not in self._pages_with_skipped_content:
self._pages_with_skipped_content.append(page_title)
@staticmethod
def _api_error_message(error: APIResponseError) -> str:
"""Extract a stable, human-readable message from Notion API errors."""
body = getattr(error, "body", None)
if isinstance(body, dict):
return str(body.get("message", str(error)))
if body:
return str(body)
return str(error)
async def __aenter__(self):
"""Async context manager entry."""
return self
@ -800,282 +753,3 @@ class NotionHistoryConnector:
# Return empty string for unsupported block types
return ""
# =========================================================================
# WRITE OPERATIONS (create, update, delete pages)
# =========================================================================
async def _get_first_accessible_parent(self) -> str | None:
"""
Get the first accessible page ID that can be used as a parent.
Returns:
Page ID string, or None if no accessible pages found
"""
try:
notion = await self._get_client()
# Search for pages, get most recently edited first
response = await self._api_call_with_retry(
notion.search,
filter={"property": "object", "value": "page"},
sort={"direction": "descending", "timestamp": "last_edited_time"},
page_size=1, # We only need the first one
)
results = response.get("results", [])
if results:
return results[0]["id"]
return None
except Exception as e:
logger.error(f"Error finding accessible parent page: {e}")
return None
def _markdown_to_blocks(self, markdown: str) -> list[dict[str, Any]]:
"""Convert markdown content to Notion blocks using notion-markdown."""
return to_notion(markdown)
async def create_page(
self, title: str, content: str, parent_page_id: str | None = None
) -> dict[str, Any]:
"""
Create a new Notion page.
Args:
title: Page title
content: Page content (markdown format)
parent_page_id: Optional parent page ID (creates as subpage if provided)
Returns:
Dictionary with page details:
- page_id: Created page ID
- url: Page URL
- title: Page title
- status: "success" or "error"
- message: Success/error message
Raises:
APIResponseError: If Notion API returns an error
"""
try:
logger.info(
f"Creating Notion page: title='{title}', parent_page_id={parent_page_id}"
)
# Get Notion client
notion = await self._get_client()
# Convert markdown content to Notion blocks
children = self._markdown_to_blocks(content)
# Prepare parent - find first available page if not provided
if not parent_page_id:
logger.info(
"No parent_page_id provided, searching for first accessible page..."
)
parent_page_id = await self._get_first_accessible_parent()
if not parent_page_id:
logger.warning("No accessible parent pages found")
return {
"status": "error",
"message": "Could not find any accessible Notion pages to use as parent. "
"Please make sure your Notion integration has access to at least one page.",
}
logger.info(f"Using parent_page_id: {parent_page_id}")
parent = {"type": "page_id", "page_id": parent_page_id}
# Create the page with standard title property
properties = {
"title": {"title": [{"type": "text", "text": {"content": title}}]}
}
response = await self._api_call_with_retry(
notion.pages.create,
parent=parent,
properties=properties,
children=children[:100], # Notion API limit: 100 blocks per request
)
page_id = response["id"]
page_url = response["url"]
# If content has more than 100 blocks, append them
if len(children) > 100:
for i in range(100, len(children), 100):
batch = children[i : i + 100]
await self._api_call_with_retry(
notion.blocks.children.append, block_id=page_id, children=batch
)
return {
"status": "success",
"page_id": page_id,
"url": page_url,
"title": title,
"message": f"Created Notion page '{title}'",
}
except APIResponseError as e:
logger.error(f"Notion API error creating page: {e}")
error_msg = self._api_error_message(e)
return {
"status": "error",
"message": f"Failed to create Notion page: {error_msg}",
}
except Exception as e:
logger.error(f"Unexpected error creating Notion page: {e}")
return {
"status": "error",
"message": f"Failed to create Notion page: {e!s}",
}
async def update_page(
self, page_id: str, content: str | None = None
) -> dict[str, Any]:
"""
Update an existing Notion page by appending new content.
Note: Content is appended to the page, not replaced.
Args:
page_id: Page ID to update
content: New markdown content to append to the page (optional)
Returns:
Dictionary with update result
Raises:
APIResponseError: If Notion API returns an error
"""
try:
notion = await self._get_client()
appended_block_ids = []
if content:
# Convert new content to blocks
try:
children = self._markdown_to_blocks(content)
if not children:
logger.warning(
"No blocks generated from content, skipping append"
)
return {
"status": "error",
"message": "Content conversion failed: no valid blocks generated",
}
except Exception as e:
logger.error(f"Failed to convert markdown to blocks: {e}")
return {
"status": "error",
"message": f"Failed to parse content: {e!s}",
}
# Append new content blocks
try:
for i in range(0, len(children), 100):
batch = children[i : i + 100]
response = await self._api_call_with_retry(
notion.blocks.children.append,
block_id=page_id,
children=batch,
)
batch_block_ids = [
block["id"] for block in response.get("results", [])
]
appended_block_ids.extend(batch_block_ids)
logger.info(
f"Successfully appended {len(children)} new blocks to page {page_id}"
)
logger.debug(
f"Appended block IDs: {appended_block_ids[:5]}..."
if len(appended_block_ids) > 5
else f"Appended block IDs: {appended_block_ids}"
)
except Exception as e:
logger.error(f"Failed to append content blocks: {e}")
return {
"status": "error",
"message": f"Failed to append content: {e!s}",
}
# Get updated page info
response = await self._api_call_with_retry(
notion.pages.retrieve, page_id=page_id
)
page_url = response["url"]
page_title = response["properties"]["title"]["title"][0]["text"]["content"]
return {
"status": "success",
"page_id": page_id,
"url": page_url,
"title": page_title,
"appended_block_ids": appended_block_ids,
"message": f"Updated Notion page '{page_title}' (content appended)",
}
except APIResponseError as e:
logger.error(f"Notion API error updating page: {e}")
error_msg = self._api_error_message(e)
return {
"status": "error",
"message": f"Failed to update Notion page: {error_msg}",
}
except Exception as e:
logger.error(f"Unexpected error updating Notion page: {e}")
return {
"status": "error",
"message": f"Failed to update Notion page: {e!s}",
}
async def delete_page(self, page_id: str) -> dict[str, Any]:
"""
Delete (archive) a Notion page.
Note: Notion doesn't truly delete pages, it archives them.
Args:
page_id: Page ID to delete
Returns:
Dictionary with deletion result
Raises:
APIResponseError: If Notion API returns an error
"""
try:
notion = await self._get_client()
# Archive the page (Notion's way of "deleting")
response = await self._api_call_with_retry(
notion.pages.update, page_id=page_id, archived=True
)
page_title = "Unknown"
with contextlib.suppress(KeyError, IndexError):
page_title = response["properties"]["title"]["title"][0]["text"][
"content"
]
return {
"status": "success",
"page_id": page_id,
"message": f"Deleted Notion page '{page_title}'",
}
except APIResponseError as e:
logger.error(f"Notion API error deleting page: {e}")
error_msg = self._api_error_message(e)
return {
"status": "error",
"message": f"Failed to delete Notion page: {error_msg}",
}
except Exception as e:
logger.error(f"Unexpected error deleting Notion page: {e}")
return {
"status": "error",
"message": f"Failed to delete Notion page: {e!s}",
}

View file

@ -17,7 +17,6 @@ from sqlalchemy.future import select
from app.config import config
from app.db import SearchSourceConnector
from app.routes.slack_add_connector_route import refresh_slack_token
from app.schemas.slack_auth_credentials import SlackAuthCredentialsBase
from app.utils.oauth_security import TokenEncryption
@ -155,6 +154,8 @@ class SlackHistory:
)
# Refresh token
# Lazy import to avoid circular dependency
from app.routes.slack_add_connector_route import refresh_slack_token
connector = await refresh_slack_token(self._session, connector)
# Reload credentials after refresh

View file

@ -16,7 +16,6 @@ from sqlalchemy.future import select
from app.config import config
from app.db import SearchSourceConnector
from app.routes.teams_add_connector_route import refresh_teams_token
from app.schemas.teams_auth_credentials import TeamsAuthCredentialsBase
from app.utils.oauth_security import TokenEncryption
@ -146,6 +145,8 @@ class TeamsConnector:
)
# Refresh token
# Lazy import to avoid circular dependency
from app.routes.teams_add_connector_route import refresh_teams_token
connector = await refresh_teams_token(self._session, connector)
# Reload credentials after refresh