resolve vercel error

This commit is contained in:
Manoj Aggarwal 2026-01-09 13:33:28 -08:00
parent 4532cc3569
commit 8b650f4cf4
2 changed files with 54 additions and 28 deletions

View file

@ -278,16 +278,41 @@ async def get_thread_messages(
)
# Return messages in the format expected by assistant-ui
messages = [
NewChatMessageRead(
id=msg.id,
thread_id=msg.thread_id,
role=msg.role,
content=msg.content,
created_at=msg.created_at,
messages = []
for msg in thread.messages:
# Eagerly extract all data while in session context
msg_id = msg.id
msg_thread_id = msg.thread_id
msg_role = msg.role
msg_content = msg.content
msg_created_at = msg.created_at
msg_updated_at = msg.updated_at
msg_user_id = msg.user_id
msg_metadata = msg.message_metadata
# Manually construct user info to avoid lazy loading
user_info = None
if msg_user_id and msg.user:
user_info = MessageUserInfo(
id=msg.user.id,
email=msg.user.email,
is_active=msg.user.is_active,
is_superuser=msg.user.is_superuser,
is_verified=msg.user.is_verified,
)
messages.append(
NewChatMessageRead(
id=msg_id,
thread_id=msg_thread_id,
role=msg_role,
content=msg_content,
created_at=msg_created_at,
updated_at=msg_updated_at,
user=user_info,
message_metadata=msg_metadata,
)
)
for msg in thread.messages
]
return ThreadHistoryLoadResponse(messages=messages)

View file

@ -871,9 +871,10 @@ async def index_connector_content(
) from e
async def update_connector_last_indexed(session: AsyncSession, connector_id: int):
async def _update_connector_timestamp_by_id(session: AsyncSession, connector_id: int):
"""
Update the last_indexed_at timestamp for a connector.
Update the last_indexed_at timestamp for a connector by its ID.
Internal helper function for routes.
Args:
session: Database session
@ -948,7 +949,7 @@ async def run_slack_indexing(
# Only update last_indexed_at if indexing was successful (either new docs or updated docs)
if documents_processed > 0:
await update_connector_last_indexed(session, connector_id)
await _update_connector_timestamp_by_id(session, connector_id)
logger.info(
f"Slack indexing completed successfully: {documents_processed} documents processed"
)
@ -1010,7 +1011,7 @@ async def run_notion_indexing(
# Only update last_indexed_at if indexing was successful (either new docs or updated docs)
if documents_processed > 0:
await update_connector_last_indexed(session, connector_id)
await _update_connector_timestamp_by_id(session, connector_id)
logger.info(
f"Notion indexing completed successfully: {documents_processed} documents processed"
)
@ -1070,7 +1071,7 @@ async def run_github_indexing(
f"GitHub indexing successful for connector {connector_id}. Indexed {indexed_count} documents."
)
# Update the last indexed timestamp only on success
await update_connector_last_indexed(session, connector_id)
await _update_connector_timestamp_by_id(session, connector_id)
await session.commit() # Commit timestamp update
except Exception as e:
await session.rollback()
@ -1129,7 +1130,7 @@ async def run_linear_indexing(
f"Linear indexing successful for connector {connector_id}. Indexed {indexed_count} documents."
)
# Update the last indexed timestamp only on success
await update_connector_last_indexed(session, connector_id)
await _update_connector_timestamp_by_id(session, connector_id)
await session.commit() # Commit timestamp update
except Exception as e:
await session.rollback()
@ -1190,7 +1191,7 @@ async def run_discord_indexing(
# Only update last_indexed_at if indexing was successful (either new docs or updated docs)
if documents_processed > 0:
await update_connector_last_indexed(session, connector_id)
await _update_connector_timestamp_by_id(session, connector_id)
logger.info(
f"Discord indexing completed successfully: {documents_processed} documents processed"
)
@ -1252,7 +1253,7 @@ async def run_teams_indexing(
)
# Update last_indexed_at after successful indexing (even if 0 new docs - they were checked)
await update_connector_last_indexed(session, connector_id)
await _update_connector_timestamp_by_id(session, connector_id)
logger.info(
f"Teams indexing completed successfully: {documents_processed} documents processed. {error_or_warning or ''}"
)
@ -1308,7 +1309,7 @@ async def run_jira_indexing(
f"Jira indexing successful for connector {connector_id}. Indexed {indexed_count} documents."
)
# Update the last indexed timestamp only on success
await update_connector_last_indexed(session, connector_id)
await _update_connector_timestamp_by_id(session, connector_id)
await session.commit() # Commit timestamp update
except Exception as e:
logger.error(
@ -1368,7 +1369,7 @@ async def run_confluence_indexing(
f"Confluence indexing successful for connector {connector_id}. Indexed {indexed_count} documents."
)
# Update the last indexed timestamp only on success
await update_connector_last_indexed(session, connector_id)
await _update_connector_timestamp_by_id(session, connector_id)
await session.commit() # Commit timestamp update
except Exception as e:
logger.error(
@ -1426,7 +1427,7 @@ async def run_clickup_indexing(
f"ClickUp indexing successful for connector {connector_id}. Indexed {indexed_count} tasks."
)
# Update the last indexed timestamp only on success
await update_connector_last_indexed(session, connector_id)
await _update_connector_timestamp_by_id(session, connector_id)
await session.commit() # Commit timestamp update
except Exception as e:
logger.error(
@ -1484,7 +1485,7 @@ async def run_airtable_indexing(
f"Airtable indexing successful for connector {connector_id}. Indexed {indexed_count} records."
)
# Update the last indexed timestamp only on success
await update_connector_last_indexed(session, connector_id)
await _update_connector_timestamp_by_id(session, connector_id)
await session.commit() # Commit timestamp update
except Exception as e:
logger.error(
@ -1544,7 +1545,7 @@ async def run_google_calendar_indexing(
f"Google Calendar indexing successful for connector {connector_id}. Indexed {indexed_count} documents."
)
# Update the last indexed timestamp only on success
await update_connector_last_indexed(session, connector_id)
await _update_connector_timestamp_by_id(session, connector_id)
await session.commit() # Commit timestamp update
except Exception as e:
logger.error(
@ -1611,7 +1612,7 @@ async def run_google_gmail_indexing(
f"Google Gmail indexing successful for connector {connector_id}. Indexed {indexed_count} documents."
)
# Update the last indexed timestamp only on success
await update_connector_last_indexed(session, connector_id)
await _update_connector_timestamp_by_id(session, connector_id)
await session.commit() # Commit timestamp update
except Exception as e:
logger.error(
@ -1695,7 +1696,7 @@ async def run_google_drive_indexing(
f"Google Drive indexing successful for connector {connector_id}. Indexed {total_indexed} documents from {len(items.folders)} folder(s) and {len(items.files)} file(s)."
)
# Update the last indexed timestamp only on full success
await update_connector_last_indexed(session, connector_id)
await _update_connector_timestamp_by_id(session, connector_id)
await session.commit() # Commit timestamp update
except Exception as e:
logger.error(
@ -1755,7 +1756,7 @@ async def run_luma_indexing(
# Only update last_indexed_at if indexing was successful (either new docs or updated docs)
if documents_processed > 0:
await update_connector_last_indexed(session, connector_id)
await _update_connector_timestamp_by_id(session, connector_id)
logger.info(
f"Luma indexing completed successfully: {documents_processed} documents processed"
)
@ -1815,7 +1816,7 @@ async def run_elasticsearch_indexing(
f"Elasticsearch indexing successful for connector {connector_id}. Indexed {indexed_count} documents."
)
# Update the last indexed timestamp only on success
await update_connector_last_indexed(session, connector_id)
await _update_connector_timestamp_by_id(session, connector_id)
await session.commit()
except Exception as e:
await session.rollback()
@ -1874,7 +1875,7 @@ async def run_web_page_indexing(
# Only update last_indexed_at if indexing was successful (either new docs or updated docs)
if documents_processed > 0:
await update_connector_last_indexed(session, connector_id)
await _update_connector_timestamp_by_id(session, connector_id)
logger.info(
f"Web page indexing completed successfully: {documents_processed} documents processed"
)
@ -1947,7 +1948,7 @@ async def run_bookstack_indexing(
f"BookStack indexing successful for connector {connector_id}. Indexed {indexed_count} documents."
)
# Update the last indexed timestamp only on success
await update_connector_last_indexed(session, connector_id)
await _update_connector_timestamp_by_id(session, connector_id)
await session.commit() # Commit timestamp update
except Exception as e:
logger.error(