mirror of
https://github.com/MODSetter/SurfSense.git
synced 2026-05-02 04:12:47 +02:00
fix: critical timestamp parsing and audit fixes
- Fix timestamp conversion: String(epochMs) → new Date(epochMs).toISOString() in use-messages-sync, use-comments-sync, use-documents, use-inbox. Without this, date comparisons (isEdited, cutoff filters) would fail. - Fix updated_at: undefined → null in use-inbox to match InboxItem type - Fix ZeroProvider: skip Zero connection for unauthenticated users - Clean 30+ stale "Electric SQL" comments in backend Python code
This commit is contained in:
parent
f04ab89418
commit
cf21eaacfc
33 changed files with 62 additions and 57 deletions
|
|
@ -887,7 +887,7 @@ async def _process_file_with_document(
|
|||
)
|
||||
|
||||
try:
|
||||
# Set status to PROCESSING (shows spinner in UI via ElectricSQL)
|
||||
# Set status to PROCESSING (shows spinner in UI via Zero)
|
||||
document.status = DocumentStatus.processing()
|
||||
await session.commit()
|
||||
logger.info(
|
||||
|
|
@ -951,7 +951,7 @@ async def _process_file_with_document(
|
|||
):
|
||||
page_limit_error = e.__cause__
|
||||
|
||||
# Mark document as failed (shows error in UI via ElectricSQL)
|
||||
# Mark document as failed (shows error in UI via Zero)
|
||||
error_message = str(e)[:500]
|
||||
document.status = DocumentStatus.failed(error_message)
|
||||
document.updated_at = get_current_timestamp()
|
||||
|
|
|
|||
|
|
@ -139,7 +139,7 @@ async def index_airtable_records(
|
|||
await task_logger.log_task_success(
|
||||
log_entry, success_msg, {"bases_count": 0}
|
||||
)
|
||||
# CRITICAL: Update timestamp even when no bases found so Electric SQL syncs
|
||||
# CRITICAL: Update timestamp even when no bases found so Zero syncs
|
||||
await update_connector_last_indexed(
|
||||
session, connector, update_last_indexed
|
||||
)
|
||||
|
|
@ -460,7 +460,7 @@ async def index_airtable_records(
|
|||
documents_failed += 1
|
||||
continue
|
||||
|
||||
# CRITICAL: Always update timestamp (even if 0 documents indexed) so Electric SQL syncs
|
||||
# CRITICAL: Always update timestamp (even if 0 documents indexed) so Zero syncs
|
||||
await update_connector_last_indexed(session, connector, update_last_indexed)
|
||||
|
||||
total_processed = documents_indexed
|
||||
|
|
|
|||
|
|
@ -462,7 +462,7 @@ async def index_bookstack_pages(
|
|||
documents_failed += 1
|
||||
continue
|
||||
|
||||
# CRITICAL: Always update timestamp (even if 0 documents indexed) so Electric SQL syncs
|
||||
# CRITICAL: Always update timestamp (even if 0 documents indexed) so Zero syncs
|
||||
# This ensures the UI shows "Last indexed" instead of "Never indexed"
|
||||
await update_connector_last_indexed(session, connector, update_last_indexed)
|
||||
|
||||
|
|
|
|||
|
|
@ -470,7 +470,7 @@ async def index_clickup_tasks(
|
|||
|
||||
total_processed = documents_indexed
|
||||
|
||||
# CRITICAL: Always update timestamp (even if 0 documents indexed) so Electric SQL syncs
|
||||
# CRITICAL: Always update timestamp (even if 0 documents indexed) so Zero syncs
|
||||
# This ensures the UI shows "Last indexed" instead of "Never indexed"
|
||||
await update_connector_last_indexed(session, connector, update_last_indexed)
|
||||
|
||||
|
|
|
|||
|
|
@ -442,7 +442,7 @@ async def index_confluence_pages(
|
|||
documents_failed += 1
|
||||
continue # Skip this page and continue with others
|
||||
|
||||
# CRITICAL: Always update timestamp (even if 0 documents indexed) so Electric SQL syncs
|
||||
# CRITICAL: Always update timestamp (even if 0 documents indexed) so Zero syncs
|
||||
# This ensures the UI shows "Last indexed" instead of "Never indexed"
|
||||
await update_connector_last_indexed(session, connector, update_last_indexed)
|
||||
|
||||
|
|
|
|||
|
|
@ -718,7 +718,7 @@ async def index_discord_messages(
|
|||
documents_failed += 1
|
||||
continue
|
||||
|
||||
# CRITICAL: Always update timestamp (even if 0 documents indexed) so Electric SQL syncs
|
||||
# CRITICAL: Always update timestamp (even if 0 documents indexed) so Zero syncs
|
||||
await update_connector_last_indexed(session, connector, update_last_indexed)
|
||||
|
||||
# Final commit for any remaining documents not yet committed in batches
|
||||
|
|
|
|||
|
|
@ -413,7 +413,7 @@ async def index_elasticsearch_documents(
|
|||
documents_failed += 1
|
||||
continue
|
||||
|
||||
# CRITICAL: Always update timestamp (even if 0 documents indexed) so Electric SQL syncs
|
||||
# CRITICAL: Always update timestamp (even if 0 documents indexed) so Zero syncs
|
||||
# This ensures the UI shows "Last indexed" instead of "Never indexed"
|
||||
if update_last_indexed:
|
||||
connector.last_indexed_at = (
|
||||
|
|
|
|||
|
|
@ -451,7 +451,7 @@ async def index_github_repos(
|
|||
documents_failed += 1
|
||||
continue
|
||||
|
||||
# CRITICAL: Always update timestamp (even if 0 documents indexed) so Electric SQL syncs
|
||||
# CRITICAL: Always update timestamp (even if 0 documents indexed) so Zero syncs
|
||||
await update_connector_last_indexed(session, connector, update_last_indexed)
|
||||
|
||||
# Final commit
|
||||
|
|
|
|||
|
|
@ -554,7 +554,7 @@ async def index_google_calendar_events(
|
|||
documents_failed += 1
|
||||
continue
|
||||
|
||||
# CRITICAL: Always update timestamp (even if 0 documents indexed) so Electric SQL syncs
|
||||
# CRITICAL: Always update timestamp (even if 0 documents indexed) so Zero syncs
|
||||
await update_connector_last_indexed(session, connector, update_last_indexed)
|
||||
|
||||
# Final commit for any remaining documents not yet committed in batches
|
||||
|
|
|
|||
|
|
@ -477,7 +477,7 @@ async def index_google_gmail_messages(
|
|||
documents_failed += 1
|
||||
continue
|
||||
|
||||
# CRITICAL: Always update timestamp (even if 0 documents indexed) so Electric SQL syncs
|
||||
# CRITICAL: Always update timestamp (even if 0 documents indexed) so Zero syncs
|
||||
await update_connector_last_indexed(session, connector, update_last_indexed)
|
||||
|
||||
# Final commit for any remaining documents not yet committed in batches
|
||||
|
|
|
|||
|
|
@ -422,7 +422,7 @@ async def index_jira_issues(
|
|||
documents_failed += 1
|
||||
continue # Skip this issue and continue with others
|
||||
|
||||
# CRITICAL: Always update timestamp (even if 0 documents indexed) so Electric SQL syncs
|
||||
# CRITICAL: Always update timestamp (even if 0 documents indexed) so Zero syncs
|
||||
# This ensures the UI shows "Last indexed" instead of "Never indexed"
|
||||
await update_connector_last_indexed(session, connector, update_last_indexed)
|
||||
|
||||
|
|
|
|||
|
|
@ -463,7 +463,7 @@ async def index_linear_issues(
|
|||
documents_failed += 1
|
||||
continue
|
||||
|
||||
# CRITICAL: Always update timestamp (even if 0 documents indexed) so Electric SQL syncs
|
||||
# CRITICAL: Always update timestamp (even if 0 documents indexed) so Zero syncs
|
||||
await update_connector_last_indexed(session, connector, update_last_indexed)
|
||||
|
||||
# Final commit for any remaining documents not yet committed in batches
|
||||
|
|
|
|||
|
|
@ -520,7 +520,7 @@ async def index_luma_events(
|
|||
documents_failed += 1
|
||||
continue
|
||||
|
||||
# CRITICAL: Always update timestamp (even if 0 documents indexed) so Electric SQL syncs
|
||||
# CRITICAL: Always update timestamp (even if 0 documents indexed) so Zero syncs
|
||||
# This ensures the UI shows "Last indexed" instead of "Never indexed"
|
||||
await update_connector_last_indexed(session, connector, update_last_indexed)
|
||||
|
||||
|
|
|
|||
|
|
@ -252,7 +252,7 @@ async def index_notion_pages(
|
|||
{"pages_found": 0},
|
||||
)
|
||||
logger.info("No Notion pages found to index")
|
||||
# CRITICAL: Update timestamp even when no pages found so Electric SQL syncs
|
||||
# CRITICAL: Update timestamp even when no pages found so Zero syncs
|
||||
await update_connector_last_indexed(session, connector, update_last_indexed)
|
||||
await session.commit()
|
||||
await notion_client.close()
|
||||
|
|
@ -506,7 +506,7 @@ async def index_notion_pages(
|
|||
documents_failed += 1
|
||||
continue
|
||||
|
||||
# CRITICAL: Always update timestamp (even if 0 documents indexed) so Electric SQL syncs
|
||||
# CRITICAL: Always update timestamp (even if 0 documents indexed) so Zero syncs
|
||||
await update_connector_last_indexed(session, connector, update_last_indexed)
|
||||
|
||||
total_processed = documents_indexed
|
||||
|
|
|
|||
|
|
@ -599,7 +599,7 @@ async def index_obsidian_vault(
|
|||
failed_count += 1
|
||||
continue
|
||||
|
||||
# CRITICAL: Always update timestamp (even if 0 documents indexed) so Electric SQL syncs
|
||||
# CRITICAL: Always update timestamp (even if 0 documents indexed) so Zero syncs
|
||||
await update_connector_last_indexed(session, connector, update_last_indexed)
|
||||
|
||||
# Final commit for any remaining documents not yet committed in batches
|
||||
|
|
|
|||
|
|
@ -256,7 +256,7 @@ async def index_slack_messages(
|
|||
f"No Slack channels found for connector {connector_id}",
|
||||
{"channels_found": 0},
|
||||
)
|
||||
# CRITICAL: Update timestamp even when no channels found so Electric SQL syncs
|
||||
# CRITICAL: Update timestamp even when no channels found so Zero syncs
|
||||
await update_connector_last_indexed(session, connector, update_last_indexed)
|
||||
await session.commit()
|
||||
return 0, None # Return None (not error) when no channels found
|
||||
|
|
@ -593,7 +593,7 @@ async def index_slack_messages(
|
|||
documents_failed += 1
|
||||
continue
|
||||
|
||||
# CRITICAL: Always update timestamp (even if 0 documents indexed) so Electric SQL syncs
|
||||
# CRITICAL: Always update timestamp (even if 0 documents indexed) so Zero syncs
|
||||
await update_connector_last_indexed(session, connector, update_last_indexed)
|
||||
|
||||
# Final commit for any remaining documents not yet committed in batches
|
||||
|
|
|
|||
|
|
@ -249,7 +249,7 @@ async def index_teams_messages(
|
|||
f"No Teams found for connector {connector_id}",
|
||||
{"teams_found": 0},
|
||||
)
|
||||
# CRITICAL: Update timestamp even when no teams found so Electric SQL syncs
|
||||
# CRITICAL: Update timestamp even when no teams found so Zero syncs
|
||||
await update_connector_last_indexed(session, connector, update_last_indexed)
|
||||
await session.commit()
|
||||
return 0, None # Return None (not error) when no items found
|
||||
|
|
@ -635,7 +635,7 @@ async def index_teams_messages(
|
|||
documents_failed += 1
|
||||
continue
|
||||
|
||||
# CRITICAL: Always update timestamp (even if 0 documents indexed) so Electric SQL syncs
|
||||
# CRITICAL: Always update timestamp (even if 0 documents indexed) so Zero syncs
|
||||
await update_connector_last_indexed(session, connector, update_last_indexed)
|
||||
|
||||
# Final commit for any remaining documents not yet committed in batches
|
||||
|
|
|
|||
|
|
@ -444,7 +444,7 @@ async def index_crawled_urls(
|
|||
|
||||
total_processed = documents_indexed + documents_updated
|
||||
|
||||
# CRITICAL: Always update timestamp (even if 0 documents indexed) so Electric SQL syncs
|
||||
# CRITICAL: Always update timestamp (even if 0 documents indexed) so Zero syncs
|
||||
await update_connector_last_indexed(session, connector, update_last_indexed)
|
||||
|
||||
# Final commit for any remaining documents not yet committed in batches
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue