chore: removed content trunking for better UI

This commit is contained in:
DESKTOP-RTLN3BA\$punk 2025-10-14 14:19:48 -07:00
parent d86aaea125
commit 31982cea9a
9 changed files with 42 additions and 145 deletions

View file

@ -71,9 +71,7 @@ def extract_sources_from_documents(
source = {
"id": doc.get("chunk_id", source_id_counter),
"title": document_info.get("title", "Untitled Document"),
"description": doc.get("content", "")[:100] + "..."
if len(doc.get("content", "")) > 100
else doc.get("content", ""),
"description": doc.get("content", "").strip(),
"url": metadata.get("url", metadata.get("page_url", "")),
}
@ -204,11 +202,7 @@ async def fetch_documents_by_ids(
title += f" ({issue_state})"
# Create description
description = (
doc.content[:100] + "..."
if len(doc.content) > 100
else doc.content
)
description = doc.content
if comment_count:
description += f" | Comments: {comment_count}"
@ -229,11 +223,7 @@ async def fetch_documents_by_ids(
if message_date:
title += f" ({message_date})"
description = (
doc.content[:100] + "..."
if len(doc.content) > 100
else doc.content
)
description = doc.content
url = (
f"https://slack.com/app_redirect?channel={channel_id}"
if channel_id
@ -246,11 +236,7 @@ async def fetch_documents_by_ids(
page_id = metadata.get("page_id", "")
title = f"Notion: {page_title}"
description = (
doc.content[:100] + "..."
if len(doc.content) > 100
else doc.content
)
description = doc.content
url = (
f"https://notion.so/{page_id.replace('-', '')}"
if page_id
@ -261,11 +247,7 @@ async def fetch_documents_by_ids(
title = f"GitHub: {doc.title}"
description = metadata.get(
"description",
(
doc.content[:100] + "..."
if len(doc.content) > 100
else doc.content
),
(doc.content),
)
url = metadata.get("url", "")
@ -281,11 +263,7 @@ async def fetch_documents_by_ids(
description = metadata.get(
"description",
(
doc.content[:100] + "..."
if len(doc.content) > 100
else doc.content
),
(doc.content),
)
url = (
f"https://www.youtube.com/watch?v={video_id}"
@ -304,11 +282,7 @@ async def fetch_documents_by_ids(
if message_date:
title += f" ({message_date})"
description = (
doc.content[:100] + "..."
if len(doc.content) > 100
else doc.content
)
description = doc.content
if guild_id and channel_id:
url = f"https://discord.com/channels/{guild_id}/{channel_id}"
@ -329,11 +303,7 @@ async def fetch_documents_by_ids(
if status:
title += f" ({status})"
description = (
doc.content[:100] + "..."
if len(doc.content) > 100
else doc.content
)
description = doc.content
if priority:
description += f" | Priority: {priority}"
if issue_type:
@ -395,11 +365,7 @@ async def fetch_documents_by_ids(
except Exception:
pass
description = (
doc.content[:100] + "..."
if len(doc.content) > 100
else doc.content
)
description = doc.content
if location:
description += f" | Location: {location}"
if calendar_id and calendar_id != "primary":
@ -437,11 +403,8 @@ async def fetch_documents_by_ids(
except Exception:
pass
description = (
doc.content[:100] + "..."
if len(doc.content) > 100
else doc.content
)
description = doc.content
if location_name:
description += f" | Venue: {location_name}"
elif meeting_url:
@ -466,11 +429,7 @@ async def fetch_documents_by_ids(
)
title += f" (visited: {formatted_date})"
description = (
doc.content[:100] + "..."
if len(doc.content) > 100
else doc.content
)
description = doc.content
url = webpage_url
elif doc_type == "CRAWLED_URL":
@ -479,22 +438,15 @@ async def fetch_documents_by_ids(
"og:description",
metadata.get(
"ogDescription",
(
doc.content[:100] + "..."
if len(doc.content) > 100
else doc.content
),
(doc.content),
),
)
url = metadata.get("url", "")
else: # FILE and other types
title = doc.title
description = (
doc.content[:100] + "..."
if len(doc.content) > 100
else doc.content
)
description = doc.content
url = metadata.get("url", "")
# Create source entry