Merge pull request #410 from MODSetter/dev

feat: bumped version to v0.0.8
This commit is contained in:
Rohan Verma 2025-10-16 22:45:41 -07:00 committed by GitHub
commit f9d5a9fbc0
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
8 changed files with 2552 additions and 2548 deletions

View file

@ -60,14 +60,13 @@ def upgrade() -> None:
def downgrade() -> None:
"""
Downgrade is not supported for enum values in PostgreSQL.
Removing enum values can break existing data and is generally not safe.
To remove these values, you would need to:
1. Remove all references to BAIDU_SEARCH_API in the database
2. Recreate the enum type without BAIDU_SEARCH_API
3. Reapply all other enum values
This is intentionally left as a no-op for safety.
"""
pass

View file

@ -569,16 +569,16 @@ class ConnectorService:
) -> tuple:
"""
Search using Baidu AI Search API and return both sources and documents.
Baidu AI Search provides intelligent search with automatic summarization.
We extract the raw search results (references) from the API response.
Args:
user_query: User's search query
user_id: User ID
search_space_id: Search space ID
top_k: Maximum number of results to return
Returns:
tuple: (sources_info_dict, documents_list)
"""
@ -680,7 +680,9 @@ class ConnectorService:
"sources": [],
}, []
except Exception as exc:
print(f"ERROR: Unexpected error calling Baidu API: {type(exc).__name__}: {exc!r}")
print(
f"ERROR: Unexpected error calling Baidu API: {type(exc).__name__}: {exc!r}"
)
print(f"Endpoint: {baidu_endpoint}")
print(f"Payload: {payload}")
return {
@ -705,9 +707,11 @@ class ConnectorService:
# Extract references (search results) from the response
baidu_references = data.get("references", [])
if "code" in data or "message" in data:
print(f"WARNING: Baidu API returned error - Code: {data.get('code')}, Message: {data.get('message')}")
print(
f"WARNING: Baidu API returned error - Code: {data.get('code')}, Message: {data.get('message')}"
)
if not baidu_references:
print("WARNING: No references found in Baidu API response")
@ -735,7 +739,9 @@ class ConnectorService:
source = {
"id": self.source_id_counter,
"title": title,
"description": content[:300] if content else "", # Limit description length
"description": content[:300]
if content
else "", # Limit description length
"url": url,
}
sources_list.append(source)

View file

@ -2,7 +2,6 @@
Elasticsearch indexer for SurfSense
"""
import hashlib
import json
import logging
from datetime import UTC, datetime

View file

@ -1,6 +1,6 @@
[project]
name = "surf-new-backend"
version = "0.0.7"
version = "0.0.8"
description = "SurfSense Backend"
readme = "README.md"
requires-python = ">=3.12"

5066
surfsense_backend/uv.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,7 +1,7 @@
{
"name": "surfsense_browser_extension",
"displayName": "Surfsense Browser Extension",
"version": "0.0.7",
"version": "0.0.8",
"description": "Extension to collect Browsing History for SurfSense.",
"author": "https://github.com/MODSetter",
"scripts": {

View file

@ -220,7 +220,7 @@ export const AppSidebar = memo(function AppSidebar({
</div>
<div className="grid flex-1 text-left text-sm leading-tight">
<span className="truncate font-medium">SurfSense</span>
<span className="truncate text-xs">beta v0.0.7</span>
<span className="truncate text-xs">beta v0.0.8</span>
</div>
</div>
</SidebarMenuButton>

View file

@ -1,6 +1,6 @@
{
"name": "surfsense_web",
"version": "0.0.7",
"version": "0.0.8",
"private": true,
"description": "SurfSense Frontend",
"scripts": {