mirror of
https://github.com/MODSetter/SurfSense.git
synced 2026-04-27 09:46:25 +02:00
Add SurfSense docs to documents table
This commit is contained in:
parent
4ace7d09a0
commit
738e23b51a
9 changed files with 338 additions and 59 deletions
|
|
@ -7,7 +7,7 @@ on a [citation:doc-XXX] link.
|
|||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy import func, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.orm import selectinload
|
||||
|
||||
|
|
@ -17,8 +17,10 @@ from app.db import (
|
|||
User,
|
||||
get_async_session,
|
||||
)
|
||||
from app.schemas import PaginatedResponse
|
||||
from app.schemas.surfsense_docs import (
|
||||
SurfsenseDocsChunkRead,
|
||||
SurfsenseDocsDocumentRead,
|
||||
SurfsenseDocsDocumentWithChunksRead,
|
||||
)
|
||||
from app.users import current_active_user
|
||||
|
|
@ -87,3 +89,81 @@ async def get_surfsense_doc_by_chunk_id(
|
|||
status_code=500,
|
||||
detail=f"Failed to retrieve Surfsense documentation: {e!s}",
|
||||
) from e
|
||||
|
||||
|
||||
@router.get(
|
||||
"/surfsense-docs",
|
||||
response_model=PaginatedResponse[SurfsenseDocsDocumentRead],
|
||||
)
|
||||
async def list_surfsense_docs(
|
||||
page: int = 0,
|
||||
page_size: int = 50,
|
||||
title: str | None = None,
|
||||
session: AsyncSession = Depends(get_async_session),
|
||||
user: User = Depends(current_active_user),
|
||||
):
|
||||
"""
|
||||
List all Surfsense documentation documents.
|
||||
|
||||
Args:
|
||||
page: Zero-based page index.
|
||||
page_size: Number of items per page (default: 50).
|
||||
title: Optional title filter (case-insensitive substring match).
|
||||
session: Database session (injected).
|
||||
user: Current authenticated user (injected).
|
||||
|
||||
Returns:
|
||||
PaginatedResponse[SurfsenseDocsDocumentRead]: Paginated list of Surfsense docs.
|
||||
"""
|
||||
try:
|
||||
# Base query
|
||||
query = select(SurfsenseDocsDocument)
|
||||
count_query = select(func.count()).select_from(SurfsenseDocsDocument)
|
||||
|
||||
# Filter by title if provided
|
||||
if title and title.strip():
|
||||
query = query.filter(SurfsenseDocsDocument.title.ilike(f"%{title}%"))
|
||||
count_query = count_query.filter(
|
||||
SurfsenseDocsDocument.title.ilike(f"%{title}%")
|
||||
)
|
||||
|
||||
# Get total count
|
||||
total_result = await session.execute(count_query)
|
||||
total = total_result.scalar() or 0
|
||||
|
||||
# Calculate offset
|
||||
offset = page * page_size
|
||||
|
||||
# Get paginated results
|
||||
result = await session.execute(
|
||||
query.order_by(SurfsenseDocsDocument.title).offset(offset).limit(page_size)
|
||||
)
|
||||
docs = result.scalars().all()
|
||||
|
||||
# Convert to response format
|
||||
items = [
|
||||
SurfsenseDocsDocumentRead(
|
||||
id=doc.id,
|
||||
title=doc.title,
|
||||
source=doc.source,
|
||||
content=doc.content,
|
||||
created_at=doc.created_at,
|
||||
updated_at=doc.updated_at,
|
||||
)
|
||||
for doc in docs
|
||||
]
|
||||
|
||||
has_more = (offset + len(items)) < total
|
||||
|
||||
return PaginatedResponse(
|
||||
items=items,
|
||||
total=total,
|
||||
page=page,
|
||||
page_size=page_size,
|
||||
has_more=has_more,
|
||||
)
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to list Surfsense documentation: {e!s}",
|
||||
) from e
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue