style(backend): run ruff format on 10 files

This commit is contained in:
CREDO23 2026-01-28 22:20:02 +02:00
parent 20b8a17254
commit 949ec949f6
10 changed files with 48 additions and 32 deletions

View file

@ -670,7 +670,9 @@ async def delete_thread(
) from None
@router.post("/threads/{thread_id}/complete-clone", response_model=CompleteCloneResponse)
@router.post(
"/threads/{thread_id}/complete-clone", response_model=CompleteCloneResponse
)
async def complete_clone(
thread_id: int,
session: AsyncSession = Depends(get_async_session),
@ -702,7 +704,9 @@ async def complete_clone(
raise HTTPException(status_code=400, detail="Clone already completed")
if not thread.cloned_from_thread_id:
raise HTTPException(status_code=400, detail="No source thread to clone from")
raise HTTPException(
status_code=400, detail="No source thread to clone from"
)
message_count = await complete_clone_content(
session=session,

View file

@ -53,7 +53,9 @@ async def clone_public_chat_endpoint(
source_thread = await get_thread_by_share_token(session, share_token)
if not source_thread:
raise HTTPException(status_code=404, detail="Chat not found or no longer public")
raise HTTPException(
status_code=404, detail="Chat not found or no longer public"
)
target_search_space_id = await get_user_default_search_space(session, user.id)

View file

@ -941,7 +941,11 @@ async def index_connector_content(
f"Triggering web pages indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}"
)
index_crawled_urls_task.delay(
connector_id, search_space_id, str(user.id), indexing_from, indexing_to
connector_id,
search_space_id,
str(user.id),
indexing_from,
indexing_to,
)
response_message = "Web page indexing started in the background."