mirror of
https://github.com/trustgraph-ai/trustgraph.git
synced 2026-04-27 01:16:22 +02:00
Fix/doc streaming proto (#673)
* Librarian streaming doc download * Document stream download endpoint
This commit is contained in:
parent
b2ef7bbb8c
commit
df1808768d
7 changed files with 128 additions and 33 deletions
|
|
@ -174,4 +174,6 @@ class LibraryResponseTranslator(MessageTranslator):
|
|||
|
||||
def from_response_with_completion(self, obj: LibrarianResponse) -> Tuple[Dict[str, Any], bool]:
|
||||
"""Returns (response_dict, is_final)"""
|
||||
return self.from_pulsar(obj), True
|
||||
# For streaming responses, check end_of_stream to determine if this is the final message
|
||||
is_final = getattr(obj, 'end_of_stream', True)
|
||||
return self.from_pulsar(obj), is_final
|
||||
|
|
|
|||
|
|
@ -212,6 +212,9 @@ class LibrarianResponse:
|
|||
# list-uploads response
|
||||
upload_sessions: list[UploadSession] = field(default_factory=list)
|
||||
|
||||
# stream-document response - indicates final chunk in stream
|
||||
end_of_stream: bool = False
|
||||
|
||||
# FIXME: Is this right? Using persistence on librarian so that
|
||||
# message chunking works
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue