trustgraph/trustgraph-cli/scripts/tg-load-pdf

248 lines
5.9 KiB
Text
Raw Normal View History

2024-07-10 23:20:06 +01:00
#!/usr/bin/env python3
2024-07-12 15:06:51 +01:00
"""
Loads a PDF document into TrustGraph processing.
2024-07-12 15:06:51 +01:00
"""
2024-07-10 23:20:06 +01:00
import pulsar
2024-07-25 22:24:14 +01:00
from pulsar.schema import JsonSchema
2024-07-10 23:20:06 +01:00
import base64
import hashlib
2024-07-12 15:06:51 +01:00
import argparse
import os
import time
import uuid
2024-07-12 15:06:51 +01:00
from trustgraph.schema import Document, document_ingest_queue
from trustgraph.schema import Metadata
from trustgraph.log_level import LogLevel
from trustgraph.knowledge import hash, to_uri
from trustgraph.knowledge import PREF_PUBEV, PREF_DOC, PREF_ORG
from trustgraph.knowledge import Organization, PublicationEvent
from trustgraph.knowledge import DigitalDocument
2024-07-12 15:06:51 +01:00
default_user = 'trustgraph'
default_collection = 'default'
2024-07-12 15:06:51 +01:00
class Loader:
def __init__(
self,
pulsar_host,
output_queue,
user,
collection,
2024-07-12 15:06:51 +01:00
log_level,
metadata,
2024-07-12 15:06:51 +01:00
):
self.client = pulsar.Client(
pulsar_host,
logger=pulsar.ConsoleLogger(log_level.to_pulsar())
)
self.producer = self.client.create_producer(
topic=output_queue,
schema=JsonSchema(Document),
chunking_enabled=True,
)
self.user = user
self.collection = collection
self.metadata = metadata
def load(self, files):
2024-07-12 15:06:51 +01:00
for file in files:
self.load_file(file)
def load_file(self, file):
2024-07-12 15:06:51 +01:00
try:
path = file
2024-07-12 15:06:51 +01:00
data = open(path, "rb").read()
# Create a SHA256 hash from the data
id = hash(data)
id = to_uri(PREF_DOC, id)
triples = []
def emit(t):
triples.append(t)
self.metadata.id = id
self.metadata.emit(emit)
2024-07-12 15:06:51 +01:00
r = Document(
metadata=Metadata(
2024-07-12 15:06:51 +01:00
id=id,
metadata=triples,
user=self.user,
collection=self.collection,
2024-07-12 15:06:51 +01:00
),
data=base64.b64encode(data),
)
self.producer.send(r)
print(f"{file}: Loaded successfully.")
2024-07-12 15:06:51 +01:00
except Exception as e:
print(f"{file}: Failed: {str(e)}", flush=True)
2024-07-12 15:06:51 +01:00
def __del__(self):
self.client.close()
def main():
2024-07-10 23:20:06 +01:00
2024-07-12 15:06:51 +01:00
parser = argparse.ArgumentParser(
prog='tg-load-pdf',
2024-07-12 15:06:51 +01:00
description=__doc__,
2024-07-10 23:20:06 +01:00
)
2024-07-12 15:06:51 +01:00
default_pulsar_host = os.getenv("PULSAR_HOST", 'pulsar://localhost:6650')
2024-07-25 21:43:55 +01:00
default_output_queue = document_ingest_queue
2024-07-12 15:06:51 +01:00
parser.add_argument(
'-p', '--pulsar-host',
default=default_pulsar_host,
help=f'Pulsar host (default: {default_pulsar_host})',
)
parser.add_argument(
'-o', '--output-queue',
default=default_output_queue,
help=f'Output queue (default: {default_output_queue})'
)
parser.add_argument(
'-u', '--user',
default=default_user,
help=f'User ID (default: {default_user})'
)
parser.add_argument(
'-c', '--collection',
default=default_collection,
help=f'Collection ID (default: {default_collection})'
)
parser.add_argument(
'--name', help=f'Document name'
)
parser.add_argument(
'--description', help=f'Document description'
)
parser.add_argument(
'--copyright-notice', help=f'Copyright notice'
)
parser.add_argument(
'--copyright-holder', help=f'Copyright holder'
)
parser.add_argument(
'--copyright-year', help=f'Copyright year'
)
parser.add_argument(
'--license', help=f'Copyright license'
)
parser.add_argument(
'--publication-organization', help=f'Publication organization'
)
parser.add_argument(
'--publication-description', help=f'Publication description'
)
parser.add_argument(
'--publication-date', help=f'Publication date'
)
parser.add_argument(
'--url', help=f'Document URL'
)
parser.add_argument(
'--keyword', nargs='+', help=f'Keyword'
)
parser.add_argument(
'--identifier', '--id', help=f'Document ID'
)
2024-07-12 15:06:51 +01:00
parser.add_argument(
'-l', '--log-level',
type=LogLevel,
default=LogLevel.ERROR,
choices=list(LogLevel),
help=f'Output queue (default: info)'
)
parser.add_argument(
'files', nargs='+',
2024-07-12 15:06:51 +01:00
help=f'File to load'
)
args = parser.parse_args()
while True:
try:
document = DigitalDocument(
id,
name=args.name,
description=args.description,
copyright_notice=args.copyright_notice,
copyright_holder=args.copyright_holder,
copyright_year=args.copyright_year,
license=args.license,
url=args.url,
keywords=args.keyword,
)
if args.publication_organization:
org = Organization(
id=to_uri(PREF_ORG, hash(args.publication_organization)),
name=args.publication_organization,
)
document.publication = PublicationEvent(
id = to_uri(PREF_PUBEV, str(uuid.uuid4())),
organization=org,
description=args.publication_description,
start_date=args.publication_date,
end_date=args.publication_date,
)
2024-07-12 15:06:51 +01:00
p = Loader(
pulsar_host=args.pulsar_host,
output_queue=args.output_queue,
user=args.user,
collection=args.collection,
2024-07-12 15:06:51 +01:00
log_level=args.log_level,
metadata=document,
2024-07-12 15:06:51 +01:00
)
p.load(args.files)
2024-07-12 15:06:51 +01:00
print("All done.")
2024-07-12 15:06:51 +01:00
break
except Exception as e:
print("Exception:", e, flush=True)
print("Will retry...", flush=True)
2024-07-10 23:20:06 +01:00
2024-07-12 15:06:51 +01:00
time.sleep(10)
2024-07-10 23:20:06 +01:00
2024-07-12 15:06:51 +01:00
main()
2024-07-10 23:20:06 +01:00