From 9b5cbbf9ca97bab872b7803d51b91b7db4b0f230 Mon Sep 17 00:00:00 2001 From: Cyber MacGeddon Date: Wed, 10 Jul 2024 23:20:06 +0100 Subject: [PATCH] Trustgraph initial code drop --- .gitignore | 4 + Containerfile | 54 ++ LICENSE | 202 ++++++ Makefile | 30 + architecture.png | Bin 0 -> 78652 bytes architecture.svg | 598 ++++++++++++++++++ docker-compose-azure.yaml | 172 +++++ docker-compose-claude.yaml | 170 +++++ docker-compose-ollama.yaml | 178 ++++++ docker-compose-vertexai.yaml | 174 +++++ graph-clear | 8 + graph-dump | 70 ++ requirements.txt | 16 + scripts/chunker-recursive | 6 + scripts/embeddings-hf | 6 + scripts/embeddings-vectorize | 6 + scripts/graph-rag | 6 + scripts/graph-show | 10 + scripts/graph-to-turtle | 37 ++ scripts/graph-write-cassandra | 6 + scripts/init-pulsar-manager | 11 + scripts/kg-extract-definitions | 6 + scripts/kg-extract-relationships | 6 + scripts/llm-azure-text | 6 + scripts/llm-claude-text | 6 + scripts/llm-ollama-text | 6 + scripts/llm-vertexai-text | 6 + scripts/loader | 47 ++ scripts/pdf-decoder | 6 + scripts/query | 16 + scripts/vector-write-milvus | 6 + setup.py | 65 ++ tests/test-embeddings | 15 + tests/test-graph-rag | 14 + tests/test-llm | 15 + tests/test-milvus | 35 + trustgraph/__init__.py | 0 trustgraph/chunker/__init__.py | 0 trustgraph/chunker/recursive/__init__.py | 3 + trustgraph/chunker/recursive/__main__.py | 7 + trustgraph/chunker/recursive/chunker.py | 164 +++++ trustgraph/decoder/__init__.py | 0 trustgraph/decoder/pdf/__init__.py | 3 + trustgraph/decoder/pdf/__main__.py | 7 + trustgraph/decoder/pdf/pdf_decoder.py | 159 +++++ trustgraph/edge_map.py | 102 +++ trustgraph/embeddings/__init__.py | 0 trustgraph/embeddings/hf/__init__.py | 3 + trustgraph/embeddings/hf/__main__.py | 7 + trustgraph/embeddings/hf/hf.py | 161 +++++ trustgraph/embeddings/vectorize/__init__.py | 3 + trustgraph/embeddings/vectorize/__main__.py | 6 + trustgraph/embeddings/vectorize/vectorize.py | 167 +++++ trustgraph/embeddings_client.py | 70 ++ trustgraph/graph/__init__.py | 0 trustgraph/graph/cassandra_write/__init__.py | 3 + trustgraph/graph/cassandra_write/__main__.py | 7 + trustgraph/graph/cassandra_write/write.py | 144 +++++ trustgraph/graph_rag.py | 227 +++++++ trustgraph/graph_rag_client.py | 68 ++ trustgraph/kg/__init__.py | 0 trustgraph/kg/extract_definitions/__init__.py | 3 + trustgraph/kg/extract_definitions/__main__.py | 7 + trustgraph/kg/extract_definitions/extract.py | 193 ++++++ .../kg/extract_relationships/__init__.py | 3 + .../kg/extract_relationships/__main__.py | 7 + .../kg/extract_relationships/extract.py | 252 ++++++++ trustgraph/llm/__init__.py | 0 trustgraph/llm/azure_text/__init__.py | 3 + trustgraph/llm/azure_text/__main__.py | 7 + trustgraph/llm/azure_text/llm.py | 213 +++++++ trustgraph/llm/claude_text/__init__.py | 3 + trustgraph/llm/claude_text/__main__.py | 7 + trustgraph/llm/claude_text/llm.py | 190 ++++++ trustgraph/llm/ollama_text/__init__.py | 3 + trustgraph/llm/ollama_text/__main__.py | 7 + trustgraph/llm/ollama_text/llm.py | 169 +++++ trustgraph/llm/vertexai_text/__init__.py | 3 + trustgraph/llm/vertexai_text/__main__.py | 7 + trustgraph/llm/vertexai_text/llm.py | 254 ++++++++ trustgraph/llm_client.py | 71 +++ trustgraph/log_level.py | 20 + trustgraph/prompts.py | 138 ++++ trustgraph/rag/__init__.py | 0 trustgraph/rag/graph/__init__.py | 3 + trustgraph/rag/graph/__main__.py | 7 + trustgraph/rag/graph/rag.py | 172 +++++ trustgraph/rdf.py | 6 + trustgraph/schema.py | 67 ++ trustgraph/trustgraph.py | 108 ++++ trustgraph/vector/__init__.py | 0 trustgraph/vector/milvus_write/__init__.py | 3 + trustgraph/vector/milvus_write/__main__.py | 7 + trustgraph/vector/milvus_write/write.py | 136 ++++ 94 files changed, 5399 insertions(+) create mode 100644 .gitignore create mode 100644 Containerfile create mode 100644 LICENSE create mode 100644 Makefile create mode 100644 architecture.png create mode 100644 architecture.svg create mode 100644 docker-compose-azure.yaml create mode 100644 docker-compose-claude.yaml create mode 100644 docker-compose-ollama.yaml create mode 100644 docker-compose-vertexai.yaml create mode 100755 graph-clear create mode 100755 graph-dump create mode 100644 requirements.txt create mode 100755 scripts/chunker-recursive create mode 100755 scripts/embeddings-hf create mode 100755 scripts/embeddings-vectorize create mode 100755 scripts/graph-rag create mode 100755 scripts/graph-show create mode 100755 scripts/graph-to-turtle create mode 100755 scripts/graph-write-cassandra create mode 100755 scripts/init-pulsar-manager create mode 100755 scripts/kg-extract-definitions create mode 100755 scripts/kg-extract-relationships create mode 100755 scripts/llm-azure-text create mode 100755 scripts/llm-claude-text create mode 100755 scripts/llm-ollama-text create mode 100755 scripts/llm-vertexai-text create mode 100755 scripts/loader create mode 100755 scripts/pdf-decoder create mode 100755 scripts/query create mode 100755 scripts/vector-write-milvus create mode 100644 setup.py create mode 100755 tests/test-embeddings create mode 100755 tests/test-graph-rag create mode 100755 tests/test-llm create mode 100755 tests/test-milvus create mode 100644 trustgraph/__init__.py create mode 100644 trustgraph/chunker/__init__.py create mode 100644 trustgraph/chunker/recursive/__init__.py create mode 100644 trustgraph/chunker/recursive/__main__.py create mode 100755 trustgraph/chunker/recursive/chunker.py create mode 100644 trustgraph/decoder/__init__.py create mode 100644 trustgraph/decoder/pdf/__init__.py create mode 100755 trustgraph/decoder/pdf/__main__.py create mode 100755 trustgraph/decoder/pdf/pdf_decoder.py create mode 100644 trustgraph/edge_map.py create mode 100644 trustgraph/embeddings/__init__.py create mode 100644 trustgraph/embeddings/hf/__init__.py create mode 100755 trustgraph/embeddings/hf/__main__.py create mode 100755 trustgraph/embeddings/hf/hf.py create mode 100644 trustgraph/embeddings/vectorize/__init__.py create mode 100755 trustgraph/embeddings/vectorize/__main__.py create mode 100755 trustgraph/embeddings/vectorize/vectorize.py create mode 100644 trustgraph/embeddings_client.py create mode 100644 trustgraph/graph/__init__.py create mode 100644 trustgraph/graph/cassandra_write/__init__.py create mode 100755 trustgraph/graph/cassandra_write/__main__.py create mode 100755 trustgraph/graph/cassandra_write/write.py create mode 100644 trustgraph/graph_rag.py create mode 100644 trustgraph/graph_rag_client.py create mode 100644 trustgraph/kg/__init__.py create mode 100644 trustgraph/kg/extract_definitions/__init__.py create mode 100755 trustgraph/kg/extract_definitions/__main__.py create mode 100755 trustgraph/kg/extract_definitions/extract.py create mode 100644 trustgraph/kg/extract_relationships/__init__.py create mode 100755 trustgraph/kg/extract_relationships/__main__.py create mode 100755 trustgraph/kg/extract_relationships/extract.py create mode 100644 trustgraph/llm/__init__.py create mode 100644 trustgraph/llm/azure_text/__init__.py create mode 100755 trustgraph/llm/azure_text/__main__.py create mode 100755 trustgraph/llm/azure_text/llm.py create mode 100644 trustgraph/llm/claude_text/__init__.py create mode 100755 trustgraph/llm/claude_text/__main__.py create mode 100755 trustgraph/llm/claude_text/llm.py create mode 100644 trustgraph/llm/ollama_text/__init__.py create mode 100755 trustgraph/llm/ollama_text/__main__.py create mode 100755 trustgraph/llm/ollama_text/llm.py create mode 100644 trustgraph/llm/vertexai_text/__init__.py create mode 100755 trustgraph/llm/vertexai_text/__main__.py create mode 100755 trustgraph/llm/vertexai_text/llm.py create mode 100644 trustgraph/llm_client.py create mode 100644 trustgraph/log_level.py create mode 100644 trustgraph/prompts.py create mode 100644 trustgraph/rag/__init__.py create mode 100644 trustgraph/rag/graph/__init__.py create mode 100755 trustgraph/rag/graph/__main__.py create mode 100755 trustgraph/rag/graph/rag.py create mode 100644 trustgraph/rdf.py create mode 100644 trustgraph/schema.py create mode 100644 trustgraph/trustgraph.py create mode 100644 trustgraph/vector/__init__.py create mode 100644 trustgraph/vector/milvus_write/__init__.py create mode 100755 trustgraph/vector/milvus_write/__main__.py create mode 100755 trustgraph/vector/milvus_write/write.py diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..fdd19e0a --- /dev/null +++ b/.gitignore @@ -0,0 +1,4 @@ +*~ +__pycache__/ +env/ +*.egg_info/ diff --git a/Containerfile b/Containerfile new file mode 100644 index 00000000..e01bd7a2 --- /dev/null +++ b/Containerfile @@ -0,0 +1,54 @@ + +# ---------------------------------------------------------------------------- +# Build an AI container. This does the torch install which is huge, and I +# like to avoid re-doing this. +# ---------------------------------------------------------------------------- + +FROM docker.io/fedora:40 AS ai + +ENV PIP_BREAK_SYSTEM_PACKAGES=1 + +RUN dnf install -y python3 python3-pip python3-wheel python3-aiohttp \ + python3-rdflib + +RUN pip3 install torch --index-url https://download.pytorch.org/whl/cpu + +RUN pip3 install anthropic google-cloud-aiplatform langchain langchain-core \ + langchain-huggingface langchain-text-splitters langchain-community \ + pymilvus sentence-transformers transformers huggingface-hub \ + pulsar-client && \ + pip3 cache purge + +# ---------------------------------------------------------------------------- +# Build a container which contains the built Python package. The build +# creates a bunch of left-over cruft, a separate phase means this is only +# needed to support package build +# ---------------------------------------------------------------------------- + +FROM ai AS build + +env PACKAGE_VERSION=0.0.0 + +COPY setup.py /root/build/ +COPY README.md /root/build/ +COPY scripts/ /root/build/scripts/ +COPY trustgraph/ root/build/trustgraph/ + +RUN (cd /root/build && pip3 wheel -w /root/wheels --no-deps .) + +# ---------------------------------------------------------------------------- +# Finally, the target container. Start with base and add the package. +# ---------------------------------------------------------------------------- + +FROM ai + +COPY --from=build /root/wheels /root/wheels + +RUN pip3 install /root/wheels/trustgraph-* && \ + pip3 cache purge && \ + rm -rf /root/wheels + +WORKDIR / + +CMD sleep 1000000 + diff --git a/LICENSE b/LICENSE new file mode 100644 index 00000000..d6456956 --- /dev/null +++ b/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/Makefile b/Makefile new file mode 100644 index 00000000..f7fbdc72 --- /dev/null +++ b/Makefile @@ -0,0 +1,30 @@ + +# VERSION=$(shell git describe | sed 's/^v//') +VERSION=0.1.16 + +all: container + +CONTAINER=docker.io/trustgraph/trustgraph-flow + +container: + podman build -f Containerfile -t ${CONTAINER}:${VERSION} \ + --format docker + +push: + podman push ${CONTAINER}:${VERSION} + +start: + podman run -i -t --name ${NAME} \ + -i -t \ + -p 8081:8081 \ + -v $$(pwd)/keys:/keys \ + -v $$(pwd)/configs:/configs \ + ${CONTAINER}:${VERSION} + +stop: + podman rm -f ${NAME} + +clean: + rm -rf wheels/ + +# sed -i 's/0.1.15/0.1.16/' docker-compose*.yaml diff --git a/architecture.png b/architecture.png new file mode 100644 index 0000000000000000000000000000000000000000..deb30c7caa2e89973b78f87e7d6a6746ef5bc131 GIT binary patch literal 78652 zcmeFZWmuG3*fuh#;U6(nw3I!H2#APCOAH+j0}P!4Dgx3ubf|Po z3`h)oYhb_6@x8ykd);eYabD+jt{tJKB1cR_O$32Jh@tY*>JZ4~ zR}cuk`qeAoH#?#;z2INhT;=uLA&}c1xPS0wtm%EgFYkKD=z3_lSbKPzxmiKHy}h~Y zoE_XP%v`OwT-;3Ngsy2z9^R2Ze+8>-fo5K zUJV`_y+l-n+B(52mA`qPK;bgc(*Vh(Wh&2E1FdGZuZ*j|8=DOXpWCm)<-*zktXD z-@JN$m3j5%{~heIV<;DHHFR&3DXD*xmLBjhEb~ym%U9sYh16wEXio20h!=2wmls!V zQ0%5h8_Sq6&D8REzI1H9a(vf<3n`VSf>JOtzROE=bOeFa>YcB$?u)^?qAL&QR*9n# zlDs7YKJsxLpP6G{C^J4xk(cF9e$SH7!=X=1*`kIRLIw{Z)eBT9xXwJD%=wwx$ zDimXMI&HkU^@$;nDUKF?<%)h8+$2Ze*{F*M&E%9I%rI}{(0AxGTr8EO2A~VwhFM(( zAJq$VqD~8w)jlgkow7M`njq~}GyC)KAn$ITn?qt~*}r&|D9^QF-5ULn01^6iEs-Qn z5536j??&02R+tIBIu%i5N>yIbGZOa5Jv>Nc(}aGLws*;RVZ&;*y-{XC^6r z819V0-6r8X6Gm>6v~Iop{$9FHl6}tIj zRhD=atkuB|yvx7+&MNj@0U5aNoc?2iSPP(lRoX?}Eh%s1$lr%kC`iX+Z<>wvc3KCx4T;yI%o;cdU;&U|^sDnliOlEY&ux}Cw4lBF z#Z}{=f-0=A(W;Oep$2bOPe>2$oR$zS+Lu9kmdz_^WLav?m3<6*_za^OWoK${7~y!7 zPiGTVW7yoZ`%-+WHgawS4?=(Se3P8(ECq|fxC~g0_80=ELpQw*`>UGBtXy>rQ$iso z0ktaIk#A?uL8%_(qZFVqw+g2ou@tM%Ja$Ie4%1JaLIsjExnXcH&(QOE)(qz%Zea(0 zqmX?39xc9s;c^DZ5ql}$M(Rsi1e2XrhVNVRJ$S}Y*DV^UuSUuZ8aH>@` z4tSdPhX+#4LBDJ;N?qTWP^6Gj-t)NiD^cEZ=xBY{WW<#J$x;fs)K+y`mbPd%`*uOW zeN3@*vY|>rxG2WDPcKxXOc?i+prBcly-82l>tRBOuHJckb5-=mtUthRBxiKrHKF-D z7)RrxQ`eAt-?R7%!cjKduWwm?mcZzE?Ko^u;Al0cxu?Sn5`54)OAZ{UmZ@pzrBCnJ zi~tR0g_iUdBtV`7seB-`HS(x>njzI{AN7|uE<}W9P_J|X0%0dS-x7(+ZE4+a(v~A* z6HgQ9rb|^Rk4~p}X*$<1zv}CHGAADq5~-6yOsyWAn_jrrgE1kSq6q^wsmo7m^rKLY zy1ViowAeefQpQa*VU*<>M6$KBPA9e9uZ*94V2!b?aY7!@fTw4h<0*_I&Q}Wnn^B z54AX`kN($bDew;Iv>^R3JH7uc*OgSh`K%=G{@G2vcfnw!TT->Pdn<3qmshn&(e)5IfqWt3Lkz!s!r_V)>_kYt0cq zUT>MqaSpvY>Nzr-jFBm>&ViqFP9u%M+%#gZ8l1S}|z&alR>o@Zy?wMn(thl4{ysM%^2 z3&M0nx5roa!=URubFK@skyWqQ(8uhF8VnRr+6utpGW z7SeBnku{exoMA?bI~4UrT3zvopeM%~*y-duE>vv##F3riOgp0DZu;X@(=DDI|zI!86$0V<+wsfm>mzqEg;3Io^YVCPDe;#}~wXGL{I=at(L%-tfG z(>M(4A}cF>9r3qZ56uA8ZYkug(YZ*3Isz5@s6vh2#D+AxRn=$-kbzkIvBB%--D>lPR{+aGOtvzm>ZiMKgg#XOg3H#ws)kP6|Sfnoj0T- z9<59n`5>~$P1osgshT>BBTR;?VNQX}zCn?$LBkpMr<8Gnfl6vX#|)X_tV!cRh%f$q zsW7jRqM^nf(Q6B?MiSu$4Gu!K-Wykph=|wvTfGHt!_x#F!Mm1&eAj4>NDt0ZytD=H z6lwpmKF>p@G8K9pY%gJ&fA-{4Xl`fKei`;t@2hU)v6*tNT3J!{wsp6Xdbaw#bC~?| z0BYUWP;$dxr=4)Z+DOrg@2EwnIX}o}-30M{fm=7~YfmZpdGLkQQnZnoVaLl{`tBZx ziXTmF%3TZo`wAF!bkS^q+~`)uP_6i@TgjEDKir^&@( z?wS4ILh)Cx(V=KFlWAG;TIYkbZJi^PwPm%*F9&#uG#59q8@u>uO4jYoq-{q#i+mq9$}-n3SfJiinp7 zW~6Di7nN1aiHnGPAxs7EuK)La8O0@DkzseYkxyn+^(bGtZ(#5R&sq<^mVDtk?r!&o zw53xQwS(?}N|cua8%3%>byJbbf^+pNvzP0|(=14y$jsldqDf%uf#C(Fn9L`dewWUW8uFMp%_;Ri@7qj(uT^ZHGY0R|^*r;_EwWV^LG`d=m0IEuVJd2s!*N zo(lQ@D4I;!JBX&~P6ju(9)iS9I%5J(zXtzKmwFZN%DNrDtD z6?3h4JE6jLi3dxCju1Yj(WTJZ8C0CK$-aOU0eOG8*SX^p&sor zh60;Mfb?YYDL$^8p>@>B6UyY?^nD)o?Uu_|b5l_&Fh(g6F$oD}{Tr<=er0SX=IzX? zcoJ#wB`%bpP6--hy$NkJV?hrW^j$&R(dJ=`i0GivuR@)$GF$naZN&2=ydwP~9Ef@P zq9a9*SczbvOw=+(oj>m5w7o_slJV>tR{u^&$Z7_4Lf?vZakF97jGJEKwnFI0i1u{Q z8TM$eK(d%S=&&o4%LXkx?D>#>wYJC$*jaP6j!=(&<88%97}cd&HC0>bd?jq<+8gle zJEf}&JulITbQ3(HyBOKa!7Atf-w>x6V3>`kGpvpI^rA;Y{kHLhc|+L=fX5W?*O zRZ1xZYd(Od6s(2-lwR9ypf~#NZCbLs)VKDEkgosl-_oX?1k`e*$w_v#^R#|{igp~C z6s^e2EF08qGVQ39_5j(YBVm)?zBydrC6*Z^w)>7tnS?z>fh8GgB4o?qrPItUyc?#0 z=YRDg`<3-Z@3T;m_WAYjE4T8owlc61%P@PG|IH3`o)jNc`eea#Lm*zW%bp4pcGM9n zwk)@Y)fhjIHmI(Yv`y~CgcVaqhFNyT1E6f+S@CBj*BtTY;YuyQHB*A&C(g}cUIilw z#d?3dO{=S`1;tPPwoud2CAR$TujrFdgm_$U9fUScVbU2pK4ujN23gKPmU)q80|4sg zSYV5ZYW>|WWfmoC9Q-J4`}1JIE&qh1CZ)@M-n|xe{7H|mp!Zks>+RBY?Nl~#Jam%g zPkUXL$3be}{tb7Z+(sWFEi1Y=Zp3zj>fy;}wTEn}t%bJ>bU3Z56bHv9%o=xQ#yEDH z6T1NDOA{{Be17KSHMBD4Jb1wtnaZiV6Oi{K>VN`I=HV-R5xQ9aWZO<(`za zN|C)EZTJ)Vo_S>$UH)A4nzMX-N{#&98{R8Fe#uz%q}|g_H53d_({5-(6~1h`Y0F4C zI54woPrs$Xn>1jtMrzxag5g!xkR#N`=pV3JxsB@`Yzt4p?04lVODgyCS1)Vxo#%!k zUAzLJGtVmS`^_zUYxP}=YF`&L*izBa6CZ5F1ev0zSwTfn;l4-*$}X%Y4aR?x2Tr## zD2neiVLZ*)Q*=+iqzy)h`RZ~SO-)7haoTwl7!%Sa-8aAAs#pY0rjz0nWVu0TVusP) z5^jX845w+d@>jgx*l5}XK2kqdxOBCxU~fHHC*fzVB~@7GG}F7hKkxb+U!B-3HGryr z;zgrMEPu?~W%|A^**?rnQekXjfuQ~dRd`;--Ns=|dYWK^Gtzm$oBIo5f4j-oLD^d4 zC~(b;z|a_d$i9br>R^C6NcjeAqim^Nc5D<{EVmNcH%CB$U`-&Kl;^G&aBz~kdUd~f z>)0aW+N<989&0~L8vw}X3YE2PG+B~usLCsRc7|PdSWTr)J$U&LQQH)->4_!v%TP=1 zI%w*pqjpM46`A}yIL2K7&8ig5Dc-s*cxX3io0 zr0D^4M;R-&C*zccfKYY^ufJVlzwCKZ6&uqa6dF6ZsCL+0Sdoe=S=gr^?bdh${1VX9 zuuwhe>0Bw-S3|2c(!C--D?B|@s;6xMtLl+7@t2EGQgBX{%9uI)2+A^EtJM?RXcEsR zq%9kd15w?jFh8&a541+7|a zeQ~}X%0vWC8f>WDK&RiVI*CjrSAB$0AN2zXr`SU<2l(vXSQwUm`Iw+c@V`jO74)W!A-#6NSb%u-$u`JIAYxIIK889=)Qy9+;MDmAVpTaePzU4J7a4Z5^=6%QJ28 z3Y8m^zcCH}UWL*GLxA)w=9QE_Rw_9=_tUy;^;@F2J?)kT z%PlQ1s#`uHEjbkf150_rg}MRF;lozol7GZdj_8PU93z~1rIYY!!c4G46*lk?UZJyr z{k-uOa0?5wvmHK`+K@^__A}DrNu>Hg0y-!{Nx#@4pwKmGIYPN+N$2?AJ?8+Ap$&W5 zwHM4u8cXwE2~Pv@ki`GRWu(p}BdQH8BWUOs!5a0t%Cu_L8ZhJoCak?otY)+sj&+{bCIM2yZ=3uC%Q)9%}(YpE( zfn)pC)tZ8zAEx^}CrD~rX+g?>kpiY%+>=1e=AE>>taR0ZDlE{tYuf$NeQ2kV=TZ8!`H=JoJdWCjLZj>+)pESb;~QR16O~Y|B!^zZq%NkK52L--(7NePu}e?xOEnbUjua6bn?G!X%h7X@Kl zosVw-fa~C(73YPnoL8ks?&>VtR8JMt*U-Lrm^!lZTLmP?crz-1!W;~=a=g&hKkXWC zze&kQeE1bSbJtPF#nxwpEv<|BPt1wJeK7mQoxb{}CjTgs5f^l7qfLTbn+Pk|^L_|9B$Dn{ZO#CS6~+q)u~n zhmUl^%vVo(d=|?8f=`JAWy6R`2_VoZTOJnl~F7-jsn^qto$I{B;7T0I?GYD|07*6ah9>g4p(sS z@X!n=p&k4{8(G6-FMPD(o&7FAl7Hp< z93XJlbk7I^U`tYmoG((@X@=EyhWAb)Ru~qPhlT-63Hkl9aoYP?#>{pmn;4UVwFYbx zmbIzBP7#@Ck1blH{lOwv2_r4Wlud8_2x6*?8^q_UTMk<&qG z*N-eLl~RS>xSR4x7_aB?dzvsIa9_IU?mO8M5v$_ijfB2U?Z>w|!WHA7&+k9ISmll) zonkiMT&oP=!!}0Id2vVWd?*~`jh>txaaj-2N38E9W~~YM2r6s1^f;P~>%MptX~rU2 z!S>TEwUM{g$$jdvl0K1@pZ*@Z$?53PS<~JU_1(JzQl#R!QsE?aOTBP{Vg~^n313yC z$uIylN~2PfbNv+%9U;j#1=5j18r@-YC_(J(6tzs!a-2mo(dbE7NXxF=X zA2?gM{{CuK;)(;Pb-<>u0S$%w z7^odDU9#r}j7!MA`HQ-vm|%b;YFdDOy>aA#>kA@AS3b;a`9qk~gi(Vr-l)fHrVy-m z8KYQWGt(7{X}qM}5L(=d>;EM_K$>-g3y%sGB``)$BQmC8H6Wx6Xg=Q}sb)`r&yXZ0 z+cepw94&pA11bKeUBy~@{>ptT*kqEmQ-+>O^;DhQEOyg-!L5#8M5OR|tJ$PInx5N3 z4+*IJ13{x}qk*o!OM#J%!h-^f%K*PZ)xnBqOl${(t=*Mu*KyHM zvK|}_aIEP;OcthM#^Iz(?XuZfYrWp%_dpGr+qVg0JTuHGgSPTP;q+@Uj2o!Q+;+kB zjH>b;cP;SK&^2W!P7#&PRQ84<}Frc58!2<0pW)J{E0 zk5j!@nik=Q5P7}_V^$sf4{qMS2(lg{$)uGlQz!H)_*^JZ*f2uxw83`i1?O~}=sV}j z>mQHgG)r#3R=MH%)12yPxh;e(S+DLfPesZ%JA=R6EpxbtBn}xd+T~bdBsVAuB09_rhJFJM? zB)@!5I4H}P#cd~T9S#W~yD z3XCV1p!BY`ReZzm!ib|-HlrIPY*eZ0isRH!bsJZ;zVrZ@Z@Rj19k!<-rPKC9azeDj zi=9=mxkbrZ9@fZrU0zu0!LDyP(GrDQL1)KDW9<_89UV~d{V@v|2CWlWBG+&3=id?t ziX0VEZ#DoKW&7?n4!Fw#3=dkMPdu4)%cv=Fbku&)_{Zkv+27h?@Zr!hV-O${TI@B> zhHJUrW9(UOr(p~W!bH$y)Y_`Hx?3b(%hiR})8nP=okR_Oq|~o)VysllnG0GZ?*!nzcY=?Qm2O&w2r;sf#xu@J>do zp%&}bSDAaY($(vnRDgAQUUvOt@%n#h0gf@ijVFv7RVZ8fK|C#xt>^bb6ER1cy?A`9 zS5o!Y6@#A&X*$~2dz<3O5Ml!gWhRhHb#3J8$1h#!4!IyUQhMwY%I$oeZ}!JO$=6|E zEVrYz9m`hCG6N_Fzz&w4y?s3mbwuKz{&J zzAOs(7e$2&E54vJru^O3UnUqIjCdpQvPe}>^w2-w5m|(c62G)PFQ=2Wmaa& zSxvq-BJZINVAZ;OP45SfE(>qARKA2XyZ6Q$*Os4RDTRv!X+KZqf{cZ$Al3>VB)i#$DR;m>q3dKW4u_V^681`iBo)xk6npkqkAnc|*tyFf0Ft9gIaNfTzR z^|9m{tVC;r6P?&u1-ukFQYKOL0n;Pe=j6U9Wt2JgABTHclpCMz>K3ZF6a6;ppw!0MQpt8D(>?2Y~dO zDbjZA=~k-|qX|?7yplm6kJA}9tCFnkwSOZt*#A65+h6wTI(3F{@3dV7GS4weFn$!- zYx1DEg|h*jAIS;2XcioyvGizA#oN=Yq5mp91x!m- zom(-@mrzLRSf&J7yCYpMXcTJ>Mpc`{XQ zP%>*s&BA<;jbHJ_L)^J zC;ux(D&~-%z<#*0LKZwtSL6XI0LDI!Nzb`xy=*#vQ8pqA0WJ1_Vvn1%wha=q+Ls{4o;c+O}>j;dKJcgBSkuFsoHF*aK5E-P|4bFr9=t& zJ0&?Zg0MacynC&rYI62KV3Jdt_VcWN%TjH(y{<>88InZMQHwWKUC^f2-kR7{@H{V& z6BP86q$O+kxq`CkP{$Uc*ki9d-~8>G?rzVlnsJcEu0iKoWxuB*=t+?v1HMI{J)2s0 zFlH%0KPdY}So=VQ5Ff{oy7u~)Hu_78K_IPR2~R-`)xDJ;%LAucS9?sCh&F$bP@q|( zCDaiVG89s>_(^zX+N~#M3Y14#mP*Fqil8e7*kMQQwX~cD2E-ec8(y|zbg>Vlw9la`;N)*8FUNcZaGl5rnBRwAS8(L-$R z94(x>o&`{azz*ljMLh!7W0~h(D7WQ^40=C15ZKTJfxo7&5!Yc;uh9`j8d&la5fx;^ zY%-0wjs4}cW^lMIHg~Itjua7XDZn;NI|m669FT2q+IbG0H(dsSTKa%Lc3oTg!EY(5hy_!+Xxjo~t+kL@ghV z+@;BAa9^i&6$p*syR_w0VG^E``1&-TV;QF>D>Xob?H@SRu8KNXY08Jg8qQunM7#kh zJkV~5VMloCaEbl~A*eV;BfgP<y$sq#u_T*cqcuHUDFvJr;i&)3M>?mh!#{56 z+s`*`78)XaQ%umKu|?6UvOk^I6cW;JH@;k{{F9suD&iTMCKl*~`Z&P6j2-4w6M940 zAV%YmXF|_1h~)?oh}IH=Xd;PH%rre=g1#Yx5HDhVP>BAyjY{e$1Q=6|EwLH?#d?yilD`%g)^y#OKOOIXH;i+MSe>d zs_t&rlh)pmA}llb5*lm9Lcgx*yBKI%K}}ibL;}ck;;@jzm%w`2RG%(5H-GU;GW3!= zJseF?mMj?kJ!s=Ck$Wzsm_eQ*UIHsCbg|cT!K4oX!~4W(7MVFo`tXX?JXJ;U|ug00w*E9o=y!k(E#VvMla;JEu;XfCW7WIxfwu!$?RU}@LZ8%PA2gc+C~I6sLpdihdR`&iYaFQo%$p~G zI?*m-f19FTrB*bSzmQxP5p*cB_>yt+YGhkH;)4TqzX37OB@}V$Dt~=mDsdY`vo7u8 z8bj@t>@>T=0PE|cRRFmaI(<Dp^{QDIzk9-xX=U1;{2Oar2>QFv;vRl=uh@v91K zmKnn8O`cy|W|kxY*SDJ^{x0{nh!6}KqiH^ZN!$^siZ6~~%yUa7X!hcmdX*C8-q!ox zYv?|8WWEr8w%S8p2S-1dxi1g+OVdhk<7A3miC|?n1=cXVPa6XJ_Y()}@F$GXpb-Q# z3wt<55fBbqGwMz#`@zsMIyh}zu1HYANWutP?1webIs<`qo_@%_%vNL_}xe=&v(n zi-=ArOF47AYY|^sd=Hdf4fm@cLKir&Ya(QA-=3Ea-5P-hT6LU_KHIdQDpdYa08}QH zAR3trso)p_SC8jQa(;Wy+XwWb%_?-!hWa_wCUy4ydHQqgI{Xx)el2%E^P@x0JqlT! zc1TMH`VvxP+ob?8ueNW2us z2oVskN?ZjX1p_LEWBgd#G2N~%K>I2q6y*gU%-^{p?6lP!MXLL`db{Za8k_`jg7}p5 z)i(>#=a{xD1;mCvmu8(UVw2&4CT*ZR0Nt5#YDy=Bu@bwI8HP0D;#!!Xt7k(C&(Mzo&_U_ZRp_E25) zv#=yw!$kg!0LUp6q#s#Pg$e2iE^oHc7Val1#0EOe%7M=R@OX?fuB8FCM!m)abO}~J zR83k66cPB6Mt-yCIo|WOF&UQUh^N0^U=GWg`y^4l(EEBM49yVxZtd`cg?pQ1(3+vOMm(V@q|XhGr$#qz}jha>M!qJ1=J*~SfLp&fm@_&AeK&f70}s@ zr_$3$gnE2w){3ok=}C~_3%ddE=Kz4AfFY34c$rXtvRm>Pv7cB7n|2m2brx!m=YG)8 zS~xp)n0q?PFq*#5f1JENfk_FsFe?vr#Jx<+4n&Xv7i z->LuE`~2hZ_i1Z@oO0yMUE9&^Q|(suYXpXLdG5k++D&zlB!Hu3r;VGfW{!{UIUnBJ zBoHUX>mP*W+SLS31sJx6P1r;=kGso`8)^e3TuRk6*k>U7u<2{c|1Iy@TQxg@B}WFA zn*XiA1%`Jjpw!3h#0g1sZagUbn7PCk1^hlj6VrMVA;MI>9#XEAM1ljU!OxL7KKzc>S1;5g5C(c zsP=pyu>~MP2i~0BpZ8me#rtqCXm*$Vs5t3Nc3^MaNfhL#BkrvedA~Km6;P!d>NUQi zStgEJb=|t_sdA&oFTh(XGBD_@kJH!aYxqTI%?7v_$A~{X(9;r3La%XeVqZ7yP3TZS z+ii>#m&$RJ+J}7;H4A7JfR>tb>>3d*3=N?gKi=tar1o2iWdj)**M|d#hqvlBr;N1L z_G}LPjX(Gv#o~3{l`I3x1JQ=V6MBwRNwHz4zj{yzA)uP6w@bklqj~+V&sJMHF+RJ- zSk+_6XOK4(5$R9Q@5xJiT6}ssFgr5DS<`h>Jl*+6_ac$e_wqZgK6`S6LC=Ex|7u)* zs$2H211AJ8wfA*Y!S)eKWEB`Vj|K^2`$Pn%nu7f(k|N09_Gd`?&Q(vlS(DS0pIz4O z7cU++e3WM@7L}E`BmOr&w%rVRb~H_$>AVkWl6P9<$8qVykuJE#5>m zXDe|az|SjsI<4uU8qhR-T|q_Z#BPENr}17H$QLJi(iqBht(NIRwLu=G4D`dA+Njx? z-SVI=DJm_Eez!+_gS9U34n(`okgEXqKzayxlt~I1#7J9TzwnN+&$9;a ztwihBnJE7Sc13;e;MQ|n1k%Q|R`Y=+i34Fh4od9i)@jM6J+485DK1_y@*#+jb$lq66b177c&%^z$~e6WAUBDOh#;n+|4FY`QmKb# zGkl(btVLwh`A(2Bbx=sPxu~Vl<~r_c1bj+E{MU_el%;kxP#hnt0WIDw4%@mAV- z4LDVTS@7hwX;^`A^u02Z%-4pR{3D+)QI^B@Ur{E@nBCtx>qcGyN@ z_byajFg2m|yFQ+2)t|Sx6CVDy+AR;8TZ+%-diz#Qnofj7?yST4Z0?JdEcG_!gRV=1 z{pebCiuECF`s?9_G%GhfpjpMt{F3nu5A$pcs_>k=cT~|3q_>7&rD)6mM7HKkxI-1r z2u;BpNy$?u;5NO-y~TeogLtAs(@CSbh5BelzC@Iz%a#=x7gGO3A~TWkD?z1noc$7>S6xu03g%=^C6?7KTgq|7VcdC zFa3AqAKiJ--KbAyK+gOwJPwo*C{fV2 z?wE50>(t@o-U+Vl+DVQCD?Wu*z7);9M6nrj2QI|GG{&H+**a%m8)Z_}_z?mL zy>Q0YtRU%0?@EXdd;vX5*$rS;H`j*_O;KEzkB7wtveS~;J(6oQ^BSZLI%z7a{EMsO z1#BIBLoTVfWOZGF{C7x*<2op`COb#%4G#68-tw`aSvA3Ygys?ce7albqqnby~p7Z=A}cnehV3gqy@ zb->vfKsgm2XxT>9yf>Brl}~tcGVrm|KUBP$8`nNhkoBh*CerjOgCii8gFqt%X#36% zO@7)WMkZdnJU1O}^1HnA-nEQS6~NU*8pVQZt18`jy=U!1_z+jli#~uoczdsK<-qWU z{ydN&)b5FbM}W`&k@HetwU;jihrZ1xqkgXfY=JdKnW8?8;+u_khl4kw*MKXvRe#y- ze0l`3SKnB%CPH!QW0+s5)}!`%{@d3n>T@Z&>~)7V)r$iJXgs{91viR~xg*WU>p5_D zeZ9?a`FlC<&1(FPj9irDBr11(|X{rIDD5{-NsfP7X$275fW7^bmA4KndiE9 zKxhF4@vZ0gIj)1&i;4Jg_Nl?z8dz)~m)q_>Wil1&U;|qqkk;hfVWa6x1XDkf>1>Cnw-(G#Crq&)di9yZ& zL%p*pnOz?@t3ov8Do~`~210ecC&SLq2J7B`N7Tw@lTm+A`pMtpyM&z|yymGIwTwe* zY=);yD6;dh_`z6@<-#ZSj!|bJgLU0$4q;}t8ZwPuqt2S{cEnNdC3B^j^G3>yX}4V9 zH631)(#?Tu3(W&Tf&YXSDjKeEJ(tt#SP_kAAqYE^i2md<`CNbE4@_3(K**EM|eRM+k7 za8D8mz@p8`0Or^e)P8pzvHBi>FKTw3#0R48RQHYX5uv@!pRKQlaTpMyD+X>;H_9># zXu$qW+isw78EUGJB0wN?K^zP4--Q?MU9pZz<;85xU`yOa?HIvXGH_S&)mNptOSvMLLxK_YFk{0-zJBdSHf zVSU)P=1LA30rK1)Tpey*GV8bp62Fb(7WEN?lTa{H3ltGO)O=Q}8(AZ-(dXwHYpcMN z8G#N4a3>XQpz6R8C&+6RP9^%HHtRp+Z2-fAd-xO#EXeJavTLn;2t5l2d@WvS?#u;q zaLj67m^eEeD3NjB`Zehau=qSzdtjXPbQ+M>aI!PE-(1>m6W zZDOPR4`qKF9g9OP)!kEk&%H#O87xMGrJ%S?w{;n1E~Q~cK>AjP|Ll28`PvD9;(eDU z66Jh5%1m6f1o>Jl`#;As9j|}F&wML>;+Vyb`x|p=QU8Uth9+P5!tbx)^W_T84Vnvy zBLul{twC{z0vC%FfKqF-;uzEuY0bb5NNp~Ug*ET+Yp+T@Za}?ln5ZGi1iO@S z1E+Fo_X?E;G;?$h0uZii3Tu@))^Nu}(!_|AeAbp<8H%{EeuYp+K}}#|MZJ?P0?@N6}ERVQcQ6t9n^cfiu|0YZyyZ>UE3tRVtdv+Kv4Y=Z1!j8J#Nq;*loqJoOE&rPG(g z;8@;7rBhX5?&+8G_Gp6w71{tMwA-qCVDlC?P1-5LPj~BjqFdlory2N9%IBBgNVA^= z{+4U-m!C2#+uvfI`IM2-i4wrc5XbMse@QjJLCxMEb?K2#h)jBZU(erKsYhR9;0JNk z8h5Qj-;c+$lK(g$Ql~GU4_xJJfe+u&Qjd*0GiXM4$4Jg)b;#Vk6!8}K=}wsNUu!<# zZ_#miV^a0qWHXGcrQ^6&>0dPW^AGr^wifw?M*ifNI{}m5(=8YuzZ%-ic(H$Hv!=Hq^P#1uJZBofcsl{5ow)Mwm%i8uy{3PsO7*#PjA3 zX$~ZAYHGKq6d%gicq3rt8Zb>j^3Wv`ytc=TO32cO?QcJtyToKJKOAAya`~xRtVa+T+SZt5xYIkiDT~teZ#_NzE2tkRGcHI+}%ds-;b`7=R7~B4KcPE$cnvW~; zV8SkYm5j~@VV77BRt};}Rjh`j;^yLgiO6a~&4(WJf1Rh2lq`#s=Xf_=s$rt5a{snv z|5hu%^1fF_c0as5Y&B2JYfq;Bi&Vt%qR5o=+>3|XR6Klb?oNa(yX1q0>iADHT;fgO zFyio@-u`(i9(NdVtHdlzT+FXYXIIYidB@t(>k;G$yUuby5Su&l8NKYkAn>mtl}~T_ zPv%W|O4|1KH<#CKxfS0b5!@WAj=SL@_f}ozs(Q)Zn43XL9CYQ93vMO_95rRxuGX8^ zsd)dHo%;q&Yug*HNM?BDl{T)*NZC5q307(|m*o*@FxS>i)tXH!Teo}RJ>OErIU^t} zqrVq>7j#PVvu`Pl4F$yB>81Pe!%}s$L;C6-Z0Thfb}nMe%2Z89S4H_5YZZYNQSiA@ zSbnQ-ymq?J{O{y?$7J(M$f?J5eoEuzbT^)JBP|_&d;@=oe`SYcJ48zKGZ#x9W425}8s?XAc)mbk1A$kA8?>f-q_MHlr;YA|cX7 z_%@kmJq(@|EL14e4}#Xk{}(g*@K;$vt$`L`Td#DyxzD&^$~SU<6acYZNHHQj|n zXIt)&^z%qOZ6AATuAtxjo{#cVE`Lq%KyX!<`AcWI1D9Ij8mYCKNL*<6`Nzyu8C$oeM|DlhvaRNy`*pRdU1f|^e|#I(eqba>vH2$ZmX|l3 zsw1fkgV?~12s5*=*CU5c8Z-MJq#8LNdZP|@dxQdeqCYf!9WQH&x1bq&T>3my%!Z`N z72?h|TGFd%{G0vewU@QcoZFtm?|H<~R+T)0Lks@R45{ZeejdN+G5{(TR8=e8p}5tKNPDL}LwmQOsd6gBu#2}8iE z95>W5{r>V7@KYJRKcK7GiiukL%w2TfzHMya=t>!4#S+O*4QzlpKW=07$@8=)zir=q z*v8WjYq;7+6=6vw*p~l1^OBCUxpd6=di*rknpZd#p$>ma?v`Nvh2f|Q7)H71H5mpD zRz%?B0cS_Y5=fXn^`7+kc5bz2j)NOI!zHE$)15y?RZ<0LyB|9mYBY!a{(9L)4#vZ? zLbX*eCok#MB72XIN1!?HSG4{tf7L49_nlA95_wA?_e-Tt zx9ekuRqylis-VZt2)%Dvr+h_h$VamYU2>z-?}g?IJabd=8;fr!ePEJ3t6e9l9xnRT zmQOrZpnbQY+Kdwz`TB+k6y@^CR!sJ#3SO6@gRZ{5qR}&7^UiU?QNw630vqa|(b1}P`6@XPJO9fcV*&+7FqkaW(9j{)o3mY8A%og|v3s~#ybb?70w!rV?p;=}6d z3-a(cvxjfE@fS*tr*7TPq`?c`g!04sERZ6 zDKGa<+Gf1dH(M5ls-K~U4Bv85MqY#}sp1xxbNhJvbLz!a#@%dY6r1=9@KBG8U zT+CPL!RnMRgYVG{7gKUqeDJY5a^j>sp4H4~<{S7u;tJOI^?>VOHB)H9U8LN_8ZIV2 zzn^@^tEH{Jye-!$=NUcz>GpdxV~2j2h(Vi`C?K2(s49@O*-+)5dD9Q;g61rQY>EUFA!s@^9}zSq>Z&&KJm5t$EM1&WmL{-;rZV-PHBLJvJcQB^A>c1 zjyKJ9)1=XD_R1?Kv6IEw=+9vtj;^!Q`tS9nJJCgAg19{PrBF+@+v#pX zbIa(yL|8@`uRSm=D6 z+v6K^uiNA8%5>8rN;6J7UH~wno+8<&xx%`*<#I%>mHx-K_{D#g68-#s$LEIBnit6i zKTI|bZ{<8QnFt7DZoivyFXLc>>rN_L$|2HWtKeY`%TV5brplC?{xtTbjyCGovG=L) zB#od|eSDX~hn7|w!KUb-T1+$rn%PreA)Y_JpZQHh13(K~>&TQMZJG1S6$L{OC?(gT< zqknpI;Ek7`&-d%dQ@&U~P%pZod;0GzHGqvO1vlu?zaH-7IY^^iq}U_OPtA1>L@{qx zxxo)CZQ0f@Xf(tU9nOMVbSdvgmn9SatFM=UejDTgtMFp-oc1*FAn>M_0(ipY7EuQo zDrSV(ZcBOy>(g#ROr548s`nqSRfz_Z1~fdlat%VH_k{xt9abhc1^-xU$lph`5I4o( z_SBTpm5->P`g`3?8(h><;Cbk%=D)`RMjI7Kg%LQFW8*~pgqav-(3oxN&Y%DKoYwVQ zY7R}pE^{1?;d^E!hJ_lP%C;t;-2agZyEoNgosrV*_PaXXBBE8*T_2zJuG+}FNc$%< z^X5_K<~mx2Ha;_y*?bjUB@O)WU#;JRNZ+Lh{i)iba2kc@7T{f$*ETSPh7Ai`pR=6x z&hUm3Zh(5(;w~H+b1)aqe$Qjhy6Qq zOiWk2R(DuZFEqWKz}up+V8uMy7<4#hn?1WHu{XUsoQNb^ynHCDHV@BQCjms>ux&4Y zW<=FQCjrg<&x|h6Ja-YF2GG7Hyl(Ta*V5GIK^0%J%Pv!3^K9I)3QnN~KgfIU?l#*z z%53CCc0^~R;L=A!aQ>U6Hp7h7c4Y`B2!k41(MVNb0#7)31_ksubUYib*9LU&PUK3^ zrzAaNY{uHUP7tcy62I~j2K*CgUvOVPtfOa0r$O)h=^(QX8naLWInb9e`?s}1m6v~O zIliLva1y5}#v1TK^8H6cR`bW&5dD3(EuCOqJQ$W$5{inMS*!#Eo8hd=Wm=9a<DM{Kd?26(4=S`TSuJUy=CO8-|9f$=2 z;;hCz5`&;Fp959zmy5s_zQe}IiE-E=LnN7x_nj;9>Bto}`wNq#Zr0JGb`Kf>C8NGKJprB%*# z)q7YM^gXG)+u)}TFUP2jI(h(-+z_GxqI|W75N+LU<0!fcO2O58{a;AtW7r| zJj8BsYT0a`?56Tihn6Xg@(x-T1txwb-eyTr!baeFLY|6fFX3f{*C|+I&U`5sC|5tzvpj_^C5OT0Ten+;i-+jQv!P{m%K>Y(O3lj-t zwNnIEBSq-Y{<<3g0d$(99V*4b? zxy`&hm>|Wt9oxsakyu;^G{&!#QVYf|$RFG52Yx$^S@BNwI;N~V`v4oNFUD)!iu2hcGDtQ} zmNGw)M==7`k^d+i)lkiKm~OJHYIFjk!zc#|9m~Zy*ScXhQM+y(G;u=LD1xMeX> z??fKyE1HAKynPx4ZPotss?QFnk{BVqw*1xHQD15P-_qm&Y~O2n-(ibDc}lw{U&NeI z3=kLO{u|V|r1&EIf_t?<5zDx%ek*wm?mZ~cbXIi8Q2=P`(dirS71Rq8yS=ON=j5Wu zr=L?QsKu*T;*w>v05?iP?7zDHHU1K=6f!r)(kGlUr5h-;VG>qZ)H@MaMaYxkobCA& zZ`A~t+Dgil07*zybNcR+J7a2G|1=2_yl0UWYVl*wlMv$+@ z(J9LQ{eLeIARE-lRd4rnd*kfGRsp1KyLsroXc?rS_0f_9n(KW&GS)zf%I@m254@qK z$@mG(9Eh z+yBFMf^3^_9A@95_ zh!~&X8I?&FkhX12v7w3(iCvzeUEAv1t@BIpLzYA^F9j!ay!!}rSJ1{$%BRZ+;raGsV-?q^J3+IIa!V?tyUIc z+XrrR7v1}G&5G`Kk|_p5?M#cA^*Tj_{0f@qKbJ2(?5QN~?hJBq;p{YE*9ln6PFF5+ zN|g|mgu1Lb4i&v0>4HeE<*NPP5zGl}Z{71OE`Y(I=xdGq`?W27WuSftP_>k6pIqFm z6O3Q6+PYy#X)v{?TphqjC#FqL-42@CeJHg7Xp>&MvoqEN>&wkGA!Q}zbc4LMjS|3>5gIApMfYb8R{N-rR`NP<>MO!P3BHTD6(99ex*>bP6WCb*dk(k}F5 z7MH%{Crc!}-FceTU$lhpF$L;H1H%9BDdkHYOzMOS_eKINWJl`|f0B?Tl=UDf)Kb+S z!p3XXon+9NbTS(e{dzz?`kyx&K&jMK`yGHiRt0Ay*3PJa_!_W4+!dwS4LfB8J}sJ+ z^)p8e?Kv1<4cPv5v)~Va4YJ!+lwi5PZSYj*P*+xzSBm{*>KD~`mhhADL+$^3&qB{J zSYAGUELUW>P0-87SW6^kKU&lPjqbnR7!zUz)fZXxr?E@fTCw?-yX9)fdtdZyu|Riot86QF$Pf`uKk`9z#-{%eXEmxW^C zQv&(6vqU zFD`m>rVrvYF%kEJS<>Yi@?BhqhwNAKA^qzAn|b?Hq$K!q#A%jG`H!tDH7MT{d}xE$ zb8p4XSR#2Oz`3(4`iP-iTGX-Sbg}tjj}q}fkc5bV)Q9yLE(J^9@2Hyy7&+5Z0!oYs z8Y`B5d->SYUf6WkmT7j#B4L!w+GZbcwgxDDZBF`ik1FuqDt;EnzTT1CHZ>XS zp~Mv@rO~38`-)V=IqhAtWURYVpizOoDW`?kGqx1 zM}0-zO_wN>RP5u=#3uX7h*Z&m)A(xtNR4k1^9VvaS=hJ`FVqKee5iI zI>X6eh}}Fv8X~j~sLo2g^K`W~D13;asDhCSCqy@yCd!0Zn>=jANLGz?1*YGqYBof~ z)(X)l4%HASnPWsu3i0}S7YA+Fid5F~S+}8MHt-U@`6t2#w=2ZD6w;3sIQRFR-@H3u zKqer*q_wg>CCI5UkIfe3$$#UN@=@Lqv!Lk zn=Z4ncAH5Ir=a@mVuj<7kuiZ87r!7YG7e3GYhH`@Mm_oRzd@Z~Mk z->~P~vyUj%V-f9tV_9g&YkqV3o5~^^9L#W8 zee#p_K+AknF~Z4*p=>DrD$53))>4r7{ZgpDM?q2Gk(jN`r=1%w^TvhR87|O~z-ku| zC3P1B!6*H-)LylhR6syNi4&4fEF;P;F$J`p{rkiG$xJkw1n}1Q1n9u>{@APROv@&i zYB3usjqCmtMVD`?r=dgjyGxg{4?<2HBBx{mCt;P4hC1|~Z<)WnlEa@<5x3IG6wY7Y z3#|_AY|EN_C-aik(f+*|pWm41eyH&R?=#;DEFCWcK@1i?3{+gEqf|}tv z8^L??CMYY<3tXswb$rU}%%AKmY0M+&c&YBPVaziEIIf4FZzilDcd$-C(|M`AWr3;~ z7?LisyKu~X(xNLi*a{45?LviF{2)||yKDU+{F;7AnLEA`Tp7lu{quZ)9EjyZk?lEz z^~}tmsslbb-$ZO;C0QT*SF_k5QR1ZUNFy(o?Z<3STW-NtEBvkJ;nwSy_a@RY84i!^ za)wUyqs~(bilX1%!hT@Ges~^OO8)i#%Zb<_JZ<-=Yfn2os#Qj)AX@3v^{R|#Mn^}- zrxm;x{JFD`+-I2xgF@MFhr=odI4w0vDM>WZj{gQVlJty>+gZV+n|l*ENAs1GY$U^n z*B8g$_itOb>zLdqn#~pZ}IPssm z|MfYsE`p<@X#)4Nb*()tUgyZ!6DFIr$;pIn?(R@Ff_Ic*oA97)TbFIRr-G6iB*lCb z$>Yo?-Yg&K&{@LP+l zc~M_zt&v7Y&;ipK3uH$un+*KTt=L)n-o^xM8J+m%@o{4vlu8Y*fiK_wQwIv9Tr&;-tttnttk{*zXgRqwHPwagbq4cSN^DM+ub!CemPT4`SBHQe z1l|0^u%iXgbe?r#pLK1`isjV*$l%)mca?>o zt*g~r2jXIBmPeqe<_LH>4Z4lRHprzRv{fMqH>@_wCVjK!IxwCoR`j@t$=H_9c))=` zAkFtXDa|hrhoGl~wmbV9g|0d@TC!8)g3NJ4pzKi;Ht`aKLCEnTh=L=yFksy51URvu4N2oU$Iw{Gc6=B`bY3fQO;ST87G*BpT4wZj3J>vd!6e9I(Uh8z5-Dc_E( zqZsbf;iHc0w|s_%YQ211o!1JUGEx?H5n}GUONRA|^0w-!qP5Y&j0JdjcoQ=-3Vf6c z2#a@8+tW}R?D~jT{WGl+uh7!LkGpR8!nUwa5BGY|O^xf~wCySRhW#^Kf3_$eQma(x zV3XAe($L<9bs9UVkbO7w>XEwJc~>IRr+@n?PKjORN^bVVa&#gVe&Qu28g!se$OvH-d`i7eei5;*$-iX$vL*!WT{}hiz;byA3}i$6%-Xrj zQYxu@XEG7-FKMD+@$a;034>o-(a}&;A}gc$T6Hd@zdR4$gj;*~>#+Wu+TWhacyZ8s z#8o`AzHJxxf{Rk^*ciaPV_{ME{9VhV*7BRV0Mjv{(*XA7rnBl8&x!;LhJAVV2iOD5 zRQi;@Zk_<+p$zZQ0i_f^G39+}iS&%?EcU`y>gjj9be&=Tk0&YBK?S6wev~k?Gi443 z$mkHcWkWnL)_6hIljI>_A#rhcFga5+?81PVbGd(^CiWEfa!o%0ysqWfAQqhKzK+2-P zTSiOE)C?Q=dEK~r=pT>CuxCotcJ{)sra>S=XOeq=(0V6&#hx1zqj= zRHhzmQ3fUziA%pPm1X7%|0_~&e82eFEXhprWY-Yb$hXWW)Trgi?}YJKXfC+b3O(aa zFM7;-wq0*%|6tn2pd$u!es?sl&C?v$!-kL1Gl5o}oe3ilkAFQxDe`kTT$L$`HIJ%$OPkQ+&BDHo6!%2L znhYhB3m7$BB;kd^Q2$-?Ri*RnVQEuSia43)m1g)uQdv%p|Jq~An*Qy{N;7b>M8kfB z%k)-2&;;&d0SgArVBYn*+TB{+zK1^B4@Azo9U<7z6r0BH|QJH?U;j;@J6+nv$W-wjxx!u@&l z4fM`4Ye!%5(qT#vTna)uWK*jC`6bsNW$Uw&{kFq12I(r&>sGv^0#{wDBmT=(!}Kv4 zkzjrAgHL(Me!>?j_{cQBg2^z7YhIn*KAjK9x3azAvqCi$b6-Kse0WB)9ty~dWExk$Mw;^uAS*LuF9$NZCSqq zwO!F)gK|j<{e0_rrmg*Lr8OT%*g5d!_vJe*|5M_cHmRKB9sf5^PqAEP`ZvFhQ5}`e8h<~+EMK1f$q{mV)Et?$H+c7If!t$* z@v=))R5ImrP3ZWV3`~^=aMN6`@r^d3zQ`{Y45eDR&p_jta^}P4ckJd=ww_SU_ve8^ z*JsEqs#2N8OF!OZtYjnWU%tZ#v?dOCHubB2H79yHmcna2&BMRuP#z8In7al1tl4>r zL|$vV8Qn`!xDqQ9z`x6E;Hr2x&c|Z$qGfjbbPYMfQ)W)rJw)!QL+co-@*{_Fxp6K_ z$6~j97xbYO(c&rHG0rmo!Q*bzarP^Dl88)or_5w&;&o(Q7a7wyZkm_fq**rqQn2!Y z*|G<+=thN8ftD1@SRiGFMqb0>>U|**K3`S zykd($Fv7;7w}d9#QL@@y%m|?_cZTE?VbP15%!N}hs+Ko-gEhnhkXHu%cXtreL%)G? zs}kdaW)8HJ8IGHH_Bo#`VHYa_W5_jWtD+yE|i=g~ngTeUkcYE*66}lu}?fM^5 znwMx=lg@fHi=brfF`v$9;67ZAu$Hd5XP_?{Um1n=ZnT~FX|M1I9qeYj>}TxlC7v)u zq2_q4ey?02G<^-Zo}JTwci36ale@2nH?$RiRd@ZQQCgxMI$YrTI6uLstI3r<0D5t` z?0h_{QUM5kXK(L1hh`PGFIV@f+_){RhYcJrtYCA_&>!XNMCwNg3CJ(UK*BX0NSX-G zQ%eX>XU{paGgpDzmB1O<$k3Iuf$OB=ukVFs^(}6u{F1Q$|u9k8RpXn4?Ly z>GvbjCr_(&E*nKhWY_j=p)0bbxUOPU{yWE(CkTB%rcTVwb$g(j?Wz-FchnfPhHv_# zBo!#_Ap_jz0~bm0Z&ig%sO&YI?&c4skJki}L(Epw#3!epHsWSy_yt-7(j>)_vQ=ETi@~oX{}dwB~*Q?;Yi< zK5~@xYDa{&&=*cZY95}1Nbjeuqs5wEF|;xlfh-=nAK`!k&4SMAr_8>|tIm2_sXOw^ z(+C=yF+#QbZ33=J_5MkrwG*v;NK93S;*2bpx!4~Vn0@7=A*JV-v%ykR{vNCPN@022 z0m<8ZSL+QV1JGofe z=GzEWrFD3rJK7&n5WBGImU3y}a8*y|xX#sN?_ynl;G_gQIpr@22((Na3XK{y)=Iam zfO{V7=+b8pA8OJAagW*(ckVOckimW7`&wQ6lgCME(kSY>Hu~t{l zCq-B=uutAyYK6PutvYZh?&d{g#tf@!L*1x; zmcX0j``oC;v(Lp?lIktP_MMO|xlB&P{c-g{7$Csd?m+Z6ilU}1C1#N6N^;d_atxuQ zWTH%&_S3D!k_ttcbBpM%VH6tM)N8-rE{>P>sVJ$cMK?07xZ83cq<+;Z1qro+r6X37 z-*QjwPfR0wExcc`&*B>zlaR^;N{~|;bZro4<9BUAAwLb8fc1W$k*`w(a&e`iv)r3s z`%;(AF-HkE_wflK-*@`3o1`x<@uacBsh!g!dpA>$NdSb4JneZEc~u1a=*z)-OXU6g z*!dkQ7&i@?maj7FvNDb0N{_1vh&SjtO63nVY`?uQPyG3{HYFi4Xu{=yyM}f^ z*olah5;ipVzUkYI#I17`)23mEhSTAU0lQ8bN$)0&Oq&;cg-Frl8)?0Xq#Po`mj`p2m!@vc6QR;LR`le$NRZotkbZe;X zi7c`EKX$|#`{q8mqGDqXIQ??*@cF5MrZKu58C;e|85V{BRBb%AZ~h_y97-tw7gWQB zzx1}zMB^KP0yhf)C3UJCpgP^}0_rl7{qfs>r2Y&tk7@loJ?jk$wg$5RA2Y&6A(utF zZq){I0H4#W8no#*%P626kEx1tB>y>X*u_qNuig|3@ymb&M@CtTb1HBrF*i}5t5qr* zQKd^N1U1k4UWpnw!xx_kU{~ zTip@@3TU4h)4dF1d1)er?PJfTHKNly-T(-sm~Lx|;|PR*0~a$2Zb+C*#B4`Ehw<%h zh-Se)P#|tiQU0N=YI3Zfd^GPK4;1vKNDtpz|2ChU8u5HEP87rdw^$fvq`EiWp%4Nc6=0L$;qM@bxVez;QRaN|C3 zTTgc;8R@a&T^tFF$#&6Fqprwz5$xd>O%g{7c4SwNJ>3ED0qQvjxmB_Qy5!kK2U<`c%+dVP7gJp{s|5eK z2oAi#;}o_Eg1^I0;|kSwA*|6}xxV9o1^9Doj2vqHN2 z-vP%PnoXk0B?^Hm^%hIrSq^}-5v311@KLCC@!>+_?m~Vag25&!y5q8n{hmA(6u_Y0 z^6)tr`RH5v104Nq4<25gT1Zz|490pXHy~=;#%0rF2G6$y(VjX60kGtr2d4n)92MCdcVcpD} z8P?pMKtlf5NzBxNY}XN%$uyQi02x92ZPLq&*WaG+7}`DzfvMuG(TlGtK@ z%z^&b7gqdjpoKg|OibffiiF;DA@{r0+t!4A_MO-_qWZQPXaf0f+PrGGij-H^f0Ft1 z9(jeYk+Q3W$GaF*2o6cY;qD>xscYEJEwi`*( z*j@?Qd;*&ELx67g>*?+%1Igt5uuexpLnXmYJJ0U7DEIhjh!?%noXD=u6j6w`E6!xv z&j-p4k0(ud4j9uDr;rTq%8ZsicXrLt;Xl@E!mF3*w`WB%o}t#>k4s6+yP7EkSAUTu zVcW4U7j~&vkcN55?nj;PfC0B;`ej~zo_p^2I_Wf3tpr6xTvKjPZpqxo z&DBYZCG~tyQMn&8-+j>_uZ@7y+{(b9RGL{|p9pjzJZXKjaynT~+K+DM^uwM*TMy8>qc>q2m*O_ODBqFnOiP(1lkGj(u%>>1`bu3HpL1f4_+~$kwZm?h zpaidcF$@Wg3pXq^XO>}vdr$)40swSjJCR1IPg!rCT8o9w2oiz%tq1{pQqrC0Q*RL&!E@N9I%|L{8l($A z834+Ne+@q{CPhNxR8+q*Wu1^^qcGCD+dkKN!v#>_&9xs1o1Bg}fp(i@GD6olvNi*4 zw`VRsv*nmi9dxdI(q;@7MJ19Vo<^Ny`PTE|RuM6SSYvQwaGg>3F;IsZ`n5W|-x-ba zTaN2ouWe|7S!~ynlxqDL^^F&%z&vdBEs~p>pFSASe=6hHg6V0xTq zQ-5qCy}KU|BVR< z?eyYUW$nA>n0JFn_V@E7<}B|yqk9aUFs}M3BSC+5BJn<8aN=t8O2w=-uf{vb`}Xye zsi&y0Uu!HiXNOh^>4)9>d7RD#(+@gtU5X=$<%>T6AS4#q;IaJB-WQ5SvllB(Iz&GS zB3QUCxlxC3y}E@$x^r12PLkz}*AH9Qfr*TvHu>hUZ@d#HJBb0DnfgmWjbr4+rY~Cg zn#x#lFF1FrZ*z7qJbWfF#5ABST)+3nAf7vvC%}Q4UwB9Jqzl9v02`^)f=-H%+O6U7 zK)DS`-go?-d{4w{Ytx^6iqrMDu*Pj2z^W%-gu0^eotl`==Uui$St>~Ozwdull20F7 znPchOO7}Vy@^bo#cbh@U&xO=yT32Z+hC63fNQo74`9x&APjCgr%8$7nMT>qOwKNB? zeh@wYOq3Pz#yzsi{3f6eloD4g*wJwWE@c)y;(^b@$j5i$8#i;w_9E&vK&Kj4(Ci*B z%+TcMJatgwj{PCXWyfTdEZ9^K9v=~v>j}gtJ;~INWhY2<>{`+o*^cZXNE1fOi~*_M zR=!K!tIfhm7;&-H)BOjA=y4~qpP%lb5R36_!-kYPVHt9QFKZK;v)xwy7XTOs=tX{~ zB94iMj?@5BzUbB)ov+gt^Wg{Jwu9lwpwmv9u#?YyS#ToG?}nJJ!tbz_kOXIOw}2g3 z)g^0lD++X!V5o|1=tEfYi!QZzM1UYWXKiRaF~e9(pexu~6IGtpVC+QbmuWIe zI9#;w6Pi0rn2WzcEaM4-TeMaq`QG5W^&M;fsFsi8!~2{JL=xD_R1&}J7XDOI&`+1r zou9319=cvCkQq0AAs|K9!H1Wc`Af+W`)v4e@K0kbWp*H^a>KJ05JD$IhF= z7S4~dx9hMn$5v9wqsofOeeM;S@zYy+a~sS~Cn5<~i|%I*B4mw*xK;)JzH%Y}?drMU z;m4;;Wprn|KY1Ad0Zeyg5($WufDNSjEk%#qFk!Tv ztk&07rvMcLq2_ZawL;vEZn90jdu#%_)B0sF-y-){RvbwAVSMGSi8BRg5U`l0=9COX zgNWjKNBolm02mI?7ZIe@$%sL~!hjEdJBp36W@Z)tJeU*&7$-5ltYs@XlG0q|j*^-! zUiUiUn*ry5WB5i}N^>@EPrFBSJ|V>Wacmv71{dzHBR6kPxw6NDG!-$iWH!?!fH&*X zS8r;2axfyXB0sS5)&*-7e)_1R^eah)*WF9x^`YW*&>vm6A6rJSqm`DlW0t7vT;LUD zl+VjzI101k8j5CZYqQ)u{(1IUh)>#TdAdCuSYAob`KECMr$~Wkho7amksk&9Oby1i zOl!H!XV0ePWzPRbkx@~Y(647+M;Ui~wYbhcmM33ofUOp6D2bD$3hg`2EeHFQL$N$l z#t7bENb=nJp`rWc-rxhGop> z=m3?XUfaqBQ$-GZNDLWZbtR>pD=o=!H4{zliYF{iKP20oYf=m8-Mk8yr148@-53xB zDQG6k0C7oekxvN_9MEc8LbgtqK12+45Yf5;L>o~$9BPbouc&>Zn-NV!9Atk2+GMc` z)vCWek|+qnFRm-R)AX&6I`JEkcgbF`F5a(RcmZ@WfOo1t*CBnAaiBjH`rgi1a>adL z2fkLrI(pRQ}Ow~SKlPELw zSaI+I4q*2-jrwgQK-d~8I9(b3n^cO-h`09j_@Al<>*710OP^i%qDkspN3&bC=)I9{ z3YVoCltAF;1u{W<_c1+ucz~qDJIgIPH~qfADWwD8jvl;6Dx_!m%R1SGDya#dSDOo!x-+g$SN%SS&-qC+;r0T3!7HjW>1^6 z_uLn;_D~r4CXkIHzmzTkeYu+b*mBOQ)cBhv8!k2JLlEu#Ju)r8rj=*_D5|}*pXX8i zv!93;8^!tNL?YO@E`XER%rZc31eQrVO3eR8g_YxO;#X@`0<_FDB?U~U<7PlVtB$MZ z8{8gJQui0w7uf)SWDHa}RDP%rHwd$c-CJ3$)KanW_Z@@n4`*rAMp3bWym`)8^J5Z< z=)7ZQS8<2c*7F=oK_$Pd+d_1s9;r9*1ymq820X_}QA{(y&MMbR@aUNY9lS@zf>P?6 zmOauvy;5}tLXi>m6q9l$>7^ze=5Eqg!Ai~7_QMY+w9FGcs<2+H*V^2nZRZsM=+7u~ zYmx?#pP3VH4%svnlW%KJ4>b(r$@D9pIV!!q6LraNF98s8Gf`J$!ny@-Nin1QeByL; z!PfiIjVSAD?1dgWS?dhp#8w*0c@>`YrXAJmN{V@C!`6WjKrUWV(N}!;`m#wcol2s2 zSWKuPzci*zo_O!(>Q#KaC(6neKE8`i%anA4F6IKX*Qk{Ve+wRQStj82*EfcDJ#Xhb zaw4j;)aqO`oYL(%q0|fk?R70F(?SnPF%;kJu%Kh0hyzZfGanMy>{^^4JD;2_C%37q zX%A;8)-XM~fl%GQX5Din&^zZ_F}5Jb!c^tSv+=4Fa`j*+fJu65mzaa9=;n)LIN9;Z)0g`1~$d6vyPgiJ< zE|;P#fF58`x&stFAly`Z6>+~MA)WIiy?b%b*Ms+8Wsn01aMsyW#kW=qQ3gX?t>?XJ zv`)#^SZgDf20ZSU1`rGGl$H<@iP3Uw0V_NC3M#P>tZ#CRdT@w7dx3=<_`@jq8IOHQ zeBC_G&$IO`3JO zjPzkEW9)HhQbO^K$g4PAuy??HBzl26m-4U$r!2{13S1ectfG_We2;zv%`@*|>c!>F z1Sx2C4sT2~f-V3<52AkaF*6A-pu;90QqQ>sCA8C%XPTPRIAX|FWnoe!JCfjp?{p`f zq{d^v1NBWLazH0am>WL6?co%Vr;=yc#K!D?x%x&jSHUEM1cEAzLN){$#e77UhEj+# z;$Y<#qr&RhMQ7W%;b7yFdff{Q%b>4t`nVaY$*Mm5g|w|omBkW!Gn?Y$nT#J3%L>Cc zthV|1+Y9#HqZTt6F5aDq$Nc3=OI)QYCzrv|>We%C?mcYl&1aWliK=T5#s@=N?uG~y z4N?(H&n>qpb;TLNoly(Fs=iFS7=QO?xhLE zDhuV=bcMx(OX6<$VQ^Pui>d{;6%;qCq(2RkBN_O+LF5*~yORBceKy#K50KAlUFDd~~Y_2Q0tZC zC4@Ar7$d?`26RT{lKO1?EWRC2QY|M;IQ%+19=aA5DiZ0;pKF}5QeDYz+7g-#7ZcPg zBw1fj^Fe`;&VK7*FaU&6NhH<%eZ|;)SA_*w@8k=O*%?|RsWfM^jDl}6KHi?!upK%9 zI=sV22iIQ}kOQDfD;OF}LF3?Ef^WA-<&DI_agL5+sRzUmye9Q4i*wtA8?6%hF~}?v z-mB!BcL6F3@t(3JP~QJ?0WkVGDf_Vzzg%Ho9)MGGv1^jd2JNrO$A`}^HonHj00%E_ zA~<#~sM8pWDl;X=e#c`PaMD?5Xr^9pd2qO?B}#pH4nSES-~Wh~M%uhLmQS}hu%K|{ z*U(B~EytXUI_C|S_o8%QzfobWb(Wz1b{Wd$psd@HN4fpf`rBM+bj$Uuy2tC|nz*83 zY${S1`g5Jw8<3QwwEhq)ZLE&QcaLJJ7iyYoO{roeeP*%-RRVhdu6;d|Mlwj9x9+Xv~g^|_N)YQ|p_A8gArJ7TY?lUc$@Ewox@%`AfIo=A~LTzq3UOqv`%br-T0p9MC zt?Ls?gqsQ%B`X;U4ZN?No+>fC*o6Ddw(*jRw(|x><6Sz^ z=sO@~21hCt#p_9EQ5k8zT}B=0y1fRR-7f{>&BY&1i4;^l_Ih5{_XF zert*RWvp9RdAnauhaHb+3AyjdY|l3=pu5)-zwA8KKuJzQh``!C+G*<} zGib1wh}CoRbh4!s&bF^s5oddzcbUJtP(B6T2#|?lmYtoE8N6d}n*SJ@myekQcnC41 zZCsWXRsvLxyft`yXOR+5dm#4KGVR;%1p*Mm~DFnN*+GARY7jRd0@yn%Qe6%f+ zBXmrxDJYU7h}=DM>05uI4dhzUJCYLQFW2$8700DR)6nJO8HLDJ+Z;!Fs7WQt9)%%WA^CTGjZt zZ-fTs@Gx}V8=Mj0MeA%Gncdqd00~5^<%z32?6P_gU-ob@l4Us!*LJ}c2vWc?jEXbu zu;S8ZBe;g23ZH?yLNh1$3`nx8x05hhSF$_Eq!Ep@&9FmjC-g7v?P+E0nIud0XX+D$ zM_Uz9o<)~ntGvD}PPw-FXgPY*U7;m@9nS~e?Cax@TSuh88lUCl1CNuIR;$r9S61SQ zHM|=f>{tr9xUZbW2yeHH-}&k!+E5i6pNLXSx)zXL+>Mm*cVSdIFQG5`Zw=@Ujj zJ0&6~N1i)BM)H;bCx=WvJLBS;#Y2R#4F?%uf=;Cj#-5&0!tZZvhJJe)+wf!1;7qiN zTvANo8cBZuCx*EZ_)lb>Z3`Xt4~^DuhrF)O2MUb-;HUiIx|3vlw>Z}tR9wu-#dY4j zcDk{|giHpaaI($refSZ^?|FH0uFpyLYc-O5b*|2PUWyk>3X%L*Ll4Xc;4>1i>p<_* zvHRHC3=8_N~Ye1z0F_xP^qa{L$32NLhAL!IpW6dHexX#T_NggtbXO>^}!^ukc|mo zrX~5qezoy*MbQ-_xkotIH`?R)l!fP&mP@9ctHv4|58`b<1@J)TTBhDL25A5!>jTv< z5x6etnCPJA$F^4iP^8EUZx*e0r@4>>3uu~ad-JM6&T3J?afYx<`=mkt^(T-9b}j!Y z|3ou}^sXDJ=BUCijAM|0_zO(!yjXjKm+~hJqgoD4{f9OQWD?$$DL2&D(eSt<74>oi zpPzY$eQ=*KpL37&ob!w$;JNW}?AU8yY^jR%*obx=cX`rnyOD(Wu$7IRKqh19LI!xu z(x9Iz6`?Ma(|4LTdGCe4$)-g;(=szFg7)?cDy-mTH3VUFjc8CSpN|<%>n^+Ir0tsZ z1HL+(w56?bh{5@1YhJ-PbSJ}LFTJLRhD(++jZu44W=@7ZzYG%rjV+xlfK?a&tSBs} z2nk2$e-lLUMoNWtc_61=K@noIpegfix;SwTa z$z@I=)afzA-=$>?-9~I1$rqOgzV|j2vIrT%RBm8gMWGIB_HxB|-o8GCqY}*LpH20|$ndckG%n+HfsG=;2~$|(o;vywk5*uEuB5QV*{}Q~ zUf<$afhFoE#CLCJ!DF!OV}r0}#Z24t3zx8;bhYN^dRLarV@?dJZ>4PZ!e%4;^w`0z zwO`3pQDwDaRU+2WgZOxUyI^koQBS;r#V5T5oF+|LH+q;fwFi1-RP{=w)+b;=b~aLo ze!%+)n-LkUoeNv7subOZePH+PW-NmK05nD(Z*wu11^rq8j>t^-f9z&1jBpM!Npge_ z>?9TlFG2dTx^RWiReckdQ@jW#WoSE_3iTsFoMFgeo^hL(ow+1e&@mZ?T z)-s6Yv}Fq=nv%N)hM9Dss~)$Kb9Aas$cAue5#4SEfSUzmCSm-KH@D%Jm)>EYj$y2< zrCOya_to$vqV+U=wOMg7xwkMm^38uL!XKTc4|2AITFC)joD$+301SO<}Q}z9Mxvx2{Od zq?YJeT<^%NG6h;tBYsBld~P8|9A?;lb8s?Sw-V%%fwu zVBVA$OrFPxTRGL#o_H>5d=#7YZcrDtR?EJthEjeUf>kM{h=Bi%a2eUGM&I3CE1k&F z7rBVEY{910sye%b1}~lkLDG=Ta;!Q z#wQ@=UqJ9R;to=pIDx*2tS4}M0QBQ-U%h~y!RB{{c~>rw@VH)wk%{>V)Kzrq)=?R- z5estl-{k-Bc){?@k*~g$rizU$3U$in+b5vL3As1~u$A(Luu@5eQF@UyPieOxTI|`P z;_>QYpBR#y)Qq!nU?VQw$H4abxOs4kXEZlVrd#>LfCyGa;2Nu_rQhG&}gDhQb3ynA(UmThD!lfcyA4i~kB znr147546prAF<#Jkdy>`l)L&>Q-9~R1vrlz(&L#&D@w7q3$Nc_178LkMIivM1 zECWN+ERsgj@V|zJ|NKtU-N~Q-c~(;sm`Y?b39L(pJj}xuu@f@-iAH!fom(X7HCBQq zS_)jwgwFub*$|~heFU(X-5c@m)ZBX;4jTzi+A7nvPlbL>35~F8@|kd++=I*21+|a-Q?d%98^bdgU2VPs>&tZ1tHBJzH{HMu}(E;M^m(^c{rgy*mw;|54_tA2fbPSYj@ zcjKq;%xRk$j1!|aAc(lN;$At?Wx?6l-QyrYwkvn{jhKQ&U=Wbh&Up~lu^t-gaG;Mr zUN_4}q@?@x{;6txC&x77=UKcKeKuK{Ep1mj&dcc_c1^&?)3-Y8gj#4B!~705Y+Mtu za*?s34EW1VUGTSMc5BrOHrb7j2%CyznwPf|j_lTUok2iLNsYNS1CpGX^(+B(E27>X zytJ0QEN}@r{_9;jQroc_`#BQqL6> z5bgE~i>StYMAYzl8E{dkuJhN7j1Gbl@fcpBfE?8ExU^;KcS_M|bRc60Pe?E~W=_0r zjrLV~1C0DR#yQj3AF<(~uuNjww#^_iSs3!#^uEk~{E9j_Mw?>(sbQJ2)9vZxB|t~? zKl8k3D?os;yNeh{xljp{3h zVy+u*KSJ~ojjt#fvONR05FVd<-kpRX`XhC`KY|Zh@JWgYA(6nQj3b$x@W(lDQy-PuGbvhHN3mhX^Ygcxk_471KMbNi56Z8! z4~2FuFWbBcpPOsJgX6-m;)ZRjba`x#Qs;E^+rech3y@fRzj|l-o2h)3*)O5}z{IZK z#_iBEyQTN{UsziLM+06ew8C37VKkoV#4fOC1kD$k?<9Y(CrxZ{9NShoOo0+p%!0ah zMiTVps8uGcV8pjnj90#BA!fjk^&sMdc=0*1)>el$BVL`+2fE1;njE{9j?&SPL890A zL6HVEA7zpx3sJSOe zj>iKaLHUK8c=El2Eic19b?ycZh48Q%ohzhv!56;iaJdOrP6Xio+Ch-xI;- zDV{l@A2~jM79hr1uDacX7fJ^waM&{7(xfyw6c>PfUd0T;_zuU}ixUmA_>(JV5Kw_+ z7SQ>{X7nUnqhh4!>-LkBT{HftwBOIA&oc6<*M(Hvj;dL&jH}e9L z&}Of&GW&=Uu=WJ65MUND`6HvgE~wvYi$IdvKN-LaIRCJ2Svsv zM;tJls63kd{i7)@%{${mc=jG_K)-H!zCIzzl4R#Mj2|(Hv<(Iw2G{C6ME8e8#~(aT zJ|G6w|LAph2JwlaZ-< zdZ6$OOb?=+If#*@vitken0>xTOUY?|>&U1v?#jy8+q_&l`F>Z=iAuBXr!uCrcJ%x3 z!4?2@Hk;UMoKMxxOrSprmt5=qF02yOX7lVN_vkxKKI^dDd|8czHXbfhMqDgS=X?BH z5=;Ni#)pJWhyI%N4jS+P5$&pob34rSjr-aXiR{rs=UM99cc_ob3oFk#o%fxHn%XQq zPCwCv&oB74SuWpV+$QbePEM+SGs@xMi>k8Q4e&z~pe;5adeRe0;3gLo8`2k>PHCR| z#!%3m5d@~WVzS?jtAQfEEtX=yY&B+4w|$Z`t-mxuH?Ce&oRrckDy*Ri5*?q_xlY&X z5Sm1&gUfRRPovOut2=6)-j1?%W>WC`ceDeuMBd0(q(Bb5GbU;ms?}4oUPek-pdRR@ zy$(f*Mjz1yGHo9%CmoGq?1VdXO}xD2q%$Ouo*M@_U?G{M_nYuPyk1bctZn%F@*)}d zp<2$W_Ja+Z@~|fnN;#T3;v?DZal7m!_zvnys?a}h;+L#68H=NZe!K$sfDGTCAS*EH zQWAQhn)6tP2V&oLg$Od+T3cF|kr4yCZP83kk}@LH-Dub%{sqd$NQZ=WQkPJ?39+5u z3o?n;>V2W7KjggcBB&6{E#_*|KJ!!d1%*A=bSX)mDaQxI-{Y=IdlEIw4+2pQLHbUL zobK*Q(6z7{r8+1Eta9?Ksc|9Ika!903k*Xa@6H4P_XM5dwx0U&4t$PbX|2MWeWz>q zJ8hhR{+~WvJEz|_NZ8qjMtC}BW(0rnUzL=znlmK5Z z>-Z(+rqR(TyA&f7;b1xb-R~*QjPvPAwf;Fr!TH9|MGvvyVEHo`I?RmWmi2}6hB;v` zVBxb9U=k?fx-$Gk!mA1CN-NleD)jHcEesx>anPx_<@tbyY4=*L^<%2wPdW0T2A%!% zJ2KypaIrPYHntNLV>55uSmoD`C(`KU6*{12qZ0-5&U5A2q*OR~&jjKnV1+5%UEtz2 zKc&VW8?jRc^VW5NjIPGwy@0p0`Z(xIHbG%wsF*mNdHN=SiqI`sx|ty4bV?r6kE>>!)S!0rZ6S;Nrd_t# zmsXo++h150?c9r=c!3lvLL~85w3YNZ-lAm#Asbua1lPB@_pUvj%ZqFULFwvT)A5S< zY)Ir4@AwW2DSK9?R4h)+MKw2sa)u?Gt2UDgngfQvYgfb_JQb*Ide@{SEo5^fnnO{o znDMTa$81~kU2!sg!_$WTFWtGPTe9-GMZdBrCWvU?%#y1h0UE`9ArMT@?};yF3^dpV z+8M-VGD9wd<4WpVRhg95vl9yT?Ut5fN*hoWL7T%>eNvY+8F#l}ZG&JvhL-j<7628p z@woZ2%m-`FTuHy*@zR4P{xG*mNvQGO~AYAgs8G6V#))qaPuF=D*BCZivFyp^u|s< z#br_mLZ$;JiAJPrqb6VIyv?0ANmi*hkv?oZlpywL{T}POzBzUA(|u=Cox-cPDA`LZ z1S1K&gfxUmS`3M)&RGxZX(@%kYEEv;IbJMI1~}xe|IQsjSi9{Vrp%d118eR|KpnyF8%X>J^h{sVOG4zZiKI zXlL}9wp94zabv`*KcES~M-b9vKtH4Dwzl_{CdrM8YK5FrGvHtbAMfFWs(O)W#<0!; zK?d&fY1H&Zx2`%R{(9Bp&5O%o=0?ZH@NMOnm)=p2ugp5~T3jhABl;98g6eS{)sug> zZz?D#?8|6>2{jJmqvwru%+Ad}YK1-mavB#n?-we~^IQPl#v)`9Gw{7aPjI;`tXI-+ z6L)lM{>iT)j^U>Qg;rmISLEk7gWlQOMMTKw&#BiX5;7fEPFnA3jXr4qH8`D?u*m@h z7*ZS*v7au6hNZB&kFt>N2{xi{2^!0rNpwtcyn6BYLqokO6koj9_hFGE%^#V_i4Kj$ zdTZTjIrEEA4*pdPJpv5ED|%{P2dn#TkAYLJ__(C(Cacezi=4e61PpxYU1*4rh*v4R z>l$`qeKZ&7Aqz$Fbvph?;TnEsU2E{Q-j+_=&f}+`**S-df`OZDg4V7{T9XxwPx?o> zgf<7WUYjX8kX!P&+Az`Na|#Qe@qh5=n`cIU0th|kQkX@-FB|1k`BRO62H<-$|4!Mk z-vsFi2mj)Cu|Fn8K1-PP*nU)q20q3p=hB$#6V9BQ8^e7SUJjmL-C@spkf5rG`5<@w zx++^im1G_Gj_2X$j3g62`T6)9oGsy+d>zAzMJJaT)jFEa(nytam1U8G3k_lE++4uR zd(*U-=2_BFm>hEr^U9)BfEJ!*@{oeJlDN%uBvDA(1!k9bE{m^Ir)EozbylzSrLKWK zpXUxT$iN|oI-jJr;=d6Md2dsnL)Gp0QFDv`sit$YPaB;eFw}dsdnpg;0$jUG;LfX0 z%74=7kL028MH8P$|K^j`1FJwLXx}YkbX%i;NH0yrXQg!D=4h_bu>;42`||vt=Pt~* z#9)^08z#pIf-}2iu67Uh_aev7HQ^@*Aev#ac)?wUAso{E3z#nuQtE4HJO=~!oB>&^ z2)D;T{}}6GOX#V~8*k~w={`_}-6FIdp5RK8CMIEXUIQs_WsSV2(S&m29 zg$e{Sx?j~Y->i%*ES^Ko=RbBG?^iN%FR!nkbDeM%X=SwkUz@jjog+txW;W6wQK8D|*N^P9mfVZ#@8m9VxR3pJ&~vt`S$6dvip zOHn{h#+R56skzi7VHr%ZBW6w~7carG=lkpyh5BkyY}!i;1P+b$&5tXFyP!`jp< zjq#acNCc?y$_6j8A2xT^rqsDJf7;# zQWx4VlUf5ZQsIl!5@2MMhEJ4)%dIjU`Gou?K=StX*)jQ~TLX6$B;;*(cNsw4+tLaG z!hAc0N0UW#Bg?1|{OZ`GlJPoN*TLMOA#jc3MCXWarDMSSPm&rIhIJ&SjicqW zr$O&C);O6!$xI4|OfzAB>G-@T$81Pwzci6b>uZt(O@GzXZyQ|b9`ps%?HG=%c8wNs z07hW#3 zOP>gS1ImJ&cK1Z5>@DL{PPQKxE&jfneS>8-`=<`OoFk#b#^3Wv@lyHTYdCNF4*-+_ zVN+EEPlelkq;@BRgqg)ru4^-RdYJa@e5r_v4 zt2dw?V0!Y5bm=3vm14uUa7p&4^>HV#2a*A$s$b+^7*&`!lo6ho{a?n#6Y{14d|3k2SLfkm{fVtPM0eas+;%Xzy4%4KTR{RG^XZohLC}e4; zcYa3#a0+CcS2k;axAorQkWbn`v-=<*Q>emB;w3Bd&4pT7-8s#NVqZm^0rM4p5cw1P z6-*&K#%5g#f{#rtVc z@eD&CFw0WrGd~Jd*wZFUWvyDA68en{_PR-}p5{d5tBtG1EJ6Q{_x=XX$ge7m;_B+_ zcat74|2^+aGUJ~d3^nx?he|ZoYVVmj!g${`eSfJuy1@_f4MijfpiO{7B zEXuD`&)_rAMe!db$ai&9DW5W)=?MD>Sg6of8RYQ+NagdL^-LUh2|L#Clx|Dbih5mV zd~=ta$eOIH27tb(bqlerT~PkkxHmSX+gKh~5=G_DX+bU3CDtBo+6T8&c+7vCf!nim`&G-%><9p6-Pv>GNru- zwHNnhf3n|?2!o61R7$TwQ}3ooGN+VSYw;F&|K!ig99`b%CkQ>eC5rp>eD0ohmd!%@ z`fQK;dbdJCI>-1WDe$c|Ulh4qhwMbwG!195lD>dq1h>2yB)7W1nXxYvR;QC@qmbt0 zv?qK^hDW*QF_Xak`^v6g_1U-eH6*ID=X`uL*ih)izPIBwdJQh-9#3`BoZ{2*imM7_ zmnDsE4)KOXdoL?RlZE+$n8niYH(4BS++TGJlD~*$e==C3bzwDXbjR+G_am52&g`rE z+OOv+F56T+y}i!h>Q4>_v2q_@v=v)ARW#!dxO4Im4;#C`)<1e0q0G%^;kT?Jr{9Br zpf6XiNLuW#%jM=VA(Wc`vpLE4`xjNI#2S~#YboW>gHt*?_pXssA~tgA5va9kt9t}t zD?GO=cUc-!3k6l`~EpU4mX`zu8-WF*H-L20AptZflrV$P7MB~=5? z%;0J@UDkWw{mK4d&o@Tgr6FwXd8QlJi=U;3aNTmbikq$XcJ8*1-O5UZ-40S01M;+2 z&SvwM`YA@L@@b9>*e>RJ!Tm0cn!T3jAv;Bc7d4ncJbdmr9%lW1_q^^`EPPkP;ZA4M zMmBGgug>P?)Ol~A(#H=|VorA^-;CsM8ciWJxKz5^?>tGeGubqvhlaa|3O_9``fBD^ zuU^zOZ=qoA>R@a2rp%V-68$Ed$7XhFGRjIM_)=v|C#6QA)ks!9?j|2aGs1%`WV`M7 zpo9vwQcZlxkv&^aXvMUk-UmVlf`J0%1kcy z1HdOkW+}e+UqT(?PQOOA6bfi}BDLiSCYzcdvwoa=->#2;#V~KfIs`3V{?A7$mnk>DNy&N~A zj79RM)t*1#Jd6b2Dmt7ZOSFc0PQqv~Q?)#e)eKdpj6|?^pmRZkuPh5c z=Y~tc>^~3QJ(wo9kR9MYD_dczlxUG2FpfBx?t^t&FB{89?Knb#dCt|^v~DzWKNw@+ z-}SNzQkb-+so^enF#3XnG2v=wu>t$8BrCe;u8hYH|4!R+yMz_nI28+lUQ6S-LgLVg zy4iNfaS!d(Jtt+iMAvpPy%x^z>!W$mebxkmA=^BurIV+Pl)r^sxkB4l{X4* zZhm^lE0jH9TUmomcqzG^;*6zoP)9x`pA;5dWZv~*sf$i^^n99D*a<49+C8{_aDDqw zMtM-BVD19Vve|64#cF}pEw$Fo3U=AFdF*MUW=ZUWZt%R@^xPO%(#lGE%6x{A6+g{nqY{cp^>P~nL3Z-w-v&XcaF)2sA!R;W5rN}h6!+$x_)7dsW7VcV# z>Twtp?Qp!LWN|QCLT<#`sbJLBA6;4oPfd2ctA`i&c~x(QmsUXmQid0Ge?`pwgZy-I zbEhw!`Q{*iuaSECot&JUyrSaDr-sWmz8k^2Wum)RP{yc77b^v^gkQDgD-#z~3`<3% z6P@G3PNnO|v>{I~CQc7hts-gHqJ=;6D=E!<=7+$+E+1lV@J57I;H0$|3ps_-Z29V? zy;mrDuWf!ck|D(`ad3ORG>XpJoV!l=1f~dKnWTB*?p{{{)}lVHt#IA|J9Y!b6g#it z+Mv(y<5Kg&6VH0RGR5lJyGxRb=JNsfWt>e0!8DBq>)5DDaikAbA;-`6=dg2a6jTm= z=EPVLeGjpDx)g-HG=CSuH}KuqyVD#=c^$?~bHmkJ$h`8(aetV8OeulAp-Ol-b|%&c zCyfJxNm5(cyE&kM?&VV}<=KGCH;id4y)b+p@V^V1TuS&-cfY@JTSl7NI}70*WdCu9 z0!FRe1r-+3UR_^z4-8B+e_AM*H#bWbxotva-Yp!WCQ#h$xg77pdL~bV@7uN+VfdySU6Fo9Noac|MaO#y6#n32!M@wCdHpLDgqALM+y~NzNxn2~{ z%7rct@uwEi5R(r~Z0!zBvAP2rw|fB_bW7DnALmYAdOQ1kWn0!ux^M!9zo2~c5QT=H z196cD*7Mj~aic-~y_zOT{iX>rSF^uPZxmE>pmE)x#j~E!!>V^B8pmU9_d6V+lh>q{ zXR^OPtg4zCBM!3LZ-b&F>*`?Ii>N(SRv%>!B9t~qUa=IMyzj|5p4FThG#E-zPrQqZ z$(}z6KK5H)lrmYxkP{m1h|cRC9;v-w-H3M2ae7r)NsLw&HKx1Z{DYd0tKoJcEz zAjKSv*UOIHQV;RxJ_zrsJX{x~dg&QVW9J_KLi$b{dpd4HVU2D*ea;if8y?Ymrdxf&8^JDc%Os%t#HJ?WIKIgtu=PC zZQoLCKXatF8`b4>Tu@m5YX6>p!+We&b*FuV;pD}LxJ+^pDAnCX-v z^({MZaWu}8guF?V2G=V8&LWHC3anEWmow`p^QcFRN5;GBDSsZuqZne4i))#M9cTL z48vhHr;=i#%-JP-D(K$C4G2th9|^hM%*$j;-m03mc3kSix!)b98UYiK#N!%sP3ROT zm+Hhxbugq3@vL5*K^e&D{i;+z9q(S`#vDx~FiSrR&Ko3P;md9= z+;|B+E_4<=uJP=JxtLgAy^D z&+Hx54+ae~cEQBE_<_D`v*~V`i`i#7XXATZseX7pBURKktPipiiBkn$18S+gUnY0f z!DLf0fY&m%uviE426Xh_*Q;w4SD8&7os3E|19XnvAWzr4<06Vz@47Cy?jDkxFSa(> z{Hzrzk6%lb=FxZinU7B#70ImbQv*iJUjCEJ{IicQU?04PLDHp;`9>hK2n)kL^rBleoxaGCN?z%vNNv}xLt783-WT4xM$>I#FNrdb zs&p-UURY6Vt+h9n9fa_TeRp!)7WWMX^L3mg_6buyMQf&SpHjiy@RcKQkroP%z zXK=ekD}3W{bTJk}*@+06$ir&@erm<){>%ykhQGhRe|Nq?iO9&3<)t$iE>X80qju6& zSzUdE?Rki8qOxy0K0V!C^W&6Gu}HP!p?h8`l^2B{OqTZx=LubJM%KFwv7-#rr4^o{ zSK$}$3|;S@Jv+N$x)l?X*YKu{Ddg^NSs(RbZBfo8R2roNo!I919NpFXhH6I<{24_T`CJib@-D znp-00wi`vnk6plU-dT*#{7(uyL)>H0CSkk(BRZI_xw2QYDAfZ=2m#CPz}8xuP_xoF zs4Bt~5)yJdPF$1#tvXJE-QhKbOS9WNn-rJ17IC1eKHBuHH!@%E&fFKE%#xK z9LxB#AHKMr7{Sn<4A2086X+}` zPR%BWO;3G{L@P}|^>w}$VRvtnQ|^ZqLm~h`wya8ivs?f|$lkmmg;+{^VK11h4(yoz zDp!L9=2O#Y=-H!4q9c6h?k*qs7%eR{+>uC%BJIStyuSufEk{Uv~&IyRS*SWEcg~)7U*sG@j7wai68t>2$wHeJ~T^69k z`}jK@dq}pu+0+mK)M}n`hmHM^$c7E?-Qf5+jF;lsoU*h>Sm@BcTCT@YoJWWtHMU+3#jdLMCA75**zcwAs@dPfw9wt zY8P6WXs^BhU9IvfeZ;9AkoZ;6i0A^)(KEsw9R(HCPFx2mkXL^oAn5Z!Iqhh4EyOP2 z^987k980rI;uwQbP=r+rs0WDuxX{xMeXhbro9`I06kgyHpr}jvtL$(gh!>PqO!f#i zTk(|a&K;^it9Fjn6i9(C4XeG(r5zy02|Gx199T&>iqc`UInJie53blEN?!39^*ITz zb+%MOYIblXx#VO6$gC*cf++Si3yf}9j8wtY z`{m7sfu+Nz)9#sx6u-?4$Dn@7`GqLG&nKzqVO}->D zNoA20p(td#b06!pn`vIP-~0-6`m2i+;i=%%oU2)40B|k-2-bLy5N-)HoI&suvE{y6 zw6SJARRwidfCuFmdD{+ZR{ovbl!!nYcX18##maGl3^vA;)-{f*`pN0^CSi7 zwNdh02yNRIuP9@fDJUGm{jANtnc3+-bb8gk5-CCaj}qhrFRo*vB?p*8yIO<5XKJwB zwtah1ab>Gh}-FWxFM3&~gD zcStOc6F_YQVT)%h`asH zZ5YseVm#M&2LJyrTRDD%DS~S1P%JeL7iIND{k~4Ttkn2FTy)Bp09+?B6upepH zBurP<`vR7yo7ba6T;q5E%YZ`ON6cCsK{7Ju_^ac$yDxw&T}z+HXl^Z4(~V0wKL*o` z$^f8dpJ1P4)KKRw+K4NhMJ|o5o*+WkY?ar?7>#l-Uu9}ic#!S{D{Du5wXRJQJ8isl zb&6GKA8vD7ADmgD)m#h%ihAC1kL{mHlIveRv<57e2m9x{K|WdLt}W#~8t9{h_6!az z#q^I~i8$-V^8=+g7;-&x0Q>U-<`sbTrq(wF0hs8(ndFk~b$kLUK&4XB^y%9PA~=+h z5Bb%70!*7%P-~B23V8ABf0iTZn~w$_fM1nYmDvxJSeejf%;~iCfJI9TwQteNh|8pyCm^ zVw9M#+FT4`{eSQWR`6h4+`Izo&GJJ9dOG4F6HWX--KP916MuJR`842xM~fd}%P4dh zvwT#E>j3>Rm>bg%+K5Lm3JvK5EOB+t(VYj>WgY>9tMk2n^WE)>sEGO=_3C1(EQU$m zO|z6;935@@;QSSH&~yd&9RLhL7uNHz_ZUF4>#e3G7j*acC?6~apy_)2$5c!d>)Mks z)}%d1`_+t~R#>X`tGkkkdpXOIeaAfT$*V;_M=^eKQzPP6s}=q8>a=IxU*Jq^wbDI(BN@qrYb z>I+zYHIYn`IVJC$Gs22r)D{)+>Hk}0PfuGc&5n<%AU!= zKWSg)A5qcRZ-s}zIYH8&V~xY_-AX}7yltRz0^RmTj2%@X; z-gO=LKFY`c!(a*=@vLR5Dcg3IM&4)s!_9<~CxO{c^jOCGi7k6bn#@+^wls}QKXV81vKBvK3 zxes-l!424KowWSCll4VACjZx(Jt(>ABUZ)r`U{i59iGWlY5pr5JhmVKh)-1r&jfN}SAO9B8<0KLgA_?1O6@&0YE zy+w)nzxsRPj<2x=I64@ZD#lviAN{*3U?XfumEJREgde4ssr45Y(C7ku z4Ct!xk!ejyR}WFH93Wj?VP6|kGF-6KX4i|s5_iL z;a9Km3WT)Au2@Azr)0sGt^F63V+r)MzECtd`erYjJz5rtgirO7Ehl8$JiVTUV!l_~ zX<{++7pkym3)c=GQ zIB*AS*ljT@?G{mwobq@yXwci4%I|I^j?M;4q7M%Q`bdf3{uYnu^&a>j1 z-}~dk#^JrRs>$x}ii9uPETF0pXyX5=T(lJ?xBN$+zL8mm#vjrWafr0n(o&E^P*a89i-P#%Ok9^O|tE*@Geo+o!L@|k~K(tJ%tA2c%>gEd>W zu3T;I9L^%TIwR^rPpb3%LB&Qw{{FDY)){a3x^ zY*ifZW#^EH*$x``AOOukHAd`7dg#JS=!A1|=K?82y2Vd8M-(}0NGlc?Op3SU>f%aO7c8l89-hDsmvw`B$Aoe{M zvV_joh&s|$*Q0BH%Af$3T|gk}FoGjmniM<^p47-c`|5uvx~#r~A=ajb_XdCTa{j!o zJyD+}{*iW&n^ZE3)6vJ=oqmGWEq;4}rKW+xz+Q@v z3?7bKRYqlVUE|7n)1-B^3kwZ^%OWE9w(vXNEq=;eC(MDSZu4*~bn&Ko?L<{$gwvT| zBnB3m7CkcGpSSJ=$qqwRF>wuJwaANhQwL?r5We<^mKu6p0hY^!tkrBt4inN4Z8vOmqbL9ATL`K ztL32*d2%`EsR?eC+W-HaTdAJ49F3CZr3&xL08@15-H!Wl-+eVnK!o+F-zjdW1oLUB z2q5n3G+Je)rjQo%cYinaO0CASAMrD|#fDV0TR##453!c|=Kv}kb`)((WBCRaKZ+vWE%nYqG)xho(bC_d7G`?cRGR?^B<5 zRP#t8k-zWPue-oRRCU+SshZW#xbADl$&yD8BgWQeU9(=Mo^;`5 zE~?S2a}^aMLu?=O{E?Z;Ge+nSiYzX|RJ=z+RAJ$_A_H$Kf1UDO710~?)IT0c{rz>q z#5Zlf5R{-mfPY zO?kp25U<|wKU~GR*J~xF<>GLp=PU)?fQ3Fr%6fXqtTBCq5uEeVoNtuc;fn#0XO(#U zCIVE6D2Cfj?vYn-m{t*Z?=KZT4&vJm(nnukK|5Wb?6Km!2`{wRGz0A01WZN+pwaZu zfNL^O+T)}esq7g4+nqf{1q8@3_McPv8ik4?>UrXJhR#qUdDRCCb(}V?2*E$V2)6zv>#)Kwt#)rDRg=y_U0cDRE^Ca&{cuLUB&*`K zqhO@<9Zf!r*Ja4xy=`^{WKPyIcOeU2Wzb|AWGN~{Cvty+hL?yv6)*}W%Nn$*$)|?I zrT&qVuX26%%v%-n3zS7A<>{|6>#_zdzU~TdT-fd95xxTl=JWSU*RwE2yendGqn$Dr zl?@eyzGh~pk4c#uU2nK5%0Gr2WBg%0+n#6748!NAziq=kF`J*I} zL;ransE|6(>O>V6ROG)*%KJs9p%TyeB2cu%K}F#?r0>linfZlVlp*U#Y-er|FW;hg zP|>LnwcB@Qn}Zb%X}J-65ICVU0AjT+SIjU4i0@GG6=){A6-XmR2ga*K7LV{ ztG|7-`n&^OR8*D$UykE#Gv3-aC(;rUJM;2E6o~cm-vu#urcdkf#$rWF^gD56@89W` zcpR;~ijVZT=)JvKsBQ|*5lp+xUn(5b|NowP9kk1sqa#En$IM13Gr}^?&Qh#R8Iyp3 z8D!=}Ywln&+T$>RsQ6Hoq$w?#U zzMe3IV$FdIGn5Q7go0)FSuY=<%>3uqD53uLwhY8MZQ1qt0_$~hv&m%WqNm}$YqnGJ zvRe^4eJdu?5HFJtfAx?oTZ5$ktN3ipAd0op9P*QA#huCAuLOalW^4*IKmvyeqm8zjq|WQVt)Xp}F- ze?cWQh;R^kENe_mH1hD-1g}-EPmd?Vq_~C42I=HRjEU1NN(jc2C+UXM6LfnuF(55( z|LQ>64f`pq5DF}?C~iISue0%21#S_8MXpxL*}AD}k>9>m-a9puLSxQZO`P@WNTGE# zst%aX*KMj`U<7 zfiZGs;`*6JnDf>`$>#oP{0#ZUy!R5#$dDG=e+-rGfc*i6IJxv2IB`;nutcL|Eybw+^{U39sn-k;Jv4X%!a2B+)X=}RimGr4V*%v)tY17`z z;lp1n3P_N+{=Y^?rO#%=wp4C%Fb=`K+IZKVA2X<(IXtT^lyMjFy(i>|Y2dsTzyA2j@Tp+g7mYT7;o3+;Q&^dIDQ2BoaF7<4f9RzsyQR>L z%jKa_XW!a>njd*1x<4wfk;J;;zAx${I=82wyfAl4oye=Lt-#N@gC z_L?@o5O0mDbeQS=p@-;*0!@AqJqm=Oi}BAF#|f=;XOfzbqRDW3Y`aXYrt=qPDV92q;w>2|o_xL&b>vm2smi+V?99AfW{F!@RfB#37G$k`^rCj1g_vvjGN$AIGFVzO&2PrzPACp%rG^~z+^WqHsD;STu-Ugf& z)TlAT5LBUVRq;@q+*Kh}Ro996lg5%@!3*gjth*-V?Lcwg!KuQXjQkG!(q+6uIGmEv zbQRj)tDq8D!hYsJg&%5m8%e##Ig%U28I6^GtSr9^^bB6Wz=Fv zDU%n{Y>G#wG;x^0`AWXV}iD|X^d(Ma~` zDU+lem?`<{>1UZ~3;$x}YW{HpUV?qX;k(t_8P`JTf)~*R<)og%i+qrl^}i02L`J5` zwqK2!K|*$dnRw;tOh@@FvZ$0m6KCtfv1=BfS!59 zjq`Z zm`w(DewL}z_%v^?u=?J~u4kr0!DZh%RTxIc;Uuhk(?K*c{jeRX^6oG*zt_!WDMleO z^Zo0*0p8~Nt6S4O!sroqH~+5Jwo|g_Dpk1x>{{baSI#l=P#^+;KY1!0s?-O@%9+6; zOBx*otoHUt5<0Nz_YyiV&T$dmkBxI>4?JoXhIuYJQp(5DzYlCi{a=w9-2yTaDX%6& z8P0WGvgrScj-((d~MQC1Dlk6()|o#q>;hxE4Ey6r%rRISQ`zjF9VI+iDKFeM_1oe zQ$~p7mt`x{;PooVZpnyWe}CY?L+g|BCU%HdywD#*Cy|lvxCS!6bl?z0-yUph6=3^m34+7{+RoJ-9dn6%vVVc!9=cy4zdv|xj= zWr=-}jc3B+)0;}KBgU|LUl@*f+Yq7u3t+rj;k=r5vP&p;6rK~~N>@nBvFaodFasxdE}4LRZwP_Vc1REdHLm^gvXS1R z$*?TN`8LkQAI4=(3GUmbP0#8l<1r$|*Cst+W7TFa3Wx`@JY4Iw!t;XS0Y4<|$8@a^ zi4dn#e5(Al>5x~jSWMWbWT+8Ss0d^8Lf%P`GrF6E;k3DrzW11iM=nnH0;D z#tfAW=tSq#IPzr`4$;Jp(ua@o4!fyFcKYPF-dr?F6{*Un0fuQK@R>W zkqpuq#73BP6=Z$0^Tde8r8 zZXn18NdFJ+Ngn*vG$Uo3RsU?ZUp{%+(#|JyJcGVy}t#0 z;?LI@sXk2%XrI$XG@2VdhQ$8O8rb$^`;8<7_mxEip6mw{<-Ohe?$$K!5+H;Gf;){hkf7a-1ZkwPZv(mSR87^)kEyR}YKH0`6n!{n zpS{+8_FB)9T`nh^oU~BKBSTSCTyz^}nfmhD)H$tWgzZG!E=C{vYA8{lEpgXlQ@Z_E zuM|r^1jtppiOi9rty4P9?Nl4Io zO@z~Hcwy-Ll#!Fo4QS*cx|PO|{JCYXbhmFmx8jrJk3N@thUUS?Cx=WbomzWaQ6+Jr zO>Lpw!2Qe_5CyB#d0Z+%Ce0t&`X$b9ZAlTi9y<*D$c~RHc7a4h{cH;y$dcIMS`DZW z`=J0_P0p~%ET8GxUqAP4>Rv`!%)_AF0wEQ@zFLM<)_5=Iqy*kJZH8QBhRDZT zvA2KZoqitWNY>el=RZ;qZO3;THrbc9Ka$tDh12lYE8FCv$d&vtyeYHrPZaT!c2fS$X ziyj-p5w|zGaK{}J6!26bwXc!D`CJiH!H;Nnf-$8`C&bJ0jmzw_MT;uaPcF-L-^AIu zexY)X1QC+M8Tx6BG`*wl%akT6k9eD|W(911rt*4K^hegPwFhObSVg81t;@J0+xJ7I zWds$9^Qi7}|4}vKQ#r9o4lORFnGUQR4cJK?c3`e)BnLLT0dM}wg&Ke_1>Rnmc3=VjpKaFvhc`>SdPEbT@>_<9@tj_LJ995((FgLL zW+8iP{EF(li+NOpBv`KCD;(!eEt;L$5jrnQ4Rx-4@QSMQm2!FMy%poHVMUk!?08(f zsjq&hq0T7=Kv}iAng;Z1HnyU1F0DGh00)w7p@-VGh)g?azC#l9@2$jaSP_>4y*%=x z``=p}u21rI3ESA3U<4F9;x2B|!0NMhgj;+$h~-vl`E5V+vNscBlGv@lw+vImkBFMM zI@7J<_wH%|*Bt!xDmkRZxu^bo2@Z|($J?seOaHoIziGH^o0W7B{OQ*6+7@`pLj|k$ z{ah&mnXO!+v5Uv8k;uu;i9ISgE8V@jDQ%&E6Z06EQYTAM#LVbO^+fI(>+oi2bVm;K zJ9f=b1JH9y$4}hWPR)_m+yGXNl51p)WWT$((Tl*uQOeJ=XgRyH)5UvGG48_#UJ2|` zN-nO3y#GTIK*82u&zO9qz9@5s)KpbxsnlS768FY&U zngMC+=0w)u<^|LX_^hy>fB*Gxj6`;>UVZBHHMHS7%6geC;r$gz2;#G7jUx6gOjVcS zbpGwG3VmPJ`D++)DyF!qzTcg{jeIYsTF220)TyV}4nn1q)Oe6H4AUQOxq@6R=O&MT z^-V()c#6v@CX&PVWT={U1HECeHoXxz{W^evEJbHe7S>xk8X;1Q&T@S}hNo^Lr*wGx zldL|7f8}O$l&Ry%V+GF}Sqh+vYoUQM8cyv!KL7unIJrhXzmqMWyubW*qU{C8`u`dh zoX_F^HemVx-OU`e^e^5CsNsvP!l3pm_)=HNL#}h*|Ht~-8r+d4(@~f)fSb8Xjhjr9 zhANik#v5Z=1=TH6y{wmNUIcY(LHh3P$}J;LWw!bCoTt_*$#|E95Zx~I>L>=~3L!DK ztpl}X2IXiWL#n2c-yX()!%jFjD@00kuFk&F^vBnraz^55r&ru=-sBr7W*`-Z=pjW8HnN?dMS`&e>@>wxppuEGv03A!Nyjj9p z25;P^i}%7SVRh5F+!}E{o9x|nUtKCa@h&;ajEvapJ=mGXPf0~;^V( z_svx*Uh1hZKDtexef#CN+drP({_%u7>LXEj5K*`kVK_K~mrU~!d1E2mq+6}Wu|OwC z2*Y;dpC>7aS6%Z<7BTC&^d;%>B^QVDRk${wTtduxGK(_pDE`YQGXsNQIWsRwMhsRB z#iABcC!U84Yd6nGNLH$#;GLvoWk4J>NFNq;i=Fws38@cyVM0!T%T2m%^Nc$`E7MTo zaqG9O)wqnK1IwsWs{yv%KP?tDdH&5*TBlc7SalqsY+WjU4TN5a=C*q$JF^zMw$n|V zJT-ahLB@rnx`n8B_gR<=)%1-Uy0`JnEKyY+#mm(~YD-M&@nzg-tw-d;Ar<~^k@$r! z)rQRr?VJI9l~q8Vc6OVqNwxY@dUdY?>Q7nKmxz=Wm=Z<*dl**~wU>gS-=;-qK13xg z?6HJ;%Y7EslGi3qdBg|Z%a}-QQHUl}H@YVKi$M&b;`dPz-onTsDoQfc_}7C~flZt4 zQ9Sy+)xPw0Y%EN)P!|OsFj|6i9-o+=12oQ0j(@nIXImXly-Q8@y#`>c&wzP#lclT> zGO(0Zfi;H@u~|=1MO|~%7z@paj{C4h5=}SYgt=bS^c|%dox|#I9$+$L9yE$C7-c;Fv-#(aJB#9MoPfkt8 zxMW%Tnl*YH9$^9p1??QBOC+w*S9&v@{23&$E7r;0XVJ3OP9ynd?soCYIP=5bW1P+f zSlNL$!a_>tHDl8r7S5pXa{sx3{hcjNHN3EFD}=Nc;tDxZ@e^3 zzOR2kK2|9hNemLl8`=4M(YkZPAdkNk6itb59URu-YYO5tg5K^mUOVQ^ zps12d?P_>;@KJwIO2b}{#PYcV+l=u*?v3nZ`m=Sv+ z2;ufymee~g&3-GcfC~~7nq`i}crU{|l&How(oM7%;=O1SMqU2rxng35Xm}ZfMg-If zG=j4N2t!`>d!6P)i55lqt(ky+dyGs>s`~nfI~rk=tGHqZfmFVKp34kJ`S0d{@GWX{ z6Q01LTMdrrBn!%FJ!)zlDG)oc!ei|C_{@F9B7(LzH=X|=)W8x^qUDV0!HH%`{bV&A zU5Du%s;hFp6-yI^umY0(FSLB8eGuMy>gI=1&4{6~o32&Zp9^UH_RDd+Axacmc>d%c z3r=HjI2zPSjQ;)+#!^UL6@svbpLg^Nd9+&5Nj2hdGVWm3dyAZubg++a?joXnT|DDt z&4Q(v9MT;JA;f6e^ub11ONw?d6%oK~M#N6N%hn|9r&6mWh?L_|bRMMa16t&AwD6Epw*E^0`M zx=-*G4maQV0`psM&QLn^ZD&{0)PfQbxmmN8(Ec04O}LF-RfZ7kc@$2Ic|PAianKA< ztK%^cu&dE$?Y&GUAZ=&g9V_!!kCuLUUW(%ex)v}XdL(1W$;o$rhp`nwpQ=2J(q?=kJ4?HQrr_}I=3sZAJEiBZzLEDqS-PS-nKyY8kgd&o4jKvcGR~8f z+XBr+Y=wi_zi;TM>##x6$|_XVmstjleu^PEM!3f~reKjn1|qPuqFy!)Kc{$Wyzc7q z@+b5!vVJ*=>*riUZQm`l%hi;~s$f8rJt>nuX&bUCa}fMffD0yyKPt;nd~+9&%7MIY z;TRY_f3frv_t~W@TtB`54rm=chB@8{;c$?QCU0>pJ2l$($VIF)A62_dtZfVGF}feA ztuVnWhjsL|8?^d3!##kGpYYMF+0#b$>#6~t`)^Au~%Zu7zAJ>Od97L_b5HKD0}`NopNBy5M& zD4%F{8gd$J5H%moidc%3Ys%D?Ws zYdG3U?WG<#@zJlX`kiqdbStaR`tT^DJ9_gq#;(doK#u0#NRR#z#u@Iz)#tAl)*F7Lu$?5pQ!JSeiD!@{9AOEFbHH$Ds%n#)xT| zS_0Y}ZA>}^pLW-_$&k5>OZETh=psI8MJU1SiU&|C`Mm+Tx|C5Z*}r)rikesaG$34N zz!ag@cYiNh`=)Bf#nfkMXL8af&u&lOI?U$N_w*`f}T^h}}Y@H|U zoF~>QyFT|sW4ShKh0KJW3d<(SNR9+5@QW;PrprG|mu57kBMg@$Jh!oed8qK22%{KD zBc2gP>5aox*81S6P`Z*LTV-e+`@%%aY;AFpxJw@WzZW*`=h>SF4j!VVz(Uc|m+jYH zWg0OwJo^WjLq={4B1aXvQF4oGpHvOheJp(30~%wla<-C2 zk6EnX>-SQO?kq4f!>cJTf3f8Cu{)#PPu)D(Dw#Yi9LC09wD-$^l4?zbCK?fC!_8Sb zurq&3s+)gt9;sTR9`g@q1h4~pIHZ6oT z5e(y2WD&rYBkVNYy3GA>sofu%tpx+ct-90(-p#ab2G3OUTPk<{5pg^2lZj$uW`?_A zY(~{oBnOh^=G_|Amzc(qyXk(!%B-*3jlt;mx~Ts#PbHC~+l4!bv5QH$p-!3S z;}YZ6K{0FJ{bTC$gf#&eAAk11dZI86aHpo!T6NH{l%=160hH;AV3MA+Y7rg&n9^#J zr_Ay5-a4>%!@bGXFPEHu>a;&n)Xz4CsbOM)c@+82Y;WgY`HUz>k1sRTfX?gRGJ1NA ztB|3nA3ExjnIw^|Fb|(d^M^{wGABRKVxwXXhY~!*8af&)zD7Tl0UCAha31~2HMh9I zLr6ZN{4DcVc|f+(fG2aY5p%XObN2T;+@p0Ayh{Qi+k*Pr4Pa&Dv*tbMgW;)tzNU*X zymd((=K@PxEC6$a!ZXJe$HVJ!I}26VuCTSx86{CiEubBSZ0PsUOM8BtZjEB;25oC9LV&kh|7Hed4Wh}6-&hza&k6iVJvCaVOc;ms zd?A2_%WXzg9EW#r|RR_gF`mS|0ig{=$hCMqL`zGp`6{BC{Uh?uWb4dw^&=xAw0a}djQ zV5EC`62`9gzPj)celqyAy*?@O^XN!4CT%o9jyX>ppexi%VY($ZT86_bwzLku=$7eb znbpe$2RCtc__9XL8j97`)@C>W6EHzi(>y|KWH0h}S3^}S5j*0b^&xm==k9Zt!yVLM zO=jL`*>UCnQO$Knf{(YPhy2V7ttf{QfUF43_K;62H^)&fng^oZsqhe4@IIgp2}h^3 zn4FJ}_wVS~+%~{csyyIaJ zuD(bF{TdH$__Mk`S0^ZX5{~|Aw8#2Zy!3A>89(n-Yz<-YR`Jr)at&3Py%i_VT%TbV z3!Q=hcJxX?<6Hc4iwD4+f8*ibq16_OmqmBw*f1lmJsrBS+grF}OWO$I+hEXW4>Nx8-YR>|~Y9 zofefoL3iCf3(iRk+{GvUCcv$GWRb{O)p3U`HXRR5`f3>-mz#9b0K!@i|1XJt_PgI> z{(;Hiu=fnzxC^A5`05|7EFz2G)=@;AnmFwWi|nZDaoNEt2s8jy7~lfwJ-gJ z`d&ayHT!HA65LH4bmV8}*l_WxZajBvA!c^(38aTTJkC^_1Mk^cYvlmfRvszZ8Kh_p zZbbt%sSZeHy3P){ytt8sp%oY+6VaPgjLK3q> z4qk%ZXj1Xgbn%uQCe=~PS4cAEYGF_MT zJpes=f|gpU0`* z0%%d9Ri(W&`^3*=1#~`FIO!+SRZllG{RL~6|I}gf03MqU)kjF3gWpQCQm18^VCPLb z`)RvZ;{u(*jhKTr&FcDU9csM_B&Q}m(}#9rFX0dE^^O;7;H%jm;DXUzeN@&C4rJ%E zG?SF*5Xka;x#>zh#I$~7+C5sE;{XG`FaT*D;(wm~i!T$Sb5l`zNrn3!ga*(WPp)@g z*)kPg*vkrh(3t1-eyyWnf5)q2LA+yKbp`wMxqz*^SDHPzP(wILxktRp4Z6k zZT1K5YZF za5p?7Pg*$LEd%O_8iWao&g7upah~j9b?HScEHuNeR(uv6tY<~9uqS_lnf>_?^um7< zKZS8SR9#aoOSs@D8`Y&QnxS+)^Y%^ay!ZqJ);$R<-k<|SjrABKOs@UmWipW7yVGfSiDNa#n5aHi<}$X;q3-8+g5c$lLU8sGMN84hA(1?A{J_9TqFx8xoaK3F$0Y zuAB-tdnJ*@5%KEXwtxuZ08@;yX|PPmQ;{k6#ggJlnj`GguI!QM$uf$fVDu+x><)kB zwdnC*REWWPUK?NFK4b+F$P8C*3xbnE)P}ju_||k(G+?;!S!UJz+Xye`jC-t-u^g1t z8$+tW2H-q}kPTh`=9$Q@_NH8`B6{{*YeVR78P%BcF#*q$mTFGhEq$~sax;d8g%PUZ z_j>Gm1rWIiVms(Kx|ht(y*+=QMK50g-)5j`5kIN4_0;?PT$e`R`j5`dm8IA>%1qZe zz`0jiAqv7tpN*y6+wI*L1TM2U>#vuz7f?JegXwTD9jTMVL50hdueEWw9pMtmyj-fz zYas-5Fj>&!CS%QS^3TYl7}DiY6(`eUyu(6Uq0V!h#I*M#%ZG@P7+z{5EE2mq)e3|r8W~*(X zSB0`a8bE4<6o|iXd;ReN$BpdAB2IsZPELb_%u;B?zm=_&PcBwZ63L~fAG&d}=bmoe zQ%@x-Jx9r0tL}A2)E+Ix7!PlzI5&62E>nW^Gg2iMgrJL+F*B8_gOL_Mlg?nD-LR() z$WtP?g3bUNK`XS`ucXmyh{)NDS@VwhM{)XV5)a@xWC5>LCK3`?;(P zm~w@O4?ag~)3x#+gW$u$)M)vueED_DX(rYDJ7iPJRWDOg#(RvbnC?d*x300l{Hiqp zEgpF{L(RpikG8p>p?ATRiTIy=uP!Sf03l2H&=M|fsmF4Se)Iw9_*KK@fsLUo`}K93cpA*j zATDz}$^MBSw%PWskgzi?YQZ{;+0+4Ykbj0@O;|y=$?KQ{(@|@@)=b3=tPTQ;I=&0VO z0_b8B@e>ZaATw4TXe)8-B- z{m$8KBc+Uv)8nI}fccy1cKrq~JXxc)P&>9D{vqif4S-$P9WtW`5q2-oBPf`0FPI^K zgtE0;#Ph>CCtY1~&uj;c#6DhRbJSn&Vn}simB#_q=KV2Ht4fW!k&OTUmyT2-nS7sL6qSdn4h3M$% z-9@>0}5 zs1KMcoxw_|Ij|C!%rHevMdz+4@Z496&CDRNN%e7JX-U~bAPKVfkBz72L8Zv)(tQd% zCKdffvjq_ovT!kSwJf2{FDn_G#Q&hQ1cjX@)^z5X`c@J5mFVaaz_zs& zzNJqeWo_ZnBT#olMR3HGfTJ*8?Byhbd3np#U3E739j7{0c9eotcPCy9i zI{y8Mb_IZFaXh231X+Cwa~K_ADP#H774&}b77^$JZ|4&4OSx~~l!UCDG5@*ur>Z)ajK%0?rwDjV;ehvlI zr%#`ft$cX#_+$GoJVB}04@$orwX&2gf7kvh?}`?~=)<>1HqRU<#FmcqLRG6qfeMh{ zih%O5GlN~%3`mysS)brp^X!7RXmjx&!m{=}U?0?EnagxE;_hspTpX@s-Ak znX{DB;g?s&S#J`>!jSU#R#5hB0QAE`FY#eng|ck58^G6rl0Cj8+!zf*Qr&}Rk)p+( zoE%#p@kr>?Pm4wR)mCdOvkkzt30BGHRm$_85`}d1p2=CvVz&{dVWm2VA{0 z3<<)N&sbSYhOQchcwcQ7091U2Q)=pM6G-k|`$+mQ#hYc$ix;||8 z-wG!WgY9nJLU&pjK1z*e-g*I^rpn?(s1E}feCZuD4y=`=t$K%4{@jQf zIXBf2q6bY6PS5r>xW82F2=5&Gm0!Cqet*V8gUH5EJ#ScJs-!PwZ9;acNyI%hS7=LK zsc9tElhbACy^o;GKzuI`QnA2CFm0o{guixLJ9i?6rAwzozev3%R+1RizX&PZ~0~D|hZb-Ob$}`uk8sYh}g){F1$xmzWqI zghFs58Pob!|9ECho#0Omjrw(UJ|su0JJ=I{M!Xs8Yj_2#rR8JjvK0*2N#{|>tI}?E zJx%O*@|=v*)nu9ufoK*AFd+)>Taw81WTf@`x)_=l;onaC;?Z0TnS#rfR~p9K@|>-z8+|7i@UvT<3NRgdT?C~hdfHe*LQy}g3f z-wY6oyNzFC8U)VhH8y!u&{wJ}REr#swChgP#_x|re>{Cko{{nTN$e=zH7QI3rH5f# zlltU~tk0sSTTy5X+z>*8I6G9bI*p}Z^MQFflm<-9uYH&$6JRMDd0N)aQrGtw*n6(L zInaqz^M!kM(9OZa$$xH`7>js$|MH5^yS7s&rHG#CO{7sB8QQnd!rYW{wLMO4DMy<) zPd~(yQ|H3d{a$vr`q}&*Fw9EY20zS_3$Qeel?pYvGserHlUh7zWKvL57-UJ^ibYGa zu%lWT0VSNRzqNou$2+b&7jv?2FzU)p0~ssr!%r)6QfEk(UG3d1f3bDwTPOFTj>FXFM038pk^ zpe0L<*JWsy9KbisVxXC6$z1WcyH;pt+Kw&lpEw=&o%HTpS8HeGnCgZW{r!?TT}Q-^ zC7o`}z`-Lq$ZjwFf@EjkJK!Vo2*d@X53JW07$K7rcz!6Ft2j?1AsjG*rbKNg0 z!A~PkPZ^aLjxn^a6CKp~w>qg|o-VepyV6uRNruGFkY((Fod$wB*i}l`(-2#W>X8Doa zs8OQCw6rmayCS)Kb9#xiJ3_5HLQ^L-!5Heu8@+xw?;QXZLX~|~ zoNfn3ezQrC-&@1sPTFAlGQOagP`)MOBda@TKW1`hp;CN_-Ge zK{GXNvQ4-Z|DYI*EifDiMg_m}iSIXGa;hI)E7Pq{hsj3SHS2fDHyU*e>QvVnQUXH4 z5r55Ly4aF23mz2N!>s@oI!jtgLnbDWm;oNnog{9eP*>b3Da>>cmAx?~UmXD5Qri=Swa9s&CvnDgWF zu(MD|>8f{FU=G51q;q^d9$NgQs~#-NzK$odrZJVI#BSr<&@SDSyVP}j?#NU8qiuLY zvx}6!`yh$?Y)Xcm){-N#Nh*B|2{r!sJNkG5vbV}%x@v-U7)(MMOU7`dB?^~84?l`` zS##_H$#y4_eO%yB=v=v-KobOr1giB|XN5ly2cg#bRQIhz-tjslVu&l0ZUI}xcWQvP z)*aPG6h;KPkJZCxW7?K3%`w<~>4yHc;EeaiOjCDqe!u)(w0vn@w=>TMqo4K8x}<_K zWR@_A6VLo=QVe-B4>`UPdVW<~8$Zle0|wxon_sBZb??LuVmJQnW?>$)KCvm&7GyI| zp#Z@fOfJGB2z}o7{i*lwQ5);MacgJM+|$X|k_o*uoTfh;zv=rZzi^_W2MC7fqi}U} z%`&cfEI~Q;QLZh!;VpYg-?9g(D(e>x)1T%Q0E|{3-Yn`XPz?`-Dy%s%`A3SNtcac$ zbW7j*t45x3nxT-hbv#6PJ!VztLCdmb=d52Mh$b_hVnHquus3Nd^bVjF7c+UU>$ut+ zxz`+`fmZL@&Iuh3TDeF#IXsQy<>k#k+GtYhjG~6noPhq=SuL0n2fsp_4w81Wf|W{l z+?@$Liz7GI!*qFbRcGtR*R6NpV8t-Sgj+r0e$5@`7~S_qT`5x`xYL_QKnylHQ(TvK z(l2}-C8@L42b@wof;OW)E#suVISaPh`0L=?em>9sPR*EZK^fxDXM;7dV(=n|8Dj%! zLWSw#DWVOBYRC9B8${tA%Db7@Q_mtl7|*^Qzg_w3`bd8VrZKCg!02!^ zI}hKNg>zBs{%!3$5_jz<3O1>Io^Q#^mf=&fd4Cx}eI7s#>m!=Dsk{OpSt>mgzWw_t zH+kYG@Gb0R4DSG(!PJxpK6p4}>8V3cH?ZQ+O{|R%1NnM;++`LF6T$P9LWFX(qbL{? zX={~D0oME#QK;1DZ@iyq&w#yf2Ci1M{8`l!#$Z*}6p1ZY&Q@+4CkDvUljh`FUe`s5 zOaOz<(_v!m{is)n@d^d7jcAT@W0wDPy<4dB*cjdG_~@_2S7SG%Wy;$crf2A=2BJ0K z`SNBvXLi+CRCTM}NU-++V@S_Z4EPw6Vq?V)ANYZOq{oBs@D=m1IBYMQ)L4xE?30j0 z-v$GTpgUul>OeRQgkgltZhH0=4oZwE{kSPY6mYA6Bl9tz<4W=$SN4r8pX_VtcESJc z!j$WfE5aMUV0Xu-<2ks<@ds9$xBMxoHdR5s<@v7H++b#HbMI0bL_F$asF8-*`&A)c zW>sVY#k^jW>37$7DoV0cT|&Ho03v3|*DGw58DkURzck|gezDEBx-YbQ?|&mp%SamZ z6)d&3h;Ot>9#w_scNCG6HI$ayoqF?Vj6F+NETkXQ*wv#Pi)d_i3C=fhp{3B4N?^v_ zS+YJGE;3Ncn(@B%yJYJ@vVGJw&eej znHs8BL>5CGIg6Tm65g@6S?sg&cwG||o-LTv(gG$q5OXsN#hCt%f1hZ9l;Mn9nfdQ~ zX=zkLzbXtAq*mfMNX&~TSv1z|kwXamCfV7Eoa*Fc^7G~vV_XBmIp_V>HJ6gRW|)(g zxO`YO&AV*B|Ev@>shzQSsQ7Bfuk2mLH^k4FB(;`(Efe(;mC53F6#|pYpzw8!Sj2Fg zn}-sA>6-G0-YpWA9`{$ewzVzwcUVl!x940m+Kb!+nx{@MvZg9GdIrZYgG}>J=gX%? zSD^-NNt-Ztb!1yQ#f~AL+SuwZ1wNA0s=-g*QLae*pU!eY%QBD2e7a9~K) z9HDofzw3K5l)g;0Zq?!`{q}C>$}ZtrZ>?GlOcvL<&mCg3GY0G1Kdl{ihm{yj_67D+ z3Jx|bWDnSoc7#H%+Lb!Mm$AQ>-sFuoerbf-RoPol7vn`+-N-RkpG>pf=hJi5{+G3+ zrRB8fhIkC25@>9Y-@p#D<&ajis7ek!-;}L*sfK(ROG{D9VTKkm64<34f)!`Ereb0_ ze+E=Y*Uq-U%+%B%1MEjp2BxLNk&_Yla?O1r)_+D<_5Qpgh}wtdG(HyVvHjJcg6&i- zgYtPk&zSIq-64~;sPgt%jI+0&OAgR?h)Nyrg0{`LPO~258pXGFd8FB)08+(l?1TIP1-H0Ch3Ts1j$K4ZyQu7NV69-4F@EWC%87Z-X zltgHLOgH0Gm~D}B{Zg&KqFeovTm7PkhLw#(0dOloFu|^Lles=}Zt1m$LhSN=j+(={ zjQE7!T*3k7U64tJ05Y_+_SfL6-hGxD&OnOKlp0<%sS5-4Gg0q{BhLZnDJw3Rd%*j>+G5}Q2oUzJF@f1zt`9ZL-vA5Mv~H@prB*VchkVKbcn%3z z0<+%YH_+UoHAS)>>@|iuirMuvA-7DI;8)A}gfbn3jz?qjP*$%~or~@oKodmoE2ikr z)gvLIqda;VKCdNq!z#!V-XV5aJ3eQ*pZz$>FHt9%OTjkVo0CT{B`Ji8c_{q86nflpAj$U3B@l& zUP$A{YB;3z>>h(0%QKevK1)$DHc}=`L%c*&S&OBuFsE*UjyL{R-OM*}jbsn;l^}V|L6ihqr)*~6l^9@S2QRt0^Dlhn6W-*o%I_g*Qyq1DY~+S`}@L)BkW*kW;vF;HM<$*KW>64 zLrKuA%CYw^=;?67I`(+&o_Vn;;&auUme6QO(tMsUy4*M5WQu zn;^Wov6qI8NV}`QgMEv>tPW;Om|h;Fo;!qLA#5ikY3U3zl)c;vl$4wBijXUA2NX3G zrRu;`x-oN_jrif{gtUECwdEBd^$wLmHZqAc{=o%}SrII8_d6PAvo=vc^OWt#jy0TM zm8g$cP8`_IG}CSoJ3OG4qu=fS$mPBAjSSdoJggZYPOwI7qm62wrQu}w7|Zby!h_GY zq)K5w~7g_DxQjV!+~W7If)| zLk@`TrA+BfYk=?HJ(k#aUmM;RJuSQS_ASvHy7LxopPRCdza=!edK@X96qqQXKW620 zC+AtX7V~o99&w-SX;1o5x%vI-cZO8GL7dW^boAr8X&yy7L!8IfiZ;g$MgI(7ras1= z4;;C^WnjiO+D8w;$1J^EGne+xB}4xxHI-%-=|0aHqK?C26gWAT&{h9YC}(bVZcduq5=HD z?RsBqz~(8&8FCFrE6?)I#U_WNeB}bS!NfEEPa*M*%bXh=oI;$7RSv!+`mc~U^XlaT zfKhVU{06MY+m~I4fD-(7Gp+Y@*;&qzayo^V7Crjkh%U>pag6Z2Z4Tl9*X9hV$d`X9 zAt*eqyTMsX`Zux3Mwy0e#3ND|_NxKv45_5sy(zT|mr7TClE&!4pc#Hy%KR3s2WfrI z$LGLE9z|o~eZ1=8ja{1#r7QgljQdz-RU?!lk%UHD{U#-20pSs(mb+duqCwX*Q zG?$=&mx1;OvsZtaeXF72oYNUk*}CjsgQon|@m`A(Q<4lC8R!ufY1SJ~5P?ArYL!j- zVy4C?^3tbx$Le9-DrjqNqBK=QPz$GU$MVhTFn(rh`4a01D)OjQ+fMV^e{=9*tbFzC z%VE{jeM@)-wU=y&Jo1$1=`V(e4+Xo$v+8LDIuzGRrv9~5Z)$x`++M%z29fLNwX}3s zv9Mq*?e<%yarux+O)KCqBCDTeID_#fQKsSHDikQ z&;mv)tFQOr-d#?7%Al^@nG5$n6Nk^g$iOwtl=# z!ezTcuwF77iZohl7q0mEb)E`On= z0j1yP>YVxRJpNa!kYYW(TeH5;KpM4LGZgD>u~w{jGno5f(y4TYSTvr`Z9hkeWmw-E zQZQ&s-AoBS9*1ETmK1;{rCyZq`+wDshPbXZ$<}yZn^Kw1DhYJA!>_hcwM!{lD|LprDg0dyZ-q13OHz{gkon6BhZR8Rfa=Gt zbc|9b4}Ltf(*Jawhf}Pi;!xYH&sN*UKy78urnyz7WcIBN3^j_vFYKdGZmMhfeKkYn zuWY~43KZ9VGy()~;JDq5&f&@M7eEAbImvRpHgxxfY@al;CnUo4IE&PHOQbnjWq>O! zYdQ?ZUQ`|e=sANX-53C$;6p31J&CB)wJmb@R$DJt0efPNV%f1`l=VaPB6khw6f%0O zT(4?>(fe;zCQfN0Ajs0D%LMCMXY%H+{TmIoIq>s;hkO59LG=IQH>+0!r@EcAQ6C2m zBX>iocNSxnP&IIaBWd(i96jp4X#5p7>{X8UVzktGnG|YuD98JFtJADX9}7REPxpM0 zaoB6I=qiY*$~cj)oApNXz({b)vFsQ9(&`EL(-dp!>f~bl|GcMO-N`sRrnR_!UV?{R zvk-%sa-R1(MWIkOS}Zt5JeS6WBW1Zx2^Q9hWs(_Qh!QKMK3SED$!$GMX=M=HhZdv{ zn?#A^zkHIFpHC-pcDyrHYd6hPMut=Vw>=d+B<#j>A@{YP84@_xaq3CixfQBrB+r#X zt3jSFr_3J%J0CyoMdI4lLg(1%Xzt{s_SHu?;xZQy{MGGQnj*T#@U*h!>|}GL^Md!c zwz3++WBZcXm%<0Bd4=;8bhtjH5>L*W9S< z=wGGA&JNm;C^E!1AF%2znVVM%Znp+V4YkRT5r^#^Z^0Ms@#C6`vS*3g_k%6`I!gHYxtDc1~N2bZ51X2+-bx<3 zi4zZcgaBC(+0k&81Z)i(uD+nr+O8ilhbJ@=U`A`8eVS#A!RTbTXS19I*LfWCtT$0n zJ{|7rTDs^3B42Y2TO1;dr};JpPjS?)UQC!;F>)gHbaE%afD@-u*ADOzDSI{cF+J97 za(*_^461%@c-~I)g>)+xVUfD{VW*4zAkUhM>mAO#^@YtNE(s#1Cp#pkRRl`uOoHQ! z_^Xh**-~pwb!}_dbuum(9;(%D8 zgy9s1<7#G9A}|kT-F$Xemvv2fZhPFsqc_1eYqsX7@v7Ehag10l-P9j|NJ6z{c(z#j zy7+<8q%we7c-!GeN5Hft#}|q@ao*%Dyp) zOx)}|9LnQegxB${D=}SoUt{w8^1qTfGt6iZ(EmCx7+D4EEc8vNw6t)FKB+NnsFm!gmj)VKdJy}03r%jZ8$P(+RPI<{$j zT{j^fWHkOc7}-&pz2!lVy|^3C<=se|t8!%7bZ?LAkLcBUE8FUa3@o@h<}%mde7&TG z-iE%m)_xsZm(N_2S^dgEVTi!)J?Q=X7WQuaJfB{f!~DUHqzR9rgSK@;VtCRJaHTt* zR#objpmzBPFp@cn9Q4a#z>sA{Liv*T{42$FWU)5b_Ta#(G4fat{ zM+X^DU=%}DKuVO3B0^}=LN6jMgsKq00KyEfFoL262}2MNDWQd41Oyxz={*F7BB+!A zp%*Dq&d$X1o*!_&yzd9Diw|5}&;8u@z4zK{t-YV(fvyt$aFz8+f&Dr5u^pd#tt9psV*$5bR|7NAssk9QqTjM>}pT{6;O|y zfIUmESr&ZofxBEOGm^AE1QT|=@LK|`6}~y38?WIoPF^hP0XZckWE%7E%W@I#X^&ht zGZ1aum2!RlGz2Lb=j~^Gd}W3P1oK0mVn?I+5+j|wQ+;>LPxfjBXgAb%Oci?gHxiFT zk|K0;z<(>V_VsbAUaZ_c>BsN$xGZF>$}HiKvXM(OR`zVqTC{RLfCOhdwdB78qNa0( z?RH?*E7y1J`FK<*l#ZH9Ks|yb?_l89CTH_lEeco11unh%pU}va&Ca{u$?FGOGw%-aMP3+AIo?w-D)=n!$C2Pz2(n4f@74 z6G!C-n)K_3{#p`8n8&aKqqMvL*+2>%_ zjj9{+O(eF^juL!x>o%Cm6Y-|fF_mNY5uwae>{+Ky(wA#b^sJxd$J1r0>vvI67~+dd zU89Y%dMrajSjrDT0R&p2n(a$YssTY({xH^z?+g(5W=*L?7WL{-@5u^Q< zvn1c13il)ShGe*Bz2B2Zt8Q_?%?_H&NRuR&RWr`qcQ-uP$kVywd7%m)4^#XqrFWdZ zt^BPdr3^z{I%lASpE`k+n|W~nu|CPJ4xGLeNUR{lO1Gxl=Ms6J?pwCIrNQv^Bi!JT z5l_I|%R_L>>^}oJuF)Q!nbMrk`Ku{4$Mv~WQ|s#w;|>7HGg_mIg1Xt9^i6f){@DtH23fsy z>8YTnug1ZGd?Bm(bIp6ReF*b5c0v3CO&t_QM`oOyPN?v1@)2gJA9=dCKGxW_PdoS> zX+BB`WwtXy(Y|;&CN0+$ciYdJ*fWu1^;sN-?ce%-1YDQtaLROuNmp0N!xi2)7f%ir zCzE-5&eRE~4Vf-RDgUb|BkOVe_B+N@o@?{keub;|%Esh@ku$l#6GJb+xvBR>5Nr^k z^he{jVRdTpKZmE-_4`#l2>618(qq+qq>50uThM#Zvd0`Fo?}mE8X$m zq{;pmZzzSLn2RBkY1AaO9BY*3&ejwB?B;aRK@;X;{<=K0vZW??b(Ph70;5KAlufHx ze+%x|%Iq_E2c{6ne9&sM50QI-4X?z$6b;b9HPAj?uanew?_pa)UmwaXc(&Q)1P7w` z7_a(Xc%%P@@V6h{d~Zx}{WKZrH@2C{$2&_)u37X~P*i^V8w?<0lOU!ukevx0lL3at zrX|ahsTfz7KbyGlv4hO<$OkMqnV78Oh=tGWQY+3?N;YqvFW5HB2fi9@Of<2>P&?eKjOh|y zJ8vIhwK~@LdepGi8LrKv-maSJ0f_4H#wA5IdpmfRQHDZ zcG1K(h$&4@#n4MEI9?XFD@6$7Ug>`KBbX7ii8veES+|dAX9iKRN1@03l!Cl`rYRj3DCE^!FAnW9gXQjm|eSrU?VVtK{F? zaX6KBj&c{Lv%HLBuDtT#w1J4ZntDo7JQkhPZcOFlw7EBE6PZ2=%oPQUYN#B{ zyzM0YHr5(M?zOx@K{R1wE0EwaR?^-P&5Wv25gI4u2k%yMzqnjXxLsEhgea-HA2v5L zAWEUbRW8A54x!h;FG3Wgk-pkS$F+o0H3+ z5P)r9iA$X`AK_n{NUDWH;UgYo=s``*u`*ly<~Qs7Q{JyjWPp#v=-rE}q!d z_$-C9^irkEM}9dIjGMZL+fF5*rKwJA^2zu#q4UL{i*6vWETmksC{*FYS%jQgn7sZB z;8O7~2%r}4^`9o29gh&s-*kZl8^n$^hn;+Ms<{Q?UfYtL3eb-aW++g@QSNGrS&&_Q%hQ;R?>)lH4Z)(pMlk(0Ac_ zKD6M0c1fShH{A8;`0dUwrSxyGXBMMUzNcv+PXy@)`@3BF6{-)*({Y( z`H5iM_3%Uk(>rd4a!Oxf#=yDMoDQ3nRB6gGRWQ{XR&RGa(9^6dvbeaUnDf))hoAzT z(_`dSQ0|9;w^@7g5iOq)mn!e!w~@%-hI|brQd_;CwzH9u=U3ViLgcx=D9Q3rAec1T zgeEXf{#m`_m}08wu>`&Fqrgz^Q$rLSW&;ZG7Zrc${!;!ic6ZJDe8#W+r&#JV=2+a^ zNWrZ$duQPR2sOI}!)H~p%*9gT&CQ7*m2=s6sc_j{omlvZFD6av&gVBs?$#lCsZRBu zM?m{Bm|i&}?SwNke(H&x>aybxvIHlmI2vldf!i&LNZYlNQ!HTpewHS-`BF8>m=Cug{g77`cI@ zBmV6X^-hhQdb3un+acEQje|b1>DNz1A!S5Y_e+BDY%;299;jiJk*Y>do5}GQE#YN3 zQ^k)O_<%%Dd7t2r?s)em1Xkily1A!fAVTdvJC%t0X2q)&R}|c+h*!?iy=htWFLxE5 zzCl6CYVQJ%D7T0n!(|Uwgc?Wrt>)DO5N{F$1i)CQ*J-3<(D zNS?oJQtC+RCRIj1td!=7mv(Aw?Bj(quxbnbACC&=o!`{Tf@uGp%IV?Dn>?`>6@n%=Dggdi0 zvvJ`2g8ypZf-eSflFOz;>L|EoITDkmy9*!r@E+5H7@ghE-C0A@8?YQX_pL~(`B@5e zm6a+=^F53oY=x8fIotKHZov^lFoDH?x2Jb-6=ZUS%Xe*`JL=LU@=-lySoK#Jy8ct% zY}{B`KbIb|$B<^Qq+Do`yEzmcxB`F8a)Ci)mi?Iw-;;1Z2Dnb6+|S)=F);UJRl$$H zaJ-}2|1sgTW%o}d8NJ;$6}RdV5n?U%wV!Vd8|>lJkF(VI693|WOM@JDvRP?M4y4uo zOTAo5MV+-F%K~84ShDl~max(%usfS$eF>txa)n#c^$sJO?gX8zOCJs=)^zK1L50iy!V{MR z$Zs>G_e~4i#i{!_6AW#)WgIjdtb7#790--OY_8`hBtkxX;(iD48Fckdh#UvwLeQO8 z*dRNHTNcwC|Hon^McZgN0Ad#dcZM&Hpe55nkoDm7%}mcObC=_IXUQHd9eK~LkG~@K z>u-g_pMKwTiHbY-c>oP2rmS79u604C=S3Z5M8Rn`_ww3AQHkKYt*0@nD!-g@XUfq6 z_Z#36|43(!{nob1x-hW7jBTtGYd0$!1{FI8T-Lu&3+V4$=aDfks}!TdZN+0t^08ZH zl&&_=O88GgZ`sd35Z_*J;LfL}YzA9X}n1 z8nPyl5x1$|oel)|QK~{`WhwS844T<}FvbHYkOa#9^VYOg+Hm;_{5X-F|11RC!|p_x zWXm1zsyzs^7<-A`N35}5CckTmWsGcYgqYZ6z7Z_}+A6&FNo9I)RM0VanO0HK&Zff9 zR?)rTa9H@YMg!1$J2Ock8MF9AC&0M9((ElM0rg;~k9)WHz=V)Ce>v}IU_%VSv^u+^ zuwUtcMSRLVi}#N*>%j`(>1;xwgu_wNNpdMSU;S1dWDDYQ<%HZGHRL}4RPDnNWa?(M zU65vka0hiVh|(sZUax7}XZfr#Es_)%`^7(CWp=P8b6f9BKH3KH-Ov&`B%QwNwMx2YV2?3D=&VH4vpn%P@}DMwTIDW-$ly*WDO^DSHP$yu?43eVaeJzw1)nTyz94r F`yUD(Bm@8e literal 0 HcmV?d00001 diff --git a/architecture.svg b/architecture.svg new file mode 100644 index 00000000..86a0cad5 --- /dev/null +++ b/architecture.svg @@ -0,0 +1,598 @@ + + + + + + + + + + + + + + + + + loader + + + + + pdfdecoder + + + + chunker + + + + vectorizer + + + + kgextractor + + + + llmollama + + + + graphwrite + + + + vectorstore + + + + Pulsar + + + + Ollama + + + + GPUhardware + + + + Milvus + + + + Cassandra + + + + + + + + + + + + + + + + + + Embeds + + + + + RDFedges + + + + + diff --git a/docker-compose-azure.yaml b/docker-compose-azure.yaml new file mode 100644 index 00000000..1a86ed9e --- /dev/null +++ b/docker-compose-azure.yaml @@ -0,0 +1,172 @@ + +volumes: + cassandra: + pulsar-conf: + pulsar-data: + etcd: + minio-data: + milvus: + +services: + + cassandra: + image: docker.io/cassandra:4.1.5 + ports: + - "9042:9042" + volumes: + - "cassandra:/var/lib/cassandra" + restart: on-failure:100 + + pulsar: + image: docker.io/apachepulsar/pulsar:3.3.0 + command: bin/pulsar standalone + ports: + - "6650:6650" + - "8080:8080" + volumes: + - "pulsar-conf:/pulsar/conf" + - "pulsar-data:/pulsar/data" + restart: on-failure:100 + + pulsar-manager: + image: docker.io/apachepulsar/pulsar-manager:v0.3.0 + ports: + - "9527:9527" + - "7750:7750" + environment: + SPRING_CONFIGURATION_FILE: /pulsar-manager/pulsar-manager/application.properties + restart: on-failure:100 + + etcd: + image: quay.io/coreos/etcd:v3.5.5 + command: + - "etcd" + - "-advertise-client-urls=http://127.0.0.1:2379" + - "-listen-client-urls" + - "http://0.0.0.0:2379" + - "--data-dir" + - "/etcd" + environment: + ETCD_AUTO_COMPACTION_MODE: revision + ETCD_AUTO_COMPACTION_RETENTION: "1000" + ETCD_QUOTA_BACKEND_BYTES: "4294967296" + ETCD_SNAPSHOT_COUNT: "50000" + ports: + - "2379:2379" + volumes: + - "etcd:/etcd" + restart: on-failure:100 + + minio: + image: docker.io/minio/minio:RELEASE.2024-07-04T14-25-45Z + command: + - "minio" + - "server" + - "/minio_data" + - "--console-address" + - ":9001" + environment: + MINIO_ROOT_USER: minioadmin + MINIO_ROOT_PASSWORD: minioadmin + ports: + - "9001:9001" + volumes: + - "minio-data:/minio_data" + restart: on-failure:100 + + milvus: + image: docker.io/milvusdb/milvus:v2.4.5 + command: + - "milvus" + - "run" + - "standalone" + environment: + ETCD_ENDPOINTS: etcd:2379 + MINIO_ADDRESS: minio:9000 + ports: + - "9091:9091" + - "19530:19530" + volumes: + - "milvus:/var/lib/milvus" + restart: on-failure:100 + + pdf-decoder: + image: docker.io/trustgraph/trustgraph-flow:0.1.16 + command: + - "pdf-decoder" + - "-p" + - "pulsar://pulsar:6650" + restart: on-failure:100 + + chunker: + image: docker.io/trustgraph/trustgraph-flow:0.1.16 + command: + - "chunker-recursive" + - "-p" + - "pulsar://pulsar:6650" + restart: on-failure:100 + + vectorize: + image: docker.io/trustgraph/trustgraph-flow:0.1.16 + command: + - "embeddings-vectorize" + - "-p" + - "pulsar://pulsar:6650" + restart: on-failure:100 + + embeddings: + image: docker.io/trustgraph/trustgraph-flow:0.1.16 + command: + - "embeddings-hf" + - "-p" + - "pulsar://pulsar:6650" + restart: on-failure:100 + + kg-extract-definitions: + image: docker.io/trustgraph/trustgraph-flow:0.1.16 + command: + - "kg-extract-definitions" + - "-p" + - "pulsar://pulsar:6650" + restart: on-failure:100 + + kg-extract-relationships: + image: docker.io/trustgraph/trustgraph-flow:0.1.16 + command: + - "kg-extract-relationships" + - "-p" + - "pulsar://pulsar:6650" + restart: on-failure:100 + + vector-write: + image: docker.io/trustgraph/trustgraph-flow:0.1.16 + command: + - "vector-write-milvus" + - "-p" + - "pulsar://pulsar:6650" + - "-t" + - "http://milvus:19530" + restart: on-failure:100 + + graph-write: + image: docker.io/trustgraph/trustgraph-flow:0.1.16 + command: + - "graph-write-cassandra" + - "-p" + - "pulsar://pulsar:6650" + - "-g" + - "cassandra" + restart: on-failure:100 + + llm: + image: docker.io/trustgraph/trustgraph-flow:0.1.16 + command: + - "llm-azure-text" + - "-p" + - "pulsar://pulsar:6650" + - "-k" + - ${AZURE_TOKEN} + - "-e" + - ${AZURE_ENDPOINT} + restart: on-failure:100 + diff --git a/docker-compose-claude.yaml b/docker-compose-claude.yaml new file mode 100644 index 00000000..5999725e --- /dev/null +++ b/docker-compose-claude.yaml @@ -0,0 +1,170 @@ + +volumes: + cassandra: + pulsar-conf: + pulsar-data: + etcd: + minio-data: + milvus: + +services: + + cassandra: + image: docker.io/cassandra:4.1.5 + ports: + - "9042:9042" + volumes: + - "cassandra:/var/lib/cassandra" + restart: on-failure:100 + + pulsar: + image: docker.io/apachepulsar/pulsar:3.3.0 + command: bin/pulsar standalone + ports: + - "6650:6650" + - "8080:8080" + volumes: + - "pulsar-conf:/pulsar/conf" + - "pulsar-data:/pulsar/data" + restart: on-failure:100 + + pulsar-manager: + image: docker.io/apachepulsar/pulsar-manager:v0.3.0 + ports: + - "9527:9527" + - "7750:7750" + environment: + SPRING_CONFIGURATION_FILE: /pulsar-manager/pulsar-manager/application.properties + restart: on-failure:100 + + etcd: + image: quay.io/coreos/etcd:v3.5.5 + command: + - "etcd" + - "-advertise-client-urls=http://127.0.0.1:2379" + - "-listen-client-urls" + - "http://0.0.0.0:2379" + - "--data-dir" + - "/etcd" + environment: + ETCD_AUTO_COMPACTION_MODE: revision + ETCD_AUTO_COMPACTION_RETENTION: "1000" + ETCD_QUOTA_BACKEND_BYTES: "4294967296" + ETCD_SNAPSHOT_COUNT: "50000" + ports: + - "2379:2379" + volumes: + - "etcd:/etcd" + restart: on-failure:100 + + minio: + image: docker.io/minio/minio:RELEASE.2024-07-04T14-25-45Z + command: + - "minio" + - "server" + - "/minio_data" + - "--console-address" + - ":9001" + environment: + MINIO_ROOT_USER: minioadmin + MINIO_ROOT_PASSWORD: minioadmin + ports: + - "9001:9001" + volumes: + - "minio-data:/minio_data" + restart: on-failure:100 + + milvus: + image: docker.io/milvusdb/milvus:v2.4.5 + command: + - "milvus" + - "run" + - "standalone" + environment: + ETCD_ENDPOINTS: etcd:2379 + MINIO_ADDRESS: minio:9000 + ports: + - "9091:9091" + - "19530:19530" + volumes: + - "milvus:/var/lib/milvus" + restart: on-failure:100 + + pdf-decoder: + image: docker.io/trustgraph/trustgraph-flow:0.1.16 + command: + - "pdf-decoder" + - "-p" + - "pulsar://pulsar:6650" + restart: on-failure:100 + + chunker: + image: docker.io/trustgraph/trustgraph-flow:0.1.16 + command: + - "chunker-recursive" + - "-p" + - "pulsar://pulsar:6650" + restart: on-failure:100 + + vectorize: + image: docker.io/trustgraph/trustgraph-flow:0.1.16 + command: + - "embeddings-vectorize" + - "-p" + - "pulsar://pulsar:6650" + restart: on-failure:100 + + embeddings: + image: docker.io/trustgraph/trustgraph-flow:0.1.16 + command: + - "embeddings-hf" + - "-p" + - "pulsar://pulsar:6650" + restart: on-failure:100 + + kg-extract-definitions: + image: docker.io/trustgraph/trustgraph-flow:0.1.16 + command: + - "kg-extract-definitions" + - "-p" + - "pulsar://pulsar:6650" + restart: on-failure:100 + + kg-extract-relationships: + image: docker.io/trustgraph/trustgraph-flow:0.1.16 + command: + - "kg-extract-relationships" + - "-p" + - "pulsar://pulsar:6650" + restart: on-failure:100 + + vector-write: + image: docker.io/trustgraph/trustgraph-flow:0.1.16 + command: + - "vector-write-milvus" + - "-p" + - "pulsar://pulsar:6650" + - "-t" + - "http://milvus:19530" + restart: on-failure:100 + + graph-write: + image: docker.io/trustgraph/trustgraph-flow:0.1.16 + command: + - "graph-write-cassandra" + - "-p" + - "pulsar://pulsar:6650" + - "-g" + - "cassandra" + restart: on-failure:100 + + llm: + image: docker.io/trustgraph/trustgraph-flow:0.1.16 + command: + - "llm-claude-text" + - "-p" + - "pulsar://pulsar:6650" + - "-k" + - ${CLAUDE_KEY} + restart: on-failure:100 + diff --git a/docker-compose-ollama.yaml b/docker-compose-ollama.yaml new file mode 100644 index 00000000..d7d0f36f --- /dev/null +++ b/docker-compose-ollama.yaml @@ -0,0 +1,178 @@ + +volumes: + cassandra: + pulsar-conf: + pulsar-data: + etcd: + minio-data: + milvus: + +services: + + cassandra: + image: docker.io/cassandra:4.1.5 + ports: + - "9042:9042" + volumes: + - "cassandra:/var/lib/cassandra" + restart: on-failure:100 + + pulsar: + image: docker.io/apachepulsar/pulsar:3.3.0 + command: bin/pulsar standalone + ports: + - "6650:6650" + - "8080:8080" + volumes: + - "pulsar-conf:/pulsar/conf" + - "pulsar-data:/pulsar/data" + restart: on-failure:100 + + pulsar-manager: + image: docker.io/apachepulsar/pulsar-manager:v0.3.0 + ports: + - "9527:9527" + - "7750:7750" + environment: + SPRING_CONFIGURATION_FILE: /pulsar-manager/pulsar-manager/application.properties + restart: on-failure:100 + + etcd: + image: quay.io/coreos/etcd:v3.5.5 + command: + - "etcd" + - "-advertise-client-urls=http://127.0.0.1:2379" + - "-listen-client-urls" + - "http://0.0.0.0:2379" + - "--data-dir" + - "/etcd" + environment: + ETCD_AUTO_COMPACTION_MODE: revision + ETCD_AUTO_COMPACTION_RETENTION: "1000" + ETCD_QUOTA_BACKEND_BYTES: "4294967296" + ETCD_SNAPSHOT_COUNT: "50000" + ports: + - "2379:2379" + volumes: + - "etcd:/etcd" + restart: on-failure:100 + + minio: + image: docker.io/minio/minio:RELEASE.2024-07-04T14-25-45Z + command: + - "minio" + - "server" + - "/minio_data" + - "--console-address" + - ":9001" + environment: + MINIO_ROOT_USER: minioadmin + MINIO_ROOT_PASSWORD: minioadmin + ports: + - "9001:9001" + volumes: + - "minio-data:/minio_data" + restart: on-failure:100 + + milvus: + image: docker.io/milvusdb/milvus:v2.4.5 + command: + - "milvus" + - "run" + - "standalone" + environment: + ETCD_ENDPOINTS: etcd:2379 + MINIO_ADDRESS: minio:9000 + ports: + - "9091:9091" + - "19530:19530" + volumes: + - "milvus:/var/lib/milvus" + restart: on-failure:100 + + pdf-decoder: + image: docker.io/trustgraph/trustgraph-flow:0.1.16 + command: + - "pdf-decoder" + - "-p" + - "pulsar://pulsar:6650" + restart: on-failure:100 + + chunker: + image: docker.io/trustgraph/trustgraph-flow:0.1.16 + command: + - "chunker-recursive" + - "-p" + - "pulsar://pulsar:6650" + restart: on-failure:100 + + vectorize: + image: docker.io/trustgraph/trustgraph-flow:0.1.16 + command: + - "embeddings-vectorize" + - "-p" + - "pulsar://pulsar:6650" + restart: on-failure:100 + + embeddings: + image: docker.io/trustgraph/trustgraph-flow:0.1.16 + command: + - "embeddings-hf" + - "-p" + - "pulsar://pulsar:6650" + restart: on-failure:100 + + kg-extract-definitions: + image: docker.io/trustgraph/trustgraph-flow:0.1.16 + command: + - "kg-extract-definitions" + - "-p" + - "pulsar://pulsar:6650" + restart: on-failure:100 + + kg-extract-relationships: + image: docker.io/trustgraph/trustgraph-flow:0.1.16 + command: + - "kg-extract-relationships" + - "-p" + - "pulsar://pulsar:6650" + restart: on-failure:100 + + vector-write: + image: docker.io/trustgraph/trustgraph-flow:0.1.16 + command: + - "vector-write-milvus" + - "-p" + - "pulsar://pulsar:6650" + - "-t" + - "http://milvus:19530" + restart: on-failure:100 + + graph-write: + image: docker.io/trustgraph/trustgraph-flow:0.1.16 + command: + - "graph-write-cassandra" + - "-p" + - "pulsar://pulsar:6650" + - "-g" + - "cassandra" + restart: on-failure:100 + + llm: + image: docker.io/trustgraph/trustgraph-flow:0.1.16 + command: + - "llm-ollama-text" + - "-p" + - "pulsar://pulsar:6650" + - "-r" + - "http://${OLLAMA_HOST}:11434/" + restart: on-failure:100 + + graph-rag: + image: docker.io/trustgraph/trustgraph-flow:0.1.16 + command: + - "graph-rag" + - "-p" + - "pulsar://pulsar:6650" + restart: on-failure:100 + diff --git a/docker-compose-vertexai.yaml b/docker-compose-vertexai.yaml new file mode 100644 index 00000000..e7e14be2 --- /dev/null +++ b/docker-compose-vertexai.yaml @@ -0,0 +1,174 @@ + +volumes: + cassandra: + pulsar-conf: + pulsar-data: + etcd: + minio-data: + milvus: + +services: + + cassandra: + image: docker.io/cassandra:4.1.5 + ports: + - "9042:9042" + volumes: + - "cassandra:/var/lib/cassandra" + restart: on-failure:100 + + pulsar: + image: docker.io/apachepulsar/pulsar:3.3.0 + command: bin/pulsar standalone + ports: + - "6650:6650" + - "8080:8080" + volumes: + - "pulsar-conf:/pulsar/conf" + - "pulsar-data:/pulsar/data" + restart: on-failure:100 + + pulsar-manager: + image: docker.io/apachepulsar/pulsar-manager:v0.3.0 + ports: + - "9527:9527" + - "7750:7750" + environment: + SPRING_CONFIGURATION_FILE: /pulsar-manager/pulsar-manager/application.properties + restart: on-failure:100 + + etcd: + image: quay.io/coreos/etcd:v3.5.5 + command: + - "etcd" + - "-advertise-client-urls=http://127.0.0.1:2379" + - "-listen-client-urls" + - "http://0.0.0.0:2379" + - "--data-dir" + - "/etcd" + environment: + ETCD_AUTO_COMPACTION_MODE: revision + ETCD_AUTO_COMPACTION_RETENTION: "1000" + ETCD_QUOTA_BACKEND_BYTES: "4294967296" + ETCD_SNAPSHOT_COUNT: "50000" + ports: + - "2379:2379" + volumes: + - "etcd:/etcd" + restart: on-failure:100 + + minio: + image: docker.io/minio/minio:RELEASE.2024-07-04T14-25-45Z + command: + - "minio" + - "server" + - "/minio_data" + - "--console-address" + - ":9001" + environment: + MINIO_ROOT_USER: minioadmin + MINIO_ROOT_PASSWORD: minioadmin + ports: + - "9001:9001" + volumes: + - "minio-data:/minio_data" + restart: on-failure:100 + + milvus: + image: docker.io/milvusdb/milvus:v2.4.5 + command: + - "milvus" + - "run" + - "standalone" + environment: + ETCD_ENDPOINTS: etcd:2379 + MINIO_ADDRESS: minio:9000 + ports: + - "9091:9091" + - "19530:19530" + volumes: + - "milvus:/var/lib/milvus" + restart: on-failure:100 + + pdf-decoder: + image: docker.io/trustgraph/trustgraph-flow:0.1.16 + command: + - "pdf-decoder" + - "-p" + - "pulsar://pulsar:6650" + restart: on-failure:100 + + chunker: + image: docker.io/trustgraph/trustgraph-flow:0.1.16 + command: + - "chunker-recursive" + - "-p" + - "pulsar://pulsar:6650" + restart: on-failure:100 + + vectorize: + image: docker.io/trustgraph/trustgraph-flow:0.1.16 + command: + - "embeddings-vectorize" + - "-p" + - "pulsar://pulsar:6650" + restart: on-failure:100 + + embeddings: + image: docker.io/trustgraph/trustgraph-flow:0.1.16 + command: + - "embeddings-hf" + - "-p" + - "pulsar://pulsar:6650" + restart: on-failure:100 + + kg-extract-definitions: + image: docker.io/trustgraph/trustgraph-flow:0.1.16 + command: + - "kg-extract-definitions" + - "-p" + - "pulsar://pulsar:6650" + restart: on-failure:100 + + kg-extract-relationships: + image: docker.io/trustgraph/trustgraph-flow:0.1.16 + command: + - "kg-extract-relationships" + - "-p" + - "pulsar://pulsar:6650" + restart: on-failure:100 + + vector-write: + image: docker.io/trustgraph/trustgraph-flow:0.1.16 + command: + - "vector-write-milvus" + - "-p" + - "pulsar://pulsar:6650" + - "-t" + - "http://milvus:19530" + restart: on-failure:100 + + graph-write: + image: docker.io/trustgraph/trustgraph-flow:0.1.16 + command: + - "graph-write-cassandra" + - "-p" + - "pulsar://pulsar:6650" + - "-g" + - "cassandra" + restart: on-failure:100 + + llm: + image: docker.io/trustgraph/trustgraph-flow:0.1.16 + command: + - "llm-vertexai-text" + - "-p" + - "pulsar://pulsar:6650" + - "-k" + - "/vertexai/private.json" + - "-r" + - "us-west1" + volumes: + - "./vertexai:/vertexai" + restart: on-failure:100 + diff --git a/graph-clear b/graph-clear new file mode 100755 index 00000000..9633a08f --- /dev/null +++ b/graph-clear @@ -0,0 +1,8 @@ +#!/usr/bin/env python3 + +from trustgraph import TrustGraph + +t = TrustGraph() + +t.clear() + diff --git a/graph-dump b/graph-dump new file mode 100755 index 00000000..e31a1c3b --- /dev/null +++ b/graph-dump @@ -0,0 +1,70 @@ +#!/usr/bin/env python3 + +import pulsar +from pulsar.schema import JsonSchema, Bytes +from schema import Chunk, Triple +from langchain_huggingface import HuggingFaceEmbeddings +from langchain_community.llms import Ollama +from trustgraphETL import scholar, callmixtral, build_graph_robust +import sys +import rdflib +import uuid + +g = rdflib.Graph() + +client = pulsar.Client("pulsar://localhost:6650") + +consumer = client.subscribe( + 'graph-load', 'graph-dump', + schema=JsonSchema(Triple), +) + +g = rdflib.Graph() +count = 0 +limit = 100 + +while True: + + msg = consumer.receive() + + try: + + v = msg.value() + + if v.o.is_uri: + g.add(( + rdflib.term.URIRef(v.s.value), + rdflib.term.URIRef(v.p.value), + rdflib.term.URIRef(v.o.value), + )) + else: + g.add(( + rdflib.term.URIRef(v.s.value), + rdflib.term.URIRef(v.p.value), + rdflib.term.Literal(v.o.value), + )) + + count += 1 + + if count > limit: + + id = str(uuid.uuid4()) + path = f"graph/{id}.ttl" + g.serialize(destination=path) + g = rdflib.Graph() + print(f"Written {path}") + + count = 0 + + # Acknowledge successful processing of the message + consumer.acknowledge(msg) + + except Exception as e: + + print(e) + + # Message failed to be processed + consumer.negative_acknowledge(msg) + +client.close() + diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 00000000..7bfcf3ba --- /dev/null +++ b/requirements.txt @@ -0,0 +1,16 @@ +torch +urllib3 +transformers +sentence-transformers +rdflib +pymilvus +langchain +langchain-core +langchain-huggingface +langchain-text-splitters +langchain-community +huggingface-hub +cassandra-driver +pulsar-client +anthropic +google-cloud-aiplatform diff --git a/scripts/chunker-recursive b/scripts/chunker-recursive new file mode 100755 index 00000000..2356903d --- /dev/null +++ b/scripts/chunker-recursive @@ -0,0 +1,6 @@ +#!/usr/bin/env python3 + +from trustgraph.chunker.recursive import run + +run() + diff --git a/scripts/embeddings-hf b/scripts/embeddings-hf new file mode 100755 index 00000000..a7d84d04 --- /dev/null +++ b/scripts/embeddings-hf @@ -0,0 +1,6 @@ +#!/usr/bin/env python3 + +from trustgraph.embeddings.hf import run + +run() + diff --git a/scripts/embeddings-vectorize b/scripts/embeddings-vectorize new file mode 100755 index 00000000..3de1e3a9 --- /dev/null +++ b/scripts/embeddings-vectorize @@ -0,0 +1,6 @@ +#!/usr/bin/env python3 + +from trustgraph.embeddings.vectorize import run + +run() + diff --git a/scripts/graph-rag b/scripts/graph-rag new file mode 100755 index 00000000..a6dab1f3 --- /dev/null +++ b/scripts/graph-rag @@ -0,0 +1,6 @@ +#!/usr/bin/env python3 + +from trustgraph.rag.graph import run + +run() + diff --git a/scripts/graph-show b/scripts/graph-show new file mode 100755 index 00000000..26e6cbff --- /dev/null +++ b/scripts/graph-show @@ -0,0 +1,10 @@ +#!/usr/bin/env python3 + +from trustgraph.trustgraph import TrustGraph + +t = TrustGraph() + +rows = t.get_all(limit=100_000_000) +for s, p, o in rows: + print(s, p, o) + diff --git a/scripts/graph-to-turtle b/scripts/graph-to-turtle new file mode 100755 index 00000000..1bd48802 --- /dev/null +++ b/scripts/graph-to-turtle @@ -0,0 +1,37 @@ +#!/usr/bin/env python3 + +from trustgraph.trustgraph import TrustGraph +import rdflib +import sys +import io + +t = TrustGraph() + +g = rdflib.Graph() + +rows = t.get_all(limit=100_000_000) +for s, p, o in rows: + +# print(s, p, o) + sv = rdflib.term.URIRef(s) + pv = rdflib.term.URIRef(p) + + if o.startswith("https://") or o.startswith("http://"): + + # Skip malformed URLs with spaces in + if " " in o: + continue + + ov = rdflib.term.URIRef(o) + else: + ov = rdflib.term.Literal(o) + + g.add((sv, pv, ov)) + +g.serialize(destination="output.ttl", format="turtle") + +buf = io.BytesIO() + +g.serialize(destination=buf, format="turtle") + +sys.stdout.write(buf.getvalue().decode("utf-8")) diff --git a/scripts/graph-write-cassandra b/scripts/graph-write-cassandra new file mode 100755 index 00000000..7fc3d0c8 --- /dev/null +++ b/scripts/graph-write-cassandra @@ -0,0 +1,6 @@ +#!/usr/bin/env python3 + +from trustgraph.graph.cassandra_write import run + +run() + diff --git a/scripts/init-pulsar-manager b/scripts/init-pulsar-manager new file mode 100755 index 00000000..6e855ffb --- /dev/null +++ b/scripts/init-pulsar-manager @@ -0,0 +1,11 @@ +#!/usr/bin/env bash + +CSRF_TOKEN=$(curl http://localhost:7750/pulsar-manager/csrf-token) + +curl \ + -H "X-XSRF-TOKEN: $CSRF_TOKEN" \ + -H "Cookie: XSRF-TOKEN=$CSRF_TOKEN;" \ + -H 'Content-Type: application/json' \ + -X PUT \ + http://localhost:7750/pulsar-manager/users/superuser \ + -d '{"name": "admin", "password": "apachepulsar", "description": "test", "email": "username@test.org"}' diff --git a/scripts/kg-extract-definitions b/scripts/kg-extract-definitions new file mode 100755 index 00000000..327ec06f --- /dev/null +++ b/scripts/kg-extract-definitions @@ -0,0 +1,6 @@ +#!/usr/bin/env python3 + +from trustgraph.kg.extract_definitions import run + +run() + diff --git a/scripts/kg-extract-relationships b/scripts/kg-extract-relationships new file mode 100755 index 00000000..91040589 --- /dev/null +++ b/scripts/kg-extract-relationships @@ -0,0 +1,6 @@ +#!/usr/bin/env python3 + +from trustgraph.kg.extract_relationships import run + +run() + diff --git a/scripts/llm-azure-text b/scripts/llm-azure-text new file mode 100755 index 00000000..cdaea4b8 --- /dev/null +++ b/scripts/llm-azure-text @@ -0,0 +1,6 @@ +#!/usr/bin/env python3 + +from trustgraph.llm.azure_text import run + +run() + diff --git a/scripts/llm-claude-text b/scripts/llm-claude-text new file mode 100755 index 00000000..496d1440 --- /dev/null +++ b/scripts/llm-claude-text @@ -0,0 +1,6 @@ +#!/usr/bin/env python3 + +from trustgraph.llm.claude_text import run + +run() + diff --git a/scripts/llm-ollama-text b/scripts/llm-ollama-text new file mode 100755 index 00000000..cb7a4ebc --- /dev/null +++ b/scripts/llm-ollama-text @@ -0,0 +1,6 @@ +#!/usr/bin/env python3 + +from trustgraph.llm.ollama_text import run + +run() + diff --git a/scripts/llm-vertexai-text b/scripts/llm-vertexai-text new file mode 100755 index 00000000..4634015f --- /dev/null +++ b/scripts/llm-vertexai-text @@ -0,0 +1,6 @@ +#!/usr/bin/env python3 + +from trustgraph.llm.vertexai_text import run + +run() + diff --git a/scripts/loader b/scripts/loader new file mode 100755 index 00000000..a6dc4450 --- /dev/null +++ b/scripts/loader @@ -0,0 +1,47 @@ +#!/usr/bin/env python3 + +import pulsar +from pulsar.schema import JsonSchema, Bytes, String +from trustgraph.schema import Document, Source +import base64 +import hashlib + +# client = pulsar.Client("pulsar://localhost:6650") +host="10.89.1.246" +host="localhost" +client = pulsar.Client(f"pulsar://{host}:6650") + +producer = client.create_producer( + topic='document-load', + schema=JsonSchema(Document), + chunking_enabled=True, +) + +files=[ + "Challenger-Report-Vol1.pdf", +# "columbia-accident-investigation-board-report-volume-1.pdf", +# "Proposed_CIRCIA_Rules.pdf", +] + +for file in files: + + path = "sources/" + file + data = open(path, "rb").read() + + id = hashlib.sha256(path.encode("utf-8")).hexdigest()[0:8] + + r = Document( + source=Source( + source=path, + title=path, + id=id, + ), + data=base64.b64encode(data), + ) + + resp = producer.send(r) + + print(resp) + +client.close() + diff --git a/scripts/pdf-decoder b/scripts/pdf-decoder new file mode 100755 index 00000000..82b89298 --- /dev/null +++ b/scripts/pdf-decoder @@ -0,0 +1,6 @@ +#!/usr/bin/env python3 + +from trustgraph.decoder.pdf import run + +run() + diff --git a/scripts/query b/scripts/query new file mode 100755 index 00000000..5cb5c0c6 --- /dev/null +++ b/scripts/query @@ -0,0 +1,16 @@ +#!/usr/bin/env python3 + +from trustgraph.graph_rag import GraphRag +import sys + +query = " ".join(sys.argv[1:]) + +gr = GraphRag(verbose=True) + +if query == "": + query="""This knowledge graph describes the Space Shuttle disaster. +Present 20 facts which are present in the knowledge graph.""" + +resp = gr.query(query) +print(resp) + diff --git a/scripts/vector-write-milvus b/scripts/vector-write-milvus new file mode 100755 index 00000000..952e22cf --- /dev/null +++ b/scripts/vector-write-milvus @@ -0,0 +1,6 @@ +#!/usr/bin/env python3 + +from trustgraph.vector.milvus_write import run + +run() + diff --git a/setup.py b/setup.py new file mode 100644 index 00000000..2ad4db47 --- /dev/null +++ b/setup.py @@ -0,0 +1,65 @@ +import setuptools +import os + +with open("README.md", "r") as fh: + long_description = fh.read() + +version = "0.0.0" + +setuptools.setup( + name="trustgraph", + version=version, + author="trustgraph.ai", + author_email="security@trustgraph.ai", + description="trustgraph.ai", + long_description=long_description, + long_description_content_type="text/markdown", + url="https://github.com/trustgraph.ai/FIXME.git", + packages=setuptools.find_packages(), + classifiers=[ + "Programming Language :: Python :: 3", + "License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)", + "Operating System :: OS Independent", + ], + python_requires='>=3.8', + download_url = "https://github.com/trustgraph.ai/FIXME.git/archive/refs/tags/v" + version + ".tar.gz", + install_requires=[ + "torch", + "urllib3", + "transformers", + "sentence-transformers", + "rdflib", + "pymilvus", + "langchain", + "langchain-core", + "langchain-huggingface", + "langchain-text-splitters", + "langchain-community", + "huggingface-hub", + "requests", + "cassandra-driver", + "pulsar-client", + "pypdf", + "anthropic", + "google-cloud-aiplatform", + ], + scripts=[ + "scripts/chunker-recursive", + "scripts/graph-show", + "scripts/graph-to-turtle", + "scripts/graph-write-cassandra", + "scripts/kg-extract-definitions", + "scripts/kg-extract-relationships", + "scripts/llm-ollama-text", + "scripts/llm-vertexai-text", + "scripts/llm-claude-text", + "scripts/llm-azure-text", + "scripts/loader", + "scripts/pdf-decoder", + "scripts/query", + "scripts/embeddings-vectorize", + "scripts/embeddings-hf", + "scripts/vector-write-milvus", + "scripts/graph-rag", + ] +) diff --git a/tests/test-embeddings b/tests/test-embeddings new file mode 100755 index 00000000..e2bcdbde --- /dev/null +++ b/tests/test-embeddings @@ -0,0 +1,15 @@ +#!/usr/bin/env python3 + +import pulsar +from trustgraph.embeddings_client import EmbeddingsClient + +embed = EmbeddingsClient(pulsar_host="pulsar://localhost:6650") + +prompt="Write a funny limerick about a llama" + +resp = embed.request(prompt) + +print(resp) + + + diff --git a/tests/test-graph-rag b/tests/test-graph-rag new file mode 100755 index 00000000..c6abfe05 --- /dev/null +++ b/tests/test-graph-rag @@ -0,0 +1,14 @@ +#!/usr/bin/env python3 + +import pulsar +from trustgraph.graph_rag_client import GraphRagClient + +rag = GraphRagClient(pulsar_host="pulsar://localhost:6650") + +query="""This knowledge graph describes the Space Shuttle disaster. +Present 20 facts which are present in the knowledge graph.""" + +resp = rag.request(query) + +print(resp) + diff --git a/tests/test-llm b/tests/test-llm new file mode 100755 index 00000000..35177e81 --- /dev/null +++ b/tests/test-llm @@ -0,0 +1,15 @@ +#!/usr/bin/env python3 + +import pulsar +from trustgraph.llm_client import LlmClient + +llm = LlmClient(pulsar_host="pulsar://localhost:6650") + +prompt="Write a funny limerick about a llama" + +resp = llm.request(prompt) + +print(resp) + +llm.close() + diff --git a/tests/test-milvus b/tests/test-milvus new file mode 100755 index 00000000..e95955dc --- /dev/null +++ b/tests/test-milvus @@ -0,0 +1,35 @@ +#!/usr/bin/env python3 + +from langchain_huggingface import HuggingFaceEmbeddings + +from edge_map import VectorStore + +client = VectorStore() + +embeddings = HuggingFaceEmbeddings(model_name="all-MiniLM-L6-v2") + +text="""A cat is a small animal. A dog is a large animal. +Cats say miaow. Dogs go woof. +""" + +embeds = embeddings.embed_documents([text])[0] + +text2="""If you couldn't download the model due to network issues, as a walkaround, you can use random vectors to represent the text and still finish the example. Just note that the search result won't reflect semantic similarity as the vectors are fake ones. +""" + +embeds2 = embeddings.embed_documents([text2])[0] + +client.insert(embeds, "animals") +client.insert(embeds, "vectors") + +query="""What noise does a cat make?""" + +qembeds = embeddings.embed_documents([query])[0] + +res = client.search( + qembeds, + limit=2 +) + +print(res) + diff --git a/trustgraph/__init__.py b/trustgraph/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/trustgraph/chunker/__init__.py b/trustgraph/chunker/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/trustgraph/chunker/recursive/__init__.py b/trustgraph/chunker/recursive/__init__.py new file mode 100644 index 00000000..3b816664 --- /dev/null +++ b/trustgraph/chunker/recursive/__init__.py @@ -0,0 +1,3 @@ + +from . chunker import * + diff --git a/trustgraph/chunker/recursive/__main__.py b/trustgraph/chunker/recursive/__main__.py new file mode 100644 index 00000000..18e14ad5 --- /dev/null +++ b/trustgraph/chunker/recursive/__main__.py @@ -0,0 +1,7 @@ +#!/usr/bin/env python3 + +from . chunker import run + +if __name__ == '__main__': + run() + diff --git a/trustgraph/chunker/recursive/chunker.py b/trustgraph/chunker/recursive/chunker.py new file mode 100755 index 00000000..ba5eb939 --- /dev/null +++ b/trustgraph/chunker/recursive/chunker.py @@ -0,0 +1,164 @@ + +""" +Simple decoder, accepts text documents on input, outputs chunks from the +as text as separate output objects. +""" + +import pulsar +from pulsar.schema import JsonSchema +import tempfile +import base64 +import os +import argparse +from langchain_text_splitters import RecursiveCharacterTextSplitter +import time + +from ... schema import TextDocument, Chunk, Source +from ... log_level import LogLevel + +class Processor: + + def __init__( + self, + pulsar_host, + input_queue, + output_queue, + subscriber, + log_level, + ): + + self.client = pulsar.Client( + pulsar_host, + logger=pulsar.ConsoleLogger(log_level.to_pulsar()) + ) + + self.consumer = self.client.subscribe( + input_queue, subscriber, + schema=JsonSchema(TextDocument), + ) + + self.producer = self.client.create_producer( + topic=output_queue, + schema=JsonSchema(Chunk), + ) + + self.text_splitter = RecursiveCharacterTextSplitter( + chunk_size=1000, + chunk_overlap=20, + length_function=len, + is_separator_regex=False, + ) + + def run(self): + + while True: + + msg = self.consumer.receive() + + try: + + v = msg.value() + print(f"Chunking {v.source.id}...", flush=True) + + texts = self.text_splitter.create_documents( + [v.text.decode("utf-8")] + ) + + for ix, chunk in enumerate(texts): + + id = v.source.id + "-c" + str(ix) + + r = Chunk( + source=Source( + source=v.source.source, + id=id, + title=v.source.title + ), + chunk=chunk.page_content.encode("utf-8"), + ) + + self.producer.send(r) + + # Acknowledge successful processing of the message + self.consumer.acknowledge(msg) + + print("Done.", flush=True) + + except Exception as e: + print(e, flush=True) + + # Message failed to be processed + self.consumer.negative_acknowledge(msg) + + def __del__(self): + self.client.close() + +def run(): + + parser = argparse.ArgumentParser( + prog='pdf-decoder', + description=__doc__, + ) + + default_pulsar_host = os.getenv("PULSAR_HOST", 'pulsar://pulsar:6650') + default_input_queue = 'text-doc-load' + default_output_queue = 'chunk-load' + default_subscriber = 'chunker-recursive' + + parser.add_argument( + '-p', '--pulsar-host', + default=default_pulsar_host, + help=f'Pulsar host (default: {default_pulsar_host})', + ) + + parser.add_argument( + '-i', '--input-queue', + default=default_input_queue, + help=f'Input queue (default: {default_input_queue})' + ) + + parser.add_argument( + '-s', '--subscriber', + default=default_subscriber, + help=f'Queue subscriber name (default: {default_subscriber})' + ) + + parser.add_argument( + '-o', '--output-queue', + default=default_output_queue, + help=f'Output queue (default: {default_output_queue})' + ) + + parser.add_argument( + '-l', '--log-level', + type=LogLevel, + default=LogLevel.INFO, + choices=list(LogLevel), + help=f'Output queue (default: info)' + ) + + args = parser.parse_args() + + + while True: + + try: + + p = Processor( + pulsar_host=args.pulsar_host, + input_queue=args.input_queue, + output_queue=args.output_queue, + subscriber=args.subscriber, + log_level=args.log_level, + ) + + p.run() + + except Exception as e: + + print("Exception:", e, flush=True) + print("Will retry...", flush=True) + + time.sleep(10) + + diff --git a/trustgraph/decoder/__init__.py b/trustgraph/decoder/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/trustgraph/decoder/pdf/__init__.py b/trustgraph/decoder/pdf/__init__.py new file mode 100644 index 00000000..0d8d9c78 --- /dev/null +++ b/trustgraph/decoder/pdf/__init__.py @@ -0,0 +1,3 @@ + +from . pdf_decoder import * + diff --git a/trustgraph/decoder/pdf/__main__.py b/trustgraph/decoder/pdf/__main__.py new file mode 100755 index 00000000..44dd026d --- /dev/null +++ b/trustgraph/decoder/pdf/__main__.py @@ -0,0 +1,7 @@ +#!/usr/bin/env python3 + +from . pdf_decoder import run + +if __name__ == '__main__': + run() + diff --git a/trustgraph/decoder/pdf/pdf_decoder.py b/trustgraph/decoder/pdf/pdf_decoder.py new file mode 100755 index 00000000..f892ebac --- /dev/null +++ b/trustgraph/decoder/pdf/pdf_decoder.py @@ -0,0 +1,159 @@ + +""" +Simple decoder, accepts PDF documents on input, outputs pages from the +PDF document as text as separate output objects. +""" + +import pulsar +from pulsar.schema import JsonSchema +from langchain_community.document_loaders import PyPDFLoader +import tempfile +import base64 +import os +import argparse +import time + +from ... schema import Document, TextDocument, Source +from ... log_level import LogLevel + +class Processor: + + def __init__( + self, + pulsar_host, + input_queue, + output_queue, + subscriber, + log_level, + ): + + self.client = pulsar.Client( + pulsar_host, + logger=pulsar.ConsoleLogger(log_level.to_pulsar()) + ) + + self.consumer = self.client.subscribe( + input_queue, subscriber, + schema=JsonSchema(Document), + ) + + self.producer = self.client.create_producer( + topic=output_queue, + schema=JsonSchema(TextDocument), + ) + + def run(self): + + while True: + + msg = self.consumer.receive() + + try: + + v = msg.value() + print(f"Decoding {v.source.id}...", flush=True) + + with tempfile.NamedTemporaryFile(delete_on_close=False) as fp: + + fp.write(base64.b64decode(v.data)) + fp.close() + + with open(fp.name, mode='rb') as f: + + loader = PyPDFLoader(fp.name) + pages = loader.load() + + for ix, page in enumerate(pages): + + id = v.source.id + "-p" + str(ix) + r = TextDocument( + source=Source( + source=v.source.source, + title=v.source.title, + id=id, + ), + text=page.page_content.encode("utf-8"), + ) + + self.producer.send(r) + + # Acknowledge successful processing of the message + self.consumer.acknowledge(msg) + + print("Done.", flush=True) + + except Exception as e: + print(e, flush=True) + + # Message failed to be processed + self.consumer.negative_acknowledge(msg) + + def __del__(self): + self.client.close() + +def run(): + + parser = argparse.ArgumentParser( + prog='pdf-decoder', + description=__doc__, + ) + + default_pulsar_host = os.getenv("PULSAR_HOST", 'pulsar://pulsar:6650') + default_input_queue = 'document-load' + default_output_queue = 'text-doc-load' + default_subscriber = 'pdf-decoder' + + parser.add_argument( + '-p', '--pulsar-host', + default=default_pulsar_host, + help=f'Pulsar host (default: {default_pulsar_host})', + ) + + parser.add_argument( + '-i', '--input-queue', + default=default_input_queue, + help=f'Input queue (default: {default_input_queue})' + ) + + parser.add_argument( + '-s', '--subscriber', + default=default_subscriber, + help=f'Queue subscriber name (default: {default_subscriber})' + ) + + parser.add_argument( + '-o', '--output-queue', + default=default_output_queue, + help=f'Output queue (default: {default_output_queue})' + ) + + parser.add_argument( + '-l', '--log-level', + type=LogLevel, + default=LogLevel.INFO, + choices=list(LogLevel), + help=f'Output queue (default: info)' + ) + + args = parser.parse_args() + + while True: + + try: + p = Processor( + pulsar_host=args.pulsar_host, + input_queue=args.input_queue, + output_queue=args.output_queue, + subscriber=args.subscriber, + log_level=args.log_level, + ) + + p.run() + + except Exception as e: + + print("Exception:", e, flush=True) + print("Will retry...", flush=True) + + time.sleep(10) + diff --git a/trustgraph/edge_map.py b/trustgraph/edge_map.py new file mode 100644 index 00000000..55d9077f --- /dev/null +++ b/trustgraph/edge_map.py @@ -0,0 +1,102 @@ + +from pymilvus import MilvusClient, CollectionSchema, FieldSchema, DataType + +class VectorStore: + + def __init__(self, uri="http://localhost:19530"): + + self.client = MilvusClient(uri=uri) + + self.collection = "edges" + self.dimension = 384 + + if not self.client.has_collection(collection_name=self.collection): + self.init_collection() + + def init_collection(self): + + pkey_field = FieldSchema( + name="id", + dtype=DataType.INT64, + is_primary=True, + auto_id=True, + ) + + vec_field = FieldSchema( + name="vector", + dtype=DataType.FLOAT_VECTOR, + dim=self.dimension, + ) + + entity_field = FieldSchema( + name="entity", + dtype=DataType.VARCHAR, + max_length=65535, + ) + + schema = CollectionSchema( + fields = [pkey_field, vec_field, entity_field], + description = "Edge map schema", + ) + + self.client.create_collection( + collection_name=self.collection, + schema=schema, + metric_type="IP", + ) + + index_params = MilvusClient.prepare_index_params() + + index_params.add_index( + field_name="vector", + metric_type="COSINE", + index_type="FLAT", # IVF_FLAT?! + index_name="vector_index", + params={ "nlist": 128 } + ) + + self.client.create_index( + collection_name=self.collection, + index_params=index_params + ) + + def insert(self, embeds, entity): + + data = [ + { + "vector": embeds, + "entity": entity, + } + ] + + self.client.insert(collection_name=self.collection, data=data) + + def search(self, embeds, fields=["entity"], limit=10): + + search_params = { + "metric_type": "COSINE", + "params": { + "radius": 0.1, + "range_filter": 0.8 + } + } + + self.client.load_collection( + collection_name=self.collection, +# replica_number=1 + ) + + res = self.client.search( + collection_name=self.collection, + data=[embeds], + limit=limit, + output_fields=fields, + search_params=search_params, + )[0] + + self.client.release_collection( + collection_name=self.collection, + ) + + return res + diff --git a/trustgraph/embeddings/__init__.py b/trustgraph/embeddings/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/trustgraph/embeddings/hf/__init__.py b/trustgraph/embeddings/hf/__init__.py new file mode 100644 index 00000000..7ccb0b37 --- /dev/null +++ b/trustgraph/embeddings/hf/__init__.py @@ -0,0 +1,3 @@ + +from . hf import * + diff --git a/trustgraph/embeddings/hf/__main__.py b/trustgraph/embeddings/hf/__main__.py new file mode 100755 index 00000000..89684e3e --- /dev/null +++ b/trustgraph/embeddings/hf/__main__.py @@ -0,0 +1,7 @@ +#!/usr/bin/env python3 + +from . hf import run + +if __name__ == '__main__': + run() + diff --git a/trustgraph/embeddings/hf/hf.py b/trustgraph/embeddings/hf/hf.py new file mode 100755 index 00000000..33890483 --- /dev/null +++ b/trustgraph/embeddings/hf/hf.py @@ -0,0 +1,161 @@ + +""" +Simple LLM service, performs text prompt completion using an Ollama service. +Input is prompt, output is response. +""" + +import pulsar +from pulsar.schema import JsonSchema +import tempfile +import base64 +import os +import argparse +from langchain_huggingface import HuggingFaceEmbeddings +import time + +from ... schema import EmbeddingsRequest, EmbeddingsResponse +from ... log_level import LogLevel + +class Processor: + + def __init__( + self, + pulsar_host, + input_queue, + output_queue, + subscriber, + log_level, + model, + ): + + self.client = pulsar.Client( + pulsar_host, + logger=pulsar.ConsoleLogger(log_level.to_pulsar()) + ) + + self.consumer = self.client.subscribe( + input_queue, subscriber, + schema=JsonSchema(EmbeddingsRequest), + ) + + self.producer = self.client.create_producer( + topic=output_queue, + schema=JsonSchema(EmbeddingsResponse), + ) + + self.embeddings = HuggingFaceEmbeddings(model_name=model) + + def run(self): + + while True: + + msg = self.consumer.receive() + + try: + + v = msg.value() + + # Sender-produced ID + + id = msg.properties()["id"] + + print(f"Handling input {id}...", flush=True) + + text = v.text + embeds = self.embeddings.embed_documents([text]) + + print("Send response...", flush=True) + r = EmbeddingsResponse(vectors=embeds) + self.producer.send(r, properties={"id": id}) + + print("Done.", flush=True) + + # Acknowledge successful processing of the message + self.consumer.acknowledge(msg) + + except Exception as e: + + print("Exception:", e, flush=True) + + # Message failed to be processed + self.consumer.negative_acknowledge(msg) + + def __del__(self): + print("Closing", flush=True) + self.client.close() + +def run(): + + parser = argparse.ArgumentParser( + prog='llm-ollama-text', + description=__doc__, + ) + + default_pulsar_host = os.getenv("PULSAR_HOST", 'pulsar://pulsar:6650') + default_input_queue = 'embeddings' + default_output_queue = 'embeddings-response' + default_subscriber = 'embeddings-hf' + + parser.add_argument( + '-p', '--pulsar-host', + default=default_pulsar_host, + help=f'Pulsar host (default: {default_pulsar_host})', + ) + + parser.add_argument( + '-i', '--input-queue', + default=default_input_queue, + help=f'Input queue (default: {default_input_queue})' + ) + + parser.add_argument( + '-s', '--subscriber', + default=default_subscriber, + help=f'Queue subscriber name (default: {default_subscriber})' + ) + + parser.add_argument( + '-o', '--output-queue', + default=default_output_queue, + help=f'Output queue (default: {default_output_queue})' + ) + + parser.add_argument( + '-l', '--log-level', + type=LogLevel, + default=LogLevel.INFO, + choices=list(LogLevel), + help=f'Output queue (default: info)' + ) + + parser.add_argument( + '-m', '--model', + default="all-MiniLM-L6-v2", + help=f'LLM model (default: all-MiniLM-L6-v2)' + ) + + args = parser.parse_args() + + + while True: + + try: + + p = Processor( + pulsar_host=args.pulsar_host, + input_queue=args.input_queue, + output_queue=args.output_queue, + subscriber=args.subscriber, + log_level=args.log_level, + model=args.model, + ) + + p.run() + + except Exception as e: + + print("Exception:", e, flush=True) + print("Will retry...", flush=True) + + time.sleep(10) + diff --git a/trustgraph/embeddings/vectorize/__init__.py b/trustgraph/embeddings/vectorize/__init__.py new file mode 100644 index 00000000..31596b8c --- /dev/null +++ b/trustgraph/embeddings/vectorize/__init__.py @@ -0,0 +1,3 @@ + +from . vectorize import * + diff --git a/trustgraph/embeddings/vectorize/__main__.py b/trustgraph/embeddings/vectorize/__main__.py new file mode 100755 index 00000000..a578de8a --- /dev/null +++ b/trustgraph/embeddings/vectorize/__main__.py @@ -0,0 +1,6 @@ + +from . vectorize import run + +if __name__ == '__main__': + run() + diff --git a/trustgraph/embeddings/vectorize/vectorize.py b/trustgraph/embeddings/vectorize/vectorize.py new file mode 100755 index 00000000..b362470d --- /dev/null +++ b/trustgraph/embeddings/vectorize/vectorize.py @@ -0,0 +1,167 @@ + +""" +Vectorizer, applies an embedding algorithm to a chunk. Input is a chunk, +output is chunk and vectors. +""" + +import pulsar +from pulsar.schema import JsonSchema +import tempfile +import base64 +import os +import argparse +import time + +from ... schema import Chunk, VectorsChunk +from ... embeddings_client import EmbeddingsClient +from ... log_level import LogLevel + +class Processor: + + def __init__( + self, + pulsar_host, + input_queue, + output_queue, + subscriber, + log_level, + model, + ): + + self.client = pulsar.Client( + pulsar_host, + logger=pulsar.ConsoleLogger(log_level.to_pulsar()) + ) + + self.consumer = self.client.subscribe( + input_queue, subscriber, + schema=JsonSchema(Chunk), + ) + + self.producer = self.client.create_producer( + topic=output_queue, + schema=JsonSchema(VectorsChunk), + ) + + self.embeddings = EmbeddingsClient(pulsar_host=pulsar_host) + + def emit(self, source, chunk, vectors): + + r = VectorsChunk(source=source, chunk=chunk, vectors=vectors) + self.producer.send(r) + + def run(self): + + while True: + + msg = self.consumer.receive() + + try: + + v = msg.value() + print(f"Indexing {v.source.id}...", flush=True) + + chunk = v.chunk.decode("utf-8") + + try: + + vectors = self.embeddings.request(chunk) + + self.emit( + source=v.source, + chunk=chunk.encode("utf-8"), + vectors=vectors + ) + + except Exception as e: + print("Exception:", e, flush=True) + + print("Done.", flush=True) + + # Acknowledge successful processing of the message + self.consumer.acknowledge(msg) + + except Exception as e: + + print("Exception:", e, flush=True) + + # Message failed to be processed + self.consumer.negative_acknowledge(msg) + + def __del__(self): + self.client.close() + +def run(): + + parser = argparse.ArgumentParser( + prog='embeddings-vectorizer', + description=__doc__, + ) + + default_pulsar_host = os.getenv("PULSAR_HOST", 'pulsar://pulsar:6650') + default_input_queue = 'chunk-load' + default_output_queue = 'vectors-chunk-load' + default_subscriber = 'embeddings-vectorizer' + + parser.add_argument( + '-p', '--pulsar-host', + default=default_pulsar_host, + help=f'Pulsar host (default: {default_pulsar_host})', + ) + + parser.add_argument( + '-i', '--input-queue', + default=default_input_queue, + help=f'Input queue (default: {default_input_queue})' + ) + + parser.add_argument( + '-s', '--subscriber', + default=default_subscriber, + help=f'Queue subscriber name (default: {default_subscriber})' + ) + + parser.add_argument( + '-o', '--output-queue', + default=default_output_queue, + help=f'Output queue (default: {default_output_queue})' + ) + + parser.add_argument( + '-l', '--log-level', + type=LogLevel, + default=LogLevel.INFO, + choices=list(LogLevel), + help=f'Output queue (default: info)' + ) + + parser.add_argument( + '-m', '--model', + default="all-MiniLM-L6-v2", + help=f'LLM model (default: all-MiniLM-L6-v2)' + ) + + args = parser.parse_args() + + while True: + + try: + + p = Processor( + pulsar_host=args.pulsar_host, + input_queue=args.input_queue, + output_queue=args.output_queue, + subscriber=args.subscriber, + log_level=args.log_level, + model=args.model, + ) + + p.run() + + except Exception as e: + + print("Exception:", e, flush=True) + print("Will retry...", flush=True) + + time.sleep(10) + diff --git a/trustgraph/embeddings_client.py b/trustgraph/embeddings_client.py new file mode 100644 index 00000000..e464e02f --- /dev/null +++ b/trustgraph/embeddings_client.py @@ -0,0 +1,70 @@ +#!/usr/bin/env python3 + +import pulsar +import _pulsar +from pulsar.schema import JsonSchema +from trustgraph.schema import EmbeddingsRequest, EmbeddingsResponse +import hashlib +import uuid + +# Ugly +ERROR=_pulsar.LoggerLevel.Error +WARN=_pulsar.LoggerLevel.Warn +INFO=_pulsar.LoggerLevel.Info +DEBUG=_pulsar.LoggerLevel.Debug + +class EmbeddingsClient: + + def __init__( + self, log_level=ERROR, client_id=None, + pulsar_host="pulsar://pulsar:6650", + ): + + if client_id == None: + client_id = str(uuid.uuid4()) + + self.client = pulsar.Client( + pulsar_host, + logger=pulsar.ConsoleLogger(log_level), + ) + + self.producer = self.client.create_producer( + topic='embeddings', + schema=JsonSchema(EmbeddingsRequest), + chunking_enabled=True, + ) + + self.consumer = self.client.subscribe( + 'embeddings-response', client_id, + schema=JsonSchema(EmbeddingsResponse), + ) + + def request(self, text, timeout=500): + + id = str(uuid.uuid4()) + + r = EmbeddingsRequest( + text=text + ) + self.producer.send(r, properties={ "id": id }) + + while True: + + msg = self.consumer.receive(timeout_millis=timeout * 1000) + + mid = msg.properties()["id"] + + if mid == id: + resp = msg.value().vectors + self.consumer.acknowledge(msg) + return resp + + # Ignore messages with wrong ID + self.consumer.acknowledge(msg) + + def __del__(self): + + self.producer.close() + self.consumer.close() + self.client.close() + diff --git a/trustgraph/graph/__init__.py b/trustgraph/graph/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/trustgraph/graph/cassandra_write/__init__.py b/trustgraph/graph/cassandra_write/__init__.py new file mode 100644 index 00000000..d891d55f --- /dev/null +++ b/trustgraph/graph/cassandra_write/__init__.py @@ -0,0 +1,3 @@ + +from . write import * + diff --git a/trustgraph/graph/cassandra_write/__main__.py b/trustgraph/graph/cassandra_write/__main__.py new file mode 100755 index 00000000..c05d8c6d --- /dev/null +++ b/trustgraph/graph/cassandra_write/__main__.py @@ -0,0 +1,7 @@ +#!/usr/bin/env python3 + +from . write import run + +if __name__ == '__main__': + run() + diff --git a/trustgraph/graph/cassandra_write/write.py b/trustgraph/graph/cassandra_write/write.py new file mode 100755 index 00000000..1b56b5ab --- /dev/null +++ b/trustgraph/graph/cassandra_write/write.py @@ -0,0 +1,144 @@ + +""" +Simple decoder, accepts PDF documents on input, outputs pages from the +PDF document as text as separate output objects. +""" + +import pulsar +from pulsar.schema import JsonSchema +import tempfile +import base64 +import os +import argparse +import time + +from ... trustgraph import TrustGraph +from ... schema import Triple +from ... log_level import LogLevel + +class Processor: + + def __init__( + self, + pulsar_host, + input_queue, + subscriber, + log_level, + graph_host, + ): + + self.client = pulsar.Client( + pulsar_host, + logger=pulsar.ConsoleLogger(log_level.to_pulsar()) + ) + + self.consumer = self.client.subscribe( + input_queue, subscriber, + schema=JsonSchema(Triple), + ) + + self.tg = TrustGraph([graph_host]) + + self.count = 0 + + def run(self): + + while True: + + msg = self.consumer.receive() + + try: + + v = msg.value() + + self.tg.insert( + v.s.value, + v.p.value, + v.o.value + ) + + self.count += 1 + + if (self.count % 1000) == 0: + print(self.count, "...", flush=True) + + # Acknowledge successful processing of the message + self.consumer.acknowledge(msg) + + except Exception as e: + + print("Exception:", e, flush=True) + + # Message failed to be processed + self.consumer.negative_acknowledge(msg) + + def __del__(self): + self.client.close() + +def run(): + + parser = argparse.ArgumentParser( + prog='graph-write-cassandra', + description=__doc__, + ) + + default_pulsar_host = os.getenv("PULSAR_HOST", 'pulsar://pulsar:6650') + default_input_queue = 'graph-load' + default_subscriber = 'graph-write-cassandra' + + parser.add_argument( + '-p', '--pulsar-host', + default=default_pulsar_host, + help=f'Pulsar host (default: {default_pulsar_host})', + ) + + parser.add_argument( + '-i', '--input-queue', + default=default_input_queue, + help=f'Input queue (default: {default_input_queue})' + ) + + parser.add_argument( + '-s', '--subscriber', + default=default_subscriber, + help=f'Queue subscriber name (default: {default_subscriber})' + ) + + parser.add_argument( + '-l', '--log-level', + type=LogLevel, + default=LogLevel.INFO, + choices=list(LogLevel), + help=f'Output queue (default: info)' + ) + + parser.add_argument( + '-g', '--graph-host', + default="localhost", + help=f'Output queue (default: localhost)' + ) + + args = parser.parse_args() + + while True: + + try: + + p = Processor( + pulsar_host=args.pulsar_host, + input_queue=args.input_queue, + subscriber=args.subscriber, + log_level=args.log_level, + graph_host=args.graph_host, + ) + + p.run() + + except Exception as e: + + print("Exception:", e, flush=True) + print("Will retry...", flush=True) + + time.sleep(10) + + diff --git a/trustgraph/graph_rag.py b/trustgraph/graph_rag.py new file mode 100644 index 00000000..4175698e --- /dev/null +++ b/trustgraph/graph_rag.py @@ -0,0 +1,227 @@ + +from trustgraph.trustgraph import TrustGraph +from trustgraph.edge_map import VectorStore +from trustgraph.trustgraph import TrustGraph +from trustgraph.llm_client import LlmClient +from trustgraph.embeddings_client import EmbeddingsClient + +LABEL="http://www.w3.org/2000/01/rdf-schema#label" +DEFINITION="http://www.w3.org/2004/02/skos/core#definition" + +class GraphRag: + + def __init__( + self, + graph_hosts=None, + pulsar_host="pulsar://pulsar:6650", + vector_store="http://milvus:19530", + verbose=False + ): + + self.verbose=verbose + + if graph_hosts == None: + graph_hosts = ["cassandra"] + + if self.verbose: + print("Initialising...", flush=True) + + self.graph = TrustGraph(graph_hosts) + + self.embeddings = EmbeddingsClient(pulsar_host=pulsar_host) + + self.vecstore = VectorStore(vector_store) + + self.entity_limit=50 + self.query_limit=30 + self.max_sg_size=3000 + + self.label_cache = {} + + self.llm = LlmClient(pulsar_host=pulsar_host) + + if self.verbose: + print("Initialised", flush=True) + + def get_vector(self, query): + + if self.verbose: + print("Compute embeddings...", flush=True) + + qembeds = self.embeddings.request(query) + + if self.verbose: + print("Done.", flush=True) + + return qembeds + + def get_entities(self, query): + + everything = [] + + vectors = self.get_vector(query) + + if self.verbose: + print("Get entities...", flush=True) + + for vector in vectors: + + res = self.vecstore.search( + vector, + limit=self.entity_limit + ) + + entities = set([ + item["entity"]["entity"] + for item in res + ]) + + everything.extend(entities) + + if self.verbose: + print("Entities:", flush=True) + for ent in everything: + print(" ", ent, flush=True) + + return everything + + def maybe_label(self, e): + + if e in self.label_cache: + return self.label_cache[e] + + res = self.graph.get_sp(e, LABEL) + res = list(res) + + if len(res) == 0: + self.label_cache[e] = e + return e + + self.label_cache[e] = res[0][0] + return self.label_cache[e] + + def get_nodes(self, query): + + ents = self.get_entities(query) + + if self.verbose: + print("Get labels...", flush=True) + + nodes = [ + self.maybe_label(e) + for e in ents + ] + + if self.verbose: + print("Nodes:", flush=True) + for node in nodes: + print(" ", node, flush=True) + + return nodes + + def get_subgraph(self, query): + + entities = self.get_entities(query) + + subgraph = set() + + if self.verbose: + print("Get subgraph...", flush=True) + + for e in entities: + + res = self.graph.get_s(e, limit=self.query_limit) + for p, o in res: + subgraph.add((e, p, o)) + + res = self.graph.get_p(e, limit=self.query_limit) + for s, o in res: + subgraph.add((s, e, o)) + + res = self.graph.get_o(e, limit=self.query_limit) + for s, p in res: + subgraph.add((s, p, e)) + + subgraph = list(subgraph) + + subgraph = subgraph[0:self.max_sg_size] + + if self.verbose: + print("Subgraph:", flush=True) + for edge in subgraph: + print(" ", str(edge), flush=True) + + if self.verbose: + print("Done.", flush=True) + + return subgraph + + def get_labelgraph(self, query): + + subgraph = self.get_subgraph(query) + + sg2 = [] + + for edge in subgraph: + + if edge[1] == LABEL: + continue + + s = self.maybe_label(edge[0]) + p = self.maybe_label(edge[1]) + o = self.maybe_label(edge[2]) + + sg2.append((s, p, o)) + + return sg2 + + def get_cypher(self, query): + + sg = self.get_labelgraph(query) + + sg2 = [] + + for s, p, o in sg: + + sg2.append(f"({s})-[{p}]->({o})") + + kg = "\n".join(sg2) + kg = kg.replace("\\", "-") + + return kg + + def get_graph_prompt(self, query): + + kg = self.get_cypher(query) + + prompt=f"""Study the knowledge graph provided, and use +the information to answer the question. The question should be answered +in plain English only. + + +{kg} + + +{query} + +""" + + return prompt + + def query(self, query): + + if self.verbose: + print("Construct prompt...", flush=True) + + prompt = self.get_graph_prompt(query) + + if self.verbose: + print("Invoke LLM...", flush=True) + + resp = self.llm.request(prompt) + + if self.verbose: + print("Done", flush=True) + + return resp + diff --git a/trustgraph/graph_rag_client.py b/trustgraph/graph_rag_client.py new file mode 100644 index 00000000..6f48e772 --- /dev/null +++ b/trustgraph/graph_rag_client.py @@ -0,0 +1,68 @@ +#!/usr/bin/env python3 + +import pulsar +import _pulsar +from pulsar.schema import JsonSchema +from trustgraph.schema import GraphRagQuery, GraphRagResponse +import hashlib +import uuid + +# Ugly +ERROR=_pulsar.LoggerLevel.Error +WARN=_pulsar.LoggerLevel.Warn +INFO=_pulsar.LoggerLevel.Info +DEBUG=_pulsar.LoggerLevel.Debug + +class GraphRagClient: + + def __init__( + self, log_level=ERROR, client_id=None, + pulsar_host="pulsar://pulsar:6650", + ): + + if client_id == None: + client_id = str(uuid.uuid4()) + + self.client = pulsar.Client( + pulsar_host, + logger=pulsar.ConsoleLogger(log_level), + ) + + self.producer = self.client.create_producer( + topic='graph-rag-query', + schema=JsonSchema(GraphRagQuery), + chunking_enabled=True, + ) + + self.consumer = self.client.subscribe( + 'graph-rag-response', client_id, + schema=JsonSchema(GraphRagResponse), + ) + + def request(self, query, timeout=500): + + id = str(uuid.uuid4()) + + r = GraphRagQuery( + query=query + ) + self.producer.send(r, properties={ "id": id }) + + while True: + + msg = self.consumer.receive(timeout_millis=timeout * 1000) + + mid = msg.properties()["id"] + + if mid == id: + resp = msg.value().response + self.consumer.acknowledge(msg) + return resp + + # Ignore messages with wrong ID + self.consumer.acknowledge(msg) + + def __del__(self): + + self.client.close() + diff --git a/trustgraph/kg/__init__.py b/trustgraph/kg/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/trustgraph/kg/extract_definitions/__init__.py b/trustgraph/kg/extract_definitions/__init__.py new file mode 100644 index 00000000..81287a3c --- /dev/null +++ b/trustgraph/kg/extract_definitions/__init__.py @@ -0,0 +1,3 @@ + +from . extract import * + diff --git a/trustgraph/kg/extract_definitions/__main__.py b/trustgraph/kg/extract_definitions/__main__.py new file mode 100755 index 00000000..403fe672 --- /dev/null +++ b/trustgraph/kg/extract_definitions/__main__.py @@ -0,0 +1,7 @@ +#!/usr/bin/env python3 + +from . extract import run + +if __name__ == '__main__': + run() + diff --git a/trustgraph/kg/extract_definitions/extract.py b/trustgraph/kg/extract_definitions/extract.py new file mode 100755 index 00000000..369aef59 --- /dev/null +++ b/trustgraph/kg/extract_definitions/extract.py @@ -0,0 +1,193 @@ + +""" +Simple decoder, accepts PDF documents on input, outputs pages from the +PDF document as text as separate output objects. +""" + +import pulsar +from pulsar.schema import JsonSchema +from langchain_community.document_loaders import PyPDFLoader +import tempfile +import base64 +import os +import argparse +import rdflib +import json +import urllib.parse +import time + +from ... schema import VectorsChunk, Triple, Source, Value +from ... log_level import LogLevel +from ... llm_client import LlmClient +from ... prompts import to_definitions +from ... rdf import TRUSTGRAPH_ENTITIES, DEFINITION + +DEFINITION_VALUE = Value(value=DEFINITION, is_uri=True) + +class Processor: + + def __init__( + self, + pulsar_host, + input_queue, + output_queue, + subscriber, + log_level, + ): + + self.client = pulsar.Client( + pulsar_host, + logger=pulsar.ConsoleLogger(log_level.to_pulsar()) + ) + + self.consumer = self.client.subscribe( + input_queue, subscriber, + schema=JsonSchema(VectorsChunk), + ) + + self.producer = self.client.create_producer( + topic=output_queue, + schema=JsonSchema(Triple), + ) + + self.llm = LlmClient(pulsar_host=pulsar_host) + + def to_uri(self, text): + + part = text.replace(" ", "-").lower().encode("utf-8") + quoted = urllib.parse.quote(part) + uri = TRUSTGRAPH_ENTITIES + quoted + + return uri + + def get_definitions(self, chunk): + + prompt = to_definitions(chunk) + resp = self.llm.request(prompt) + + defs = json.loads(resp) + + return defs + + def emit_edge(self, s, p, o): + + t = Triple(s=s, p=p, o=o) + self.producer.send(t) + + def run(self): + + while True: + + msg = self.consumer.receive() + + try: + + v = msg.value() + print(f"Indexing {v.source.id}...", flush=True) + + chunk = v.chunk.decode("utf-8") + + g = rdflib.Graph() + + try: + + defs = self.get_definitions(chunk) + print(json.dumps(defs, indent=4), flush=True) + + for defn in defs: + + s = defn["entity"] + s_uri = self.to_uri(s) + + o = defn["definition"] + + s_value = Value(value=str(s_uri), is_uri=True) + o_value = Value(value=str(o), is_uri=False) + + self.emit_edge(s_value, DEFINITION_VALUE, o_value) + + except Exception as e: + print("Exception: ", e, flush=True) + + print("Done.", flush=True) + + # Acknowledge successful processing of the message + self.consumer.acknowledge(msg) + + except Exception as e: + + print("Exception: ", e, flush=True) + + # Message failed to be processed + self.consumer.negative_acknowledge(msg) + + def __del__(self): + self.client.close() + +def run(): + + parser = argparse.ArgumentParser( + prog='pdf-decoder', + description=__doc__, + ) + + default_pulsar_host = os.getenv("PULSAR_HOST", 'pulsar://pulsar:6650') + default_input_queue = 'vectors-chunk-load' + default_output_queue = 'graph-load' + default_subscriber = 'kg-extract-definitions' + + parser.add_argument( + '-p', '--pulsar-host', + default=default_pulsar_host, + help=f'Pulsar host (default: {default_pulsar_host})', + ) + + parser.add_argument( + '-i', '--input-queue', + default=default_input_queue, + help=f'Input queue (default: {default_input_queue})' + ) + + parser.add_argument( + '-s', '--subscriber', + default=default_subscriber, + help=f'Queue subscriber name (default: {default_subscriber})' + ) + + parser.add_argument( + '-o', '--output-queue', + default=default_output_queue, + help=f'Output queue (default: {default_output_queue})' + ) + + parser.add_argument( + '-l', '--log-level', + type=LogLevel, + default=LogLevel.INFO, + choices=list(LogLevel), + help=f'Output queue (default: info)' + ) + + args = parser.parse_args() + + while True: + + try: + + p = Processor( + pulsar_host=args.pulsar_host, + input_queue=args.input_queue, + output_queue=args.output_queue, + subscriber=args.subscriber, + log_level=args.log_level, + ) + + p.run() + + except Exception as e: + + print("Exception:", e, flush=True) + print("Will retry...", flush=True) + + time.sleep(10) + diff --git a/trustgraph/kg/extract_relationships/__init__.py b/trustgraph/kg/extract_relationships/__init__.py new file mode 100644 index 00000000..81287a3c --- /dev/null +++ b/trustgraph/kg/extract_relationships/__init__.py @@ -0,0 +1,3 @@ + +from . extract import * + diff --git a/trustgraph/kg/extract_relationships/__main__.py b/trustgraph/kg/extract_relationships/__main__.py new file mode 100755 index 00000000..403fe672 --- /dev/null +++ b/trustgraph/kg/extract_relationships/__main__.py @@ -0,0 +1,7 @@ +#!/usr/bin/env python3 + +from . extract import run + +if __name__ == '__main__': + run() + diff --git a/trustgraph/kg/extract_relationships/extract.py b/trustgraph/kg/extract_relationships/extract.py new file mode 100755 index 00000000..a47c3b6e --- /dev/null +++ b/trustgraph/kg/extract_relationships/extract.py @@ -0,0 +1,252 @@ + +""" +Simple decoder, accepts PDF documents on input, outputs pages from the +PDF document as text as separate output objects. +""" + +import pulsar +from pulsar.schema import JsonSchema +from langchain_community.document_loaders import PyPDFLoader +import tempfile +import base64 +import os +import argparse +import rdflib +import json +import urllib.parse +import time + +from ... schema import VectorsChunk, Triple, VectorsAssociation, Source, Value +from ... log_level import LogLevel +from ... llm_client import LlmClient +from ... prompts import to_relationships +from ... rdf import RDF_LABEL, TRUSTGRAPH_ENTITIES + +RDF_LABEL_VALUE = Value(value=RDF_LABEL, is_uri=True) + +class Processor: + + def __init__( + self, + pulsar_host, + input_queue, + output_queue, + vec_queue, + subscriber, + log_level, + ): + + self.client = pulsar.Client( + pulsar_host, + logger=pulsar.ConsoleLogger(log_level.to_pulsar()) + ) + + self.consumer = self.client.subscribe( + input_queue, subscriber, + schema=JsonSchema(VectorsChunk), + ) + + self.producer = self.client.create_producer( + topic=output_queue, + schema=JsonSchema(Triple), + ) + + self.vec_prod = self.client.create_producer( + topic=vec_queue, + schema=JsonSchema(VectorsAssociation), + ) + + self.llm = LlmClient(pulsar_host=pulsar_host) + + def to_uri(self, text): + + part = text.replace(" ", "-").lower().encode("utf-8") + quoted = urllib.parse.quote(part) + uri = TRUSTGRAPH_ENTITIES + quoted + + return uri + + def get_relationships(self, chunk): + + prompt = to_relationships(chunk) + resp = self.llm.request(prompt) + + rels = json.loads(resp) + + return rels + + def emit_edge(self, s, p, o): + + t = Triple(s=s, p=p, o=o) + self.producer.send(t) + + def emit_vec(self, ent, vec): + + r = VectorsAssociation(entity=ent, vectors=vec) + self.vec_prod.send(r) + + def run(self): + + while True: + + msg = self.consumer.receive() + + try: + + v = msg.value() + print(f"Indexing {v.source.id}...", flush=True) + + chunk = v.chunk.decode("utf-8") + + g = rdflib.Graph() + + try: + + rels = self.get_relationships(chunk) + print(json.dumps(rels, indent=4), flush=True) + + for rel in rels: + + s = rel["subject"] + p = rel["predicate"] + o = rel["object"] + + s_uri = self.to_uri(s) + s_value = Value(value=str(s_uri), is_uri=True) + + p_uri = self.to_uri(p) + p_value = Value(value=str(p_uri), is_uri=True) + + if rel["object-entity"]: + o_uri = self.to_uri(o) + o_value = Value(value=str(o_uri), is_uri=True) + else: + o_value = Value(value=str(o), is_uri=False) + + self.emit_edge( + s_value, + p_value, + o_value + ) + + # Label for s + self.emit_edge( + s_value, + RDF_LABEL_VALUE, + Value(value=str(s), is_uri=False) + ) + + # Label for p + self.emit_edge( + p_value, + RDF_LABEL_VALUE, + Value(value=str(p), is_uri=False) + ) + + if rel["object-entity"]: + # Label for o + self.emit_edge( + o_value, + RDF_LABEL_VALUE, + Value(value=str(o), is_uri=False) + ) + + self.emit_vec(s_value, v.vectors) + self.emit_vec(p_value, v.vectors) + if rel["object-entity"]: + self.emit_vec(o_value, v.vectors) + + except Exception as e: + print("Exception: ", e, flush=True) + + print("Done.", flush=True) + + # Acknowledge successful processing of the message + self.consumer.acknowledge(msg) + + except Exception as e: + + print("Exception: ", e, flush=True) + + # Message failed to be processed + self.consumer.negative_acknowledge(msg) + + def __del__(self): + self.client.close() + +def run(): + + parser = argparse.ArgumentParser( + prog='kg-extract-relationships', + description=__doc__, + ) + + default_pulsar_host = os.getenv("PULSAR_HOST", 'pulsar://pulsar:6650') + default_input_queue = 'vectors-chunk-load' + default_output_queue = 'graph-load' + default_subscriber = 'kg-extract-relationships' + default_vector_queue='vectors-load' + + parser.add_argument( + '-p', '--pulsar-host', + default=default_pulsar_host, + help=f'Pulsar host (default: {default_pulsar_host})', + ) + + parser.add_argument( + '-i', '--input-queue', + default=default_input_queue, + help=f'Input queue (default: {default_input_queue})' + ) + + parser.add_argument( + '-s', '--subscriber', + default=default_subscriber, + help=f'Queue subscriber name (default: {default_subscriber})' + ) + + parser.add_argument( + '-o', '--output-queue', + default=default_output_queue, + help=f'Output queue (default: {default_output_queue})' + ) + + parser.add_argument( + '-l', '--log-level', + type=LogLevel, + default=LogLevel.INFO, + choices=list(LogLevel), + help=f'Output queue (default: info)' + ) + + parser.add_argument( + '-c', '--vector-queue', + default=default_vector_queue, + help=f'Vector output queue (default: {default_vector_queue})' + ) + + args = parser.parse_args() + + while True: + + try: + + p = Processor( + pulsar_host=args.pulsar_host, + input_queue=args.input_queue, + output_queue=args.output_queue, + vec_queue=args.vector_queue, + subscriber=args.subscriber, + log_level=args.log_level, + ) + + p.run() + + except Exception as e: + + print("Exception:", e, flush=True) + print("Will retry...", flush=True) + + time.sleep(10) + + diff --git a/trustgraph/llm/__init__.py b/trustgraph/llm/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/trustgraph/llm/azure_text/__init__.py b/trustgraph/llm/azure_text/__init__.py new file mode 100644 index 00000000..f2017af8 --- /dev/null +++ b/trustgraph/llm/azure_text/__init__.py @@ -0,0 +1,3 @@ + +from . llm import * + diff --git a/trustgraph/llm/azure_text/__main__.py b/trustgraph/llm/azure_text/__main__.py new file mode 100755 index 00000000..91342d2d --- /dev/null +++ b/trustgraph/llm/azure_text/__main__.py @@ -0,0 +1,7 @@ +#!/usr/bin/env python3 + +from . llm import run + +if __name__ == '__main__': + run() + diff --git a/trustgraph/llm/azure_text/llm.py b/trustgraph/llm/azure_text/llm.py new file mode 100755 index 00000000..c4f47b6a --- /dev/null +++ b/trustgraph/llm/azure_text/llm.py @@ -0,0 +1,213 @@ + +""" +Simple LLM service, performs text prompt completion using an Ollama service. +Input is prompt, output is response. +""" + +import pulsar +from pulsar.schema import JsonSchema +import tempfile +import base64 +import os +import argparse +from langchain_community.llms import Ollama +import requests +import time +import json + +from ... schema import TextCompletionRequest, TextCompletionResponse +from ... log_level import LogLevel + +class Processor: + + def __init__( + self, + pulsar_host, + input_queue, + output_queue, + subscriber, + log_level, + endpoint, + token, + ): + + self.client = pulsar.Client( + pulsar_host, + logger=pulsar.ConsoleLogger(log_level.to_pulsar()) + ) + + self.consumer = self.client.subscribe( + input_queue, subscriber, + schema=JsonSchema(TextCompletionRequest), + ) + + self.producer = self.client.create_producer( + topic=output_queue, + schema=JsonSchema(TextCompletionResponse), + ) + + self.endpoint = endpoint + self.token = token + + def build_prompt(self, system, content): + + data = { + "messages": [ + { + "role": "system", "content": system + }, + { + "role": "user", "content": content + } + ], + "max_tokens": 4192, + "temperature": 0.2, + "top_p": 1 + } + + body = json.dumps(data) + + return body + + def call_llm(self, body): + + url = self.endpoint + + # Replace this with the primary/secondary key, AMLToken, or + # Microsoft Entra ID token for the endpoint + api_key = self.token + + headers = { + 'Content-Type': 'application/json', + 'Authorization': f'Bearer {api_key}' + } + + resp = requests.post(url, data=body, headers=headers) + result = resp.json() + + message_content = result['choices'][0]['message']['content'] + + return message_content + + def run(self): + + while True: + + msg = self.consumer.receive() + + try: + + v = msg.value() + + # Sender-produced ID + + id = msg.properties()["id"] + + print(f"Handling prompt {id}...", flush=True) + + prompt = self.build_prompt( + "You are a helpful chatbot", + v.prompt + ) + + response = self.call_llm(prompt) + + print("Send response...", flush=True) + r = TextCompletionResponse(response=response) + self.producer.send(r, properties={"id": id}) + + print("Done.", flush=True) + + # Acknowledge successful processing of the message + self.consumer.acknowledge(msg) + + except Exception as e: + + print("Exception:", e, flush=True) + + # Message failed to be processed + self.consumer.negative_acknowledge(msg) + + def __del__(self): + self.client.close() + +def run(): + + parser = argparse.ArgumentParser( + prog='llm-ollama-text', + description=__doc__, + ) + + default_pulsar_host = os.getenv("PULSAR_HOST", 'pulsar://pulsar:6650') + default_input_queue = 'llm-complete-text' + default_output_queue = 'llm-complete-text-response' + default_subscriber = 'llm-ollama-text' + + parser.add_argument( + '-p', '--pulsar-host', + default=default_pulsar_host, + help=f'Pulsar host (default: {default_pulsar_host})', + ) + + parser.add_argument( + '-i', '--input-queue', + default=default_input_queue, + help=f'Input queue (default: {default_input_queue})' + ) + + parser.add_argument( + '-s', '--subscriber', + default=default_subscriber, + help=f'Queue subscriber name (default: {default_subscriber})' + ) + + parser.add_argument( + '-o', '--output-queue', + default=default_output_queue, + help=f'Output queue (default: {default_output_queue})' + ) + + parser.add_argument( + '-l', '--log-level', + type=LogLevel, + default=LogLevel.INFO, + choices=list(LogLevel), + help=f'Output queue (default: info)' + ) + + parser.add_argument( + '-e', '--endpoint', + help=f'LLM model endpoint' + ) + + parser.add_argument( + '-k', '--token', + help=f'LLM model token' + ) + + args = parser.parse_args() + + while True: + + try: + + p = Processor( + pulsar_host=args.pulsar_host, + input_queue=args.input_queue, + output_queue=args.output_queue, + subscriber=args.subscriber, + log_level=args.log_level, + endpoint=args.endpoint, + token=args.token, + ) + + p.run() + + except Exception as e: + + print("Exception:", e, flush=True) + print("Will retry...", flush=True) + + time.sleep(10) + + diff --git a/trustgraph/llm/claude_text/__init__.py b/trustgraph/llm/claude_text/__init__.py new file mode 100644 index 00000000..f2017af8 --- /dev/null +++ b/trustgraph/llm/claude_text/__init__.py @@ -0,0 +1,3 @@ + +from . llm import * + diff --git a/trustgraph/llm/claude_text/__main__.py b/trustgraph/llm/claude_text/__main__.py new file mode 100755 index 00000000..91342d2d --- /dev/null +++ b/trustgraph/llm/claude_text/__main__.py @@ -0,0 +1,7 @@ +#!/usr/bin/env python3 + +from . llm import run + +if __name__ == '__main__': + run() + diff --git a/trustgraph/llm/claude_text/llm.py b/trustgraph/llm/claude_text/llm.py new file mode 100755 index 00000000..85ffc9c7 --- /dev/null +++ b/trustgraph/llm/claude_text/llm.py @@ -0,0 +1,190 @@ + +""" +Simple LLM service, performs text prompt completion using Claude. +Input is prompt, output is response. +""" + +import pulsar +from pulsar.schema import JsonSchema +import tempfile +import base64 +import os +import argparse +import anthropic +import time + +from ... schema import TextCompletionRequest, TextCompletionResponse +from ... log_level import LogLevel + +class Processor: + + def __init__( + self, + pulsar_host, + input_queue, + output_queue, + subscriber, + log_level, + model, + api_key, + ): + + self.client = pulsar.Client( + pulsar_host, + logger=pulsar.ConsoleLogger(log_level.to_pulsar()) + ) + + self.consumer = self.client.subscribe( + input_queue, subscriber, + schema=JsonSchema(TextCompletionRequest), + ) + + self.producer = self.client.create_producer( + topic=output_queue, + schema=JsonSchema(TextCompletionResponse), + ) + + self.model = model + + self.claude = anthropic.Anthropic(api_key=api_key) + + print("Initialised", flush=True) + + def run(self): + + while True: + + msg = self.consumer.receive() + + try: + + v = msg.value() + + # Sender-produced ID + + id = msg.properties()["id"] + + print(f"Handling prompt {id}...", flush=True) + + prompt = v.prompt + response = message = self.claude.messages.create( + model=self.model, + max_tokens=1000, + temperature=0.1, + system = "You are a helpful chatbot.", + messages=[ + { + "role": "user", + "content": [ + { + "type": "text", + "text": prompt + } + ] + } + ] + ) + + resp = response.content[0].text + print(resp, flush=True) + + print("Send response...", flush=True) + r = TextCompletionResponse(response=resp) + self.producer.send(r, properties={"id": id}) + + print("Done.", flush=True) + + # Acknowledge successful processing of the message + self.consumer.acknowledge(msg) + + except Exception as e: + + print("Exception:", e, flush=True) + + # Message failed to be processed + self.consumer.negative_acknowledge(msg) + + def __del__(self): + self.client.close() + +def run(): + + parser = argparse.ArgumentParser( + prog='llm-ollama-text', + description=__doc__, + ) + + default_pulsar_host = os.getenv("PULSAR_HOST", 'pulsar://pulsar:6650') + default_input_queue = 'llm-complete-text' + default_output_queue = 'llm-complete-text-response' + default_subscriber = 'llm-claude-text' + + parser.add_argument( + '-p', '--pulsar-host', + default=default_pulsar_host, + help=f'Pulsar host (default: {default_pulsar_host})', + ) + + parser.add_argument( + '-i', '--input-queue', + default=default_input_queue, + help=f'Input queue (default: {default_input_queue})' + ) + + parser.add_argument( + '-s', '--subscriber', + default=default_subscriber, + help=f'Queue subscriber name (default: {default_subscriber})' + ) + + parser.add_argument( + '-o', '--output-queue', + default=default_output_queue, + help=f'Output queue (default: {default_output_queue})' + ) + + parser.add_argument( + '-l', '--log-level', + type=LogLevel, + default=LogLevel.INFO, + choices=list(LogLevel), + help=f'Output queue (default: info)' + ) + + parser.add_argument( + '-m', '--model', + default="claude-3-5-sonnet-20240620", + help=f'LLM model (default: claude-3-5-sonnet-20240620)' + ) + + parser.add_argument( + '-k', '--api-key', + help=f'Claude API key' + ) + + args = parser.parse_args() + + while True: + + try: + + p = Processor( + pulsar_host=args.pulsar_host, + input_queue=args.input_queue, + output_queue=args.output_queue, + subscriber=args.subscriber, + log_level=args.log_level, + model=args.model, + api_key=args.api_key, + ) + + p.run() + + except Exception as e: + + print("Exception:", e, flush=True) + print("Will retry...", flush=True) + + time.sleep(10) + + diff --git a/trustgraph/llm/ollama_text/__init__.py b/trustgraph/llm/ollama_text/__init__.py new file mode 100644 index 00000000..f2017af8 --- /dev/null +++ b/trustgraph/llm/ollama_text/__init__.py @@ -0,0 +1,3 @@ + +from . llm import * + diff --git a/trustgraph/llm/ollama_text/__main__.py b/trustgraph/llm/ollama_text/__main__.py new file mode 100755 index 00000000..91342d2d --- /dev/null +++ b/trustgraph/llm/ollama_text/__main__.py @@ -0,0 +1,7 @@ +#!/usr/bin/env python3 + +from . llm import run + +if __name__ == '__main__': + run() + diff --git a/trustgraph/llm/ollama_text/llm.py b/trustgraph/llm/ollama_text/llm.py new file mode 100755 index 00000000..9d9c7dad --- /dev/null +++ b/trustgraph/llm/ollama_text/llm.py @@ -0,0 +1,169 @@ + +""" +Simple LLM service, performs text prompt completion using an Ollama service. +Input is prompt, output is response. +""" + +import pulsar +from pulsar.schema import JsonSchema +import tempfile +import base64 +import os +import argparse +from langchain_community.llms import Ollama +import time + +from ... schema import TextCompletionRequest, TextCompletionResponse +from ... log_level import LogLevel + +class Processor: + + def __init__( + self, + pulsar_host, + input_queue, + output_queue, + subscriber, + log_level, + model, + ollama, + ): + + self.client = pulsar.Client( + pulsar_host, + logger=pulsar.ConsoleLogger(log_level.to_pulsar()) + ) + + self.consumer = self.client.subscribe( + input_queue, subscriber, + schema=JsonSchema(TextCompletionRequest), + ) + + self.producer = self.client.create_producer( + topic=output_queue, + schema=JsonSchema(TextCompletionResponse), + ) + + self.llm = Ollama(base_url=ollama, model=model) + + def run(self): + + while True: + + msg = self.consumer.receive() + + try: + + v = msg.value() + + # Sender-produced ID + + id = msg.properties()["id"] + + print(f"Handling prompt {id}...", flush=True) + + prompt = v.prompt + response = self.llm.invoke(prompt) + + print("Send response...", flush=True) + r = TextCompletionResponse(response=response) + self.producer.send(r, properties={"id": id}) + + print("Done.", flush=True) + + # Acknowledge successful processing of the message + self.consumer.acknowledge(msg) + + except Exception as e: + + print("Exception:", e, flush=True) + + # Message failed to be processed + self.consumer.negative_acknowledge(msg) + + def __del__(self): + print("Closing") + self.client.close() + +def run(): + + parser = argparse.ArgumentParser( + prog='llm-ollama-text', + description=__doc__, + ) + + default_pulsar_host = os.getenv("PULSAR_HOST", 'pulsar://pulsar:6650') + default_input_queue = 'llm-complete-text' + default_output_queue = 'llm-complete-text-response' + default_subscriber = 'llm-ollama-text' + + parser.add_argument( + '-p', '--pulsar-host', + default=default_pulsar_host, + help=f'Pulsar host (default: {default_pulsar_host})', + ) + + parser.add_argument( + '-i', '--input-queue', + default=default_input_queue, + help=f'Input queue (default: {default_input_queue})' + ) + + parser.add_argument( + '-s', '--subscriber', + default=default_subscriber, + help=f'Queue subscriber name (default: {default_subscriber})' + ) + + parser.add_argument( + '-o', '--output-queue', + default=default_output_queue, + help=f'Output queue (default: {default_output_queue})' + ) + + parser.add_argument( + '-l', '--log-level', + type=LogLevel, + default=LogLevel.INFO, + choices=list(LogLevel), + help=f'Output queue (default: info)' + ) + + parser.add_argument( + '-m', '--model', + default="gemma2", + help=f'LLM model (default: gemma2)' + ) + + parser.add_argument( + '-r', '--ollama', + default="http://localhost:11434", + help=f'ollama (default: http://localhost:11434)' + ) + + args = parser.parse_args() + + + while True: + + try: + + p = Processor( + pulsar_host=args.pulsar_host, + input_queue=args.input_queue, + output_queue=args.output_queue, + subscriber=args.subscriber, + log_level=args.log_level, + model=args.model, + ollama=args.ollama, + ) + + p.run() + + except Exception as e: + + print("Exception:", e, flush=True) + print("Will retry...", flush=True) + + time.sleep(10) + diff --git a/trustgraph/llm/vertexai_text/__init__.py b/trustgraph/llm/vertexai_text/__init__.py new file mode 100644 index 00000000..f2017af8 --- /dev/null +++ b/trustgraph/llm/vertexai_text/__init__.py @@ -0,0 +1,3 @@ + +from . llm import * + diff --git a/trustgraph/llm/vertexai_text/__main__.py b/trustgraph/llm/vertexai_text/__main__.py new file mode 100755 index 00000000..91342d2d --- /dev/null +++ b/trustgraph/llm/vertexai_text/__main__.py @@ -0,0 +1,7 @@ +#!/usr/bin/env python3 + +from . llm import run + +if __name__ == '__main__': + run() + diff --git a/trustgraph/llm/vertexai_text/llm.py b/trustgraph/llm/vertexai_text/llm.py new file mode 100755 index 00000000..44b55e62 --- /dev/null +++ b/trustgraph/llm/vertexai_text/llm.py @@ -0,0 +1,254 @@ + +""" +Simple LLM service, performs text prompt completion using an Ollama service. +Input is prompt, output is response. +""" + +import pulsar +from pulsar.schema import JsonSchema +import tempfile +import base64 +import os +import argparse +import vertexai +import time + +from google.oauth2 import service_account +import google + +from vertexai.preview.generative_models import ( + Content, + FunctionDeclaration, + GenerativeModel, + GenerationConfig, + HarmCategory, + HarmBlockThreshold, + Part, + Tool, +) + +from ... schema import TextCompletionRequest, TextCompletionResponse +from ... log_level import LogLevel + +class Processor: + + def __init__( + self, + pulsar_host, + input_queue, + output_queue, + subscriber, + log_level, + credentials, + region, + model, + ): + + self.client = pulsar.Client( + pulsar_host, + logger=pulsar.ConsoleLogger(log_level.to_pulsar()) + ) + + self.consumer = self.client.subscribe( + input_queue, subscriber, + schema=JsonSchema(TextCompletionRequest), + ) + + self.producer = self.client.create_producer( + topic=output_queue, + schema=JsonSchema(TextCompletionResponse), + ) + + self.parameters = { + "temperature": 0.2, + "top_p": 1.0, + "top_k": 32, + "candidate_count": 1, + "max_output_tokens": 8192, + } + + self.generation_config = GenerationConfig( + temperature=0.2, + top_p=1.0, + top_k=10, + candidate_count=1, + max_output_tokens=8191, + ) + + # Block none doesn't seem to work + block_level = HarmBlockThreshold.BLOCK_ONLY_HIGH + # block_level = HarmBlockThreshold.BLOCK_NONE + + self.safety_settings = { + HarmCategory.HARM_CATEGORY_HARASSMENT: block_level, + HarmCategory.HARM_CATEGORY_HATE_SPEECH: block_level, + HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT: block_level, + HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: block_level, + } + + print("Initialise VertexAI...", flush=True) + + if credentials: + vertexai.init( + location=region, + credentials=credentials, + project=credentials.project_id, + ) + else: + vertexai.init( + location=region + ) + + print(f"Initialise model {model}", flush=True) + self.llm = GenerativeModel(model) + + print("Initialisation complete", flush=True) + + def run(self): + + while True: + + msg = self.consumer.receive() + + try: + + v = msg.value() + + # Sender-produced ID + + id = msg.properties()["id"] + + print(f"Handling prompt {id}...", flush=True) + + prompt = v.prompt + + resp = self.llm.generate_content( + prompt, generation_config=self.generation_config, + safety_settings=self.safety_settings + ) + + resp = resp.text + + resp = resp.replace("```json", "") + resp = resp.replace("```", "") + + print("Send response...", flush=True) + r = TextCompletionResponse(response=resp) + self.producer.send(r, properties={"id": id}) + + print("Done.", flush=True) + + # Acknowledge successful processing of the message + self.consumer.acknowledge(msg) + + except google.api_core.exceptions.ResourceExhausted: + + print("429, resource busy, sleeping", flush=True) + time.sleep(15) + self.consumer.negative_acknowledge(msg) + + except Exception as e: + + print("Exception:", e, flush=True) + + # Message failed to be processed + self.consumer.negative_acknowledge(msg) + + def __del__(self): + self.client.close() + +def run(): + + parser = argparse.ArgumentParser( + prog='llm-ollama-text', + description=__doc__, + ) + + default_pulsar_host = os.getenv("PULSAR_HOST", 'pulsar://pulsar:6650') + default_input_queue = 'llm-complete-text' + default_output_queue = 'llm-complete-text-response' + default_subscriber = 'llm-vertexai-text' + + parser.add_argument( + '-p', '--pulsar-host', + default=default_pulsar_host, + help=f'Pulsar host (default: {default_pulsar_host})', + ) + + parser.add_argument( + '-i', '--input-queue', + default=default_input_queue, + help=f'Input queue (default: {default_input_queue})' + ) + + parser.add_argument( + '-s', '--subscriber', + default=default_subscriber, + help=f'Queue subscriber name (default: {default_subscriber})' + ) + + parser.add_argument( + '-o', '--output-queue', + default=default_output_queue, + help=f'Output queue (default: {default_output_queue})' + ) + + parser.add_argument( + '-l', '--log-level', + type=LogLevel, + default=LogLevel.INFO, + choices=list(LogLevel), + help=f'Output queue (default: info)' + ) + + parser.add_argument( + '-m', '--model', + default="gemini-1.0-pro-001", + help=f'LLM model (default: gemini-1.0-pro-001)' + ) + # Also: text-bison-32k + + parser.add_argument( + '-k', '--private-key', + help=f'Google Cloud private JSON file' + ) + + parser.add_argument( + '-r', '--region', + default='us-west1', + help=f'Google Cloud region (default: us-west1)', + ) + + args = parser.parse_args() + + if args.private_key: + credentials = service_account.Credentials.from_service_account_file( + args.private_key + ) + else: + credentials = None + + while True: + + try: + + p = Processor( + pulsar_host=args.pulsar_host, + input_queue=args.input_queue, + output_queue=args.output_queue, + subscriber=args.subscriber, + log_level=args.log_level, + credentials=credentials, + region=args.region, + model=args.model, + ) + + p.run() + + except Exception as e: + + print("Exception:", e, flush=True) + print("Will retry...", flush=True) + + time.sleep(10) + diff --git a/trustgraph/llm_client.py b/trustgraph/llm_client.py new file mode 100644 index 00000000..5e0df96d --- /dev/null +++ b/trustgraph/llm_client.py @@ -0,0 +1,71 @@ +#!/usr/bin/env python3 + +import pulsar +import _pulsar +from pulsar.schema import JsonSchema +from trustgraph.schema import TextCompletionRequest, TextCompletionResponse +import hashlib +import uuid + +# Ugly +ERROR=_pulsar.LoggerLevel.Error +WARN=_pulsar.LoggerLevel.Warn +INFO=_pulsar.LoggerLevel.Info +DEBUG=_pulsar.LoggerLevel.Debug + +class LlmClient: + + def __init__( + self, log_level=ERROR, client_id=None, + pulsar_host="pulsar://pulsar:6650", + ): + + if client_id == None: + client_id = str(uuid.uuid4()) + + self.client = pulsar.Client( + pulsar_host, + logger=pulsar.ConsoleLogger(log_level), + ) + + self.producer = self.client.create_producer( + topic='llm-complete-text', + schema=JsonSchema(TextCompletionRequest), + chunking_enabled=True, + ) + + self.consumer = self.client.subscribe( + 'llm-complete-text-response', client_id, + schema=JsonSchema(TextCompletionResponse), + ) + + def request(self, prompt, timeout=500): + + id = str(uuid.uuid4()) + + + r = TextCompletionRequest( + prompt=prompt + ) + self.producer.send(r, properties={ "id": id }) + + while True: + + msg = self.consumer.receive(timeout_millis=timeout * 1000) + + mid = msg.properties()["id"] + + if mid == id: + resp = msg.value().response + self.consumer.acknowledge(msg) + return resp + + # Ignore messages with wrong ID + self.consumer.acknowledge(msg) + + def __del__(self): + + self.producer.close() + self.consumer.close() + self.client.close() + diff --git a/trustgraph/log_level.py b/trustgraph/log_level.py new file mode 100644 index 00000000..65486b29 --- /dev/null +++ b/trustgraph/log_level.py @@ -0,0 +1,20 @@ + +from enum import Enum +import _pulsar + +class LogLevel(Enum): + DEBUG = 'debug' + INFO = 'info' + WARN = 'warn' + ERROR = 'error' + + def __str__(self): + return self.value + + def to_pulsar(self): + if self == LogLevel.DEBUG: return _pulsar.LoggerLevel.Debug + if self == LogLevel.INFO: return _pulsar.LoggerLevel.Info + if self == LogLevel.WARN: return _pulsar.LoggerLevel.Warn + if self == LogLevel.ERROR: return _pulsar.LoggerLevel.Error + raise RuntimeError("Log level mismatch") + diff --git a/trustgraph/prompts.py b/trustgraph/prompts.py new file mode 100644 index 00000000..c6b91ff2 --- /dev/null +++ b/trustgraph/prompts.py @@ -0,0 +1,138 @@ + +def turtle_extract(text): + + prompt = f""" +Study the following text and extract knowledge as +information in Turtle RDF format. +When declaring any new URIs, use prefix, +and declare appropriate namespace tags. + + + +{text} + + + +Do not use placeholders for information you do not know. +You will respond only with raw Turtle RDF data. Do not provide +explanations. Do not use special characters in the abstract text. The +abstract must be written as plain text. Do not add markdown formatting. +""" + + return prompt + +def scholar(text): + + # Build the prompt for Article style extraction + jsonexample = """{ + "title": "Article title here", + "abstract": "Abstract text here", + "keywords": ["keyword1", "keyword2", "keyword3"], + "people": ["person1", "person2", "person3"] +}""" + + promptscholar = f"""Your task is to read the provided text and write a scholarly abstract to fully explain all of the concepts described in the provided text. The abstract must include all conceptual details. + +{text} + + + +- Structure: For the provided text, write a title, abstract, keywords, + and people for the concepts found in the provided text. Ignore + document formatting in the provided text such as table of contents, + headers, footers, section metadata, and URLs. +- Focus on Concepts The abstract must focus on concepts found in the + provided text. The abstract must be factually accurate. Do not + write any concepts not found in the provided text. Do not + speculate. Do not omit any conceptual details. +- Completeness: The abstract must capture all topics the reader will + need to understand the concepts found in the provided text. Describe + all terms, definitions, entities, people, events, concepts, + conceptual relationships, and any other topics necessary for the + reader to understand the concepts of the provided text. + +- Format: Respond in the form of a valid JSON object. + + +{jsonexample} + + +You will respond only with the JSON object. Do not provide +explanations. Do not use special characters in the abstract text. The +abstract must be written as plain text. +""" + + return promptscholar + +def to_json_ld(text): + + prompt = f""" +Study the following text and output any facts you discover in +well-structured JSON-LD format. +Use any schema you understand from schema.org to describe the facts. + + + +{text} + + + +You will respond only with raw JSON-LD data in JSON format. Do not provide +explanations. Do not use special characters in the abstract text. The +abstract must be written as plain text. Do not add markdown formatting +or headers or prefixes. Do not use information which is not present in +the input text. +""" + + return prompt + + +def to_relationships(text): + + prompt = f""" +Study the following text and derive entity relationships. For each +relationship, derive the subject, predicate and object of the relationship. +Output relationships in JSON format as an arary of objects with fields: +- subject: the subject of the relationship +- predicate: the predicate +- object: the object of the relationship +- object-entity: false if the object is a simple data type: name, value or date. true if it is an entity. + + + +{text} + + + +You will respond only with raw JSON format data. Do not provide +explanations. Do not use special characters in the abstract text. The +abstract must be written as plain text. Do not add markdown formatting +or headers or prefixes. +""" + + return prompt + +def to_definitions(text): + + prompt = f""" +Study the following text and derive definitions for any discovered entities. +Do not provide definitions for entities whose definitions are incomplete +or unknown. +Output relationships in JSON format as an arary of objects with fields: +- entity: the name of the entity +- definition: English text which defines the entity + + + +{text} + + + +You will respond only with raw JSON format data. Do not provide +explanations. Do not use special characters in the abstract text. The +abstract will be written as plain text. Do not add markdown formatting +or headers or prefixes. Do not include null or unknown definitions. +""" + + return prompt + diff --git a/trustgraph/rag/__init__.py b/trustgraph/rag/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/trustgraph/rag/graph/__init__.py b/trustgraph/rag/graph/__init__.py new file mode 100644 index 00000000..432ae594 --- /dev/null +++ b/trustgraph/rag/graph/__init__.py @@ -0,0 +1,3 @@ + +from . rag import * + diff --git a/trustgraph/rag/graph/__main__.py b/trustgraph/rag/graph/__main__.py new file mode 100755 index 00000000..89ebb780 --- /dev/null +++ b/trustgraph/rag/graph/__main__.py @@ -0,0 +1,7 @@ +#!/usr/bin/env python3 + +from . rag import run + +if __name__ == '__main__': + run() + diff --git a/trustgraph/rag/graph/rag.py b/trustgraph/rag/graph/rag.py new file mode 100755 index 00000000..28ad298b --- /dev/null +++ b/trustgraph/rag/graph/rag.py @@ -0,0 +1,172 @@ + +""" +Simple RAG service, performs query using graph RAG an LLM. +Input is query, output is response. +""" + +import pulsar +from pulsar.schema import JsonSchema +import tempfile +import base64 +import os +import argparse +import time + +from ... schema import GraphRagQuery, GraphRagResponse +from ... log_level import LogLevel +from ... graph_rag import GraphRag + +class Processor: + + def __init__( + self, + pulsar_host, + input_queue, + output_queue, + subscriber, + log_level, + graph_hosts, + vector_store, + ): + + self.client = pulsar.Client( + pulsar_host, + logger=pulsar.ConsoleLogger(log_level.to_pulsar()) + ) + + self.consumer = self.client.subscribe( + input_queue, subscriber, + schema=JsonSchema(GraphRagQuery), + ) + + self.producer = self.client.create_producer( + topic=output_queue, + schema=JsonSchema(GraphRagResponse), + ) + + self.rag = GraphRag( + pulsar_host=pulsar_host, + graph_hosts=graph_hosts, + vector_store=vector_store, + verbose=True, + ) + + def run(self): + + while True: + + msg = self.consumer.receive() + + try: + + v = msg.value() + + # Sender-produced ID + + id = msg.properties()["id"] + + print(f"Handling input {id}...", flush=True) + + response = self.rag.query(v.query) + + print("Send response...", flush=True) + r = GraphRagResponse(response = response) + self.producer.send(r, properties={"id": id}) + + print("Done.", flush=True) + + # Acknowledge successful processing of the message + self.consumer.acknowledge(msg) + + except Exception as e: + + print("Exception:", e, flush=True) + + # Message failed to be processed + self.consumer.negative_acknowledge(msg) + + def __del__(self): + print("Closing", flush=True) + self.client.close() + +def run(): + + parser = argparse.ArgumentParser( + prog='llm-ollama-text', + description=__doc__, + ) + + default_pulsar_host = os.getenv("PULSAR_HOST", 'pulsar://pulsar:6650') + default_input_queue = 'graph-rag-query' + default_output_queue = 'graph-rag-response' + default_subscriber = 'graph-rag' + + parser.add_argument( + '-p', '--pulsar-host', + default=default_pulsar_host, + help=f'Pulsar host (default: {default_pulsar_host})', + ) + + parser.add_argument( + '-i', '--input-queue', + default=default_input_queue, + help=f'Input queue (default: {default_input_queue})' + ) + + parser.add_argument( + '-s', '--subscriber', + default=default_subscriber, + help=f'Queue subscriber name (default: {default_subscriber})' + ) + + parser.add_argument( + '-o', '--output-queue', + default=default_output_queue, + help=f'Output queue (default: {default_output_queue})' + ) + + parser.add_argument( + '-l', '--log-level', + type=LogLevel, + default=LogLevel.INFO, + choices=list(LogLevel), + help=f'Output queue (default: info)' + ) + + parser.add_argument( + '-g', '--graph-hosts', + default='cassandra', + help=f'Graph hosts, comma separated (default: cassandra)' + ) + + parser.add_argument( + '-v', '--vector-store', + default='http://milvus:19530', + help=f'Vector host (default: http://milvus:19530)' + ) + + args = parser.parse_args() + + while True: + + try: + + p = Processor( + pulsar_host=args.pulsar_host, + input_queue=args.input_queue, + output_queue=args.output_queue, + subscriber=args.subscriber, + log_level=args.log_level, + graph_hosts=args.graph_hosts.split(","), + vector_store=args.vector_store, + ) + + p.run() + + except Exception as e: + + print("Exception:", e, flush=True) + print("Will retry...", flush=True) + + time.sleep(10) + diff --git a/trustgraph/rdf.py b/trustgraph/rdf.py new file mode 100644 index 00000000..b65d9c29 --- /dev/null +++ b/trustgraph/rdf.py @@ -0,0 +1,6 @@ + +RDF_LABEL = "http://www.w3.org/2000/01/rdf-schema#label" +DEFINITION = "http://www.w3.org/2004/02/skos/core#definition" + +TRUSTGRAPH_ENTITIES = "http://trustgraph.ai/e/" + diff --git a/trustgraph/schema.py b/trustgraph/schema.py new file mode 100644 index 00000000..d8fd4af7 --- /dev/null +++ b/trustgraph/schema.py @@ -0,0 +1,67 @@ + +from pulsar.schema import Record, Bytes, String, Boolean, Integer, Array, Double + +from enum import Enum + +#class Command(Enum): +# reindex = 1 + +#class IndexCommand(Record): +# command = Command + +class Value(Record): + value = String() + is_uri = Boolean() + type = String() + +class Source(Record): + source = String() + id = String() + title = String() + +class Document(Record): + source = Source() + data = Bytes() + +class TextDocument(Record): + source = Source() + text = Bytes() + +class Chunk(Record): + source = Source() + chunk = Bytes() + +class VectorsChunk(Record): + source = Source() + vectors = Array(Array(Double())) + chunk = Bytes() + +class VectorsAssociation(Record): + source = Source() + vectors = Array(Array(Double())) + entity = Value() + +class Triple(Record): + source = Source() + s = Value() + p = Value() + o = Value() + +class TextCompletionRequest(Record): + prompt = String() + +class TextCompletionResponse(Record): + response = String() + +class EmbeddingsRequest(Record): + text = String() + +class EmbeddingsResponse(Record): + vectors = Array(Array(Double())) + +class GraphRagQuery(Record): + query = String() + +class GraphRagResponse(Record): + response = String() + diff --git a/trustgraph/trustgraph.py b/trustgraph/trustgraph.py new file mode 100644 index 00000000..a7b53a06 --- /dev/null +++ b/trustgraph/trustgraph.py @@ -0,0 +1,108 @@ + +from cassandra.cluster import Cluster +from cassandra.auth import PlainTextAuthProvider + +class TrustGraph: + + def __init__(self, hosts=None): + + if hosts is None: + hosts = ["localhost"] + + self.cluster = Cluster(hosts) + self.session = self.cluster.connect() + + self.init() + + def clear(self): + + self.session.execute(""" + drop keyspace if exists trustgraph; + """); + + self.init() + + def init(self): + + self.session.execute(""" + create keyspace if not exists trustgraph + with replication = { + 'class' : 'SimpleStrategy', + 'replication_factor' : 1 + }; + """); + + self.session.set_keyspace('trustgraph') + + self.session.execute(""" + create table if not exists triples ( + s text, + p text, + o text, + PRIMARY KEY (s, p) + ); + """); + + self.session.execute(""" + create index if not exists triples_p + ON triples (p); + """); + + self.session.execute(""" + create index if not exists triples_o + ON triples (o); + """); + + def insert(self, s, p, o): + + self.session.execute( + "insert into triples (s, p, o) values (%s, %s, %s)", + (s, p, o) + ) + + def get_all(self, limit=50): + return self.session.execute( + f"select s, p, o from triples limit {limit}" + ) + + def get_s(self, s, limit=10): + return self.session.execute( + f"select p, o from triples where s = %s", + (s,) + ) + + def get_p(self, p, limit=10): + return self.session.execute( + f"select s, o from triples where p = %s limit {limit}", + (p,) + ) + + def get_o(self, o, limit=10): + return self.session.execute( + f"select s, p from triples where o = %s limit {limit}", + (o,) + ) + + def get_sp(self, s, p, limit=10): + return self.session.execute( + f"select o from triples where s = %s and p = %s limit {limit}", + (s, p) + ) + + def get_po(self, p, o, limit=10): + return self.session.execute( + f"select s from triples where p = %s and o = %s allow filtering limit {limit}", + (p, o) + ) + + def get_os(self, o, s, limit=10): + return self.session.execute( + f"select s from triples where o = %s and s = %s limit {limit}", + (o, s) + ) + + def get_spo(self, s, p, o, limit=10): + return self.session.execute( + f"""select s as x from triples where s = %s and p = %s and o = %s limit {limit}""", + (s, p, o) + ) diff --git a/trustgraph/vector/__init__.py b/trustgraph/vector/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/trustgraph/vector/milvus_write/__init__.py b/trustgraph/vector/milvus_write/__init__.py new file mode 100644 index 00000000..d891d55f --- /dev/null +++ b/trustgraph/vector/milvus_write/__init__.py @@ -0,0 +1,3 @@ + +from . write import * + diff --git a/trustgraph/vector/milvus_write/__main__.py b/trustgraph/vector/milvus_write/__main__.py new file mode 100755 index 00000000..c05d8c6d --- /dev/null +++ b/trustgraph/vector/milvus_write/__main__.py @@ -0,0 +1,7 @@ +#!/usr/bin/env python3 + +from . write import run + +if __name__ == '__main__': + run() + diff --git a/trustgraph/vector/milvus_write/write.py b/trustgraph/vector/milvus_write/write.py new file mode 100755 index 00000000..6a5700d9 --- /dev/null +++ b/trustgraph/vector/milvus_write/write.py @@ -0,0 +1,136 @@ + +""" +Simple decoder, accepts PDF documents on input, outputs pages from the +PDF document as text as separate output objects. +""" + +import pulsar +from pulsar.schema import JsonSchema +from langchain_community.document_loaders import PyPDFLoader +import tempfile +import base64 +import os +import argparse +import time + +from ... schema import VectorsAssociation +from ... log_level import LogLevel +from ... edge_map import VectorStore + +class Processor: + + def __init__( + self, + pulsar_host, + input_queue, + subscriber, + store_uri, + log_level, + ): + + self.client = pulsar.Client( + pulsar_host, + logger=pulsar.ConsoleLogger(log_level.to_pulsar()) + ) + + self.consumer = self.client.subscribe( + input_queue, subscriber, + schema=JsonSchema(VectorsAssociation), + ) + + self.vecstore = VectorStore(store_uri) + + def run(self): + + while True: + + msg = self.consumer.receive() + + try: + + v = msg.value() + + if v.entity.value != "": + for vec in v.vectors: + self.vecstore.insert(vec, v.entity.value) + + # Acknowledge successful processing of the message + self.consumer.acknowledge(msg) + + except Exception as e: + + print("Exception:", e, flush=True) + + # Message failed to be processed + self.consumer.negative_acknowledge(msg) + + def __del__(self): + self.client.close() + +def run(): + + parser = argparse.ArgumentParser( + prog='pdf-decoder', + description=__doc__, + ) + + default_pulsar_host = os.getenv("PULSAR_HOST", 'pulsar://pulsar:6650') + default_input_queue = 'vectors-load' + default_subscriber = 'vector-write-milvus' + + parser.add_argument( + '-p', '--pulsar-host', + default=default_pulsar_host, + help=f'Pulsar host (default: {default_pulsar_host})', + ) + + parser.add_argument( + '-i', '--input-queue', + default=default_input_queue, + help=f'Input queue (default: {default_input_queue})' + ) + + parser.add_argument( + '-s', '--subscriber', + default=default_subscriber, + help=f'Queue subscriber name (default: {default_subscriber})' + ) + + parser.add_argument( + '-l', '--log-level', + type=LogLevel, + default=LogLevel.INFO, + choices=list(LogLevel), + help=f'Output queue (default: info)' + ) + + parser.add_argument( + '-t', '--store-uri', + default="http://localhost:19530", + help=f'Milvus store URI (default: http://localhost:19530)' + ) + + args = parser.parse_args() + + while True: + + try: + + p = Processor( + pulsar_host=args.pulsar_host, + input_queue=args.input_queue, + subscriber=args.subscriber, + store_uri=args.store_uri, + log_level=args.log_level, + ) + + p.run() + + except Exception as e: + + print("Exception:", e, flush=True) + print("Will retry...", flush=True) + + time.sleep(10) + +