mirror of
https://github.com/trustgraph-ai/trustgraph.git
synced 2026-04-26 08:56:21 +02:00
* Bump setup.py versions for 1.1 * PoC MCP server (#419) * Very initial MCP server PoC for TrustGraph * Put service on port 8000 * Add MCP container and packages to buildout * Update docs for API/CLI changes in 1.0 (#421) * Update some API basics for the 0.23/1.0 API change * Add MCP container push (#425) * Add command args to the MCP server (#426) * Host and port parameters * Added websocket arg * More docs * MCP client support (#427) - MCP client service - Tool request/response schema - API gateway support for mcp-tool - Message translation for tool request & response - Make mcp-tool using configuration service for information about where the MCP services are. * Feature/react call mcp (#428) Key Features - MCP Tool Integration: Added core MCP tool support with ToolClientSpec and ToolClient classes - API Enhancement: New mcp_tool method for flow-specific tool invocation - CLI Tooling: New tg-invoke-mcp-tool command for testing MCP integration - React Agent Enhancement: Fixed and improved multi-tool invocation capabilities - Tool Management: Enhanced CLI for tool configuration and management Changes - Added MCP tool invocation to API with flow-specific integration - Implemented ToolClientSpec and ToolClient for tool call handling - Updated agent-manager-react to invoke MCP tools with configurable types - Enhanced CLI with new commands and improved help text - Added comprehensive documentation for new CLI commands - Improved tool configuration management Testing - Added tg-invoke-mcp-tool CLI command for isolated MCP integration testing - Enhanced agent capability to invoke multiple tools simultaneously * Test suite executed from CI pipeline (#433) * Test strategy & test cases * Unit tests * Integration tests * Extending test coverage (#434) * Contract tests * Testing embeedings * Agent unit tests * Knowledge pipeline tests * Turn on contract tests * Increase storage test coverage (#435) * Fixing storage and adding tests * PR pipeline only runs quick tests * Empty configuration is returned as empty list, previously was not in response (#436) * Update config util to take files as well as command-line text (#437) * Updated CLI invocation and config model for tools and mcp (#438) * Updated CLI invocation and config model for tools and mcp * CLI anomalies * Tweaked the MCP tool implementation for new model * Update agent implementation to match the new model * Fix agent tools, now all tested * Fixed integration tests * Fix MCP delete tool params * Update Python deps to 1.2 * Update to enable knowledge extraction using the agent framework (#439) * Implement KG extraction agent (kg-extract-agent) * Using ReAct framework (agent-manager-react) * ReAct manager had an issue when emitting JSON, which conflicts which ReAct manager's own JSON messages, so refactored ReAct manager to use traditional ReAct messages, non-JSON structure. * Minor refactor to take the prompt template client out of prompt-template so it can be more readily used by other modules. kg-extract-agent uses this framework. * Migrate from setup.py to pyproject.toml (#440) * Converted setup.py to pyproject.toml * Modern package infrastructure as recommended by py docs * Install missing build deps (#441) * Install missing build deps (#442) * Implement logging strategy (#444) * Logging strategy and convert all prints() to logging invocations * Fix/startup failure (#445) * Fix loggin startup problems * Fix logging startup problems (#446) * Fix logging startup problems (#447) * Fixed Mistral OCR to use current API (#448) * Fixed Mistral OCR to use current API * Added PDF decoder tests * Fix Mistral OCR ident to be standard pdf-decoder (#450) * Fix Mistral OCR ident to be standard pdf-decoder * Correct test * Schema structure refactor (#451) * Write schema refactor spec * Implemented schema refactor spec * Structure data mvp (#452) * Structured data tech spec * Architecture principles * New schemas * Updated schemas and specs * Object extractor * Add .coveragerc * New tests * Cassandra object storage * Trying to object extraction working, issues exist * Validate librarian collection (#453) * Fix token chunker, broken API invocation (#454) * Fix token chunker, broken API invocation (#455) * Knowledge load utility CLI (#456) * Knowledge loader * More tests
499 lines
13 KiB
Python
499 lines
13 KiB
Python
|
|
from .. schema import KnowledgeResponse, Triple, Triples, EntityEmbeddings
|
|
from .. schema import Metadata, Value, GraphEmbeddings
|
|
|
|
from cassandra.cluster import Cluster
|
|
from cassandra.auth import PlainTextAuthProvider
|
|
from ssl import SSLContext, PROTOCOL_TLSv1_2
|
|
|
|
import uuid
|
|
import time
|
|
import asyncio
|
|
import logging
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
class KnowledgeTableStore:
|
|
|
|
def __init__(
|
|
self,
|
|
cassandra_host, cassandra_user, cassandra_password, keyspace,
|
|
):
|
|
|
|
self.keyspace = keyspace
|
|
|
|
logger.info("Connecting to Cassandra...")
|
|
|
|
if cassandra_user and cassandra_password:
|
|
ssl_context = SSLContext(PROTOCOL_TLSv1_2)
|
|
auth_provider = PlainTextAuthProvider(
|
|
username=cassandra_user, password=cassandra_password
|
|
)
|
|
self.cluster = Cluster(
|
|
cassandra_host,
|
|
auth_provider=auth_provider,
|
|
ssl_context=ssl_context
|
|
)
|
|
else:
|
|
self.cluster = Cluster(cassandra_host)
|
|
|
|
self.cassandra = self.cluster.connect()
|
|
|
|
logger.info("Connected.")
|
|
|
|
self.ensure_cassandra_schema()
|
|
|
|
self.prepare_statements()
|
|
|
|
def ensure_cassandra_schema(self):
|
|
|
|
logger.debug("Ensure Cassandra schema...")
|
|
|
|
logger.debug("Keyspace...")
|
|
|
|
# FIXME: Replication factor should be configurable
|
|
self.cassandra.execute(f"""
|
|
create keyspace if not exists {self.keyspace}
|
|
with replication = {{
|
|
'class' : 'SimpleStrategy',
|
|
'replication_factor' : 1
|
|
}};
|
|
""");
|
|
|
|
self.cassandra.set_keyspace(self.keyspace)
|
|
|
|
logger.debug("triples table...")
|
|
|
|
self.cassandra.execute("""
|
|
CREATE TABLE IF NOT EXISTS triples (
|
|
user text,
|
|
document_id text,
|
|
id uuid,
|
|
time timestamp,
|
|
metadata list<tuple<
|
|
text, boolean, text, boolean, text, boolean
|
|
>>,
|
|
triples list<tuple<
|
|
text, boolean, text, boolean, text, boolean
|
|
>>,
|
|
PRIMARY KEY ((user, document_id), id)
|
|
);
|
|
""");
|
|
|
|
logger.debug("graph_embeddings table...")
|
|
|
|
self.cassandra.execute("""
|
|
create table if not exists graph_embeddings (
|
|
user text,
|
|
document_id text,
|
|
id uuid,
|
|
time timestamp,
|
|
metadata list<tuple<
|
|
text, boolean, text, boolean, text, boolean
|
|
>>,
|
|
entity_embeddings list<
|
|
tuple<
|
|
tuple<text, boolean>,
|
|
list<list<double>>
|
|
>
|
|
>,
|
|
PRIMARY KEY ((user, document_id), id)
|
|
);
|
|
""");
|
|
|
|
self.cassandra.execute("""
|
|
CREATE INDEX IF NOT EXISTS graph_embeddings_user ON
|
|
graph_embeddings ( user );
|
|
""");
|
|
|
|
logger.debug("document_embeddings table...")
|
|
|
|
self.cassandra.execute("""
|
|
create table if not exists document_embeddings (
|
|
user text,
|
|
document_id text,
|
|
id uuid,
|
|
time timestamp,
|
|
metadata list<tuple<
|
|
text, boolean, text, boolean, text, boolean
|
|
>>,
|
|
chunks list<
|
|
tuple<
|
|
blob,
|
|
list<list<double>>
|
|
>
|
|
>,
|
|
PRIMARY KEY ((user, document_id), id)
|
|
);
|
|
""");
|
|
|
|
self.cassandra.execute("""
|
|
CREATE INDEX IF NOT EXISTS document_embeddings_user ON
|
|
document_embeddings ( user );
|
|
""");
|
|
|
|
logger.info("Cassandra schema OK.")
|
|
|
|
def prepare_statements(self):
|
|
|
|
self.insert_triples_stmt = self.cassandra.prepare("""
|
|
INSERT INTO triples
|
|
(
|
|
id, user, document_id,
|
|
time, metadata, triples
|
|
)
|
|
VALUES (?, ?, ?, ?, ?, ?)
|
|
""")
|
|
|
|
self.insert_graph_embeddings_stmt = self.cassandra.prepare("""
|
|
INSERT INTO graph_embeddings
|
|
(
|
|
id, user, document_id, time, metadata, entity_embeddings
|
|
)
|
|
VALUES (?, ?, ?, ?, ?, ?)
|
|
""")
|
|
|
|
self.insert_document_embeddings_stmt = self.cassandra.prepare("""
|
|
INSERT INTO document_embeddings
|
|
(
|
|
id, user, document_id, time, metadata, chunks
|
|
)
|
|
VALUES (?, ?, ?, ?, ?, ?)
|
|
""")
|
|
|
|
self.list_cores_stmt = self.cassandra.prepare("""
|
|
SELECT DISTINCT user, document_id FROM graph_embeddings
|
|
WHERE user = ?
|
|
""")
|
|
|
|
self.get_triples_stmt = self.cassandra.prepare("""
|
|
SELECT id, time, metadata, triples
|
|
FROM triples
|
|
WHERE user = ? AND document_id = ?
|
|
""")
|
|
|
|
self.get_graph_embeddings_stmt = self.cassandra.prepare("""
|
|
SELECT id, time, metadata, entity_embeddings
|
|
FROM graph_embeddings
|
|
WHERE user = ? AND document_id = ?
|
|
""")
|
|
|
|
self.get_document_embeddings_stmt = self.cassandra.prepare("""
|
|
SELECT id, time, metadata, chunks
|
|
FROM document_embeddings
|
|
WHERE user = ? AND document_id = ?
|
|
""")
|
|
|
|
self.delete_triples_stmt = self.cassandra.prepare("""
|
|
DELETE FROM triples
|
|
WHERE user = ? AND document_id = ?
|
|
""")
|
|
|
|
self.delete_graph_embeddings_stmt = self.cassandra.prepare("""
|
|
DELETE FROM graph_embeddings
|
|
WHERE user = ? AND document_id = ?
|
|
""")
|
|
|
|
async def add_triples(self, m):
|
|
|
|
when = int(time.time() * 1000)
|
|
|
|
if m.metadata.metadata:
|
|
metadata = [
|
|
(
|
|
v.s.value, v.s.is_uri, v.p.value, v.p.is_uri,
|
|
v.o.value, v.o.is_uri
|
|
)
|
|
for v in m.metadata.metadata
|
|
]
|
|
else:
|
|
metadata = []
|
|
|
|
triples = [
|
|
(
|
|
v.s.value, v.s.is_uri, v.p.value, v.p.is_uri,
|
|
v.o.value, v.o.is_uri
|
|
)
|
|
for v in m.triples
|
|
]
|
|
|
|
while True:
|
|
|
|
try:
|
|
|
|
resp = self.cassandra.execute(
|
|
self.insert_triples_stmt,
|
|
(
|
|
uuid.uuid4(), m.metadata.user,
|
|
m.metadata.id, when,
|
|
metadata, triples,
|
|
)
|
|
)
|
|
|
|
break
|
|
|
|
except Exception as e:
|
|
|
|
logger.error("Exception occurred", exc_info=True)
|
|
raise e
|
|
|
|
async def add_graph_embeddings(self, m):
|
|
|
|
when = int(time.time() * 1000)
|
|
|
|
if m.metadata.metadata:
|
|
metadata = [
|
|
(
|
|
v.s.value, v.s.is_uri, v.p.value, v.p.is_uri,
|
|
v.o.value, v.o.is_uri
|
|
)
|
|
for v in m.metadata.metadata
|
|
]
|
|
else:
|
|
metadata = []
|
|
|
|
entities = [
|
|
(
|
|
(v.entity.value, v.entity.is_uri),
|
|
v.vectors
|
|
)
|
|
for v in m.entities
|
|
]
|
|
|
|
while True:
|
|
|
|
try:
|
|
|
|
resp = self.cassandra.execute(
|
|
self.insert_graph_embeddings_stmt,
|
|
(
|
|
uuid.uuid4(), m.metadata.user,
|
|
m.metadata.id, when,
|
|
metadata, entities,
|
|
)
|
|
)
|
|
|
|
break
|
|
|
|
except Exception as e:
|
|
|
|
logger.error("Exception occurred", exc_info=True)
|
|
raise e
|
|
|
|
async def add_document_embeddings(self, m):
|
|
|
|
when = int(time.time() * 1000)
|
|
|
|
if m.metadata.metadata:
|
|
metadata = [
|
|
(
|
|
v.s.value, v.s.is_uri, v.p.value, v.p.is_uri,
|
|
v.o.value, v.o.is_uri
|
|
)
|
|
for v in m.metadata.metadata
|
|
]
|
|
else:
|
|
metadata = []
|
|
|
|
chunks = [
|
|
(
|
|
v.chunk,
|
|
v.vectors,
|
|
)
|
|
for v in m.chunks
|
|
]
|
|
|
|
while True:
|
|
|
|
try:
|
|
|
|
resp = self.cassandra.execute(
|
|
self.insert_document_embeddings_stmt,
|
|
(
|
|
uuid.uuid4(), m.metadata.user,
|
|
m.metadata.id, when,
|
|
metadata, chunks,
|
|
)
|
|
)
|
|
|
|
break
|
|
|
|
except Exception as e:
|
|
|
|
logger.error("Exception occurred", exc_info=True)
|
|
raise e
|
|
|
|
async def list_kg_cores(self, user):
|
|
|
|
logger.debug("List kg cores...")
|
|
|
|
while True:
|
|
|
|
try:
|
|
|
|
resp = self.cassandra.execute(
|
|
self.list_cores_stmt,
|
|
(user,)
|
|
)
|
|
|
|
break
|
|
|
|
except Exception as e:
|
|
logger.error("Exception occurred", exc_info=True)
|
|
raise e
|
|
|
|
|
|
lst = [
|
|
row[1]
|
|
for row in resp
|
|
]
|
|
|
|
logger.debug("Done")
|
|
|
|
return lst
|
|
|
|
async def delete_kg_core(self, user, document_id):
|
|
|
|
logger.debug("Delete kg cores...")
|
|
|
|
while True:
|
|
|
|
try:
|
|
|
|
resp = self.cassandra.execute(
|
|
self.delete_triples_stmt,
|
|
(user, document_id)
|
|
)
|
|
|
|
break
|
|
|
|
except Exception as e:
|
|
logger.error("Exception occurred", exc_info=True)
|
|
raise e
|
|
|
|
while True:
|
|
|
|
try:
|
|
|
|
resp = self.cassandra.execute(
|
|
self.delete_graph_embeddings_stmt,
|
|
(user, document_id)
|
|
)
|
|
|
|
break
|
|
|
|
except Exception as e:
|
|
logger.error("Exception occurred", exc_info=True)
|
|
raise e
|
|
|
|
async def get_triples(self, user, document_id, receiver):
|
|
|
|
logger.debug("Get triples...")
|
|
|
|
while True:
|
|
|
|
try:
|
|
|
|
resp = self.cassandra.execute(
|
|
self.get_triples_stmt,
|
|
(user, document_id)
|
|
)
|
|
|
|
break
|
|
|
|
except Exception as e:
|
|
logger.error("Exception occurred", exc_info=True)
|
|
raise e
|
|
|
|
for row in resp:
|
|
|
|
if row[2]:
|
|
metadata = [
|
|
Triple(
|
|
s = Value(value = elt[0], is_uri = elt[1]),
|
|
p = Value(value = elt[2], is_uri = elt[3]),
|
|
o = Value(value = elt[4], is_uri = elt[5]),
|
|
)
|
|
for elt in row[2]
|
|
]
|
|
else:
|
|
metadata = []
|
|
|
|
triples = [
|
|
Triple(
|
|
s = Value(value = elt[0], is_uri = elt[1]),
|
|
p = Value(value = elt[2], is_uri = elt[3]),
|
|
o = Value(value = elt[4], is_uri = elt[5]),
|
|
)
|
|
for elt in row[3]
|
|
]
|
|
|
|
await receiver(
|
|
Triples(
|
|
metadata = Metadata(
|
|
id = document_id,
|
|
user = user,
|
|
collection = "default", # FIXME: What to put here?
|
|
metadata = metadata,
|
|
),
|
|
triples = triples
|
|
)
|
|
)
|
|
|
|
logger.debug("Done")
|
|
|
|
async def get_graph_embeddings(self, user, document_id, receiver):
|
|
|
|
logger.debug("Get GE...")
|
|
|
|
while True:
|
|
|
|
try:
|
|
|
|
resp = self.cassandra.execute(
|
|
self.get_graph_embeddings_stmt,
|
|
(user, document_id)
|
|
)
|
|
|
|
break
|
|
|
|
except Exception as e:
|
|
logger.error("Exception occurred", exc_info=True)
|
|
raise e
|
|
|
|
for row in resp:
|
|
|
|
if row[2]:
|
|
metadata = [
|
|
Triple(
|
|
s = Value(value = elt[0], is_uri = elt[1]),
|
|
p = Value(value = elt[2], is_uri = elt[3]),
|
|
o = Value(value = elt[4], is_uri = elt[5]),
|
|
)
|
|
for elt in row[2]
|
|
]
|
|
else:
|
|
metadata = []
|
|
|
|
entities = [
|
|
EntityEmbeddings(
|
|
entity = Value(value = ent[0][0], is_uri = ent[0][1]),
|
|
vectors = ent[1]
|
|
)
|
|
for ent in row[3]
|
|
]
|
|
|
|
await receiver(
|
|
GraphEmbeddings(
|
|
metadata = Metadata(
|
|
id = document_id,
|
|
user = user,
|
|
collection = "default", # FIXME: What to put here?
|
|
metadata = metadata,
|
|
),
|
|
entities = entities
|
|
)
|
|
)
|
|
|
|
logger.debug("Done")
|
|
|