mirror of
https://github.com/VectifyAI/PageIndex.git
synced 2026-04-24 23:56:21 +02:00
Add PageIndexClient with agent-based retrieval via OpenAI Agents SDK (#125)
* Add PageIndexClient with retrieve, streaming support and litellm integration * Add OpenAI agents demo example * Update README with example agent demo section * Support separate retrieve_model configuration for index and retrieve
This commit is contained in:
parent
2403be8f27
commit
5d4491f3bf
9 changed files with 501 additions and 7 deletions
|
|
@ -1,6 +1,7 @@
|
|||
import litellm
|
||||
import logging
|
||||
import os
|
||||
import textwrap
|
||||
from datetime import datetime
|
||||
import time
|
||||
import json
|
||||
|
|
@ -29,6 +30,8 @@ def count_tokens(text, model=None):
|
|||
|
||||
|
||||
def llm_completion(model, prompt, chat_history=None, return_finish_reason=False):
|
||||
if model:
|
||||
model = model.removeprefix("litellm/")
|
||||
max_retries = 10
|
||||
messages = list(chat_history) + [{"role": "user", "content": prompt}] if chat_history else [{"role": "user", "content": prompt}]
|
||||
for i in range(max_retries):
|
||||
|
|
@ -57,6 +60,8 @@ def llm_completion(model, prompt, chat_history=None, return_finish_reason=False)
|
|||
|
||||
|
||||
async def llm_acompletion(model, prompt):
|
||||
if model:
|
||||
model = model.removeprefix("litellm/")
|
||||
max_retries = 10
|
||||
messages = [{"role": "user", "content": prompt}]
|
||||
for i in range(max_retries):
|
||||
|
|
@ -678,3 +683,28 @@ class ConfigLoader:
|
|||
self._validate_keys(user_dict)
|
||||
merged = {**self._default_dict, **user_dict}
|
||||
return config(**merged)
|
||||
|
||||
def create_node_mapping(tree):
|
||||
"""Create a flat dict mapping node_id to node for quick lookup."""
|
||||
mapping = {}
|
||||
def _traverse(nodes):
|
||||
for node in nodes:
|
||||
if node.get('node_id'):
|
||||
mapping[node['node_id']] = node
|
||||
if node.get('nodes'):
|
||||
_traverse(node['nodes'])
|
||||
_traverse(tree)
|
||||
return mapping
|
||||
|
||||
def print_tree(tree, indent=0):
|
||||
for node in tree:
|
||||
summary = node.get('summary') or node.get('prefix_summary', '')
|
||||
summary_str = f" — {summary[:60]}..." if summary else ""
|
||||
print(' ' * indent + f"[{node.get('node_id', '?')}] {node.get('title', '')}{summary_str}")
|
||||
if node.get('nodes'):
|
||||
print_tree(node['nodes'], indent + 1)
|
||||
|
||||
def print_wrapped(text, width=100):
|
||||
for line in text.splitlines():
|
||||
print(textwrap.fill(line, width=width))
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue