update py sdk - add stateless and stateful calls

This commit is contained in:
ramnique 2025-01-15 15:45:02 +05:30
parent 2aba775bcb
commit a22d54fd3c
7 changed files with 247 additions and 119 deletions

3
apps/python-sdk/.gitignore vendored Normal file
View file

@ -0,0 +1,3 @@
__pycache__/
venv/
.venv/

171
apps/python-sdk/lib.py Normal file
View file

@ -0,0 +1,171 @@
from typing import Dict, List, Optional, Any, Callable, Union, Tuple
import requests
import json
from schema import ApiRequest, ApiResponse, ApiMessage, ToolMessage, UserMessage, SystemMessage, AssistantMessage, AssistantMessageWithToolCalls
class Client:
def __init__(self, host: str, project_id: str, project_secret: str) -> None:
self.base_url: str = f'{host}/api/v1/{project_id}/chat'
self.headers: Dict[str, str] = {
'Content-Type': 'application/json',
'Authorization': f'Bearer {project_secret}'
}
def _call_api(
self,
messages: List[ApiMessage],
state: Optional[Dict[str, Any]] = None,
) -> ApiResponse:
request = ApiRequest(
messages=messages,
state=state
)
# print request json
print(request.model_dump_json())
response = requests.post(self.base_url, headers=self.headers, data=request.model_dump_json())
if not response.status_code == 200:
raise ValueError(f"Error: {response.status_code} - {response.text}")
response_data = ApiResponse.model_validate(response.json())
if not response_data.messages:
raise ValueError("No response")
last_message = response_data.messages[-1]
if not isinstance(last_message, (AssistantMessage, AssistantMessageWithToolCalls)):
raise ValueError("Last message was not an assistant message")
return response_data
def _process_tool_calls(
self,
tool_calls: List[Any],
tools: Dict[str, Callable[..., str]]
) -> List[ToolMessage]:
"""Process tool calls and return a list of tool response messages"""
tool_messages = []
for tool_call in tool_calls:
tool_name = tool_call.function.name
tool_arguments = json.loads(tool_call.function.arguments)
if tool_name not in tools:
raise ValueError(f'Missing tool: {tool_name}')
tool_response = tools[tool_name](**tool_arguments)
tool_msg = ToolMessage(
role='tool',
content=tool_response,
tool_call_id=tool_call.id,
tool_name=tool_name
)
tool_messages.append(tool_msg)
return tool_messages
def chat(
self,
messages: List[ApiMessage],
tools: Optional[Dict[str, Callable[..., str]]] = None,
state: Optional[Dict[str, Any]] = None,
max_turns: int = 3
) -> Tuple[List[ApiMessage], Optional[Dict[str, Any]]]:
"""Stateless chat method that handles a single conversation turn with multiple tool call rounds"""
current_messages = messages
current_state = state
turns = 0
response_messages = []
response_state = None
has_tool_calls = False
while turns < max_turns:
# call api
response_data = self._call_api(
messages=current_messages,
state=current_state
)
current_messages.extend(response_data.messages)
current_state = response_data.state
response_messages = response_data.messages
response_state = response_data.state
# Process tool calls if present and tools are provided
last_message = response_data.messages[-1]
has_tool_calls = hasattr(last_message, 'tool_calls') and last_message.tool_calls
if has_tool_calls:
tool_messages = self._process_tool_calls(last_message.tool_calls, tools)
current_messages.extend(tool_messages)
# If no tool calls were made, we're done
if not has_tool_calls:
break
turns += 1
if turns == max_turns and has_tool_calls:
raise ValueError("Max turns reached")
if not last_message.agenticResponseType == 'external':
raise ValueError("Last message was not an external message")
return response_messages, response_state
class StatefulChat:
"""Maintains conversation state across multiple turns"""
def __init__(
self,
client: Client,
tools: Optional[Dict[str, Callable[..., str]]] = None,
system_prompt: Optional[str] = None,
) -> None:
self.client = client
self.tools = tools
self.messages: List[ApiMessage] = []
self.state: Optional[Dict[str, Any]] = None
if system_prompt:
self.messages.append(SystemMessage(role='system', content=system_prompt))
def run(self, message: Union[str]) -> str:
"""Handle a single user turn in the conversation"""
# Process the message
user_msg = UserMessage(role='user', content=message)
self.messages.append(user_msg)
# Get response using the client's chat method
new_messages, new_state = self.client.chat(
messages=self.messages,
tools=self.tools,
state=self.state
)
# Update internal state
self.messages = new_messages
self.state = new_state
# Return only the final message content
last_message = new_messages[-1]
return last_message.content
def weather_lookup_tool(city_name: str) -> str:
return f"The weather in {city_name} is 22°C."
if __name__ == "__main__":
host: str = "<HOST>"
project_id: str = "<PROJECT_ID>"
project_secret: str = "<PROJECT_SECRET>"
client = Client(host, project_id, project_secret)
tools: Dict[str, Callable[..., str]] = {
'weather_lookup': weather_lookup_tool
}
chat_session = StatefulChat(client, tools)
resp = chat_session.run("whats the weather in london?")
print(resp)

View file

@ -0,0 +1,9 @@
annotated-types==0.7.0
certifi==2024.12.14
charset-normalizer==3.4.1
idna==3.10
pydantic==2.10.5
pydantic_core==2.27.2
requests==2.32.3
typing_extensions==4.12.2
urllib3==2.3.0

54
apps/python-sdk/schema.py Normal file
View file

@ -0,0 +1,54 @@
from typing import List, Optional, Union, Any, Literal
from pydantic import BaseModel
class SystemMessage(BaseModel):
role: Literal['system']
content: str
class UserMessage(BaseModel):
role: Literal['user']
content: str
class AssistantMessage(BaseModel):
role: Literal['assistant']
content: str
agenticSender: Optional[str] = None
agenticResponseType: Literal['internal', 'external']
class FunctionCall(BaseModel):
name: str
arguments: str
class ToolCall(BaseModel):
id: str
type: Literal['function']
function: FunctionCall
class AssistantMessageWithToolCalls(BaseModel):
role: Literal['assistant']
content: Optional[str] = None
tool_calls: List[ToolCall]
agenticSender: Optional[str] = None
agenticResponseType: Literal['internal', 'external']
class ToolMessage(BaseModel):
role: Literal['tool']
content: str
tool_call_id: str
tool_name: str
ApiMessage = Union[
SystemMessage,
UserMessage,
AssistantMessage,
AssistantMessageWithToolCalls,
ToolMessage
]
class ApiRequest(BaseModel):
messages: List[ApiMessage]
state: Any
class ApiResponse(BaseModel):
messages: List[ApiMessage]
state: Any

View file

@ -1,109 +0,0 @@
import requests
import json
class StatefulChatbotSDK:
def __init__(self, project_id, project_secret, tools=None):
self.base_url = f'http://localhost:3000/api/v1/{project_id}/chat'
self.headers = {
'Content-Type': 'application/json',
'Authorization': f'Bearer {project_secret}'
}
self.messages = [] # This holds the entire conversation history
self.state = None
self.tools = tools if tools else {} # Default to empty if no tools provided
def send_message(self, user_message):
# Add the user's message to the conversation history
self.messages.append({
'role': 'user',
'content': user_message
})
# Prepare the payload for the stateless API, including all past messages
payload = json.dumps({
'messages': self.messages,
'state': self.state if self.state else {}
})
# Send the request to the API
response = requests.post(self.base_url, headers=self.headers, data=payload)
if response.status_code == 200:
response_data = response.json()
# The response contains only the new messages generated in this turn
new_messages = response_data.get('messages', [])
if new_messages:
# Append new messages to the conversation history
for msg in new_messages:
self.messages.append(msg)
# Extract the new state from the response and store it
self.state = response_data.get('state', {})
# Check for tool calls in the response
tool_calls = response_data.get('messages', [{}])[0].get('tool_calls', [])
if tool_calls:
for tool_call in tool_calls:
tool_name = tool_call.get('function', {}).get('name')
tool_arguments = json.loads(tool_call.get('function', {}).get('arguments', '{}'))
# Invoke the tool if it exists, otherwise raise an error
if tool_name in self.tools:
tool_response = self.tools[tool_name](**tool_arguments)
# Add the tool response as a new message in the conversation history
self.messages.append({
'role': 'tool',
'content': tool_response
})
else:
raise ValueError(f"Missing tool: '{tool_name}'")
# Return the latest message from the assistant or tool
return new_messages[-1]['content'] if new_messages else "No response"
else:
return f"Error: {response.status_code} - {response.text}"
def get_conversation_history(self):
return self.messages
def reset_conversation(self):
self.messages = []
self.state = None
# Example tool functions
def weather_lookup_tool(location, units):
# Simulating a weather lookup tool response
return f"The weather in {location} is 22°C with {units} units."
# Interactive conversation loop
if __name__ == "__main__":
# Initialize the SDK with your project ID and secret
project_id = "<PROJECT_ID>"
project_secret = "<PROJECT_SECRET>"
tools = {
'weather_lookup_tool': weather_lookup_tool
}
chatbot = StatefulChatbotSDK(project_id, project_secret, tools)
print("Welcome to the chatbot! Type 'exit' to end the conversation.")
while True:
user_message = input("You: ")
# Check if the user wants to exit the conversation
if user_message.lower() == "exit":
print("Ending the conversation.")
break
# Send the user message to the chatbot and get the response
response = chatbot.send_message(user_message)
# Print the chatbot's response
print(f"Bot: {response}")
# Optionally, print the conversation history after the chat ends
print("\nConversation History:")
for msg in chatbot.get_conversation_history():
print(f"{msg['role'].capitalize()}: {msg['content']}")

View file

@ -20,7 +20,7 @@ function UserMessage({ content }: { content: string }) {
</div>;
}
function InternalAssistantMessage({ content, sender, latency }: { content: string, sender: string | undefined, latency: number }) {
function InternalAssistantMessage({ content, sender, latency }: { content: string, sender: string | null | undefined, latency: number }) {
const [expanded, setExpanded] = useState(false);
// show a message icon with a + symbol to expand and show the content
@ -52,7 +52,7 @@ function InternalAssistantMessage({ content, sender, latency }: { content: strin
</div>;
}
function AssistantMessage({ content, sender, latency }: { content: string, sender: string | undefined, latency: number }) {
function AssistantMessage({ content, sender, latency }: { content: string, sender: string | null | undefined, latency: number }) {
return <div className="self-start mr-[30%] flex flex-col">
<div className="flex gap-2 justify-between items-center">
<div className="text-gray-500 text-sm pl-3">
@ -104,7 +104,7 @@ function ToolCalls({
handleResults: (results: z.infer<typeof apiV1.ToolMessage>[]) => void;
projectId: string;
messages: z.infer<typeof apiV1.ChatMessage>[];
sender: string | undefined;
sender: string | null | undefined;
workflow: z.infer<typeof Workflow>;
}) {
const resultsMap: Record<string, z.infer<typeof apiV1.ToolMessage>> = {};
@ -147,7 +147,7 @@ function ToolCall({
handleResult: (result: z.infer<typeof apiV1.ToolMessage>) => void;
projectId: string;
messages: z.infer<typeof apiV1.ChatMessage>[];
sender: string | undefined;
sender: string | null | undefined;
workflow: z.infer<typeof Workflow>;
}) {
let matchingWorkflowTool: z.infer<typeof WorkflowTool> | undefined;
@ -224,7 +224,7 @@ function GetInformationToolCall({
handleResult: (result: z.infer<typeof apiV1.ToolMessage>) => void;
projectId: string;
messages: z.infer<typeof apiV1.ChatMessage>[];
sender: string | undefined;
sender: string | null | undefined;
workflow: z.infer<typeof Workflow>;
}) {
const [result, setResult] = useState<z.infer<typeof apiV1.ToolMessage> | undefined>(availableResult);
@ -318,7 +318,7 @@ function RetrieveUrlInfoToolCall({
handleResult: (result: z.infer<typeof apiV1.ToolMessage>) => void;
projectId: string;
messages: z.infer<typeof apiV1.ChatMessage>[];
sender: string | undefined;
sender: string | null | undefined;
}) {
const [result, setResult] = useState<z.infer<typeof apiV1.ToolMessage> | undefined>(availableResult);
const args = JSON.parse(toolCall.function.arguments) as { url: string };
@ -409,7 +409,7 @@ function TransferToAgentToolCall({
handleResult: (result: z.infer<typeof apiV1.ToolMessage>) => void;
projectId: string;
messages: z.infer<typeof apiV1.ChatMessage>[];
sender: string | undefined;
sender: string | null | undefined;
}) {
const typedResult = availableResult ? JSON.parse(availableResult.content) as { assistant: string } : undefined;
if (!typedResult) {
@ -438,7 +438,7 @@ function ClientToolCall({
handleResult: (result: z.infer<typeof apiV1.ToolMessage>) => void;
projectId: string;
messages: z.infer<typeof apiV1.ChatMessage>[];
sender: string | undefined;
sender: string | null | undefined;
}) {
const [result, setResult] = useState<z.infer<typeof apiV1.ToolMessage> | undefined>(availableResult);
@ -515,7 +515,7 @@ function MockToolCall({
handleResult: (result: z.infer<typeof apiV1.ToolMessage>) => void;
projectId: string;
messages: z.infer<typeof apiV1.ChatMessage>[];
sender: string | undefined;
sender: string | null | undefined;
}) {
const [result, setResult] = useState<z.infer<typeof apiV1.ToolMessage> | undefined>(availableResult);
const [response, setResponse] = useState('');

View file

@ -13124,7 +13124,7 @@
},
"node_modules/rowboat-shared": {
"version": "1.0.0",
"resolved": "git+ssh://git@github.com/rowboatlabs/shared.git#211034b606f6894b77a316cca44170b10754d932",
"resolved": "git+ssh://git@github.com/rowboatlabs/shared.git#1c8e722b8c5d644672a2db92f4ba5f25d8560352",
"dependencies": {
"zod": "^3.23.8"
}