diff --git a/README.md b/README.md index de11c1e8..fad9223e 100644 --- a/README.md +++ b/README.md @@ -31,6 +31,8 @@ Powered by OpenAI's Agents SDK, Rowboat is the fastest way to build multi-agents 3. Access the app at [http://localhost:3000](http://localhost:3000). +Note: See the [Using custom LLM providers](https://docs.rowboatlabs.com/setup/#using-custom-llm-providers) section of our docs for using custom providers like OpenRouter and LiteLLM. + ## Demo #### Create a multi-agent assistant with MCP tools by chatting with Rowboat diff --git a/apps/copilot/app.py b/apps/copilot/app.py index f064bcf7..40b29321 100644 --- a/apps/copilot/app.py +++ b/apps/copilot/app.py @@ -1,7 +1,7 @@ from flask import Flask, request, jsonify, Response, stream_with_context from pydantic import BaseModel, ValidationError from typing import List -from copilot import UserMessage, AssistantMessage, get_response, openai_client +from copilot import UserMessage, AssistantMessage, get_response from streaming import get_streaming_response from lib import AgentContext, PromptContext, ToolContext, ChatContext import os diff --git a/apps/copilot/client.py b/apps/copilot/client.py new file mode 100644 index 00000000..ac5d789e --- /dev/null +++ b/apps/copilot/client.py @@ -0,0 +1,24 @@ +import os +from openai import OpenAI +import dotenv +dotenv.load_dotenv() + +PROVIDER_BASE_URL = os.getenv('PROVIDER_BASE_URL', '') +PROVIDER_API_KEY = os.getenv('PROVIDER_API_KEY', os.getenv('OPENAI_API_KEY', '')) +PROVIDER_DEFAULT_MODEL = os.getenv('PROVIDER_DEFAULT_MODEL', 'gpt-4.1') + +if not PROVIDER_API_KEY: + raise ValueError("No LLM Provider API key found") + +completions_client = None +if PROVIDER_BASE_URL: + print(f"Using provider {PROVIDER_BASE_URL}, for completions") + completions_client = OpenAI( + base_url=PROVIDER_BASE_URL, + api_key=PROVIDER_API_KEY + ) +else: + print(f"Using OpenAI directly for completions") + completions_client = OpenAI( + api_key=PROVIDER_API_KEY + ) \ No newline at end of file diff --git a/apps/copilot/copilot.py b/apps/copilot/copilot.py index d6fe0b8f..4d5d31f9 100644 --- a/apps/copilot/copilot.py +++ b/apps/copilot/copilot.py @@ -4,9 +4,8 @@ from pydantic import BaseModel, ValidationError from typing import List, Dict, Any, Literal import json from lib import AgentContext, PromptContext, ToolContext, ChatContext - -openai_client = OpenAI() -MODEL_NAME = "gpt-4.1" # OpenAI model name +from client import PROVIDER_DEFAULT_MODEL +from client import completions_client class UserMessage(BaseModel): role: Literal["user"] @@ -75,8 +74,8 @@ User: {last_message.content} message.model_dump() for message in messages ] - response = openai_client.chat.completions.create( - model=MODEL_NAME, + response = completions_client.chat.completions.create( + model=PROVIDER_DEFAULT_MODEL, messages=updated_msgs, temperature=0.0, response_format={"type": "json_object"} diff --git a/apps/copilot/requirements.txt b/apps/copilot/requirements.txt index fe0e8c5d..f4e7d327 100644 --- a/apps/copilot/requirements.txt +++ b/apps/copilot/requirements.txt @@ -18,6 +18,7 @@ openai==1.61.0 packaging==24.2 pydantic==2.10.3 pydantic_core==2.27.1 +python-dotenv sniffio==1.3.1 tqdm==4.67.1 typing_extensions==4.12.2 diff --git a/apps/copilot/streaming.py b/apps/copilot/streaming.py index dfcc6c4c..2e7d65d5 100644 --- a/apps/copilot/streaming.py +++ b/apps/copilot/streaming.py @@ -4,9 +4,8 @@ from pydantic import BaseModel, ValidationError from typing import List, Dict, Any, Literal import json from lib import AgentContext, PromptContext, ToolContext, ChatContext - -openai_client = OpenAI() -MODEL_NAME = "gpt-4.1" # OpenAI model name +from client import PROVIDER_DEFAULT_MODEL +from client import completions_client class UserMessage(BaseModel): role: Literal["user"] @@ -90,8 +89,8 @@ User: {last_message.content} message.model_dump() for message in messages ] - return openai_client.chat.completions.create( - model=MODEL_NAME, + return completions_client.chat.completions.create( + model=PROVIDER_DEFAULT_MODEL, messages=updated_msgs, temperature=0.0, stream=True diff --git a/apps/docs/docs/setup.md b/apps/docs/docs/setup.md new file mode 100644 index 00000000..f9ed3ef7 --- /dev/null +++ b/apps/docs/docs/setup.md @@ -0,0 +1,127 @@ +## Getting started + +- ✨ **Start from an idea → Copilot builds your multi-agent workflows** + E.g. "Build me an assistant for a food delivery company to handle delivery status and missing items. Include the necessary tools." +- 🌐 **Connect MCP servers** + Add the MCP servers in Settings → import the tools into Rowboat. +- 📞 **Integrate into your app using the HTTP API or Python SDK** + Grab the Project ID and generated API Key from Settings and use the API. + +Powered by OpenAI's Agents SDK, Rowboat is the fastest way to build multi-agents! + +## Quick start + +Step 1. Set your OpenAI key: + +```bash +export OPENAI_API_KEY=your-openai-api-key +``` + +Step 2. Clone the repository and start Rowboat docker + +```bash +git clone git@github.com:rowboatlabs/rowboat.git +cd rowboat +docker-compose up --build +``` + +Step 3. Access the app at [http://localhost:3000](http://localhost:3000). + +Note: See the [Using custom LLM providers](#using-custom-llm-providers) section below for using custom providers like OpenRouter and LiteLLM. + +## Demo + +#### Create a multi-agent assistant with MCP tools by chatting with Rowboat +[![Screenshot 2025-04-23 at 00 25 31](https://github.com/user-attachments/assets/c8a41622-8e0e-459f-becb-767503489866)](https://youtu.be/YRTCw9UHRbU) + +## Integrate with Rowboat agents + +There are 2 ways to integrate with the agents you create in Rowboat + +**Option #1: HTTP API** + +You can use the API directly at [http://localhost:3000/api/v1/](http://localhost:3000/api/v1/). See [API Docs](https://docs.rowboatlabs.com/using_the_api/) for details. + +```bash +curl --location 'http://localhost:3000/api/v1//chat' \ +--header 'Content-Type: application/json' \ +--header 'Authorization: Bearer ' \ +--data '{ + "messages": [ + { + "role": "user", + "content": "tell me the weather in london in metric units" + } + ], + "state": null +}' +``` + + +**Option #2: Python SDK** + +You can use the included Python SDK to interact with the Agents + +```python +from rowboat import Client, StatefulChat +from rowboat.schema import UserMessage, SystemMessage + +# Initialize the client +client = Client( + host="http://localhost:3000", + project_id="", + api_key="" +) + +# Create a stateful chat session (recommended) +chat = StatefulChat(client) +response = chat.run("What's the weather in London?") +print(response) + +# Or use the low-level client API +messages = [ + SystemMessage(role='system', content="You are a helpful assistant"), + UserMessage(role='user', content="Hello, how are you?") +] + +# Get response +response = client.chat(messages=messages) +print(response.messages[-1].content) +``` + +## Using custom LLM providers +By default, Rowboat uses OpenAI LLMs (gpt-4o, gpt-4.1, etc.) for both agents and copilot, when you export your OPENAI_API_KEY. + +However, you can also configure custom LLM providers (e.g. LiteLLM, OpenRouter) to use any of the hundreds of available LLMs beyond OpenAI, such as Claude, DeepSeek, Ollama LLMs and so on. + +**Step 1:** Set up your custom LLM provider using the variables below, for example (assuming LiteLLM): + +```bash +export PROVIDER_BASE_URL=http://host.docker.internal:4000/ +export PROVIDER_API_KEY=sk-1234 +``` + +Rowboat uses "gpt-4.1" as the default model for agents and copilot but this can be overridden as follows, for example (assuming LiteLLM): + +```bash +export PROVIDER_DEFAULT_MODEL=claude-3-7-sonnet-latest +export PROVIDER_COPILOT_MODEL=gpt-4o +``` + +**Notes:** + +- Copilot is optimized for gpt-4o/gpt-4.1. We strongly recommend using these models for best performance. +- You can specify different models for the copilot and each agent, but all of them must belong to the same provider (e.g. LiteLLM) +- The integration is provider-agnostic and should work with any service that implements the OpenAI messages format. +- OpenAI-specific tools (e.g., web_search) will not work with non-OpenAI providers. If you get an error, remove these tools. + +**Step 2 (No change):** Clone the repository and start Rowboat docker + +```bash +git clone git@github.com:rowboatlabs/rowboat.git +cd rowboat +docker-compose up --build +``` + +**Step 3 (No change):** Access the app at [http://localhost:3000](http://localhost:3000). + diff --git a/apps/docs/mkdocs.yml b/apps/docs/mkdocs.yml index e6e6ea50..39b0c22d 100644 --- a/apps/docs/mkdocs.yml +++ b/apps/docs/mkdocs.yml @@ -4,6 +4,7 @@ theme: name: material favicon: img/favicon.ico nav: + - Getting Started: setup.md - Overview: - Introduction: index.md - Open Source License: license.md diff --git a/apps/rowboat/app/lib/project_templates.ts b/apps/rowboat/app/lib/project_templates.ts index ffe6f1ae..da98d1ac 100644 --- a/apps/rowboat/app/lib/project_templates.ts +++ b/apps/rowboat/app/lib/project_templates.ts @@ -1,6 +1,8 @@ import { WorkflowTemplate } from "./types/workflow_types"; import { z } from 'zod'; +const DEFAULT_MODEL = process.env.PROVIDER_DEFAULT_MODEL || "gpt-4.1"; + export const templates: { [key: string]: z.infer } = { // Default template 'default': { @@ -37,7 +39,7 @@ You are an helpful customer support assistant ❌ Don'ts: - don't ask user any other detail than email`, - model: "gpt-4o", + model: DEFAULT_MODEL, toggleAble: true, ragReturnType: "chunks", ragK: 3, diff --git a/apps/rowboat/app/lib/types/workflow_types.ts b/apps/rowboat/app/lib/types/workflow_types.ts index 6930a8d0..38d58a18 100644 --- a/apps/rowboat/app/lib/types/workflow_types.ts +++ b/apps/rowboat/app/lib/types/workflow_types.ts @@ -10,12 +10,7 @@ export const WorkflowAgent = z.object({ disabled: z.boolean().default(false).optional(), instructions: z.string(), examples: z.string().optional(), - model: z.union([ - z.literal('gpt-4.1'), - z.literal('gpt-4o'), - z.literal('gpt-4.1-mini'), - z.literal('gpt-4o-mini'), - ]), + model: z.string(), locked: z.boolean().default(false).describe('Whether this agent is locked and cannot be deleted').optional(), toggleAble: z.boolean().default(true).describe('Whether this agent can be enabled or disabled').optional(), global: z.boolean().default(false).describe('Whether this agent is a global agent, in which case it cannot be connected to other agents').optional(), diff --git a/apps/rowboat/app/projects/[projectId]/entities/agent_config.tsx b/apps/rowboat/app/projects/[projectId]/entities/agent_config.tsx index 3dd317f4..432725b0 100644 --- a/apps/rowboat/app/projects/[projectId]/entities/agent_config.tsx +++ b/apps/rowboat/app/projects/[projectId]/entities/agent_config.tsx @@ -20,6 +20,8 @@ import { Button as CustomButton } from "@/components/ui/button"; import clsx from "clsx"; import { EditableField } from "@/app/lib/components/editable-field"; import { USE_TRANSFER_CONTROL_OPTIONS } from "@/app/lib/feature_flags"; +import { Input } from "@/components/ui/input"; +import { Info } from "lucide-react"; // Common section header styles const sectionHeaderStyles = "text-xs font-medium uppercase tracking-wider text-gray-500 dark:text-gray-400"; @@ -405,20 +407,33 @@ export function AgentConfig({ )}
- - + +
+ +
+
Model Configuration
+ Set this according to the PROVIDER_BASE_URL you have set in your .env file (such as your LiteLLM, gateway). +
+
+ E.g. LiteLLM's naming convention is like: 'claude-3-7-sonnet-latest', but you may have set alias model names or might be using a different provider like openrouter, openai etc. +
+
+ By default, the model is set to gpt-4.1, assuming your OpenAI API key is set in PROVIDER_API_KEY and PROVIDER_BASE_URL is not set. +
+
+
+
+ ({ - key: model.value, - label: model.value - }))} - onChange={(value) => handleUpdate({ + onChange={(e) => handleUpdate({ ...agent, - model: value as z.infer['model'] + model: e.target.value as z.infer['model'] })} - className="w-40" /> diff --git a/apps/rowboat/app/projects/[projectId]/playground/components/chat.tsx b/apps/rowboat/app/projects/[projectId]/playground/components/chat.tsx index 42c46dc2..b190b87e 100644 --- a/apps/rowboat/app/projects/[projectId]/playground/components/chat.tsx +++ b/apps/rowboat/app/projects/[projectId]/playground/components/chat.tsx @@ -202,6 +202,19 @@ export function Chat({ setLoadingAssistantResponse(false); }); + eventSource.addEventListener('stream_error', (event) => { + if (eventSource) { + eventSource.close(); + } + + console.error('SSE Error:', event); + if (!ignore) { + setLoadingAssistantResponse(false); + setFetchResponseError('Error: ' + JSON.parse(event.data).error); + setOptimisticMessages(messages); + } + }); + eventSource.onerror = (error) => { console.error('SSE Error:', error); if (!ignore) { diff --git a/apps/rowboat/app/projects/[projectId]/workflow/app.tsx b/apps/rowboat/app/projects/[projectId]/workflow/app.tsx index 20b94273..5d70fbe8 100644 --- a/apps/rowboat/app/projects/[projectId]/workflow/app.tsx +++ b/apps/rowboat/app/projects/[projectId]/workflow/app.tsx @@ -15,9 +15,11 @@ import { getProjectConfig } from "@/app/actions/project_actions"; export function App({ projectId, useRag, + defaultModel, }: { projectId: string; useRag: boolean; + defaultModel: string; }) { const [selectorKey, setSelectorKey] = useState(0); const [workflow, setWorkflow] = useState> | null>(null); @@ -118,6 +120,7 @@ export function App({ useRag={useRag} mcpServerUrls={mcpServerUrls} toolWebhookUrl={toolWebhookUrl} + defaultModel={defaultModel} />} } diff --git a/apps/rowboat/app/projects/[projectId]/workflow/page.tsx b/apps/rowboat/app/projects/[projectId]/workflow/page.tsx index 631545ea..8ee6f20f 100644 --- a/apps/rowboat/app/projects/[projectId]/workflow/page.tsx +++ b/apps/rowboat/app/projects/[projectId]/workflow/page.tsx @@ -3,6 +3,7 @@ import { App } from "./app"; import { USE_RAG } from "@/app/lib/feature_flags"; import { projectsCollection } from "@/app/lib/mongodb"; import { notFound } from "next/navigation"; +const DEFAULT_MODEL = process.env.PROVIDER_DEFAULT_MODEL || "gpt-4.1"; export const metadata: Metadata = { title: "Workflow" @@ -25,6 +26,7 @@ export default async function Page({ ); } diff --git a/apps/rowboat/app/projects/[projectId]/workflow/workflow_editor.tsx b/apps/rowboat/app/projects/[projectId]/workflow/workflow_editor.tsx index d0e58820..2f029ea3 100644 --- a/apps/rowboat/app/projects/[projectId]/workflow/workflow_editor.tsx +++ b/apps/rowboat/app/projects/[projectId]/workflow/workflow_editor.tsx @@ -263,7 +263,7 @@ function reducer(state: State, action: Action): State { description: "", disabled: false, instructions: "", - model: "gpt-4o", + model: "", locked: false, toggleAble: true, ragReturnType: "chunks", @@ -552,7 +552,6 @@ function reducer(state: State, action: Action): State { draft.currentIndex++; draft.present = nextState; }); - } } @@ -568,6 +567,7 @@ export function WorkflowEditor({ useRag, mcpServerUrls, toolWebhookUrl, + defaultModel, }: { dataSources: WithStringId>[]; workflow: WithStringId>; @@ -577,6 +577,7 @@ export function WorkflowEditor({ useRag: boolean; mcpServerUrls: Array>; toolWebhookUrl: string; + defaultModel: string; }) { const [state, dispatch] = useReducer>(reducer, { patches: [], @@ -659,7 +660,11 @@ export function WorkflowEditor({ } function handleAddAgent(agent: Partial> = {}) { - dispatch({ type: "add_agent", agent }); + const agentWithModel = { + ...agent, + model: agent.model || defaultModel || "gpt-4o" + }; + dispatch({ type: "add_agent", agent: agentWithModel }); } function handleAddTool(tool: Partial> = {}) { diff --git a/apps/rowboat_agents/poetry.lock b/apps/rowboat_agents/poetry.lock index db840fe2..343332a0 100644 --- a/apps/rowboat_agents/poetry.lock +++ b/apps/rowboat_agents/poetry.lock @@ -984,14 +984,14 @@ files = [ [[package]] name = "griffe" -version = "1.6.2" +version = "1.7.3" description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "griffe-1.6.2-py3-none-any.whl", hash = "sha256:6399f7e663150e4278a312a8e8a14d2f3d7bd86e2ef2f8056a1058e38579c2ee"}, - {file = "griffe-1.6.2.tar.gz", hash = "sha256:3a46fa7bd83280909b63c12b9a975732a927dd97809efe5b7972290b606c5d91"}, + {file = "griffe-1.7.3-py3-none-any.whl", hash = "sha256:c6b3ee30c2f0f17f30bcdef5068d6ab7a2a4f1b8bf1a3e74b56fffd21e1c5f75"}, + {file = "griffe-1.7.3.tar.gz", hash = "sha256:52ee893c6a3a968b639ace8015bec9d36594961e156e23315c8e8e51401fa50b"}, ] [package.dependencies] @@ -1819,14 +1819,14 @@ files = [ [[package]] name = "mcp" -version = "1.5.0" +version = "1.6.0" description = "Model Context Protocol SDK" optional = false python-versions = ">=3.10" groups = ["main"] files = [ - {file = "mcp-1.5.0-py3-none-any.whl", hash = "sha256:51c3f35ce93cb702f7513c12406bbea9665ef75a08db909200b07da9db641527"}, - {file = "mcp-1.5.0.tar.gz", hash = "sha256:5b2766c05e68e01a2034875e250139839498c61792163a7b221fc170c12f5aa9"}, + {file = "mcp-1.6.0-py3-none-any.whl", hash = "sha256:7bd24c6ea042dbec44c754f100984d186620d8b841ec30f1b19eda9b93a634d0"}, + {file = "mcp-1.6.0.tar.gz", hash = "sha256:d9324876de2c5637369f43161cd71eebfd803df5a95e46225cab8d280e366723"}, ] [package.dependencies] @@ -2151,14 +2151,14 @@ files = [ [[package]] name = "openai" -version = "1.68.0" +version = "1.76.0" description = "The official Python library for the openai API" optional = false python-versions = ">=3.8" groups = ["main"] files = [ - {file = "openai-1.68.0-py3-none-any.whl", hash = "sha256:20e279b0f3a78cb4a95f3eab2a180f3ee30c6a196aeebd6bf642a4f88ab85ee1"}, - {file = "openai-1.68.0.tar.gz", hash = "sha256:c570c06c9ba10f98b891ac30a3dd7b5c89ed48094c711c7a3f35fb5ade6c0757"}, + {file = "openai-1.76.0-py3-none-any.whl", hash = "sha256:a712b50e78cf78e6d7b2a8f69c4978243517c2c36999756673e07a14ce37dc0a"}, + {file = "openai-1.76.0.tar.gz", hash = "sha256:fd2bfaf4608f48102d6b74f9e11c5ecaa058b60dad9c36e409c12477dfd91fb2"}, ] [package.dependencies] @@ -2166,37 +2166,42 @@ anyio = ">=3.5.0,<5" distro = ">=1.7.0,<2" httpx = ">=0.23.0,<1" jiter = ">=0.4.0,<1" -numpy = ">=2.0.2" pydantic = ">=1.9.0,<3" sniffio = "*" -sounddevice = ">=0.5.1" tqdm = ">4" typing-extensions = ">=4.11,<5" [package.extras] datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] -realtime = ["websockets (>=13,<15)"] +realtime = ["websockets (>=13,<16)"] +voice-helpers = ["numpy (>=2.0.2)", "sounddevice (>=0.5.1)"] [[package]] name = "openai-agents" -version = "0.0.4" +version = "0.0.13" description = "OpenAI Agents SDK" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "openai_agents-0.0.4-py3-none-any.whl", hash = "sha256:5577c3ee994fe0bd200d7283e4f7a614b3af19afeebcfb07b6ca6039a8a50a5c"}, - {file = "openai_agents-0.0.4.tar.gz", hash = "sha256:297e8d5faeca753e1b303d860b7ac94d03a7e10382be738163dc6a10a3b7cc1c"}, + {file = "openai_agents-0.0.13-py3-none-any.whl", hash = "sha256:e11910679e74803e8a4237ce52a21ee6f9ef0848d866e8198f5c4fb8c6310204"}, + {file = "openai_agents-0.0.13.tar.gz", hash = "sha256:6b80315e75c06b5302c5f2adba2f9ea3845f94615daed4706bfb871740f561a5"}, ] [package.dependencies] griffe = ">=1.5.6,<2" -openai = ">=1.66.2" +mcp = {version = ">=1.6.0,<2", markers = "python_version >= \"3.10\""} +openai = ">=1.76.0" pydantic = ">=2.10,<3" requests = ">=2.0,<3" types-requests = ">=2.0,<3" typing-extensions = ">=4.12.2,<5" +[package.extras] +litellm = ["litellm (>=1.65.0,<2)"] +viz = ["graphviz (>=0.17)"] +voice = ["numpy (>=2.2.0,<3) ; python_version >= \"3.10\"", "websockets (>=15.0,<16)"] + [[package]] name = "openpyxl" version = "3.1.5" @@ -3223,27 +3228,6 @@ files = [ {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, ] -[[package]] -name = "sounddevice" -version = "0.5.1" -description = "Play and Record Sound with Python" -optional = false -python-versions = ">=3.7" -groups = ["main"] -files = [ - {file = "sounddevice-0.5.1-py3-none-any.whl", hash = "sha256:e2017f182888c3f3c280d9fbac92e5dbddac024a7e3442f6e6116bd79dab8a9c"}, - {file = "sounddevice-0.5.1-py3-none-macosx_10_6_x86_64.macosx_10_6_universal2.whl", hash = "sha256:d16cb23d92322526a86a9490c427bf8d49e273d9ccc0bd096feecd229cde6031"}, - {file = "sounddevice-0.5.1-py3-none-win32.whl", hash = "sha256:d84cc6231526e7a08e89beff229c37f762baefe5e0cc2747cbe8e3a565470055"}, - {file = "sounddevice-0.5.1-py3-none-win_amd64.whl", hash = "sha256:4313b63f2076552b23ac3e0abd3bcfc0c1c6a696fc356759a13bd113c9df90f1"}, - {file = "sounddevice-0.5.1.tar.gz", hash = "sha256:09ca991daeda8ce4be9ac91e15a9a81c8f81efa6b695a348c9171ea0c16cb041"}, -] - -[package.dependencies] -CFFI = ">=1.0" - -[package.extras] -numpy = ["NumPy"] - [[package]] name = "soupsieve" version = "2.6" @@ -3417,14 +3401,14 @@ files = [ [[package]] name = "types-requests" -version = "2.32.0.20250306" +version = "2.32.0.20250328" description = "Typing stubs for requests" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "types_requests-2.32.0.20250306-py3-none-any.whl", hash = "sha256:25f2cbb5c8710b2022f8bbee7b2b66f319ef14aeea2f35d80f18c9dbf3b60a0b"}, - {file = "types_requests-2.32.0.20250306.tar.gz", hash = "sha256:0962352694ec5b2f95fda877ee60a159abdf84a0fc6fdace599f20acb41a03d1"}, + {file = "types_requests-2.32.0.20250328-py3-none-any.whl", hash = "sha256:72ff80f84b15eb3aa7a8e2625fffb6a93f2ad5a0c20215fc1dcfa61117bcb2a2"}, + {file = "types_requests-2.32.0.20250328.tar.gz", hash = "sha256:c9e67228ea103bd811c96984fac36ed2ae8da87a36a633964a21f199d60baf32"}, ] [package.dependencies] @@ -4009,4 +3993,4 @@ cffi = ["cffi (>=1.11)"] [metadata] lock-version = "2.1" python-versions = ">=3.10,<4.0" -content-hash = "4b3828121cfa1e7657d9f4ecd4635d0a952c81ad77c7ec2d71c8415c90506f2c" +content-hash = "9b132012b1e894f31b66796668c874f0c81ca3077c67e12878b00ccc3e8242ac" diff --git a/apps/rowboat_agents/pyproject.toml b/apps/rowboat_agents/pyproject.toml index 24924755..11edf8be 100644 --- a/apps/rowboat_agents/pyproject.toml +++ b/apps/rowboat_agents/pyproject.toml @@ -62,7 +62,6 @@ mypy-extensions = "^1.0.0" nest-asyncio = "^1.6.0" numpy = "^2.2.1" openai = "*" -openai-agents = "*" openpyxl = "^3.1.5" packaging = "^24.2" pandas = "^2.2.3" @@ -104,6 +103,7 @@ websockets = "^13.1" Werkzeug = "^3.1.3" wheel = "^0.44.0" xattr = "^1.1.4" +openai-agents = "^0.0.13" [build-system] requires = ["poetry-core"] diff --git a/apps/rowboat_agents/requirements.txt b/apps/rowboat_agents/requirements.txt index bd49cca9..29063f1e 100644 --- a/apps/rowboat_agents/requirements.txt +++ b/apps/rowboat_agents/requirements.txt @@ -6,6 +6,7 @@ annotated-types==0.7.0 anyio==4.8.0 asgiref==3.8.1 attrs==25.3.0 +babel==2.16.0 beautifulsoup4==4.12.3 blinker==1.9.0 build==1.2.2.post1 @@ -29,7 +30,9 @@ findpython==0.6.3 firecrawl==1.9.0 Flask==3.1.0 frozenlist==1.5.0 -griffe==1.6.2 +fsspec==2025.3.2 +ghp-import==2.1.0 +griffe==1.7.3 grpcio==1.71.0 grpcio-tools==1.71.0 gunicorn==23.0.0 @@ -39,9 +42,11 @@ hpack==4.1.0 httpcore==1.0.7 httpx==0.27.2 httpx-sse==0.4.0 +huggingface-hub==0.30.2 Hypercorn==0.17.3 hyperframe==6.1.0 idna==3.10 +importlib_metadata==8.6.1 installer==0.7.0 itsdangerous==2.2.0 jaraco.classes==3.4.0 @@ -50,11 +55,21 @@ jaraco.functools==4.1.0 Jinja2==3.1.5 jiter==0.6.1 jsonpath-python==1.0.6 +jsonschema==4.23.0 +jsonschema-specifications==2025.4.1 keyring==25.6.0 +litellm==1.67.2 lxml==5.3.0 +Markdown==3.7 markdownify==0.13.1 MarkupSafe==3.0.2 -mcp==1.5.0 +mcp==1.6.0 +mergedeep==1.3.4 +mistralai==1.2.3 +mkdocs==1.6.1 +mkdocs-get-deps==0.2.0 +mkdocs-material==9.5.50 +mkdocs-material-extensions==1.3.1 more-itertools==10.6.0 motor==3.7.0 msgpack==1.1.0 @@ -62,11 +77,13 @@ multidict==6.2.0 mypy-extensions==1.0.0 nest-asyncio==1.6.0 numpy==2.2.1 -openai==1.68.0 -openai-agents==0.0.4 +openai==1.76.0 +openai-agents==0.0.13 openpyxl==3.1.5 packaging==24.2 +paginate==0.5.7 pandas==2.2.3 +pathspec==0.12.1 pbs-installer==2025.3.17 pkginfo==1.12.1.2 platformdirs==4.3.7 @@ -80,18 +97,26 @@ pycparser==2.22 pydantic==2.10.5 pydantic-settings==2.8.1 pydantic_core==2.27.2 +Pygments==2.19.1 PyJWT==2.10.1 +pymdown-extensions==10.14.1 pymongo==4.10.1 pyproject_hooks==1.2.0 python-dateutil==2.9.0.post0 python-docx==1.1.2 python-dotenv==1.0.1 pytz==2024.2 +PyYAML==6.0.2 +pyyaml_env_tag==0.1 qdrant-client==1.13.3 Quart==0.20.0 RapidFuzz==3.12.2 +redis==5.2.1 +referencing==0.36.2 +regex==2024.11.6 requests==2.32.3 requests-toolbelt==1.0.0 +rpds-py==0.24.0 setuptools==75.8.0 shellingham==1.5.4 six==1.17.0 @@ -99,23 +124,30 @@ sniffio==1.3.1 sounddevice==0.5.1 soupsieve==2.6 sse-starlette==2.2.1 +sseclient==0.0.27 +sseclient-py==1.8.0 starlette==0.46.1 tabulate==0.9.0 +tiktoken==0.9.0 +tokenizers==0.21.1 tomlkit==0.13.2 tqdm==4.67.1 trove-classifiers==2025.3.19.19 -types-requests==2.32.0.20250306 +types-requests==2.32.0.20250328 typing-inspect==0.9.0 +typing-inspection==0.4.0 typing_extensions==4.12.2 tzdata==2024.2 urllib3==2.3.0 uvicorn==0.34.0 virtualenv==20.29.3 waitress==2.1.2 +watchdog==6.0.0 websockets==13.1 Werkzeug==3.1.3 wheel==0.44.0 wsproto==1.2.0 xattr==1.1.4 yarl==1.18.3 +zipp==3.21.0 zstandard==0.23.0 diff --git a/apps/rowboat_agents/src/app/main.py b/apps/rowboat_agents/src/app/main.py index 6f18a4cb..04bb7301 100644 --- a/apps/rowboat_agents/src/app/main.py +++ b/apps/rowboat_agents/src/app/main.py @@ -91,6 +91,7 @@ async def chat(): start_agent_name=data.get("startAgent", ""), agent_configs=data.get("agents", []), tool_configs=data.get("tools", []), + prompt_configs=data.get("prompts", []), start_turn_with_start_agent=config.get("start_turn_with_start_agent", False), state=data.get("state", {}), additional_tool_configs=[RAG_TOOL, CLOSE_CHAT_TOOL], @@ -157,6 +158,7 @@ async def chat_stream(): start_agent_name=request_data.get("startAgent", ""), agent_configs=request_data.get("agents", []), tool_configs=request_data.get("tools", []), + prompt_configs=request_data.get("prompts", []), start_turn_with_start_agent=config.get("start_turn_with_start_agent", False), state=request_data.get("state", {}), additional_tool_configs=[RAG_TOOL, CLOSE_CHAT_TOOL], @@ -168,6 +170,9 @@ async def chat_stream(): elif event_type == 'done': print("Yielding done:") yield format_sse(event_data, "done") + elif event_type == 'error': + print("Yielding error:") + yield format_sse(event_data, "stream_error") except Exception as e: logger.error(f"Streaming error: {str(e)}") diff --git a/apps/rowboat_agents/src/graph/core.py b/apps/rowboat_agents/src/graph/core.py index 638497f1..480d33be 100644 --- a/apps/rowboat_agents/src/graph/core.py +++ b/apps/rowboat_agents/src/graph/core.py @@ -7,6 +7,7 @@ import logging from .helpers.access import ( get_agent_by_name, get_external_tools, + get_prompt_by_type ) from .helpers.state import ( construct_state_from_response @@ -14,7 +15,8 @@ from .helpers.state import ( from .helpers.control import get_latest_assistant_msg, get_latest_non_assistant_messages, get_last_agent_name from .swarm_wrapper import run as swarm_run, run_streamed as swarm_run_streamed, create_response, get_agents from src.utils.common import common_logger as logger -import asyncio + +from .types import PromptType # Create a dedicated logger for swarm wrapper logger.setLevel(logging.INFO) @@ -43,6 +45,26 @@ def order_messages(messages): ordered_messages.append(ordered) return ordered_messages +def set_sys_message(messages): + """ + If the system message is empty, set it to the default message: "You are a helplful assistant." + """ + if not any(msg.get("role") == "system" for msg in messages): + messages.insert(0, { + "role": "system", + "content": "You are a helpful assistant." + }) + print("Inserted system message: ", messages[0]) + logger.info("Inserted system message: ", messages[0]) + + elif messages[0].get("role") == "system" and messages[0].get("content") == "": + messages[0]["content"] = "You are a helpful assistant." + print("Updated system message: ", messages[0]) + logger.info("Updated system message: ", messages[0]) + print("Messages: ", messages) + # logger.info("Messages: ", messages) + + return messages def clean_up_history(agent_data): """ @@ -197,7 +219,6 @@ async def run_turn( logger.info(f"Completed run of agent: {last_new_agent.name}") print(f"Completed run of agent: {last_new_agent.name}") - # Otherwise, duplicate the last response as external logger.info("No post-processing agent found. Duplicating last response and setting to external.") print("No post-processing agent found. Duplicating last response and setting to external.") @@ -236,13 +257,41 @@ async def run_turn_streamed( start_agent_name, agent_configs, tool_configs, + prompt_configs, start_turn_with_start_agent, state={}, additional_tool_configs=[], complete_request={} ): + messages = set_sys_message(messages) + is_greeting_turn = not any(msg.get("role") != "system" for msg in messages) final_state = None # Initialize outside try block try: + greeting_prompt = get_prompt_by_type(prompt_configs, PromptType.GREETING) + if is_greeting_turn: + if not greeting_prompt: + greeting_prompt = "How can I help you today?" + print("Greeting prompt not found. Using default: ", greeting_prompt) + message = { + 'content': greeting_prompt, + 'role': 'assistant', + 'sender': start_agent_name, + 'tool_calls': None, + 'tool_call_id': None, + 'tool_name': None, + 'response_type': 'external' + } + print("Yielding greeting message: ", message) + yield ('message', message) + + final_state = { + "last_agent_name": start_agent_name if start_agent_name else None, + "tokens": {"total": 0, "prompt": 0, "completion": 0} + } + print("Yielding done message") + yield ('done', {'state': final_state}) + return + # Initialize agents and get external tools new_agents = get_agents(agent_configs=agent_configs, tool_configs=tool_configs, complete_request=complete_request) last_agent_name = get_last_agent_name( @@ -274,7 +323,7 @@ async def run_turn_streamed( # Handle raw response events and accumulate tokens if event.type == "raw_response_event": - if hasattr(event.data, 'type') and event.data.type == "response.completed": + if hasattr(event.data, 'type') and event.data.type == "response.completed" and event.data.response.usage: if hasattr(event.data.response, 'usage'): tokens_used["total"] += event.data.response.usage.total_tokens tokens_used["prompt"] += event.data.response.usage.input_tokens @@ -616,4 +665,5 @@ async def run_turn_streamed( except Exception as e: print(traceback.format_exc()) print(f"Error in stream processing: {str(e)}") + print("Yielding error event:", {'error': str(e), 'state': final_state}) yield ('error', {'error': str(e), 'state': final_state}) # Include final_state in error response \ No newline at end of file diff --git a/apps/rowboat_agents/src/graph/swarm_wrapper.py b/apps/rowboat_agents/src/graph/swarm_wrapper.py index 8a2a9203..deb77f56 100644 --- a/apps/rowboat_agents/src/graph/swarm_wrapper.py +++ b/apps/rowboat_agents/src/graph/swarm_wrapper.py @@ -3,6 +3,7 @@ import json import aiohttp import jwt import hashlib +from agents import OpenAIChatCompletionsModel # Import helper functions needed for get_agents from .helpers.access import ( @@ -31,6 +32,8 @@ MONGO_URI = os.environ.get("MONGODB_URI", "mongodb://localhost:27017/rowboat").s mongo_client = MongoClient(MONGO_URI) db = mongo_client["rowboat"] +from src.utils.client import client, PROVIDER_DEFAULT_MODEL + class NewResponse(BaseModel): messages: List[Dict] agent: Optional[Any] = None @@ -47,7 +50,9 @@ async def mock_tool(tool_name: str, args: str, description: str, mock_instructio ] print(f"Generating simulated response for tool: {tool_name}") - response_content = generate_openai_output(messages, output_type='text', model="gpt-4o") + response_content = None + response_content = generate_openai_output(messages, output_type='text', model=PROVIDER_DEFAULT_MODEL) + print("Custom provider client not found, using default model: gpt-4o") return response_content except Exception as e: logger.error(f"Error in mock_tool: {str(e)}") @@ -173,8 +178,6 @@ def get_rag_tool(config: dict, complete_request: dict) -> FunctionTool: else: return None - - def get_agents(agent_configs, tool_configs, complete_request): """ Creates and initializes Agent objects based on their configurations and connections. @@ -246,12 +249,15 @@ def get_agents(agent_configs, tool_configs, complete_request): # add the name and description to the agent instructions agent_instructions = f"## Your Name\n{agent_config['name']}\n\n## Description\n{agent_config['description']}\n\n## Instructions\n{agent_config['instructions']}" try: + model_name = agent_config["model"] if agent_config["model"] else PROVIDER_DEFAULT_MODEL + print(f"Using model: {model_name}") + model=OpenAIChatCompletionsModel(model=model_name, openai_client=client) if client else agent_config["model"] new_agent = NewAgent( name=agent_config["name"], instructions=agent_instructions, handoff_description=agent_config["description"], tools=new_tools, - model=agent_config["model"], + model = model, model_settings=ModelSettings(temperature=0.0) ) diff --git a/apps/rowboat_agents/src/utils/client.py b/apps/rowboat_agents/src/utils/client.py new file mode 100644 index 00000000..06de9088 --- /dev/null +++ b/apps/rowboat_agents/src/utils/client.py @@ -0,0 +1,32 @@ +import os +import logging +from openai import AsyncOpenAI, OpenAI +import dotenv +dotenv.load_dotenv() + +PROVIDER_BASE_URL = os.getenv('PROVIDER_BASE_URL', '') +PROVIDER_API_KEY = os.getenv('PROVIDER_API_KEY', os.getenv('OPENAI_API_KEY', '')) +PROVIDER_DEFAULT_MODEL = os.getenv('PROVIDER_DEFAULT_MODEL', 'gpt-4.1') + +client = None +if not PROVIDER_API_KEY: + raise ValueError("No LLM Provider API key found") + +if PROVIDER_BASE_URL: + print(f"Using provider {PROVIDER_BASE_URL} with API key {PROVIDER_API_KEY}") + client = AsyncOpenAI(base_url=PROVIDER_BASE_URL, api_key=PROVIDER_API_KEY) +else: + print("No provider base URL configured, using OpenAI directly") + +completions_client = None +if PROVIDER_BASE_URL: + print(f"Using provider {PROVIDER_BASE_URL} for completions") + completions_client = OpenAI( + base_url=PROVIDER_BASE_URL, + api_key=PROVIDER_API_KEY + ) +else: + print(f"Using OpenAI directly for completions") + completions_client = OpenAI( + api_key=PROVIDER_API_KEY + ) \ No newline at end of file diff --git a/apps/rowboat_agents/src/utils/common.py b/apps/rowboat_agents/src/utils/common.py index eab6e558..0fb9791b 100644 --- a/apps/rowboat_agents/src/utils/common.py +++ b/apps/rowboat_agents/src/utils/common.py @@ -7,6 +7,7 @@ import time from dotenv import load_dotenv from openai import OpenAI +from src.utils.client import completions_client load_dotenv() def setup_logger(name, log_file='./run.log', level=logging.INFO, log_to_file=False): @@ -53,31 +54,28 @@ def get_api_key(key_name): raise ValueError(f"{key_name} not found. Did you set it in the .env file?") return api_key -openai_client = OpenAI( - api_key=get_api_key("OPENAI_API_KEY") -) - def generate_gpt4o_output_from_multi_turn_conv(messages, output_type='json', model="gpt-4o"): return generate_openai_output(messages, output_type, model) def generate_openai_output(messages, output_type='not_json', model="gpt-4o", return_completion=False): + print(f"In generate_openai_output, using client: {completions_client} and model: {model}") try: if output_type == 'json': - chat_completion = openai_client.chat.completions.create( - messages=messages, + chat_completion = completions_client.chat.completions.create( model=model, + messages=messages, response_format={"type": "json_object"} ) else: - chat_completion = openai_client.chat.completions.create( - messages=messages, + chat_completion = completions_client.chat.completions.create( model=model, + messages=messages, ) if return_completion: return chat_completion return chat_completion.choices[0].message.content - + except Exception as e: logger.error(e) return None diff --git a/docker-compose.yml b/docker-compose.yml index f0384d74..44558a99 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -37,6 +37,7 @@ services: - MAX_QUERIES_PER_MINUTE=${MAX_QUERIES_PER_MINUTE} - MAX_PROJECTS_PER_USER=${MAX_PROJECTS_PER_USER} - VOICE_API_URL=${VOICE_API_URL} + - PROVIDER_DEFAULT_MODEL=${PROVIDER_DEFAULT_MODEL} restart: unless-stopped rowboat_agents: @@ -52,6 +53,9 @@ services: - MONGODB_URI=mongodb://mongo:27017/rowboat - QDRANT_URL=${QDRANT_URL} - QDRANT_API_KEY=${QDRANT_API_KEY} + - PROVIDER_BASE_URL=${PROVIDER_BASE_URL} + - PROVIDER_API_KEY=${PROVIDER_API_KEY} + - PROVIDER_DEFAULT_MODEL=${PROVIDER_DEFAULT_MODEL} restart: unless-stopped copilot: @@ -63,6 +67,9 @@ services: environment: - OPENAI_API_KEY=${OPENAI_API_KEY} - API_KEY=${COPILOT_API_KEY} + - PROVIDER_BASE_URL=${PROVIDER_BASE_URL} + - PROVIDER_API_KEY=${PROVIDER_API_KEY} + - PROVIDER_DEFAULT_MODEL=${PROVIDER_DEFAULT_MODEL} restart: unless-stopped # tools_webhook: @@ -150,6 +157,7 @@ services: - MONGODB_CONNECTION_STRING=mongodb://mongo:27017/rowboat - QDRANT_URL=${QDRANT_URL} - QDRANT_API_KEY=${QDRANT_API_KEY} + - REDIS_URL=redis://redis:6379 restart: unless-stopped # chat_widget: