Merge pull request #306 from rowboatlabs/dev

Dev
This commit is contained in:
Ramnique Singh 2025-11-20 16:49:26 +05:30 committed by GitHub
commit e15faf6696
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
8 changed files with 358 additions and 67 deletions

View file

@ -1,7 +1,7 @@
#!/usr/bin/env node
import yargs from 'yargs';
import { hideBin } from 'yargs/helpers';
import { app } from '../dist/app.js';
import { app, modelConfig } from '../dist/app.js';
yargs(hideBin(process.argv))
@ -36,6 +36,14 @@ yargs(hideBin(process.argv))
});
}
)
.command(
"model-config",
"Select model",
(y) => y,
(argv) => {
modelConfig();
}
)
.command(
"update-state <agent> <run_id>",
"Update state for a run",

View file

@ -1,18 +1,21 @@
{
"name": "@rowboatlabs/rowboatx",
"version": "0.9.0",
"version": "0.10.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@rowboatlabs/rowboatx",
"version": "0.9.0",
"version": "0.10.0",
"license": "Apache-2.0",
"dependencies": {
"@ai-sdk/anthropic": "^2.0.44",
"@ai-sdk/google": "^2.0.25",
"@ai-sdk/openai": "^2.0.53",
"@ai-sdk/openai-compatible": "^1.0.27",
"@ai-sdk/provider": "^2.0.0",
"@modelcontextprotocol/sdk": "^1.20.2",
"@openrouter/ai-sdk-provider": "^1.2.3",
"ai": "^5.0.78",
"json-schema-to-zod": "^2.6.1",
"nanoid": "^5.1.6",
@ -128,6 +131,39 @@
"zod": "^3.25.76 || ^4.1.8"
}
},
"node_modules/@ai-sdk/openai-compatible": {
"version": "1.0.27",
"resolved": "https://registry.npmjs.org/@ai-sdk/openai-compatible/-/openai-compatible-1.0.27.tgz",
"integrity": "sha512-bpYruxVLhrTbVH6CCq48zMJNeHu6FmHtEedl9FXckEgcIEAi036idFhJlcRwC1jNCwlacbzb8dPD7OAH1EKJaQ==",
"license": "Apache-2.0",
"dependencies": {
"@ai-sdk/provider": "2.0.0",
"@ai-sdk/provider-utils": "3.0.17"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"zod": "^3.25.76 || ^4.1.8"
}
},
"node_modules/@ai-sdk/openai-compatible/node_modules/@ai-sdk/provider-utils": {
"version": "3.0.17",
"resolved": "https://registry.npmjs.org/@ai-sdk/provider-utils/-/provider-utils-3.0.17.tgz",
"integrity": "sha512-TR3Gs4I3Tym4Ll+EPdzRdvo/rc8Js6c4nVhFLuvGLX/Y4V9ZcQMa/HTiYsHEgmYrf1zVi6Q145UEZUfleOwOjw==",
"license": "Apache-2.0",
"dependencies": {
"@ai-sdk/provider": "2.0.0",
"@standard-schema/spec": "^1.0.0",
"eventsource-parser": "^3.0.6"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"zod": "^3.25.76 || ^4.1.8"
}
},
"node_modules/@ai-sdk/provider": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/@ai-sdk/provider/-/provider-2.0.0.tgz",
@ -239,6 +275,47 @@
"zod": "^3.24.1"
}
},
"node_modules/@openrouter/ai-sdk-provider": {
"version": "1.2.3",
"resolved": "https://registry.npmjs.org/@openrouter/ai-sdk-provider/-/ai-sdk-provider-1.2.3.tgz",
"integrity": "sha512-a6Nc8dPRHakRH9966YJ/HZJhLOds7DuPTscNZDoAr+Aw+tEFUlacSJMvb/b3gukn74mgbuaJRji9YOn62ipfVg==",
"license": "Apache-2.0",
"dependencies": {
"@openrouter/sdk": "^0.1.8"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"ai": "^5.0.0",
"zod": "^3.24.1 || ^v4"
}
},
"node_modules/@openrouter/sdk": {
"version": "0.1.17",
"resolved": "https://registry.npmjs.org/@openrouter/sdk/-/sdk-0.1.17.tgz",
"integrity": "sha512-RFN0sfe83G85MirfpkZSuoX8hLLucemnwqrTr53vlrJmBJZ244CCnuZ33vpVUI8rLg+hP1i/smW6IExzYRDGDg==",
"license": "Apache-2.0",
"dependencies": {
"zod": "^3.25.0 || ^4.0.0"
},
"peerDependencies": {
"@tanstack/react-query": "^5",
"react": "^18 || ^19",
"react-dom": "^18 || ^19"
},
"peerDependenciesMeta": {
"@tanstack/react-query": {
"optional": true
},
"react": {
"optional": true
},
"react-dom": {
"optional": true
}
}
},
"node_modules/@opentelemetry/api": {
"version": "1.9.0",
"resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.9.0.tgz",

View file

@ -1,6 +1,6 @@
{
"name": "@rowboatlabs/rowboatx",
"version": "0.10.0",
"version": "0.11.0",
"main": "index.js",
"type": "module",
"scripts": {
@ -28,7 +28,10 @@
"@ai-sdk/anthropic": "^2.0.44",
"@ai-sdk/google": "^2.0.25",
"@ai-sdk/openai": "^2.0.53",
"@ai-sdk/openai-compatible": "^1.0.27",
"@ai-sdk/provider": "^2.0.0",
"@modelcontextprotocol/sdk": "^1.20.2",
"@openrouter/ai-sdk-provider": "^1.2.3",
"ai": "^5.0.78",
"json-schema-to-zod": "^2.6.1",
"nanoid": "^5.1.6",

View file

@ -3,7 +3,7 @@ import { StreamRenderer } from "./application/lib/stream-renderer.js";
import { stdin as input, stdout as output } from "node:process";
import fs from "fs";
import path from "path";
import { WorkDir } from "./application/config/config.js";
import { WorkDir, getModelConfig, updateModelConfig } from "./application/config/config.js";
import { RunEvent } from "./application/entities/run-events.js";
import { createInterface, Interface } from "node:readline/promises";
import { ToolCallPart } from "./application/entities/message.js";
@ -54,6 +54,12 @@ export async function app(opts: {
input?: string;
noInteractive?: boolean;
}) {
// check if model config is required
const c = await getModelConfig();
if (!c) {
await modelConfig();
}
const renderer = new StreamRenderer();
const state = new AgentState(opts.agent, opts.runId);
@ -202,4 +208,177 @@ async function getUserInput(
process.exit(0);
}
return input;
}
export async function modelConfig() {
// load existing model config
const config = await getModelConfig();
const rl = createInterface({ input, output });
try {
const flavors = [
"openai",
"anthropic",
"google",
"ollama",
"openai-compatible",
"openrouter",
] as const;
const defaultBaseUrls: Record<(typeof flavors)[number], string> = {
openai: "https://api.openai.com/v1",
anthropic: "https://api.anthropic.com/v1",
google: "https://generativelanguage.googleapis.com/v1beta",
ollama: "http://localhost:11434",
"openai-compatible": "http://localhost:8080/v1",
openrouter: "https://openrouter.ai/api/v1",
};
const defaultModels: Record<(typeof flavors)[number], string> = {
openai: "gpt-5.1",
anthropic: "claude-3.5-sonnet",
google: "gemini-1.5-pro",
ollama: "llama3.1",
"openai-compatible": "gpt-4o",
openrouter: "openrouter/auto",
};
const currentProvider = config?.defaults?.provider;
const currentModel = config?.defaults?.model;
const currentProviderConfig = currentProvider ? config?.providers?.[currentProvider] : undefined;
if (config) {
console.log("Currently using:");
console.log(`- provider: ${currentProvider || "none"}${currentProviderConfig?.flavor ? ` (${currentProviderConfig.flavor})` : ""}`);
console.log(`- model: ${currentModel || "none"}`);
console.log("");
}
const flavorPromptLines = flavors
.map((f, idx) => ` ${idx + 1}. ${f}`)
.join("\n");
const flavorAnswer = await rl.question(
`Select a provider type:\n${flavorPromptLines}\nEnter number or name` +
(currentProvider ? ` [${currentProvider}]` : "") +
": ",
);
let selectedFlavorRaw = flavorAnswer.trim();
let selectedFlavor: (typeof flavors)[number] | null = null;
if (selectedFlavorRaw === "" && currentProvider && (flavors as readonly string[]).includes(currentProvider)) {
selectedFlavor = currentProvider as (typeof flavors)[number];
} else if (/^\d+$/.test(selectedFlavorRaw)) {
const idx = parseInt(selectedFlavorRaw, 10) - 1;
if (idx >= 0 && idx < flavors.length) {
selectedFlavor = flavors[idx];
}
} else if ((flavors as readonly string[]).includes(selectedFlavorRaw)) {
selectedFlavor = selectedFlavorRaw as (typeof flavors)[number];
}
if (!selectedFlavor) {
console.error("Invalid selection. Exiting.");
return;
}
const existingAliases = Object.keys(config?.providers || {}).filter(
(name) => config?.providers?.[name]?.flavor === selectedFlavor,
);
let providerName: string | null = null;
let chooseMode: "existing" | "add" = "add";
if (existingAliases.length > 0) {
const listLines = existingAliases
.map((alias, idx) => ` ${idx + 1}. use existing: ${alias}`)
.join("\n");
const addIndex = existingAliases.length + 1;
const providerSelect = await rl.question(
`Found existing providers for ${selectedFlavor}:\n${listLines}\n ${addIndex}. add new\nEnter number or name/alias [${addIndex}]: `,
);
const sel = providerSelect.trim();
if (sel === "" || sel.toLowerCase() === "add" || sel.toLowerCase() === "new") {
chooseMode = "add";
} else if (/^\d+$/.test(sel)) {
const idx = parseInt(sel, 10) - 1;
if (idx >= 0 && idx < existingAliases.length) {
providerName = existingAliases[idx];
chooseMode = "existing";
} else if (idx === existingAliases.length) {
chooseMode = "add";
} else {
console.error("Invalid selection. Exiting.");
return;
}
} else if (existingAliases.includes(sel)) {
providerName = sel;
chooseMode = "existing";
} else {
console.error("Invalid selection. Exiting.");
return;
}
}
if (chooseMode === "existing" && !providerName) {
console.error("No provider selected. Exiting.");
return;
}
if (chooseMode === "existing") {
const modelDefault =
currentProvider === providerName && currentModel
? currentModel
: defaultModels[selectedFlavor];
const modelAns = await rl.question(
`Specify model for ${selectedFlavor} [${modelDefault}]: `,
);
const model = modelAns.trim() || modelDefault;
const newConfig = {
providers: { ...(config?.providers || {}) },
defaults: {
provider: providerName!,
model,
},
};
await updateModelConfig(newConfig as any);
console.log(`Model configuration updated. Provider set to '${providerName}'.`);
return;
}
const providerNameAns = await rl.question(
`Enter a name/alias for this provider [${selectedFlavor}]: `,
);
providerName = providerNameAns.trim() || selectedFlavor;
const baseUrlDefault = defaultBaseUrls[selectedFlavor] || "";
const baseUrlAns = await rl.question(
`Enter baseURL for ${selectedFlavor} [${baseUrlDefault}]: `,
);
const baseURL = (baseUrlAns.trim() || baseUrlDefault) || undefined;
const apiKeyAns = await rl.question(
`Enter API key for ${selectedFlavor} (leave blank to skip): `,
);
const apiKey = apiKeyAns.trim() || undefined;
const modelDefault = defaultModels[selectedFlavor];
const modelAns = await rl.question(
`Specify model for ${selectedFlavor} [${modelDefault}]: `,
);
const model = modelAns.trim() || modelDefault;
const mergedProviders = {
...(config?.providers || {}),
[providerName]: {
flavor: selectedFlavor,
...(apiKey ? { apiKey } : {}),
...(baseURL ? { baseURL } : {}),
},
};
const newConfig = {
providers: mergedProviders,
defaults: {
provider: providerName,
model,
},
};
await updateModelConfig(newConfig as any);
console.log(`Model configuration updated. Provider '${providerName}' ${config?.providers?.[providerName] ? "overwritten" : "added"}.`);
} finally {
rl.close();
}
}

View file

@ -1,13 +1,15 @@
import path from "path";
import fs from "fs";
import { McpServerConfig } from "../entities/mcp.js";
import { ModelConfig as ModelConfigT } from "../entities/models.js";
import { ModelConfig } from "../entities/models.js";
import { z } from "zod";
import { homedir } from "os";
// Resolve app root relative to compiled file location (dist/...)
export const WorkDir = path.join(homedir(), ".rowboat");
let modelConfig: z.infer<typeof ModelConfig> | null = null;
const baseMcpConfig: z.infer<typeof McpServerConfig> = {
mcpServers: {
firecrawl: {
@ -26,27 +28,6 @@ const baseMcpConfig: z.infer<typeof McpServerConfig> = {
}
};
const baseModelConfig: z.infer<typeof ModelConfigT> = {
providers: {
openai: {
flavor: "openai",
},
anthropic: {
flavor: "anthropic",
},
google: {
flavor: "google",
},
ollama: {
flavor: "ollama",
}
},
defaults: {
provider: "openai",
model: "gpt-5.1",
}
};
function ensureMcpConfig() {
const configPath = path.join(WorkDir, "config", "mcp.json");
if (!fs.existsSync(configPath)) {
@ -54,24 +35,14 @@ function ensureMcpConfig() {
}
}
function ensureModelConfig() {
const configPath = path.join(WorkDir, "config", "models.json");
if (!fs.existsSync(configPath)) {
fs.writeFileSync(configPath, JSON.stringify(baseModelConfig, null, 2));
}
}
function ensureDirs() {
const ensure = (p: string) => { if (!fs.existsSync(p)) fs.mkdirSync(p, { recursive: true }); };
ensure(WorkDir);
ensure(path.join(WorkDir, "agents"));
ensure(path.join(WorkDir, "config"));
ensureMcpConfig();
ensureModelConfig();
}
ensureDirs();
function loadMcpServerConfig(): z.infer<typeof McpServerConfig> {
const configPath = path.join(WorkDir, "config", "mcp.json");
if (!fs.existsSync(configPath)) return { mcpServers: {} };
@ -79,13 +50,27 @@ function loadMcpServerConfig(): z.infer<typeof McpServerConfig> {
return McpServerConfig.parse(JSON.parse(config));
}
function loadModelConfig(): z.infer<typeof ModelConfigT> {
export async function getModelConfig(): Promise<z.infer<typeof ModelConfig> | null> {
if (modelConfig) {
return modelConfig;
}
const configPath = path.join(WorkDir, "config", "models.json");
if (!fs.existsSync(configPath)) return baseModelConfig;
const config = fs.readFileSync(configPath, "utf8");
return ModelConfigT.parse(JSON.parse(config));
try {
const config = await fs.promises.readFile(configPath, "utf8");
modelConfig = ModelConfig.parse(JSON.parse(config));
return modelConfig;
} catch (error) {
console.error(`Warning! model config not found!`);
return null;
}
}
export async function updateModelConfig(config: z.infer<typeof ModelConfig>) {
modelConfig = config;
const configPath = path.join(WorkDir, "config", "models.json");
await fs.promises.writeFile(configPath, JSON.stringify(config, null, 2));
}
ensureDirs();
const { mcpServers } = loadMcpServerConfig();
export const McpServers = mcpServers;
export const ModelConfig = loadModelConfig();
export const McpServers = mcpServers;

View file

@ -1,7 +1,14 @@
import z from "zod";
export const Provider = z.object({
flavor: z.enum(["openai", "anthropic", "google", "ollama"]),
flavor: z.enum([
"anthropic",
"google",
"ollama",
"openai",
"openai-compatible",
"openrouter",
]),
apiKey: z.string().optional(),
baseURL: z.string().optional(),
headers: z.record(z.string(), z.string()).optional(),

View file

@ -1,7 +1,7 @@
import { jsonSchema, ModelMessage } from "ai";
import fs from "fs";
import path from "path";
import { ModelConfig, WorkDir } from "../config/config.js";
import { getModelConfig, WorkDir } from "../config/config.js";
import { Agent, ToolAttachment } from "../entities/agent.js";
import { AssistantContentPart, AssistantMessage, Message, MessageList, ToolCallPart, ToolMessage, UserMessage } from "../entities/message.js";
import { runIdGenerator } from "./run-id-gen.js";
@ -405,6 +405,12 @@ export class AgentState {
}
export async function* streamAgent(state: AgentState): AsyncGenerator<z.infer<typeof RunEvent>, void, unknown> {
// get model config
const modelConfig = await getModelConfig();
if (!modelConfig) {
throw new Error("Model config not found");
}
// set up agent
const agent = await loadAgent(state.agentName);
@ -412,8 +418,8 @@ export async function* streamAgent(state: AgentState): AsyncGenerator<z.infer<ty
const tools = await buildTools(agent);
// set up provider + model
const provider = getProvider(agent.provider);
const model = provider(agent.model || ModelConfig.defaults.model);
const provider = await getProvider(agent.provider);
const model = provider.languageModel(agent.model || modelConfig.defaults.model);
let loopCounter = 0;
while (true) {

View file

@ -1,50 +1,76 @@
import { createOpenAI, OpenAIProvider } from "@ai-sdk/openai";
import { createGoogleGenerativeAI, GoogleGenerativeAIProvider } from "@ai-sdk/google";
import { AnthropicProvider, createAnthropic } from "@ai-sdk/anthropic";
import { OllamaProvider, createOllama } from "ollama-ai-provider-v2";
import { ModelConfig } from "../config/config.js";
import { ProviderV2 } from "@ai-sdk/provider";
import { createOpenAI } from "@ai-sdk/openai";
import { createGoogleGenerativeAI } from "@ai-sdk/google";
import { createAnthropic } from "@ai-sdk/anthropic";
import { createOllama } from "ollama-ai-provider-v2";
import { createOpenRouter } from '@openrouter/ai-sdk-provider';
import { createOpenAICompatible } from '@ai-sdk/openai-compatible';
import { getModelConfig } from "../config/config.js";
const providerMap: Record<string, OpenAIProvider | GoogleGenerativeAIProvider | AnthropicProvider | OllamaProvider> = {};
const providerMap: Record<string, ProviderV2> = {};
export function getProvider(name: string = "") {
export async function getProvider(name: string = ""): Promise<ProviderV2> {
// get model conf
const modelConfig = await getModelConfig();
if (!modelConfig) {
throw new Error("Model config not found");
}
if (!name) {
name = ModelConfig.defaults.provider;
name = modelConfig.defaults.provider;
}
if (providerMap[name]) {
return providerMap[name];
}
const providerConfig = ModelConfig.providers[name];
const providerConfig = modelConfig.providers[name];
if (!providerConfig) {
throw new Error(`Provider ${name} not found`);
}
const { apiKey, baseURL, headers } = providerConfig;
switch (providerConfig.flavor) {
case "openai":
providerMap[name] = createOpenAI({
apiKey: providerConfig.apiKey,
baseURL: providerConfig.baseURL,
headers: providerConfig.headers,
apiKey,
baseURL,
headers,
});
break;
case "anthropic":
providerMap[name] = createAnthropic({
apiKey: providerConfig.apiKey,
baseURL: providerConfig.baseURL,
headers: providerConfig.headers,
apiKey,
baseURL,
headers
});
break;
case "google":
providerMap[name] = createGoogleGenerativeAI({
apiKey: providerConfig.apiKey,
baseURL: providerConfig.baseURL,
headers: providerConfig.headers,
apiKey,
baseURL,
headers
});
break;
case "ollama":
providerMap[name] = createOllama({
baseURL: providerConfig.baseURL,
headers: providerConfig.headers,
baseURL,
headers
});
break;
case "openai-compatible":
providerMap[name] = createOpenAICompatible({
name,
apiKey,
baseURL : baseURL || "",
headers
});
break;
case "openrouter":
providerMap[name] = createOpenRouter({
apiKey,
baseURL,
headers
});
break;
default:
throw new Error(`Provider ${name} not found`);
}
return providerMap[name];
}