mirror of
https://github.com/rowboatlabs/rowboat.git
synced 2026-04-27 01:16:23 +02:00
added the grep tool
This commit is contained in:
parent
274a351bc1
commit
6abb3afc36
3 changed files with 116 additions and 1 deletions
|
|
@ -94,7 +94,7 @@ When a user asks for ANY task that might require external capabilities (web sear
|
|||
|
||||
**IMPORTANT**: Rowboat provides builtin tools that are internal and do NOT require security allowlist entries:
|
||||
- \`workspace-readFile\`, \`workspace-writeFile\`, \`workspace-edit\`, \`workspace-remove\` - File operations
|
||||
- \`workspace-readdir\`, \`workspace-exists\`, \`workspace-stat\`, \`workspace-glob\` - Directory exploration and file search
|
||||
- \`workspace-readdir\`, \`workspace-exists\`, \`workspace-stat\`, \`workspace-glob\`, \`workspace-grep\` - Directory exploration and file search
|
||||
- \`workspace-mkdir\`, \`workspace-rename\`, \`workspace-copy\` - File/directory management
|
||||
- \`analyzeAgent\` - Agent analysis
|
||||
- \`addMcpServer\`, \`listMcpServers\`, \`listMcpTools\`, \`executeMcpTool\` - MCP server management and execution
|
||||
|
|
|
|||
|
|
@ -177,6 +177,7 @@ The Rowboat copilot has access to special builtin tools that regular agents don'
|
|||
- \`workspace-copy\` - Copy files
|
||||
- \`workspace-getRoot\` - Get workspace root directory path
|
||||
- \`workspace-glob\` - Find files matching a glob pattern (e.g., "**/*.ts", "agents/*.md")
|
||||
- \`workspace-grep\` - Search file contents using regex, returns matching files and lines
|
||||
|
||||
#### Agent Operations
|
||||
- \`analyzeAgent\` - Read and analyze an agent file structure
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import { z, ZodType } from "zod";
|
||||
import * as path from "path";
|
||||
import { execSync } from "child_process";
|
||||
import { glob } from "glob";
|
||||
import { executeCommand } from "./command-executor.js";
|
||||
import { resolveSkill, availableSkills } from "../assistant/skills/index.js";
|
||||
|
|
@ -346,6 +347,119 @@ export const BuiltinTools: z.infer<typeof BuiltinToolsSchema> = {
|
|||
},
|
||||
},
|
||||
|
||||
'workspace-grep': {
|
||||
description: 'Search file contents using regex. Returns matching files and lines. Uses ripgrep if available, falls back to grep.',
|
||||
inputSchema: z.object({
|
||||
pattern: z.string().describe('Regex pattern to search for'),
|
||||
searchPath: z.string().optional().describe('Directory or file to search, relative to workspace root (default: workspace root)'),
|
||||
fileGlob: z.string().optional().describe('File pattern filter (e.g., "*.ts", "*.md")'),
|
||||
contextLines: z.number().optional().describe('Lines of context around matches (default: 0)'),
|
||||
maxResults: z.number().optional().describe('Maximum results to return (default: 100)'),
|
||||
}),
|
||||
execute: async ({
|
||||
pattern,
|
||||
searchPath,
|
||||
fileGlob,
|
||||
contextLines = 0,
|
||||
maxResults = 100
|
||||
}: {
|
||||
pattern: string;
|
||||
searchPath?: string;
|
||||
fileGlob?: string;
|
||||
contextLines?: number;
|
||||
maxResults?: number;
|
||||
}) => {
|
||||
try {
|
||||
const targetPath = searchPath ? path.join(WorkDir, searchPath) : WorkDir;
|
||||
|
||||
// Ensure target path is within workspace
|
||||
const resolvedTargetPath = path.resolve(targetPath);
|
||||
if (!resolvedTargetPath.startsWith(WorkDir)) {
|
||||
return { error: 'Search path must be within workspace' };
|
||||
}
|
||||
|
||||
// Try ripgrep first
|
||||
try {
|
||||
const rgArgs = [
|
||||
'--json',
|
||||
'-e', JSON.stringify(pattern),
|
||||
contextLines > 0 ? `-C ${contextLines}` : '',
|
||||
fileGlob ? `--glob ${JSON.stringify(fileGlob)}` : '',
|
||||
`--max-count ${maxResults}`,
|
||||
'--ignore-case',
|
||||
JSON.stringify(resolvedTargetPath),
|
||||
].filter(Boolean).join(' ');
|
||||
|
||||
const output = execSync(`rg ${rgArgs}`, {
|
||||
encoding: 'utf8',
|
||||
maxBuffer: 10 * 1024 * 1024,
|
||||
cwd: WorkDir,
|
||||
});
|
||||
|
||||
const matches = output.trim().split('\n')
|
||||
.filter(Boolean)
|
||||
.map(line => {
|
||||
try {
|
||||
return JSON.parse(line);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
})
|
||||
.filter(m => m && m.type === 'match');
|
||||
|
||||
return {
|
||||
matches: matches.map(m => ({
|
||||
file: path.relative(WorkDir, m.data.path.text),
|
||||
line: m.data.line_number,
|
||||
content: m.data.lines.text.trim(),
|
||||
})),
|
||||
count: matches.length,
|
||||
tool: 'ripgrep',
|
||||
};
|
||||
} catch (rgError) {
|
||||
// Fallback to basic grep if ripgrep not available or failed
|
||||
const grepArgs = [
|
||||
'-rn',
|
||||
fileGlob ? `--include=${JSON.stringify(fileGlob)}` : '',
|
||||
JSON.stringify(pattern),
|
||||
JSON.stringify(resolvedTargetPath),
|
||||
`| head -${maxResults}`,
|
||||
].filter(Boolean).join(' ');
|
||||
|
||||
try {
|
||||
const output = execSync(`grep ${grepArgs}`, {
|
||||
encoding: 'utf8',
|
||||
maxBuffer: 10 * 1024 * 1024,
|
||||
shell: '/bin/sh',
|
||||
});
|
||||
|
||||
const lines = output.trim().split('\n').filter(Boolean);
|
||||
return {
|
||||
matches: lines.map(line => {
|
||||
const match = line.match(/^(.+?):(\d+):(.*)$/);
|
||||
if (match) {
|
||||
return {
|
||||
file: path.relative(WorkDir, match[1]),
|
||||
line: parseInt(match[2], 10),
|
||||
content: match[3].trim(),
|
||||
};
|
||||
}
|
||||
return { file: '', line: 0, content: line };
|
||||
}),
|
||||
count: lines.length,
|
||||
tool: 'grep',
|
||||
};
|
||||
} catch {
|
||||
// No matches found (grep returns non-zero on no matches)
|
||||
return { matches: [], count: 0, tool: 'grep' };
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
return { error: error instanceof Error ? error.message : 'Unknown error' };
|
||||
}
|
||||
},
|
||||
},
|
||||
|
||||
analyzeAgent: {
|
||||
description: 'Read and analyze an agent file to understand its structure, tools, and configuration',
|
||||
inputSchema: z.object({
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue