diff --git a/cli/planoai/consts.py b/cli/planoai/consts.py index e51466f3..47c24ec0 100644 --- a/cli/planoai/consts.py +++ b/cli/planoai/consts.py @@ -1,5 +1,8 @@ import os +# Brand color - Plano purple +PLANO_COLOR = "#969FF4" + SERVICE_NAME_ARCHGW = "plano" PLANO_DOCKER_NAME = "plano" PLANO_DOCKER_IMAGE = os.getenv("PLANO_DOCKER_IMAGE", "katanemo/plano:0.4.4") diff --git a/cli/planoai/core.py b/cli/planoai/core.py index 57409565..670f2e0d 100644 --- a/cli/planoai/core.py +++ b/cli/planoai/core.py @@ -1,8 +1,8 @@ import json import subprocess import os -import time import sys +import time import yaml from planoai.utils import convert_legacy_listeners, getLogger diff --git a/cli/planoai/docker_cli.py b/cli/planoai/docker_cli.py index 599d57a1..0e0bc2d7 100644 --- a/cli/planoai/docker_cli.py +++ b/cli/planoai/docker_cli.py @@ -35,7 +35,7 @@ def docker_stop_container(container: str) -> str: def docker_remove_container(container: str) -> str: result = subprocess.run( - ["docker", "rm", container], capture_output=True, text=True, check=False + ["docker", "rm", "-f", container], capture_output=True, text=True, check=False ) return result.returncode @@ -48,7 +48,7 @@ def docker_start_plano_detached( env_args = [item for key, value in env.items() for item in ["-e", f"{key}={value}"]] port_mappings = [ - f"{12001}:{12001}", + "12001:12001", "19901:9901", ] diff --git a/cli/planoai/init_cmd.py b/cli/planoai/init_cmd.py new file mode 100644 index 00000000..66cb5222 --- /dev/null +++ b/cli/planoai/init_cmd.py @@ -0,0 +1,303 @@ +import os +from importlib import resources +from dataclasses import dataclass +from pathlib import Path + +import rich_click as click +from rich.console import Console +from rich.panel import Panel + +from planoai.consts import PLANO_COLOR + + +@dataclass(frozen=True) +class Template: + """ + A Plano config template. + + - id: stable identifier used by --template + - title/description: UI strings + - yaml_text: embedded template contents (works in PyPI installs) + """ + + id: str + title: str + description: str + yaml_text: str + + +_TEMPLATE_PACKAGE = "planoai.templates" + + +def _load_template_yaml(filename: str) -> str: + return resources.files(_TEMPLATE_PACKAGE).joinpath(filename).read_text("utf-8") + + +BUILTIN_TEMPLATES: list[Template] = [ + Template( + id="sub_agent_orchestration", + title="Sub Agent Orchestration", + description="multi-agent routing across specialized agents", + yaml_text=_load_template_yaml("sub_agent_orchestration.yaml"), + ), + Template( + id="coding_agent_routing", + title="Coding Agent Routing", + description="routing preferences + model aliases for coding tasks", + yaml_text=_load_template_yaml("coding_agent_routing.yaml"), + ), + Template( + id="preference_aware_routing", + title="Preference-aware LLM routing", + description="automatic LLM routing based on preferences", + yaml_text=_load_template_yaml("preference_aware_routing.yaml"), + ), + Template( + id="filter_chain_guardrails", + title="Guardrails via Filter Chains", + description="input guards, query rewrite, and context building", + yaml_text=_load_template_yaml("filter_chain_guardrails.yaml"), + ), + Template( + id="conversational_state_v1_responses", + title="Conversational State via v1/responses", + description="stateful responses with memory-backed storage", + yaml_text=_load_template_yaml("conversational_state_v1_responses.yaml"), + ), +] + + +def _get_templates() -> list[Template]: + return list(BUILTIN_TEMPLATES) + + +def _resolve_template(template_id: str | None) -> Template | None: + if not template_id: + return None + + templates = _get_templates() + for t in templates: + if t.id == template_id: + return t + + return None + + +def _ensure_parent_dir(path: Path) -> None: + path.parent.mkdir(parents=True, exist_ok=True) + + +def _write_clean_config(path: Path, force: bool) -> None: + _ensure_parent_dir(path) + if path.exists() and not force: + raise FileExistsError(str(path)) + # user asked for NOTHING in it: empty file, with just a newline for POSIX friendliness + path.write_text("\n", encoding="utf-8") + + +def _write_template_config(path: Path, template: Template, force: bool) -> str: + _ensure_parent_dir(path) + if path.exists() and not force: + raise FileExistsError(str(path)) + + path.write_text(template.yaml_text, encoding="utf-8") + return "builtin" + + +def _print_config_preview(console: Console, text: str, max_lines: int = 28) -> None: + lines = text.strip("\n").splitlines() + preview_lines = lines[:max_lines] + if len(lines) > max_lines: + preview_lines.append("... (truncated)") + preview = "\n".join(preview_lines).strip("\n") + if not preview: + preview = "(empty)" + console.print( + Panel( + preview, + title="Config preview", + border_style="dim", + title_align="left", + ) + ) + + +def _questionary_style(): + # prompt_toolkit style string format + from prompt_toolkit.styles import Style + + return Style.from_dict( + { + "qmark": f"fg:{PLANO_COLOR} bold", + "question": "bold", + "answer": f"fg:{PLANO_COLOR} bold", + "pointer": f"fg:{PLANO_COLOR} bold", + "highlighted": f"fg:{PLANO_COLOR} bold", + "selected": f"fg:{PLANO_COLOR}", + "instruction": "fg:#888888", + "text": "", + "disabled": "fg:#666666", + } + ) + + +def _force_truecolor_for_prompt_toolkit() -> None: + """ + Ensure prompt_toolkit uses truecolor so our brand hex (#969FF4) renders correctly. + Without this, some terminals or environments downgrade to 8-bit and the color + can look like a generic blue. + """ + # Only set if user hasn't explicitly chosen a depth. + os.environ.setdefault("PROMPT_TOOLKIT_COLOR_DEPTH", "DEPTH_24_BIT") + + +@click.command() +@click.option( + "--template", + "template_id_or_path", + default=None, + help="Create config.yaml from a built-in template id.", +) +@click.option( + "--clean", + is_flag=True, + help="Create an empty config.yaml with no contents.", +) +@click.option( + "--output", + "-o", + "output_path", + default="config.yaml", + show_default=True, + help="Where to write the generated config.", +) +@click.option( + "--force", + is_flag=True, + help="Overwrite existing config file if it already exists.", +) +@click.option( + "--list-templates", + is_flag=True, + help="List available template ids and exit.", +) +@click.pass_context +def init(ctx, template_id_or_path, clean, output_path, force, list_templates): + """Initialize a Plano config quickly (arrow-key interactive wizard by default).""" + import sys + + console = Console() + + if clean and template_id_or_path: + raise click.UsageError("Use either --clean or --template, not both.") + + templates = _get_templates() + + if list_templates: + console.print(f"[bold {PLANO_COLOR}]Available templates[/bold {PLANO_COLOR}]\n") + for t in templates: + console.print(f" [bold]{t.id}[/bold] - {t.description}") + return + + out_path = Path(output_path).expanduser() + + # Non-interactive fast paths + if clean or template_id_or_path: + if clean: + try: + _write_clean_config(out_path, force=force) + except FileExistsError: + raise click.ClickException( + f"Refusing to overwrite existing file: {out_path} (use --force)" + ) + console.print(f"[green]✓[/green] Wrote [bold]{out_path}[/bold]") + _print_config_preview(console, out_path.read_text(encoding="utf-8")) + return + + template = _resolve_template(template_id_or_path) + if not template: + raise click.ClickException( + f"Unknown template: {template_id_or_path}\n" + f"Run: planoai init --list-templates" + ) + try: + _write_template_config(out_path, template, force=force) + except FileExistsError: + raise click.ClickException( + f"Refusing to overwrite existing file: {out_path} (use --force)" + ) + console.print( + f"[green]✓[/green] Wrote [bold]{out_path}[/bold] [dim]({template.id})[/dim]" + ) + _print_config_preview(console, template.yaml_text) + return + + # Interactive wizard + if not (sys.stdin.isatty() and sys.stdout.isatty()): + raise click.ClickException( + "Interactive mode requires a TTY.\n" + "Use one of:\n" + " planoai init --template \n" + " planoai init --clean\n" + " planoai init --list-templates" + ) + + _force_truecolor_for_prompt_toolkit() + + # Lazy import so non-interactive users don't pay the import/compat cost + import questionary + from questionary import Choice + + # Step 1: choose template (or clean) + template_choices: list[Choice] = [ + Choice("Create a clean config.yaml (empty)", value="clean"), + ] + for t in templates: + label = f"{t.title} — {t.description}" + template_choices.append(Choice(label, value=t)) + + selected = questionary.select( + "Choose a template", + choices=template_choices, + style=_questionary_style(), + pointer="❯", + use_indicator=True, + ).ask() + if not selected: + console.print("[dim]Cancelled.[/dim]") + return + + # Step 2: output path (default: config.yaml) + out_answer = questionary.text( + "Where should I write the config?", + default=str(out_path), + style=_questionary_style(), + ).ask() + if not out_answer: + console.print("[dim]Cancelled.[/dim]") + return + out_path = Path(out_answer).expanduser() + + if out_path.exists() and not force: + overwrite = questionary.confirm( + f"{out_path} already exists. Overwrite?", + default=False, + style=_questionary_style(), + ).ask() + if not overwrite: + console.print("[dim]Cancelled.[/dim]") + return + force = True + + if selected == "clean": + _write_clean_config(out_path, force=True) + console.print(f"[green]✓[/green] Wrote [bold]{out_path}[/bold]") + _print_config_preview(console, out_path.read_text(encoding="utf-8")) + return + + template = selected + _write_template_config(out_path, template, force=True) + console.print( + f"[green]✓[/green] Wrote [bold]{out_path}[/bold] [dim]({template.id})[/dim]" + ) + _print_config_preview(console, template.yaml_text) diff --git a/cli/planoai/main.py b/cli/planoai/main.py index 3ba28d52..ac0fb019 100644 --- a/cli/planoai/main.py +++ b/cli/planoai/main.py @@ -1,11 +1,12 @@ -import click import os -import sys -import subprocess import multiprocessing -import importlib.metadata -import json +import subprocess +import sys +import rich_click as click from planoai import targets + +# Brand color - Plano purple +PLANO_COLOR = "#969FF4" from planoai.docker_cli import ( docker_validate_plano_schema, stream_gateway_logs, @@ -14,7 +15,6 @@ from planoai.docker_cli import ( from planoai.utils import ( getLogger, get_llm_provider_access_keys, - has_ingress_listener, load_env_file_to_dict, set_log_level, stream_access_logs, @@ -26,60 +26,106 @@ from planoai.core import ( stop_docker_container, start_cli_agent, ) +from planoai.init_cmd import init as init_cmd +from planoai.trace_cmd import trace as trace_cmd, start_trace_listener_background from planoai.consts import ( DEFAULT_OTEL_TRACING_GRPC_ENDPOINT, PLANO_DOCKER_IMAGE, PLANO_DOCKER_NAME, - SERVICE_NAME_ARCHGW, ) +from planoai.rich_click_config import configure_rich_click +from planoai.versioning import check_version_status, get_latest_version, get_version log = getLogger(__name__) -# ref https://patorjk.com/software/taag/#p=display&f=Doom&t=Plano&x=none&v=4&h=4&w=80&we=false -logo = r""" -______ _ -| ___ \ | -| |_/ / | __ _ _ __ ___ -| __/| |/ _` | '_ \ / _ \ -| | | | (_| | | | | (_) | -\_| |_|\__,_|_| |_|\___/ -""" +def _is_port_in_use(port: int) -> bool: + """Check if a TCP port is already bound on localhost.""" + import socket -# Command to build plano Docker images -ARCHGW_DOCKERFILE = "./Dockerfile" - - -def get_version(): - try: - # First try to get version from package metadata (for installed packages) - version = importlib.metadata.version("planoai") - return version - except importlib.metadata.PackageNotFoundError: - # Fallback to version defined in __init__.py (for development) + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: try: - from planoai import __version__ + s.bind(("0.0.0.0", port)) + return False + except OSError: + return True - return __version__ - except ImportError: - return "version not found" + +# ref https://patorjk.com/software/taag/#p=display&f=Doom&t=Plano&x=none&v=4&h=4&w=80&we=false +LOGO = f"""[bold {PLANO_COLOR}] + ______ _ + | ___ \\ | + | |_/ / | __ _ _ __ ___ + | __/| |/ _` | '_ \\ / _ \\ + | | | | (_| | | | | (_) | + \\_| |_|\\__,_|_| |_|\\___/ +[/bold {PLANO_COLOR}]""" + + +def _console(): + from rich.console import Console + + return Console() + + +def _print_cli_header(console) -> None: + console.print( + f"\n[bold {PLANO_COLOR}]Plano CLI[/bold {PLANO_COLOR}] [dim]v{get_version()}[/dim]\n" + ) + + +def _print_missing_keys(console, missing_keys: list[str]) -> None: + console.print(f"\n[red]✗[/red] [red]Missing API keys![/red]\n") + for key in missing_keys: + console.print(f" [red]•[/red] [bold]{key}[/bold] not found") + console.print(f"\n[dim]Set the environment variable(s):[/dim]") + for key in missing_keys: + console.print(f' [cyan]export {key}="your-api-key"[/cyan]') + console.print(f"\n[dim]Or create a .env file in the config directory.[/dim]\n") + + +def _print_version(console, current_version: str) -> None: + console.print( + f"[bold {PLANO_COLOR}]plano[/bold {PLANO_COLOR}] version [cyan]{current_version}[/cyan]" + ) + + +def _maybe_check_updates(console, current_version: str) -> None: + if os.environ.get("PLANO_SKIP_VERSION_CHECK"): + return + latest_version = get_latest_version() + status = check_version_status(current_version, latest_version) + + if status["is_outdated"]: + console.print( + f"\n[yellow]⚠ Update available:[/yellow] [bold]{status['latest']}[/bold]" + ) + console.print("[dim]Run: uv pip install --upgrade planoai[/dim]") + elif latest_version: + console.print(f"[dim]✓ You're up to date[/dim]") + + +configure_rich_click(PLANO_COLOR) @click.group(invoke_without_command=True) -@click.option("--version", is_flag=True, help="Show the plano cli version and exit.") +@click.option("--version", is_flag=True, help="Show the Plano CLI version and exit.") @click.pass_context def main(ctx, version): # Set log level from LOG_LEVEL env var only set_log_level(os.environ.get("LOG_LEVEL", "info")) + console = _console() + if version: - click.echo(f"plano cli version: {get_version()}") + current_version = get_version() + _print_version(console, current_version) + _maybe_check_updates(console, current_version) + ctx.exit() - log.info(f"Starting plano cli version: {get_version()}") - if ctx.invoked_subcommand is None: - click.echo("""Plano (AI-native proxy and dataplane for agentic apps) CLI""") - click.echo(logo) + console.print(LOGO) + console.print("[dim]The Delivery Infrastructure for Agentic Apps[/dim]\n") click.echo(ctx.get_help()) @@ -133,81 +179,156 @@ def build(): help="Run Plano in the foreground. Default is False", is_flag=True, ) -def up(file, path, foreground): +@click.option( + "--with-tracing", + default=False, + help="Start a local OTLP trace collector on port 4317.", + is_flag=True, +) +@click.option( + "--tracing-port", + default=4317, + type=int, + help="Port for the OTLP trace collector (default: 4317).", + show_default=True, +) +def up(file, path, foreground, with_tracing, tracing_port): """Starts Plano.""" + from rich.status import Status + + console = _console() + _print_cli_header(console) + # Use the utility function to find config file arch_config_file = find_config_file(path, file) # Check if the file exists if not os.path.exists(arch_config_file): - log.info(f"Error: {arch_config_file} does not exist.") - return - - log.info(f"Validating {arch_config_file}") - ( - validation_return_code, - validation_stdout, - validation_stderr, - ) = docker_validate_plano_schema(arch_config_file) - if validation_return_code != 0: - log.info(f"Error: Validation failed. Exiting") - log.info(f"Validation stdout: {validation_stdout}") - log.info(f"Validation stderr: {validation_stderr}") + console.print( + f"[red]✗[/red] Config file not found: [dim]{arch_config_file}[/dim]" + ) sys.exit(1) - # Set the ARCH_CONFIG_FILE environment variable + with Status( + "[dim]Validating configuration[/dim]", spinner="dots", spinner_style="dim" + ): + ( + validation_return_code, + _, + validation_stderr, + ) = docker_validate_plano_schema(arch_config_file) + + if validation_return_code != 0: + console.print(f"[red]✗[/red] Validation failed") + if validation_stderr: + console.print(f" [dim]{validation_stderr.strip()}[/dim]") + sys.exit(1) + + console.print(f"[green]✓[/green] Configuration valid") + + # Set up environment env_stage = { "OTEL_TRACING_GRPC_ENDPOINT": DEFAULT_OTEL_TRACING_GRPC_ENDPOINT, } env = os.environ.copy() - # Remove PATH variable if present env.pop("PATH", None) - # check if access_keys are preesnt in the config file - access_keys = get_llm_provider_access_keys(arch_config_file=arch_config_file) - # remove duplicates + # Check access keys + access_keys = get_llm_provider_access_keys(arch_config_file=arch_config_file) access_keys = set(access_keys) - # remove the $ from the access_keys access_keys = [item[1:] if item.startswith("$") else item for item in access_keys] + missing_keys = [] if access_keys: if file: - app_env_file = os.path.join( - os.path.dirname(os.path.abspath(file)), ".env" - ) # check the .env file in the path + app_env_file = os.path.join(os.path.dirname(os.path.abspath(file)), ".env") else: app_env_file = os.path.abspath(os.path.join(path, ".env")) - if not os.path.exists( - app_env_file - ): # check to see if the environment variables in the current environment or not + if not os.path.exists(app_env_file): for access_key in access_keys: if env.get(access_key) is None: - log.info(f"Access Key: {access_key} not found. Exiting Start") - sys.exit(1) + missing_keys.append(access_key) else: env_stage[access_key] = env.get(access_key) - else: # .env file exists, use that to send parameters to Arch + else: env_file_dict = load_env_file_to_dict(app_env_file) for access_key in access_keys: if env_file_dict.get(access_key) is None: - log.info(f"Access Key: {access_key} not found. Exiting Start") - sys.exit(1) + missing_keys.append(access_key) else: env_stage[access_key] = env_file_dict[access_key] + if missing_keys: + _print_missing_keys(console, missing_keys) + sys.exit(1) + # Pass log level to the Docker container — supervisord uses LOG_LEVEL # to set RUST_LOG (brightstaff) and envoy component log levels env_stage["LOG_LEVEL"] = os.environ.get("LOG_LEVEL", "info") + # Start the local OTLP trace collector if --with-tracing is set + trace_server = None + if with_tracing: + if _is_port_in_use(tracing_port): + # A listener is already running (e.g. `planoai trace listen`) + console.print( + f"[green]✓[/green] Trace collector already running on port [cyan]{tracing_port}[/cyan]" + ) + else: + try: + trace_server = start_trace_listener_background(grpc_port=tracing_port) + console.print( + f"[green]✓[/green] Trace collector listening on [cyan]0.0.0.0:{tracing_port}[/cyan]" + ) + except Exception as e: + console.print( + f"[red]✗[/red] Failed to start trace collector on port {tracing_port}: {e}" + ) + console.print( + f"\n[dim]Check if another process is using port {tracing_port}:[/dim]" + ) + console.print(f" [cyan]lsof -i :{tracing_port}[/cyan]") + console.print(f"\n[dim]Or use a different port:[/dim]") + console.print( + f" [cyan]planoai up --with-tracing --tracing-port 4318[/cyan]\n" + ) + sys.exit(1) + + # Update the OTEL endpoint so the gateway sends traces to the right port + env_stage[ + "OTEL_TRACING_GRPC_ENDPOINT" + ] = f"http://host.docker.internal:{tracing_port}" + env.update(env_stage) - start_arch(arch_config_file, env, foreground=foreground) + try: + start_arch(arch_config_file, env, foreground=foreground) + + # When tracing is enabled but --foreground is not, keep the process + # alive so the OTLP collector continues to receive spans. + if trace_server is not None and not foreground: + console.print( + f"[dim]Plano is running. Trace collector active on port {tracing_port}. Press Ctrl+C to stop.[/dim]" + ) + trace_server.wait_for_termination() + except KeyboardInterrupt: + if trace_server is not None: + console.print(f"\n[dim]Stopping trace collector...[/dim]") + finally: + if trace_server is not None: + trace_server.stop(grace=2) @click.command() def down(): - """Stops Arch.""" - stop_docker_container() + """Stops Plano.""" + console = _console() + _print_cli_header(console) + + with console.status( + f"[{PLANO_COLOR}]Shutting down Plano...[/{PLANO_COLOR}]", spinner="dots" + ): + stop_docker_container() @click.command() @@ -306,12 +427,15 @@ def cli_agent(type, file, path, settings): sys.exit(1) +# add commands to the main group main.add_command(up) main.add_command(down) main.add_command(build) main.add_command(logs) main.add_command(cli_agent) main.add_command(generate_prompt_targets) +main.add_command(init_cmd, name="init") +main.add_command(trace_cmd, name="trace") if __name__ == "__main__": main() diff --git a/cli/planoai/rich_click_config.py b/cli/planoai/rich_click_config.py new file mode 100644 index 00000000..ba75bc23 --- /dev/null +++ b/cli/planoai/rich_click_config.py @@ -0,0 +1,71 @@ +import rich_click as click + + +def configure_rich_click(plano_color: str) -> None: + click.rich_click.USE_RICH_MARKUP = True + click.rich_click.USE_MARKDOWN = False + click.rich_click.SHOW_ARGUMENTS = True + click.rich_click.GROUP_ARGUMENTS_OPTIONS = True + click.rich_click.STYLE_ERRORS_SUGGESTION = "dim italic" + click.rich_click.ERRORS_SUGGESTION = ( + "Try running the '--help' flag for more information." + ) + click.rich_click.ERRORS_EPILOGUE = "" + + # Custom colors matching Plano brand. + click.rich_click.STYLE_OPTION = f"dim {plano_color}" + click.rich_click.STYLE_ARGUMENT = f"dim {plano_color}" + click.rich_click.STYLE_COMMAND = f"bold {plano_color}" + click.rich_click.STYLE_SWITCH = "bold green" + click.rich_click.STYLE_METAVAR = "bold yellow" + click.rich_click.STYLE_USAGE = "bold" + click.rich_click.STYLE_USAGE_COMMAND = f"bold dim {plano_color}" + click.rich_click.STYLE_HELPTEXT_FIRST_LINE = "white italic" + click.rich_click.STYLE_HELPTEXT = "" + click.rich_click.STYLE_HEADER_TEXT = "bold" + click.rich_click.STYLE_FOOTER_TEXT = "dim" + click.rich_click.STYLE_OPTIONS_PANEL_BORDER = "dim" + click.rich_click.ALIGN_OPTIONS_PANEL = "left" + click.rich_click.MAX_WIDTH = 100 + + # Option groups for better organization. + click.rich_click.OPTION_GROUPS = { + "planoai up": [ + { + "name": "Configuration", + "options": ["--path", "file"], + }, + { + "name": "Runtime Options", + "options": ["--foreground", "--with-tracing", "--tracing-port"], + }, + ], + "planoai logs": [ + { + "name": "Log Options", + "options": ["--debug", "--follow"], + }, + ], + } + + # Command groups for main help. + click.rich_click.COMMAND_GROUPS = { + "planoai": [ + { + "name": "Gateway Commands", + "commands": ["up", "down", "build", "logs"], + }, + { + "name": "Agent Commands", + "commands": ["cli-agent"], + }, + { + "name": "Observability", + "commands": ["trace"], + }, + { + "name": "Utilities", + "commands": ["generate-prompt-targets"], + }, + ], + } diff --git a/cli/planoai/templates/coding_agent_routing.yaml b/cli/planoai/templates/coding_agent_routing.yaml new file mode 100644 index 00000000..e41db0c0 --- /dev/null +++ b/cli/planoai/templates/coding_agent_routing.yaml @@ -0,0 +1,43 @@ +version: v0.1 + +listeners: + egress_traffic: + address: 0.0.0.0 + port: 12000 + message_format: openai + timeout: 30s + +llm_providers: + # OpenAI Models + - model: openai/gpt-5-2025-08-07 + access_key: $OPENAI_API_KEY + routing_preferences: + - name: code generation + description: generating new code snippets, functions, or boilerplate based on user prompts or requirements + + - model: openai/gpt-4.1-2025-04-14 + access_key: $OPENAI_API_KEY + routing_preferences: + - name: code understanding + description: understand and explain existing code snippets, functions, or libraries + # Anthropic Models + - model: anthropic/claude-sonnet-4-5 + default: true + access_key: $ANTHROPIC_API_KEY + + - model: anthropic/claude-haiku-4-5 + access_key: $ANTHROPIC_API_KEY + + # Ollama Models + - model: ollama/llama3.1 + base_url: http://host.docker.internal:11434 + + +# Model aliases - friendly names that map to actual provider names +model_aliases: + # Alias for a small faster Claude model + arch.claude.code.small.fast: + target: claude-haiku-4-5 + +tracing: + random_sampling: 100 diff --git a/cli/planoai/templates/conversational_state_v1_responses.yaml b/cli/planoai/templates/conversational_state_v1_responses.yaml new file mode 100644 index 00000000..afc40910 --- /dev/null +++ b/cli/planoai/templates/conversational_state_v1_responses.yaml @@ -0,0 +1,25 @@ +version: v0.1 + +listeners: + egress_traffic: + address: 0.0.0.0 + port: 12000 + message_format: openai + timeout: 30s + +llm_providers: + + # OpenAI Models + - model: openai/gpt-5-mini-2025-08-07 + access_key: $OPENAI_API_KEY + default: true + + # Anthropic Models + - model: anthropic/claude-sonnet-4-20250514 + access_key: $ANTHROPIC_API_KEY + +# State storage configuration for v1/responses API +# Manages conversation state for multi-turn conversations +state_storage: + # Type: memory | postgres + type: memory diff --git a/cli/planoai/templates/filter_chain_guardrails.yaml b/cli/planoai/templates/filter_chain_guardrails.yaml new file mode 100644 index 00000000..117931e2 --- /dev/null +++ b/cli/planoai/templates/filter_chain_guardrails.yaml @@ -0,0 +1,50 @@ +version: v0.3.0 + +agents: + - id: rag_agent + url: http://rag-agents:10505 + +filters: + - id: input_guards + url: http://rag-agents:10500 + type: http + # type: mcp (default) + # transport: streamable-http (default) + # tool: input_guards (default - same as filter id) + - id: query_rewriter + url: http://rag-agents:10501 + type: http + # type: mcp (default) + # transport: streamable-http (default) + # tool: query_rewriter (default - same as filter id) + - id: context_builder + url: http://rag-agents:10502 + type: http + +model_providers: + - model: openai/gpt-4o-mini + access_key: $OPENAI_API_KEY + default: true + - model: openai/gpt-4o + access_key: $OPENAI_API_KEY + +model_aliases: + fast-llm: + target: gpt-4o-mini + smart-llm: + target: gpt-4o + +listeners: + - type: agent + name: agent_1 + port: 8001 + router: plano_orchestrator_v1 + agents: + - id: rag_agent + description: virtual assistant for retrieval augmented generation tasks + filter_chain: + - input_guards + - query_rewriter + - context_builder +tracing: + random_sampling: 100 diff --git a/cli/planoai/templates/preference_aware_routing.yaml b/cli/planoai/templates/preference_aware_routing.yaml new file mode 100644 index 00000000..cb9f685a --- /dev/null +++ b/cli/planoai/templates/preference_aware_routing.yaml @@ -0,0 +1,29 @@ +version: v0.1.0 + +listeners: + egress_traffic: + address: 0.0.0.0 + port: 12000 + message_format: openai + timeout: 30s + +llm_providers: + + - model: openai/gpt-4o-mini + access_key: $OPENAI_API_KEY + default: true + + - model: openai/gpt-4o + access_key: $OPENAI_API_KEY + routing_preferences: + - name: code understanding + description: understand and explain existing code snippets, functions, or libraries + + - model: anthropic/claude-sonnet-4-20250514 + access_key: $ANTHROPIC_API_KEY + routing_preferences: + - name: code generation + description: generating new code snippets, functions, or boilerplate based on user prompts or requirements + +tracing: + random_sampling: 100 diff --git a/cli/planoai/templates/sub_agent_orchestration.yaml b/cli/planoai/templates/sub_agent_orchestration.yaml new file mode 100644 index 00000000..b3a204f3 --- /dev/null +++ b/cli/planoai/templates/sub_agent_orchestration.yaml @@ -0,0 +1,57 @@ +version: v0.3.0 + +agents: + - id: weather_agent + url: http://langchain-weather-agent:10510 + - id: flight_agent + url: http://crewai-flight-agent:10520 + +model_providers: + - model: openai/gpt-4o + access_key: $OPENAI_API_KEY + default: true + - model: openai/gpt-4o-mini + access_key: $OPENAI_API_KEY # smaller, faster, cheaper model for extracting entities like location + +listeners: + - type: agent + name: travel_booking_service + port: 8001 + router: plano_orchestrator_v1 + agents: + - id: weather_agent + description: | + + WeatherAgent is a specialized AI assistant for real-time weather information and forecasts. It provides accurate weather data for any city worldwide using the Open-Meteo API, helping travelers plan their trips with up-to-date weather conditions. + + Capabilities: + * Get real-time weather conditions and multi-day forecasts for any city worldwide using Open-Meteo API (free, no API key needed) + * Provides current temperature + * Provides multi-day forecasts + * Provides weather conditions + * Provides sunrise/sunset times + * Provides detailed weather information + * Understands conversation context to resolve location references from previous messages + * Handles weather-related questions including "What's the weather in [city]?", "What's the forecast for [city]?", "How's the weather in [city]?" + * When queries include both weather and other travel questions (e.g., flights, currency), this agent answers ONLY the weather part + + - id: flight_agent + description: | + + FlightAgent is an AI-powered tool specialized in providing live flight information between airports. It leverages the FlightAware AeroAPI to deliver real-time flight status, gate information, and delay updates. + + Capabilities: + * Get live flight information between airports using FlightAware AeroAPI + * Shows real-time flight status + * Shows scheduled/estimated/actual departure and arrival times + * Shows gate and terminal information + * Shows delays + * Shows aircraft type + * Shows flight status + * Automatically resolves city names to airport codes (IATA/ICAO) + * Understands conversation context to infer origin/destination from follow-up questions + * Handles flight-related questions including "What flights go from [city] to [city]?", "Do flights go to [city]?", "Are there direct flights from [city]?" + * When queries include both flight and other travel questions (e.g., weather, currency), this agent answers ONLY the flight part + +tracing: + random_sampling: 100 diff --git a/cli/planoai/trace_cmd.py b/cli/planoai/trace_cmd.py new file mode 100644 index 00000000..dd8194ef --- /dev/null +++ b/cli/planoai/trace_cmd.py @@ -0,0 +1,971 @@ +import json +import os +import re +import string +import threading +import time +from collections import OrderedDict +from concurrent import futures +from dataclasses import dataclass +from datetime import datetime, timezone +from fnmatch import fnmatch +from typing import Any + +import grpc +import rich_click as click +from opentelemetry.proto.collector.trace.v1 import ( + trace_service_pb2, + trace_service_pb2_grpc, +) +from rich.console import Console +from rich.text import Text +from rich.tree import Tree + +from planoai.consts import PLANO_COLOR + +DEFAULT_GRPC_PORT = 4317 +MAX_TRACES = 50 +MAX_SPANS_PER_TRACE = 500 + + +@dataclass +class TraceSummary: + trace_id: str + start_ns: int + end_ns: int + + @property + def total_ms(self) -> float: + return max(0, (self.end_ns - self.start_ns) / 1_000_000) + + @property + def timestamp(self) -> str: + if self.start_ns <= 0: + return "unknown" + dt = datetime.fromtimestamp(self.start_ns / 1_000_000_000, tz=timezone.utc) + return dt.astimezone().strftime("%Y-%m-%d %H:%M:%S") + + +def _parse_filter_patterns(filter_patterns: tuple[str, ...]) -> list[str]: + parts: list[str] = [] + for raw in filter_patterns: + for token in raw.split(","): + part = token.strip() + if not part: + raise ValueError("Filter contains empty tokens.") + parts.append(part) + return parts + + +def _is_hex(value: str, length: int) -> bool: + if len(value) != length: + return False + return all(char in string.hexdigits for char in value) + + +def _parse_where_filters(where_filters: tuple[str, ...]) -> list[tuple[str, str]]: + parsed: list[tuple[str, str]] = [] + invalid: list[str] = [] + key_pattern = re.compile(r"^[A-Za-z0-9_.:-]+$") + for raw in where_filters: + if raw.count("=") != 1: + invalid.append(raw) + continue + key, value = raw.split("=", 1) + key = key.strip() + value = value.strip() + if not key or not value or not key_pattern.match(key): + invalid.append(raw) + continue + parsed.append((key, value)) + if invalid: + invalid_list = ", ".join(invalid) + raise click.ClickException( + f"Invalid --where filter(s): {invalid_list}. Use key=value." + ) + return parsed + + +def _collect_attr_keys(traces: list[dict[str, Any]]) -> set[str]: + keys: set[str] = set() + for trace in traces: + for span in trace.get("spans", []): + for item in span.get("attributes", []): + key = item.get("key") + if key: + keys.add(str(key)) + return keys + + +def _fetch_traces_raw() -> list[dict[str, Any]]: + port = os.environ.get("PLANO_TRACE_PORT", str(DEFAULT_GRPC_PORT)) + target = f"127.0.0.1:{port}" + try: + channel = grpc.insecure_channel(target) + stub = channel.unary_unary( + "/plano.TraceQuery/GetTraces", + request_serializer=lambda x: x, + response_deserializer=lambda x: x, + ) + response = stub(b"", timeout=3) + channel.close() + data = json.loads(response) + traces = data.get("traces", []) + if isinstance(traces, list): + return traces + except Exception: + pass + return [] + + +def _attrs(span: dict[str, Any]) -> dict[str, str]: + attrs = {} + for item in span.get("attributes", []): + key = item.get("key") + value_obj = item.get("value", {}) + value = value_obj.get("stringValue") + if value is None and "intValue" in value_obj: + value = value_obj.get("intValue") + if value is None and "doubleValue" in value_obj: + value = value_obj.get("doubleValue") + if value is None and "boolValue" in value_obj: + value = value_obj.get("boolValue") + if key is not None and value is not None: + attrs[str(key)] = str(value) + return attrs + + +def _safe_int(value: Any, default: int = 0) -> int: + try: + return int(value) + except (TypeError, ValueError): + return default + + +def _parse_since_seconds(value: str | None) -> int | None: + if not value: + return None + value = value.strip() + if not value: + return None + if len(value) < 2: + return None + number, unit = value[:-1], value[-1] + try: + qty = int(number) + except ValueError: + return None + multiplier = {"m": 60, "h": 60 * 60, "d": 60 * 60 * 24}.get(unit) + if multiplier is None: + return None + return qty * multiplier + + +def _matches_pattern(value: str, pattern: str) -> bool: + if pattern == "*": + return True + if "*" not in pattern: + return value == pattern + parts = [part for part in pattern.split("*") if part] + if not parts: + return True + remaining = value + for idx, part in enumerate(parts): + pos = remaining.find(part) + if pos == -1: + return False + if idx == 0 and not pattern.startswith("*") and pos != 0: + return False + remaining = remaining[pos + len(part) :] + if not pattern.endswith("*") and remaining: + return False + return True + + +def _attribute_map(span: dict[str, Any]) -> dict[str, str]: + attrs = {} + for item in span.get("attributes", []): + key = item.get("key") + value_obj = item.get("value", {}) + value = value_obj.get("stringValue") + if value is None and "intValue" in value_obj: + value = value_obj.get("intValue") + if value is None and "doubleValue" in value_obj: + value = value_obj.get("doubleValue") + if value is None and "boolValue" in value_obj: + value = value_obj.get("boolValue") + if key is not None and value is not None: + attrs[str(key)] = str(value) + return attrs + + +def _filter_attributes(span: dict[str, Any], patterns: list[str]) -> dict[str, Any]: + if not patterns: + return span + attributes = span.get("attributes", []) + filtered = [ + item + for item in attributes + if any( + _matches_pattern(str(item.get("key", "")), pattern) for pattern in patterns + ) + ] + cloned = dict(span) + cloned["attributes"] = filtered + return cloned + + +def _filter_traces( + traces: list[dict[str, Any]], + filter_patterns: list[str], + where_filters: list[tuple[str, str]], + since_seconds: int | None, +) -> tuple[list[dict[str, Any]], list[str]]: + now_nanos = int(time.time() * 1_000_000_000) + since_nanos = now_nanos - (since_seconds * 1_000_000_000) if since_seconds else None + + filtered_traces: list[dict[str, Any]] = [] + for trace in traces: + spans = trace.get("spans", []) or [] + if since_nanos is not None: + spans = [ + span + for span in spans + if _safe_int(span.get("startTimeUnixNano", 0)) >= since_nanos + ] + if filter_patterns: + spans = [_filter_attributes(span, filter_patterns) for span in spans] + if not spans: + continue + + candidate = dict(trace) + candidate["spans"] = spans + filtered_traces.append(candidate) + + if where_filters: + + def matches_where(trace: dict[str, Any]) -> bool: + for key, value in where_filters: + if not any( + _attribute_map(span).get(key) == value + for span in trace.get("spans", []) + ): + return False + return True + + filtered_traces = [trace for trace in filtered_traces if matches_where(trace)] + + trace_ids = [trace.get("trace_id", "") for trace in filtered_traces] + return filtered_traces, trace_ids + + +class _TraceStore: + """Thread-safe in-memory store backed by a fixed-length deque. + + Spans may arrive with **different** ``traceId`` values but are + linked via ``parentSpanId``. This store groups them into logical + traces by following parent-child span relationships, so all + connected spans end up under a single trace group regardless of + the ``traceId`` they were emitted with. + """ + + def __init__(self, max_traces: int = MAX_TRACES) -> None: + self._traces: OrderedDict[str, dict[str, Any]] = OrderedDict() + self._seen_spans: dict[str, set[str]] = {} + # span_id → group key (the trace_id used as the dict key) + self._span_to_group: dict[str, str] = {} + # parent_span_id → group key for spans whose parent arrived first + self._parent_to_group: dict[str, str] = {} + self._max_traces = max_traces + self._lock = threading.Lock() + + def _evict_oldest(self) -> None: + """Remove the oldest trace group (caller must hold *_lock*).""" + if not self._traces: + return + oldest_id, oldest = self._traces.popitem(last=False) + self._seen_spans.pop(oldest_id, None) + for span in oldest.get("spans", []): + sid = span.get("spanId", "") + self._span_to_group.pop(sid, None) + self._parent_to_group.pop(sid, None) + + def _merge_groups(self, src_key: str, dst_key: str) -> None: + """Move all spans from *src_key* group into *dst_key* (caller holds lock).""" + if src_key == dst_key or src_key not in self._traces: + return + src = self._traces.pop(src_key) + dst = self._traces[dst_key] + dst_seen = self._seen_spans[dst_key] + src_seen = self._seen_spans.pop(src_key, set()) + for span in src.get("spans", []): + sid = span.get("spanId", "") + if sid and sid not in dst_seen: + dst["spans"].append(span) + dst_seen.add(sid) + self._span_to_group[sid] = dst_key + for sid in src_seen: + self._span_to_group[sid] = dst_key + # Update parent→group mappings that pointed to src. + for pid, gid in list(self._parent_to_group.items()): + if gid == src_key: + self._parent_to_group[pid] = dst_key + + def merge_spans(self, trace_id: str, spans: list[dict[str, Any]]) -> None: + """Merge *spans* into the correct trace group. + + The group is determined by following ``parentSpanId`` / + ``spanId`` links, falling back to *trace_id* when no link + exists. + """ + with self._lock: + for span in spans: + span_id = span.get("spanId", "") + parent_id = span.get("parentSpanId", "") + + # Determine which group this span belongs to. + group_key: str | None = None + + # 1. Does the parent already live in a group? + if parent_id and parent_id in self._span_to_group: + group_key = self._span_to_group[parent_id] + + # 2. Is this span already known as a parent of another group? + if group_key is None and span_id and span_id in self._parent_to_group: + group_key = self._parent_to_group.pop(span_id) + + # 3. Fall back to the wire trace_id. + if group_key is None: + group_key = trace_id + + # Create the group if needed. + if group_key not in self._traces: + if len(self._traces) >= self._max_traces: + self._evict_oldest() + self._traces[group_key] = {"trace_id": group_key, "spans": []} + self._seen_spans[group_key] = set() + else: + self._traces.move_to_end(group_key) + + # Insert span (deduplicate). + seen = self._seen_spans[group_key] + if span_id and span_id in seen: + continue + if span_id: + seen.add(span_id) + self._span_to_group[span_id] = group_key + if len(self._traces[group_key]["spans"]) < MAX_SPANS_PER_TRACE: + self._traces[group_key]["spans"].append(span) + + # Record parent link so future spans can find this group. + if parent_id and parent_id not in self._span_to_group: + self._parent_to_group[parent_id] = group_key + + # If this span's span_id is the parent of an existing + # *different* group, merge that group into this one. + if span_id and span_id in self._parent_to_group: + other = self._parent_to_group.pop(span_id) + if other != group_key and other in self._traces: + self._merge_groups(other, group_key) + + def snapshot(self) -> list[dict[str, Any]]: + """Return traces ordered newest-first.""" + with self._lock: + traces = list(self._traces.values()) + traces.reverse() + return traces + + +_TRACE_STORE = _TraceStore() + + +def _anyvalue_to_python(value_obj: Any) -> Any: + """Convert an opentelemetry AnyValue protobuf to a Python primitive.""" + if hasattr(value_obj, "string_value") and value_obj.HasField("value"): + kind = value_obj.WhichOneof("value") + if kind == "string_value": + return value_obj.string_value + if kind == "int_value": + return value_obj.int_value + if kind == "double_value": + return value_obj.double_value + if kind == "bool_value": + return value_obj.bool_value + return None + + +def _proto_span_to_dict(span: Any, service_name: str) -> dict[str, Any]: + """Convert a protobuf Span message to the dict format used internally.""" + span_dict: dict[str, Any] = { + "traceId": span.trace_id.hex(), + "spanId": span.span_id.hex(), + "parentSpanId": span.parent_span_id.hex() if span.parent_span_id else "", + "name": span.name, + "startTimeUnixNano": str(span.start_time_unix_nano), + "endTimeUnixNano": str(span.end_time_unix_nano), + "service": service_name, + "attributes": [], + } + for kv in span.attributes: + py_val = _anyvalue_to_python(kv.value) + if py_val is not None: + value_dict: dict[str, Any] = {} + if isinstance(py_val, str): + value_dict["stringValue"] = py_val + elif isinstance(py_val, bool): + value_dict["boolValue"] = py_val + elif isinstance(py_val, int): + value_dict["intValue"] = str(py_val) + elif isinstance(py_val, float): + value_dict["doubleValue"] = py_val + span_dict["attributes"].append({"key": kv.key, "value": value_dict}) + return span_dict + + +class _OTLPTraceServicer(trace_service_pb2_grpc.TraceServiceServicer): + """gRPC servicer that receives OTLP ExportTraceServiceRequest and + merges incoming spans into the global _TRACE_STORE by trace_id.""" + + _console = Console(stderr=True) + + def Export(self, request, context): # noqa: N802 + for resource_spans in request.resource_spans: + service_name = "unknown" + for attr in resource_spans.resource.attributes: + if attr.key == "service.name": + val = _anyvalue_to_python(attr.value) + if val is not None: + service_name = str(val) + break + + for scope_spans in resource_spans.scope_spans: + for span in scope_spans.spans: + trace_id = span.trace_id.hex() + if not trace_id: + continue + span_dict = _proto_span_to_dict(span, service_name) + _TRACE_STORE.merge_spans(trace_id, [span_dict]) + short_id = trace_id[:8] + short_span = span.span_id.hex()[:8] + span_start = ( + datetime.fromtimestamp( + span.start_time_unix_nano / 1_000_000_000, tz=timezone.utc + ) + .astimezone() + .strftime("%H:%M:%S.%f")[:-3] + ) + dur_ns = span.end_time_unix_nano - span.start_time_unix_nano + dur_s = dur_ns / 1_000_000_000 + dur_str = f"{dur_s:.3f}".rstrip("0").rstrip(".") + dur_str = f"{dur_str}s" + self._console.print( + f"[dim]{span_start}[/dim], " + f"trace=[yellow]{short_id}[/yellow], " + f"span=[yellow]{short_span}[/yellow], " + f"[bold {_service_color(service_name)}]{service_name}[/bold {_service_color(service_name)}] " + f"[cyan]{span.name}[/cyan] " + f"[dim]({dur_str})[/dim]" + ) + + return trace_service_pb2.ExportTraceServiceResponse() + + +class _TraceQueryHandler(grpc.GenericRpcHandler): + """gRPC handler that serves stored traces to the CLI show command.""" + + def service(self, handler_call_details): + if handler_call_details.method == "/plano.TraceQuery/GetTraces": + return grpc.unary_unary_rpc_method_handler( + self._get_traces, + request_deserializer=lambda x: x, + response_serializer=lambda x: x, + ) + return None + + @staticmethod + def _get_traces(_request, _context): + traces = _TRACE_STORE.snapshot() + return json.dumps({"traces": traces}, separators=(",", ":")).encode("utf-8") + + +def _create_trace_server(host: str, grpc_port: int) -> grpc.Server: + """Create, bind, and start an OTLP/gRPC trace-collection server. + + Returns the running ``grpc.Server``. The caller is responsible + for calling ``server.stop()`` when done. + """ + grpc_server = grpc.server( + futures.ThreadPoolExecutor(max_workers=4), + handlers=[_TraceQueryHandler()], + ) + trace_service_pb2_grpc.add_TraceServiceServicer_to_server( + _OTLPTraceServicer(), grpc_server + ) + grpc_server.add_insecure_port(f"{host}:{grpc_port}") + grpc_server.start() + return grpc_server + + +def _start_trace_listener(host: str, grpc_port: int) -> None: + """Start the OTLP/gRPC listener and block until interrupted.""" + console = Console() + grpc_server = _create_trace_server(host, grpc_port) + + console.print() + console.print(f"[bold {PLANO_COLOR}]Listening for traces...[/bold {PLANO_COLOR}]") + console.print( + f"[green]●[/green] gRPC (OTLP receiver) on [cyan]{host}:{grpc_port}[/cyan]" + ) + console.print("[dim]Press Ctrl+C to stop.[/dim]") + console.print() + try: + grpc_server.wait_for_termination() + except KeyboardInterrupt: + pass + finally: + grpc_server.stop(grace=2) + + +def start_trace_listener_background( + host: str = "0.0.0.0", grpc_port: int = DEFAULT_GRPC_PORT +) -> grpc.Server: + """Start the trace listener in the background (non-blocking). + + Returns the running ``grpc.Server`` so the caller can call + ``server.stop()`` later. + """ + return _create_trace_server(host, grpc_port) + + +def _span_time_ns(span: dict[str, Any], key: str) -> int: + try: + return int(span.get(key, 0)) + except (TypeError, ValueError): + return 0 + + +def _trace_id_short(trace_id: str) -> str: + return trace_id[:8] if trace_id else "unknown" + + +def _trace_summary(trace: dict[str, Any]) -> TraceSummary: + spans = trace.get("spans", []) + start_ns = min((_span_time_ns(s, "startTimeUnixNano") for s in spans), default=0) + end_ns = max((_span_time_ns(s, "endTimeUnixNano") for s in spans), default=0) + return TraceSummary( + trace_id=trace.get("trace_id", "unknown"), + start_ns=start_ns, + end_ns=end_ns, + ) + + +def _service_color(service: str) -> str: + service = service.lower() + if "inbound" in service: + return "white" + if "outbound" in service: + return "white" + if "orchestrator" in service: + return PLANO_COLOR + if "routing" in service: + return "magenta" + if "agent" in service: + return "cyan" + if "llm" in service: + return "green" + return "white" + + +# Attributes to show for inbound/outbound spans when not verbose (trimmed view). +_INBOUND_OUTBOUND_ATTR_KEYS = ( + "http.method", + "http.target", + "http.status_code", + "url.scheme", + "guid:x-request-id", + "request_size", + "response_size", +) + + +def _trim_attrs_for_display( + attrs: dict[str, str], service: str, verbose: bool +) -> dict[str, str]: + if verbose: + return attrs + if "inbound" in service.lower() or "outbound" in service.lower(): + attrs = {k: v for k, v in attrs.items() if k in _INBOUND_OUTBOUND_ATTR_KEYS} + return {k: v for k, v in attrs.items() if k != "service.name.override"} + + +def _sorted_attr_items(attrs: dict[str, str]) -> list[tuple[str, str]]: + priority = [ + "http.method", + "http.target", + "http.status_code", + "guid:x-request-id", + "request_size", + "response_size", + "routing.determination_ms", + "route.selected_model", + "selection.agents", + "selection.agent_count", + "agent.name", + "agent.sequence", + "duration_ms", + "llm.model", + "llm.is_streaming", + "llm.time_to_first_token", + "llm.duration_ms", + "llm.response_bytes", + ] + prioritized = [(k, attrs[k]) for k in priority if k in attrs] + prioritized_keys = {k for k, _ in prioritized} + remaining = [(k, v) for k, v in attrs.items() if k not in prioritized_keys] + remaining.sort(key=lambda item: item[0]) + return prioritized + remaining + + +def _display_attr_value(key: str, value: str) -> str: + if key == "http.status_code" and value != "200": + return f"{value} ⚠️" + return value + + +def _build_tree(trace: dict[str, Any], console: Console, verbose: bool = False) -> None: + spans = trace.get("spans", []) + if not spans: + console.print("[yellow]No spans found for this trace.[/yellow]") + return + + start_ns = min((_span_time_ns(s, "startTimeUnixNano") for s in spans), default=0) + end_ns = max((_span_time_ns(s, "endTimeUnixNano") for s in spans), default=0) + total_ms = max(0, (end_ns - start_ns) / 1_000_000) + + trace_id = trace.get("trace_id", "unknown") + console.print( + f"\n[bold]Trace:[/bold] {trace_id} [dim]({total_ms:.0f}ms total)[/dim]\n" + ) + + spans.sort(key=lambda s: _span_time_ns(s, "startTimeUnixNano")) + tree = Tree("", guide_style="dim") + + for span in spans: + service = span.get("service", "plano(unknown)") + name = span.get("name", "") + offset_ms = max( + 0, (_span_time_ns(span, "startTimeUnixNano") - start_ns) / 1_000_000 + ) + color = _service_color(service) + label = Text(f"{offset_ms:.0f}ms ", style="yellow") + label.append(service, style=f"bold {color}") + if name: + label.append(f" {name}", style="dim white") + + node = tree.add(label) + attrs = _trim_attrs_for_display(_attrs(span), service, verbose) + sorted_items = list(_sorted_attr_items(attrs)) + for idx, (key, value) in enumerate(sorted_items): + attr_line = Text() + attr_line.append(f"{key}: ", style="white") + attr_line.append( + _display_attr_value(key, str(value)), + style=f"{PLANO_COLOR}", + ) + if idx == len(sorted_items) - 1: + attr_line.append("\n") + node.add(attr_line) + + console.print(tree) + console.print() + + +def _select_request( + console: Console, traces: list[dict[str, Any]] +) -> dict[str, Any] | None: + try: + import questionary + from questionary import Choice + from prompt_toolkit.styles import Style + except ImportError as exc: + raise click.ClickException( + "Interactive selection requires 'questionary'. " + "Install it or rerun with --json." + ) from exc + + if not traces: + return None + + style = Style.from_dict( + { + "qmark": f"fg:{PLANO_COLOR} bold", + "question": "bold", + "answer": f"fg:{PLANO_COLOR} bold", + "pointer": f"fg:{PLANO_COLOR} bold", + "highlighted": f"fg:{PLANO_COLOR} bold", + "selected": f"fg:{PLANO_COLOR}", + "instruction": "fg:#888888", + "text": "", + "disabled": "fg:#666666", + } + ) + + choices = [] + for trace in traces: + summary = _trace_summary(trace) + label = f"{_trace_id_short(summary.trace_id)} ({summary.total_ms:.0f}ms total • {summary.timestamp})" + choices.append(Choice(label, value=trace)) + + selected = questionary.select( + "Select a trace to view:", + choices=choices, + style=style, + pointer="❯", + ).ask() + + if not selected: + console.print("[dim]Cancelled.[/dim]") + return None + return selected + + +@click.argument("target", required=False) +@click.option( + "--filter", + "filter_patterns", + multiple=True, + help=( + "Limit displayed attributes to matching keys " + "(wildcards supported). Repeatable." + ), +) +@click.option( + "--where", + "where_filters", + multiple=True, + help="Match traces that contain key=value. Repeatable (AND semantics).", +) +@click.option("--list", "list_only", is_flag=True, help="List trace IDs only.") +@click.option( + "--no-interactive", + is_flag=True, + help="Disable interactive prompts and selections.", +) +@click.option("--limit", type=int, default=None, help="Limit results.") +@click.option("--since", default=None, help="Look back window (e.g. 5m, 2h, 1d).") +@click.option("--json", "json_out", is_flag=True, help="Output raw JSON.") +@click.option( + "--verbose", + "-v", + is_flag=True, + help="Show all span attributes; default trims inbound/outbound to a few keys.", +) +def _run_trace_show( + target, + filter_patterns, + where_filters, + list_only, + no_interactive, + limit, + since, + json_out, + verbose, +): + """Trace requests from the local OTLP listener.""" + console = Console() + + try: + patterns = _parse_filter_patterns(filter_patterns) + except ValueError as exc: + raise click.ClickException(str(exc)) from exc + + parsed_where = _parse_where_filters(where_filters) + if limit is not None and limit < 0: + raise click.ClickException("Limit must be greater than or equal to 0.") + since_seconds = _parse_since_seconds(since) + + if target is None: + target = "any" if list_only or since or limit else "last" + + if list_only and target not in (None, "last", "any"): + raise click.ClickException("Target and --list cannot be used together.") + + short_target = None + if isinstance(target, str) and target not in ("last", "any"): + target_lower = target.lower() + if len(target_lower) == 8: + if not _is_hex(target_lower, 8) or target_lower == "00000000": + raise click.ClickException("Short trace ID must be 8 hex characters.") + short_target = target_lower + elif len(target_lower) == 32: + if not _is_hex(target_lower, 32) or target_lower == "0" * 32: + raise click.ClickException("Trace ID must be 32 hex characters.") + else: + raise click.ClickException("Trace ID must be 8 or 32 hex characters.") + + traces_raw = _fetch_traces_raw() + if traces_raw: + available_keys = _collect_attr_keys(traces_raw) + if parsed_where: + missing_keys = [key for key, _ in parsed_where if key not in available_keys] + if missing_keys: + missing_list = ", ".join(missing_keys) + raise click.ClickException(f"Unknown --where key(s): {missing_list}") + if patterns: + unmatched = [ + pattern + for pattern in patterns + if not any(fnmatch(key, pattern) for key in available_keys) + ] + if unmatched: + unmatched_list = ", ".join(unmatched) + console.print( + f"[yellow]Warning:[/yellow] Filter key(s) not found: {unmatched_list}. " + "Returning unfiltered traces." + ) + + traces, trace_ids = _filter_traces( + traces_raw, patterns, parsed_where, since_seconds + ) + + if target == "last": + traces = traces[:1] + trace_ids = trace_ids[:1] + elif target not in (None, "any") and short_target is None: + traces = [trace for trace in traces if trace.get("trace_id") == target] + trace_ids = [trace.get("trace_id") for trace in traces] + if short_target: + traces = [ + trace + for trace in traces + if trace.get("trace_id", "").lower().startswith(short_target) + ] + trace_ids = [trace.get("trace_id") for trace in traces] + + if limit is not None: + if list_only: + trace_ids = trace_ids[:limit] + else: + traces = traces[:limit] + + if json_out: + if list_only: + console.print_json(data={"trace_ids": trace_ids}) + else: + console.print_json(data={"traces": traces}) + return + + if list_only: + if traces and console.is_terminal and not no_interactive: + selected = _select_request(console, traces) + if selected: + _build_tree(selected, console, verbose=verbose) + return + + if traces: + trace_ids = [_trace_id_short(_trace_summary(t).trace_id) for t in traces] + + if not trace_ids: + console.print("[yellow]No trace IDs found.[/yellow]") + return + + console.print("\n[bold]Trace IDs:[/bold]") + for trace_id in trace_ids: + console.print(f" [dim]-[/dim] {trace_id}") + return + + if not traces: + console.print("[yellow]No traces found.[/yellow]") + return + + trace_obj = traces[0] + _build_tree(trace_obj, console, verbose=verbose) + + +@click.group(invoke_without_command=True) +@click.argument("target", required=False) +@click.option( + "--filter", + "filter_patterns", + multiple=True, + help=( + "Limit displayed attributes to matching keys " + "(wildcards supported). Repeatable." + ), +) +@click.option( + "--where", + "where_filters", + multiple=True, + help="Match traces that contain key=value. Repeatable (AND semantics).", +) +@click.option("--list", "list_only", is_flag=True, help="List trace IDs only.") +@click.option( + "--no-interactive", + is_flag=True, + help="Disable interactive prompts and selections.", +) +@click.option("--limit", type=int, default=None, help="Limit results.") +@click.option("--since", default=None, help="Look back window (e.g. 5m, 2h, 1d).") +@click.option("--json", "json_out", is_flag=True, help="Output raw JSON.") +@click.option( + "--verbose", + "-v", + is_flag=True, + help="Show all span attributes; default trims inbound/outbound to a few keys.", +) +@click.pass_context +def trace( + ctx, + target, + filter_patterns, + where_filters, + list_only, + no_interactive, + limit, + since, + json_out, + verbose, +): + """Trace requests from the local OTLP listener.""" + if ctx.invoked_subcommand: + return + if target == "listen" and not any( + [ + filter_patterns, + where_filters, + list_only, + no_interactive, + limit, + since, + json_out, + verbose, + ] + ): + _start_trace_listener("0.0.0.0", DEFAULT_GRPC_PORT) + return + _run_trace_show( + target, + filter_patterns, + where_filters, + list_only, + no_interactive, + limit, + since, + json_out, + verbose, + ) + + +@trace.command("listen") +@click.option("--host", default="0.0.0.0", show_default=True) +@click.option( + "--port", + type=int, + default=DEFAULT_GRPC_PORT, + show_default=True, + help="gRPC port for receiving OTLP traces.", +) +def trace_listen(host: str, port: int) -> None: + """Listen for OTLP/gRPC traces.""" + _start_trace_listener(host, port) diff --git a/cli/planoai/versioning.py b/cli/planoai/versioning.py new file mode 100644 index 00000000..465932c1 --- /dev/null +++ b/cli/planoai/versioning.py @@ -0,0 +1,70 @@ +import importlib.metadata +import re + +PYPI_PACKAGE_NAME = "planoai" +PYPI_URL = f"https://pypi.org/pypi/{PYPI_PACKAGE_NAME}/json" + + +def get_version() -> str: + try: + # First try package metadata (installed package). + return importlib.metadata.version(PYPI_PACKAGE_NAME) + except importlib.metadata.PackageNotFoundError: + # Fallback to local development version. + try: + from planoai import __version__ + + return __version__ + except ImportError: + return "version not found" + + +def get_latest_version(timeout: float = 2.0) -> str | None: + """Fetch the latest version from PyPI.""" + import requests + + try: + response = requests.get(PYPI_URL, timeout=timeout) + if response.status_code == 200: + data = response.json() + return data.get("info", {}).get("version") + except (requests.RequestException, ValueError): + # Network error or invalid JSON - fail silently. + return None + return None + + +def parse_version(version_str: str) -> tuple[int, ...]: + """Parse version string into a comparable tuple.""" + clean_version = re.split(r"[a-zA-Z]", version_str)[0] + parts = clean_version.split(".") + return tuple(int(p) for p in parts if p.isdigit()) + + +def check_version_status( + current: str, latest: str | None +) -> dict[str, str | bool | None]: + """Compare current version with latest and return status metadata.""" + if latest is None: + return { + "is_outdated": False, + "current": current, + "latest": None, + "message": None, + } + + try: + is_outdated = parse_version(current) < parse_version(latest) + return { + "is_outdated": is_outdated, + "current": current, + "latest": latest, + "message": f"Update available: {latest}" if is_outdated else None, + } + except (ValueError, TypeError): + return { + "is_outdated": False, + "current": current, + "latest": latest, + "message": None, + } diff --git a/cli/pyproject.toml b/cli/pyproject.toml index 7cf17e56..be3fffee 100644 --- a/cli/pyproject.toml +++ b/cli/pyproject.toml @@ -7,10 +7,15 @@ readme = "README.md" requires-python = ">=3.10" dependencies = [ "click>=8.1.7,<9.0.0", + "grpcio>=1.60.0", "jinja2>=3.1.4,<4.0.0", "jsonschema>=4.23.0,<5.0.0", + "opentelemetry-proto>=1.20.0", + "questionary>=2.1.1,<3.0.0", "pyyaml>=6.0.2,<7.0.0", "requests>=2.31.0,<3.0.0", + "rich>=14.2.0", + "rich-click>=1.9.5", ] [project.optional-dependencies] @@ -30,6 +35,10 @@ path = "planoai/__init__.py" [tool.hatch.build.targets.wheel] packages = ["planoai"] +include = ["planoai/templates/*.yaml"] + +[tool.hatch.build.targets.sdist] +include = ["planoai/templates/*.yaml"] [tool.pytest.ini_options] addopts = ["-v"] diff --git a/cli/test/test_init.py b/cli/test/test_init.py new file mode 100644 index 00000000..b9665a2a --- /dev/null +++ b/cli/test/test_init.py @@ -0,0 +1,51 @@ +from click.testing import CliRunner + +from planoai.init_cmd import init + + +def test_init_clean_writes_empty_config(tmp_path, monkeypatch): + monkeypatch.chdir(tmp_path) + + runner = CliRunner() + result = runner.invoke(init, ["--clean"]) + + assert result.exit_code == 0, result.output + config_path = tmp_path / "config.yaml" + assert config_path.exists() + assert config_path.read_text(encoding="utf-8") == "\n" + + +def test_init_template_builtin_writes_config(tmp_path, monkeypatch): + monkeypatch.chdir(tmp_path) + + runner = CliRunner() + result = runner.invoke(init, ["--template", "coding_agent_routing"]) + + assert result.exit_code == 0, result.output + + config_path = tmp_path / "config.yaml" + assert config_path.exists() + config_text = config_path.read_text(encoding="utf-8") + assert "llm_providers:" in config_text + + +def test_init_refuses_overwrite_without_force(tmp_path, monkeypatch): + monkeypatch.chdir(tmp_path) + (tmp_path / "config.yaml").write_text("hello", encoding="utf-8") + + runner = CliRunner() + result = runner.invoke(init, ["--clean"]) + + assert result.exit_code != 0 + assert "Refusing to overwrite" in result.output + + +def test_init_force_overwrites(tmp_path, monkeypatch): + monkeypatch.chdir(tmp_path) + (tmp_path / "config.yaml").write_text("hello", encoding="utf-8") + + runner = CliRunner() + result = runner.invoke(init, ["--clean", "--force"]) + + assert result.exit_code == 0, result.output + assert (tmp_path / "config.yaml").read_text(encoding="utf-8") == "\n" diff --git a/cli/test/test_version_check.py b/cli/test/test_version_check.py new file mode 100644 index 00000000..a00fba46 --- /dev/null +++ b/cli/test/test_version_check.py @@ -0,0 +1,163 @@ +import pytest +from unittest import mock +from planoai.versioning import ( + get_version, + get_latest_version, + parse_version, + check_version_status, + PYPI_URL, +) + + +class TestParseVersion: + """Tests for version string parsing.""" + + def test_parse_simple_version(self): + assert parse_version("1.0.0") == (1, 0, 0) + assert parse_version("0.4.1") == (0, 4, 1) + assert parse_version("10.20.30") == (10, 20, 30) + + def test_parse_two_part_version(self): + assert parse_version("1.0") == (1, 0) + assert parse_version("2.5") == (2, 5) + + def test_parse_version_with_prerelease(self): + # Pre-release suffixes should be stripped + assert parse_version("0.4.1a1") == (0, 4, 1) + assert parse_version("1.0.0beta2") == (1, 0, 0) + assert parse_version("2.0.0rc1") == (2, 0, 0) + + +class TestCheckVersionStatus: + """Tests for version comparison logic.""" + + def test_current_equals_latest(self): + status = check_version_status("0.4.1", "0.4.1") + assert status["is_outdated"] is False + assert status["current"] == "0.4.1" + assert status["latest"] == "0.4.1" + assert status["message"] is None + + def test_current_is_outdated(self): + status = check_version_status("0.4.1", "0.5.0") + assert status["is_outdated"] is True + assert status["current"] == "0.4.1" + assert status["latest"] == "0.5.0" + assert "Update available" in status["message"] + assert "0.5.0" in status["message"] + + def test_current_is_newer(self): + # Dev version might be newer than PyPI + status = check_version_status("0.5.0", "0.4.1") + assert status["is_outdated"] is False + assert status["message"] is None + + def test_major_version_outdated(self): + status = check_version_status("0.4.1", "1.0.0") + assert status["is_outdated"] is True + + def test_minor_version_outdated(self): + status = check_version_status("0.4.1", "0.5.0") + assert status["is_outdated"] is True + + def test_patch_version_outdated(self): + status = check_version_status("0.4.1", "0.4.2") + assert status["is_outdated"] is True + + def test_latest_is_none(self): + # When PyPI check fails + status = check_version_status("0.4.1", None) + assert status["is_outdated"] is False + assert status["latest"] is None + assert status["message"] is None + + +class TestGetLatestVersion: + """Tests for PyPI version fetching.""" + + def test_successful_fetch(self): + mock_response = mock.Mock() + mock_response.status_code = 200 + mock_response.json.return_value = {"info": {"version": "0.5.0"}} + + with mock.patch("requests.get", return_value=mock_response): + version = get_latest_version() + assert version == "0.5.0" + + def test_network_error(self): + import requests + + with mock.patch( + "requests.get", side_effect=requests.RequestException("Network error") + ): + version = get_latest_version() + assert version is None + + def test_timeout(self): + import requests + + with mock.patch("requests.get", side_effect=requests.Timeout("Timeout")): + version = get_latest_version() + assert version is None + + def test_invalid_json(self): + mock_response = mock.Mock() + mock_response.status_code = 200 + mock_response.json.side_effect = ValueError("Invalid JSON") + + with mock.patch("requests.get", return_value=mock_response): + version = get_latest_version() + assert version is None + + def test_404_response(self): + mock_response = mock.Mock() + mock_response.status_code = 404 + + with mock.patch("requests.get", return_value=mock_response): + version = get_latest_version() + assert version is None + + +class TestVersionCheckIntegration: + """Integration tests simulating version check scenarios.""" + + def test_outdated_version_message(self, capsys): + """Simulate an outdated version scenario.""" + from rich.console import Console + + console = Console(force_terminal=True) + current_version = "0.4.1" + + # Mock PyPI returning a newer version + mock_response = mock.Mock() + mock_response.status_code = 200 + mock_response.json.return_value = {"info": {"version": "0.5.0"}} + + with mock.patch("requests.get", return_value=mock_response): + latest = get_latest_version() + status = check_version_status(current_version, latest) + + assert status["is_outdated"] is True + assert status["latest"] == "0.5.0" + + def test_up_to_date_version(self): + """Simulate an up-to-date version scenario.""" + current_version = "0.4.1" + + mock_response = mock.Mock() + mock_response.status_code = 200 + mock_response.json.return_value = {"info": {"version": "0.4.1"}} + + with mock.patch("requests.get", return_value=mock_response): + latest = get_latest_version() + status = check_version_status(current_version, latest) + + assert status["is_outdated"] is False + + def test_skip_version_check_env_var(self, monkeypatch): + """Test that PLANO_SKIP_VERSION_CHECK skips the check.""" + monkeypatch.setenv("PLANO_SKIP_VERSION_CHECK", "1") + + import os + + assert os.environ.get("PLANO_SKIP_VERSION_CHECK") == "1" diff --git a/cli/uv.lock b/cli/uv.lock index 09ca98d8..d7e6b3a0 100644 --- a/cli/uv.lock +++ b/cli/uv.lock @@ -117,6 +117,67 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", size = 16674, upload-time = "2025-05-10T17:42:49.33Z" }, ] +[[package]] +name = "grpcio" +version = "1.78.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/8a/3d098f35c143a89520e568e6539cc098fcd294495910e359889ce8741c84/grpcio-1.78.0.tar.gz", hash = "sha256:7382b95189546f375c174f53a5fa873cef91c4b8005faa05cc5b3beea9c4f1c5", size = 12852416, upload-time = "2026-02-06T09:57:18.093Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/a8/690a085b4d1fe066130de97a87de32c45062cf2ecd218df9675add895550/grpcio-1.78.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:7cc47943d524ee0096f973e1081cb8f4f17a4615f2116882a5f1416e4cfe92b5", size = 5946986, upload-time = "2026-02-06T09:54:34.043Z" }, + { url = "https://files.pythonhosted.org/packages/c7/1b/e5213c5c0ced9d2d92778d30529ad5bb2dcfb6c48c4e2d01b1f302d33d64/grpcio-1.78.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:c3f293fdc675ccba4db5a561048cca627b5e7bd1c8a6973ffedabe7d116e22e2", size = 11816533, upload-time = "2026-02-06T09:54:37.04Z" }, + { url = "https://files.pythonhosted.org/packages/18/37/1ba32dccf0a324cc5ace744c44331e300b000a924bf14840f948c559ede7/grpcio-1.78.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:10a9a644b5dd5aec3b82b5b0b90d41c0fa94c85ef42cb42cf78a23291ddb5e7d", size = 6519964, upload-time = "2026-02-06T09:54:40.268Z" }, + { url = "https://files.pythonhosted.org/packages/ed/f5/c0e178721b818072f2e8b6fde13faaba942406c634009caf065121ce246b/grpcio-1.78.0-cp310-cp310-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:4c5533d03a6cbd7f56acfc9cfb44ea64f63d29091e40e44010d34178d392d7eb", size = 7198058, upload-time = "2026-02-06T09:54:42.389Z" }, + { url = "https://files.pythonhosted.org/packages/5b/b2/40d43c91ae9cd667edc960135f9f08e58faa1576dc95af29f66ec912985f/grpcio-1.78.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ff870aebe9a93a85283837801d35cd5f8814fe2ad01e606861a7fb47c762a2b7", size = 6727212, upload-time = "2026-02-06T09:54:44.91Z" }, + { url = "https://files.pythonhosted.org/packages/ed/88/9da42eed498f0efcfcd9156e48ae63c0cde3bea398a16c99fb5198c885b6/grpcio-1.78.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:391e93548644e6b2726f1bb84ed60048d4bcc424ce5e4af0843d28ca0b754fec", size = 7300845, upload-time = "2026-02-06T09:54:47.562Z" }, + { url = "https://files.pythonhosted.org/packages/23/3f/1c66b7b1b19a8828890e37868411a6e6925df5a9030bfa87ab318f34095d/grpcio-1.78.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:df2c8f3141f7cbd112a6ebbd760290b5849cda01884554f7c67acc14e7b1758a", size = 8284605, upload-time = "2026-02-06T09:54:50.475Z" }, + { url = "https://files.pythonhosted.org/packages/94/c4/ca1bd87394f7b033e88525384b4d1e269e8424ab441ea2fba1a0c5b50986/grpcio-1.78.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bd8cb8026e5f5b50498a3c4f196f57f9db344dad829ffae16b82e4fdbaea2813", size = 7726672, upload-time = "2026-02-06T09:54:53.11Z" }, + { url = "https://files.pythonhosted.org/packages/41/09/f16e487d4cc65ccaf670f6ebdd1a17566b965c74fc3d93999d3b2821e052/grpcio-1.78.0-cp310-cp310-win32.whl", hash = "sha256:f8dff3d9777e5d2703a962ee5c286c239bf0ba173877cc68dc02c17d042e29de", size = 4076715, upload-time = "2026-02-06T09:54:55.549Z" }, + { url = "https://files.pythonhosted.org/packages/2a/32/4ce60d94e242725fd3bcc5673c04502c82a8e87b21ea411a63992dc39f8f/grpcio-1.78.0-cp310-cp310-win_amd64.whl", hash = "sha256:94f95cf5d532d0e717eed4fc1810e8e6eded04621342ec54c89a7c2f14b581bf", size = 4799157, upload-time = "2026-02-06T09:54:59.838Z" }, + { url = "https://files.pythonhosted.org/packages/86/c7/d0b780a29b0837bf4ca9580904dfb275c1fc321ded7897d620af7047ec57/grpcio-1.78.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:2777b783f6c13b92bd7b716667452c329eefd646bfb3f2e9dabea2e05dbd34f6", size = 5951525, upload-time = "2026-02-06T09:55:01.989Z" }, + { url = "https://files.pythonhosted.org/packages/c5/b1/96920bf2ee61df85a9503cb6f733fe711c0ff321a5a697d791b075673281/grpcio-1.78.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:9dca934f24c732750389ce49d638069c3892ad065df86cb465b3fa3012b70c9e", size = 11830418, upload-time = "2026-02-06T09:55:04.462Z" }, + { url = "https://files.pythonhosted.org/packages/83/0c/7c1528f098aeb75a97de2bae18c530f56959fb7ad6c882db45d9884d6edc/grpcio-1.78.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:459ab414b35f4496138d0ecd735fed26f1318af5e52cb1efbc82a09f0d5aa911", size = 6524477, upload-time = "2026-02-06T09:55:07.111Z" }, + { url = "https://files.pythonhosted.org/packages/8d/52/e7c1f3688f949058e19a011c4e0dec973da3d0ae5e033909677f967ae1f4/grpcio-1.78.0-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:082653eecbdf290e6e3e2c276ab2c54b9e7c299e07f4221872380312d8cf395e", size = 7198266, upload-time = "2026-02-06T09:55:10.016Z" }, + { url = "https://files.pythonhosted.org/packages/e5/61/8ac32517c1e856677282c34f2e7812d6c328fa02b8f4067ab80e77fdc9c9/grpcio-1.78.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:85f93781028ec63f383f6bc90db785a016319c561cc11151fbb7b34e0d012303", size = 6730552, upload-time = "2026-02-06T09:55:12.207Z" }, + { url = "https://files.pythonhosted.org/packages/bd/98/b8ee0158199250220734f620b12e4a345955ac7329cfd908d0bf0fda77f0/grpcio-1.78.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:f12857d24d98441af6a1d5c87442d624411db486f7ba12550b07788f74b67b04", size = 7304296, upload-time = "2026-02-06T09:55:15.044Z" }, + { url = "https://files.pythonhosted.org/packages/bd/0f/7b72762e0d8840b58032a56fdbd02b78fc645b9fa993d71abf04edbc54f4/grpcio-1.78.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5397fff416b79e4b284959642a4e95ac4b0f1ece82c9993658e0e477d40551ec", size = 8288298, upload-time = "2026-02-06T09:55:17.276Z" }, + { url = "https://files.pythonhosted.org/packages/24/ae/ae4ce56bc5bb5caa3a486d60f5f6083ac3469228faa734362487176c15c5/grpcio-1.78.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:fbe6e89c7ffb48518384068321621b2a69cab509f58e40e4399fdd378fa6d074", size = 7730953, upload-time = "2026-02-06T09:55:19.545Z" }, + { url = "https://files.pythonhosted.org/packages/b5/6e/8052e3a28eb6a820c372b2eb4b5e32d195c661e137d3eca94d534a4cfd8a/grpcio-1.78.0-cp311-cp311-win32.whl", hash = "sha256:6092beabe1966a3229f599d7088b38dfc8ffa1608b5b5cdda31e591e6500f856", size = 4076503, upload-time = "2026-02-06T09:55:21.521Z" }, + { url = "https://files.pythonhosted.org/packages/08/62/f22c98c5265dfad327251fa2f840b591b1df5f5e15d88b19c18c86965b27/grpcio-1.78.0-cp311-cp311-win_amd64.whl", hash = "sha256:1afa62af6e23f88629f2b29ec9e52ec7c65a7176c1e0a83292b93c76ca882558", size = 4799767, upload-time = "2026-02-06T09:55:24.107Z" }, + { url = "https://files.pythonhosted.org/packages/4e/f4/7384ed0178203d6074446b3c4f46c90a22ddf7ae0b3aee521627f54cfc2a/grpcio-1.78.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:f9ab915a267fc47c7e88c387a3a28325b58c898e23d4995f765728f4e3dedb97", size = 5913985, upload-time = "2026-02-06T09:55:26.832Z" }, + { url = "https://files.pythonhosted.org/packages/81/ed/be1caa25f06594463f685b3790b320f18aea49b33166f4141bfdc2bfb236/grpcio-1.78.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3f8904a8165ab21e07e58bf3e30a73f4dffc7a1e0dbc32d51c61b5360d26f43e", size = 11811853, upload-time = "2026-02-06T09:55:29.224Z" }, + { url = "https://files.pythonhosted.org/packages/24/a7/f06d151afc4e64b7e3cc3e872d331d011c279aaab02831e40a81c691fb65/grpcio-1.78.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:859b13906ce098c0b493af92142ad051bf64c7870fa58a123911c88606714996", size = 6475766, upload-time = "2026-02-06T09:55:31.825Z" }, + { url = "https://files.pythonhosted.org/packages/8a/a8/4482922da832ec0082d0f2cc3a10976d84a7424707f25780b82814aafc0a/grpcio-1.78.0-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:b2342d87af32790f934a79c3112641e7b27d63c261b8b4395350dad43eff1dc7", size = 7170027, upload-time = "2026-02-06T09:55:34.7Z" }, + { url = "https://files.pythonhosted.org/packages/54/bf/f4a3b9693e35d25b24b0b39fa46d7d8a3c439e0a3036c3451764678fec20/grpcio-1.78.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:12a771591ae40bc65ba67048fa52ef4f0e6db8279e595fd349f9dfddeef571f9", size = 6690766, upload-time = "2026-02-06T09:55:36.902Z" }, + { url = "https://files.pythonhosted.org/packages/c7/b9/521875265cc99fe5ad4c5a17010018085cae2810a928bf15ebe7d8bcd9cc/grpcio-1.78.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:185dea0d5260cbb2d224c507bf2a5444d5abbb1fa3594c1ed7e4c709d5eb8383", size = 7266161, upload-time = "2026-02-06T09:55:39.824Z" }, + { url = "https://files.pythonhosted.org/packages/05/86/296a82844fd40a4ad4a95f100b55044b4f817dece732bf686aea1a284147/grpcio-1.78.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:51b13f9aed9d59ee389ad666b8c2214cc87b5de258fa712f9ab05f922e3896c6", size = 8253303, upload-time = "2026-02-06T09:55:42.353Z" }, + { url = "https://files.pythonhosted.org/packages/f3/e4/ea3c0caf5468537f27ad5aab92b681ed7cc0ef5f8c9196d3fd42c8c2286b/grpcio-1.78.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fd5f135b1bd58ab088930b3c613455796dfa0393626a6972663ccdda5b4ac6ce", size = 7698222, upload-time = "2026-02-06T09:55:44.629Z" }, + { url = "https://files.pythonhosted.org/packages/d7/47/7f05f81e4bb6b831e93271fb12fd52ba7b319b5402cbc101d588f435df00/grpcio-1.78.0-cp312-cp312-win32.whl", hash = "sha256:94309f498bcc07e5a7d16089ab984d42ad96af1d94b5a4eb966a266d9fcabf68", size = 4066123, upload-time = "2026-02-06T09:55:47.644Z" }, + { url = "https://files.pythonhosted.org/packages/ad/e7/d6914822c88aa2974dbbd10903d801a28a19ce9cd8bad7e694cbbcf61528/grpcio-1.78.0-cp312-cp312-win_amd64.whl", hash = "sha256:9566fe4ababbb2610c39190791e5b829869351d14369603702e890ef3ad2d06e", size = 4797657, upload-time = "2026-02-06T09:55:49.86Z" }, + { url = "https://files.pythonhosted.org/packages/05/a9/8f75894993895f361ed8636cd9237f4ab39ef87fd30db17467235ed1c045/grpcio-1.78.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:ce3a90455492bf8bfa38e56fbbe1dbd4f872a3d8eeaf7337dc3b1c8aa28c271b", size = 5920143, upload-time = "2026-02-06T09:55:52.035Z" }, + { url = "https://files.pythonhosted.org/packages/55/06/0b78408e938ac424100100fd081189451b472236e8a3a1f6500390dc4954/grpcio-1.78.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:2bf5e2e163b356978b23652c4818ce4759d40f4712ee9ec5a83c4be6f8c23a3a", size = 11803926, upload-time = "2026-02-06T09:55:55.494Z" }, + { url = "https://files.pythonhosted.org/packages/88/93/b59fe7832ff6ae3c78b813ea43dac60e295fa03606d14d89d2e0ec29f4f3/grpcio-1.78.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8f2ac84905d12918e4e55a16da17939eb63e433dc11b677267c35568aa63fc84", size = 6478628, upload-time = "2026-02-06T09:55:58.533Z" }, + { url = "https://files.pythonhosted.org/packages/ed/df/e67e3734527f9926b7d9c0dde6cd998d1d26850c3ed8eeec81297967ac67/grpcio-1.78.0-cp313-cp313-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:b58f37edab4a3881bc6c9bca52670610e0c9ca14e2ea3cf9debf185b870457fb", size = 7173574, upload-time = "2026-02-06T09:56:01.786Z" }, + { url = "https://files.pythonhosted.org/packages/a6/62/cc03fffb07bfba982a9ec097b164e8835546980aec25ecfa5f9c1a47e022/grpcio-1.78.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:735e38e176a88ce41840c21bb49098ab66177c64c82426e24e0082500cc68af5", size = 6692639, upload-time = "2026-02-06T09:56:04.529Z" }, + { url = "https://files.pythonhosted.org/packages/bf/9a/289c32e301b85bdb67d7ec68b752155e674ee3ba2173a1858f118e399ef3/grpcio-1.78.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2045397e63a7a0ee7957c25f7dbb36ddc110e0cfb418403d110c0a7a68a844e9", size = 7268838, upload-time = "2026-02-06T09:56:08.397Z" }, + { url = "https://files.pythonhosted.org/packages/0e/79/1be93f32add280461fa4773880196572563e9c8510861ac2da0ea0f892b6/grpcio-1.78.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:a9f136fbafe7ccf4ac7e8e0c28b31066e810be52d6e344ef954a3a70234e1702", size = 8251878, upload-time = "2026-02-06T09:56:10.914Z" }, + { url = "https://files.pythonhosted.org/packages/65/65/793f8e95296ab92e4164593674ae6291b204bb5f67f9d4a711489cd30ffa/grpcio-1.78.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:748b6138585379c737adc08aeffd21222abbda1a86a0dca2a39682feb9196c20", size = 7695412, upload-time = "2026-02-06T09:56:13.593Z" }, + { url = "https://files.pythonhosted.org/packages/1c/9f/1e233fe697ecc82845942c2822ed06bb522e70d6771c28d5528e4c50f6a4/grpcio-1.78.0-cp313-cp313-win32.whl", hash = "sha256:271c73e6e5676afe4fc52907686670c7cea22ab2310b76a59b678403ed40d670", size = 4064899, upload-time = "2026-02-06T09:56:15.601Z" }, + { url = "https://files.pythonhosted.org/packages/4d/27/d86b89e36de8a951501fb06a0f38df19853210f341d0b28f83f4aa0ffa08/grpcio-1.78.0-cp313-cp313-win_amd64.whl", hash = "sha256:f2d4e43ee362adfc05994ed479334d5a451ab7bc3f3fee1b796b8ca66895acb4", size = 4797393, upload-time = "2026-02-06T09:56:17.882Z" }, + { url = "https://files.pythonhosted.org/packages/29/f2/b56e43e3c968bfe822fa6ce5bca10d5c723aa40875b48791ce1029bb78c7/grpcio-1.78.0-cp314-cp314-linux_armv7l.whl", hash = "sha256:e87cbc002b6f440482b3519e36e1313eb5443e9e9e73d6a52d43bd2004fcfd8e", size = 5920591, upload-time = "2026-02-06T09:56:20.758Z" }, + { url = "https://files.pythonhosted.org/packages/5d/81/1f3b65bd30c334167bfa8b0d23300a44e2725ce39bba5b76a2460d85f745/grpcio-1.78.0-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:c41bc64626db62e72afec66b0c8a0da76491510015417c127bfc53b2fe6d7f7f", size = 11813685, upload-time = "2026-02-06T09:56:24.315Z" }, + { url = "https://files.pythonhosted.org/packages/0e/1c/bbe2f8216a5bd3036119c544d63c2e592bdf4a8ec6e4a1867592f4586b26/grpcio-1.78.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8dfffba826efcf366b1e3ccc37e67afe676f290e13a3b48d31a46739f80a8724", size = 6487803, upload-time = "2026-02-06T09:56:27.367Z" }, + { url = "https://files.pythonhosted.org/packages/16/5c/a6b2419723ea7ddce6308259a55e8e7593d88464ce8db9f4aa857aba96fa/grpcio-1.78.0-cp314-cp314-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:74be1268d1439eaaf552c698cdb11cd594f0c49295ae6bb72c34ee31abbe611b", size = 7173206, upload-time = "2026-02-06T09:56:29.876Z" }, + { url = "https://files.pythonhosted.org/packages/df/1e/b8801345629a415ea7e26c83d75eb5dbe91b07ffe5210cc517348a8d4218/grpcio-1.78.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:be63c88b32e6c0f1429f1398ca5c09bc64b0d80950c8bb7807d7d7fb36fb84c7", size = 6693826, upload-time = "2026-02-06T09:56:32.305Z" }, + { url = "https://files.pythonhosted.org/packages/34/84/0de28eac0377742679a510784f049738a80424b17287739fc47d63c2439e/grpcio-1.78.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:3c586ac70e855c721bda8f548d38c3ca66ac791dc49b66a8281a1f99db85e452", size = 7277897, upload-time = "2026-02-06T09:56:34.915Z" }, + { url = "https://files.pythonhosted.org/packages/ca/9c/ad8685cfe20559a9edb66f735afdcb2b7d3de69b13666fdfc542e1916ebd/grpcio-1.78.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:35eb275bf1751d2ffbd8f57cdbc46058e857cf3971041521b78b7db94bdaf127", size = 8252404, upload-time = "2026-02-06T09:56:37.553Z" }, + { url = "https://files.pythonhosted.org/packages/3c/05/33a7a4985586f27e1de4803887c417ec7ced145ebd069bc38a9607059e2b/grpcio-1.78.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:207db540302c884b8848036b80db352a832b99dfdf41db1eb554c2c2c7800f65", size = 7696837, upload-time = "2026-02-06T09:56:40.173Z" }, + { url = "https://files.pythonhosted.org/packages/73/77/7382241caf88729b106e49e7d18e3116216c778e6a7e833826eb96de22f7/grpcio-1.78.0-cp314-cp314-win32.whl", hash = "sha256:57bab6deef2f4f1ca76cc04565df38dc5713ae6c17de690721bdf30cb1e0545c", size = 4142439, upload-time = "2026-02-06T09:56:43.258Z" }, + { url = "https://files.pythonhosted.org/packages/48/b2/b096ccce418882fbfda4f7496f9357aaa9a5af1896a9a7f60d9f2b275a06/grpcio-1.78.0-cp314-cp314-win_amd64.whl", hash = "sha256:dce09d6116df20a96acfdbf85e4866258c3758180e8c49845d6ba8248b6d0bbb", size = 4929852, upload-time = "2026-02-06T09:56:45.885Z" }, +] + [[package]] name = "idna" version = "3.10" @@ -174,6 +235,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl", hash = "sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe", size = 18437, upload-time = "2025-09-08T01:34:57.871Z" }, ] +[[package]] +name = "markdown-it-py" +version = "4.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mdurl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" }, +] + [[package]] name = "markupsafe" version = "3.0.2" @@ -232,6 +305,27 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739, upload-time = "2024-10-18T15:21:42.784Z" }, ] +[[package]] +name = "mdurl" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, +] + +[[package]] +name = "opentelemetry-proto" +version = "1.39.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/49/1d/f25d76d8260c156c40c97c9ed4511ec0f9ce353f8108ca6e7561f82a06b2/opentelemetry_proto-1.39.1.tar.gz", hash = "sha256:6c8e05144fc0d3ed4d22c2289c6b126e03bcd0e6a7da0f16cedd2e1c2772e2c8", size = 46152, upload-time = "2025-12-11T13:32:48.681Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/51/95/b40c96a7b5203005a0b03d8ce8cd212ff23f1793d5ba289c87a097571b18/opentelemetry_proto-1.39.1-py3-none-any.whl", hash = "sha256:22cdc78efd3b3765d09e68bfbd010d4fc254c9818afd0b6b423387d9dee46007", size = 72535, upload-time = "2025-12-11T13:32:33.866Z" }, +] + [[package]] name = "packaging" version = "25.0" @@ -243,14 +337,19 @@ wheels = [ [[package]] name = "planoai" -version = "0.4.3" +version = "0.4.4" source = { editable = "." } dependencies = [ { name = "click" }, + { name = "grpcio" }, { name = "jinja2" }, { name = "jsonschema" }, + { name = "opentelemetry-proto" }, { name = "pyyaml" }, + { name = "questionary" }, { name = "requests" }, + { name = "rich" }, + { name = "rich-click" }, ] [package.optional-dependencies] @@ -266,11 +365,16 @@ dev = [ [package.metadata] requires-dist = [ { name = "click", specifier = ">=8.1.7,<9.0.0" }, + { name = "grpcio", specifier = ">=1.60.0" }, { name = "jinja2", specifier = ">=3.1.4,<4.0.0" }, { name = "jsonschema", specifier = ">=4.23.0,<5.0.0" }, + { name = "opentelemetry-proto", specifier = ">=1.20.0" }, { name = "pytest", marker = "extra == 'dev'", specifier = ">=8.4.1,<9.0.0" }, { name = "pyyaml", specifier = ">=6.0.2,<7.0.0" }, + { name = "questionary", specifier = ">=2.1.1,<3.0.0" }, { name = "requests", specifier = ">=2.31.0,<3.0.0" }, + { name = "rich", specifier = ">=14.2.0" }, + { name = "rich-click", specifier = ">=1.9.5" }, ] provides-extras = ["dev"] @@ -286,6 +390,33 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, ] +[[package]] +name = "prompt-toolkit" +version = "3.0.52" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "wcwidth" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a1/96/06e01a7b38dce6fe1db213e061a4602dd6032a8a97ef6c1a862537732421/prompt_toolkit-3.0.52.tar.gz", hash = "sha256:28cde192929c8e7321de85de1ddbe736f1375148b02f2e17edd840042b1be855", size = 434198, upload-time = "2025-08-27T15:24:02.057Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/84/03/0d3ce49e2505ae70cf43bc5bb3033955d2fc9f932163e84dc0779cc47f48/prompt_toolkit-3.0.52-py3-none-any.whl", hash = "sha256:9aac639a3bbd33284347de5ad8d68ecc044b91a762dc39b7c21095fcd6a19955", size = 391431, upload-time = "2025-08-27T15:23:59.498Z" }, +] + +[[package]] +name = "protobuf" +version = "6.33.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/25/7c72c307aafc96fa87062aa6291d9f7c94836e43214d43722e86037aac02/protobuf-6.33.5.tar.gz", hash = "sha256:6ddcac2a081f8b7b9642c09406bc6a4290128fce5f471cddd165960bb9119e5c", size = 444465, upload-time = "2026-01-29T21:51:33.494Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b1/79/af92d0a8369732b027e6d6084251dd8e782c685c72da161bd4a2e00fbabb/protobuf-6.33.5-cp310-abi3-win32.whl", hash = "sha256:d71b040839446bac0f4d162e758bea99c8251161dae9d0983a3b88dee345153b", size = 425769, upload-time = "2026-01-29T21:51:21.751Z" }, + { url = "https://files.pythonhosted.org/packages/55/75/bb9bc917d10e9ee13dee8607eb9ab963b7cf8be607c46e7862c748aa2af7/protobuf-6.33.5-cp310-abi3-win_amd64.whl", hash = "sha256:3093804752167bcab3998bec9f1048baae6e29505adaf1afd14a37bddede533c", size = 437118, upload-time = "2026-01-29T21:51:24.022Z" }, + { url = "https://files.pythonhosted.org/packages/a2/6b/e48dfc1191bc5b52950246275bf4089773e91cb5ba3592621723cdddca62/protobuf-6.33.5-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:a5cb85982d95d906df1e2210e58f8e4f1e3cdc088e52c921a041f9c9a0386de5", size = 427766, upload-time = "2026-01-29T21:51:25.413Z" }, + { url = "https://files.pythonhosted.org/packages/4e/b1/c79468184310de09d75095ed1314b839eb2f72df71097db9d1404a1b2717/protobuf-6.33.5-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:9b71e0281f36f179d00cbcb119cb19dec4d14a81393e5ea220f64b286173e190", size = 324638, upload-time = "2026-01-29T21:51:26.423Z" }, + { url = "https://files.pythonhosted.org/packages/c5/f5/65d838092fd01c44d16037953fd4c2cc851e783de9b8f02b27ec4ffd906f/protobuf-6.33.5-cp39-abi3-manylinux2014_s390x.whl", hash = "sha256:8afa18e1d6d20af15b417e728e9f60f3aa108ee76f23c3b2c07a2c3b546d3afd", size = 339411, upload-time = "2026-01-29T21:51:27.446Z" }, + { url = "https://files.pythonhosted.org/packages/9b/53/a9443aa3ca9ba8724fdfa02dd1887c1bcd8e89556b715cfbacca6b63dbec/protobuf-6.33.5-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:cbf16ba3350fb7b889fca858fb215967792dc125b35c7976ca4818bee3521cf0", size = 323465, upload-time = "2026-01-29T21:51:28.925Z" }, + { url = "https://files.pythonhosted.org/packages/57/bf/2086963c69bdac3d7cff1cc7ff79b8ce5ea0bec6797a017e1be338a46248/protobuf-6.33.5-py3-none-any.whl", hash = "sha256:69915a973dd0f60f31a08b8318b73eab2bd6a392c79184b3612226b0a3f8ec02", size = 170687, upload-time = "2026-01-29T21:51:32.557Z" }, +] + [[package]] name = "pygments" version = "2.19.2" @@ -357,6 +488,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446, upload-time = "2024-08-06T20:33:04.33Z" }, ] +[[package]] +name = "questionary" +version = "2.1.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "prompt-toolkit" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f6/45/eafb0bba0f9988f6a2520f9ca2df2c82ddfa8d67c95d6625452e97b204a5/questionary-2.1.1.tar.gz", hash = "sha256:3d7e980292bb0107abaa79c68dd3eee3c561b83a0f89ae482860b181c8bd412d", size = 25845, upload-time = "2025-08-28T19:00:20.851Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3c/26/1062c7ec1b053db9e499b4d2d5bc231743201b74051c973dadeac80a8f43/questionary-2.1.1-py3-none-any.whl", hash = "sha256:a51af13f345f1cdea62347589fbb6df3b290306ab8930713bfae4d475a7d4a59", size = 36753, upload-time = "2025-08-28T19:00:19.56Z" }, +] + [[package]] name = "referencing" version = "0.36.2" @@ -386,6 +529,34 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, ] +[[package]] +name = "rich" +version = "14.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fb/d2/8920e102050a0de7bfabeb4c4614a49248cf8d5d7a8d01885fbb24dc767a/rich-14.2.0.tar.gz", hash = "sha256:73ff50c7c0c1c77c8243079283f4edb376f0f6442433aecb8ce7e6d0b92d1fe4", size = 219990, upload-time = "2025-10-09T14:16:53.064Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/25/7a/b0178788f8dc6cafce37a212c99565fa1fe7872c70c6c9c1e1a372d9d88f/rich-14.2.0-py3-none-any.whl", hash = "sha256:76bc51fe2e57d2b1be1f96c524b890b816e334ab4c1e45888799bfaab0021edd", size = 243393, upload-time = "2025-10-09T14:16:51.245Z" }, +] + +[[package]] +name = "rich-click" +version = "1.9.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "rich" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6b/d1/b60ca6a8745e76800b50c7ee246fd73f08a3be5d8e0b551fc93c19fa1203/rich_click-1.9.5.tar.gz", hash = "sha256:48120531493f1533828da80e13e839d471979ec8d7d0ca7b35f86a1379cc74b6", size = 73927, upload-time = "2025-12-21T14:49:44.167Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/25/0a/d865895e1e5d88a60baee0fc3703eb111c502ee10c8c107516bc7623abf8/rich_click-1.9.5-py3-none-any.whl", hash = "sha256:9b195721a773b1acf0e16ff9ec68cef1e7d237e53471e6e3f7ade462f86c403a", size = 70580, upload-time = "2025-12-21T14:49:42.905Z" }, +] + [[package]] name = "rpds-py" version = "0.27.1" @@ -577,3 +748,12 @@ sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599 wheels = [ { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" }, ] + +[[package]] +name = "wcwidth" +version = "0.2.14" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/24/30/6b0809f4510673dc723187aeaf24c7f5459922d01e2f794277a3dfb90345/wcwidth-0.2.14.tar.gz", hash = "sha256:4d478375d31bc5395a3c55c40ccdf3354688364cd61c4f6adacaa9215d0b3605", size = 102293, upload-time = "2025-09-22T16:29:53.023Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/af/b5/123f13c975e9f27ab9c0770f514345bd406d0e8d3b7a0723af9d43f710af/wcwidth-0.2.14-py2.py3-none-any.whl", hash = "sha256:a7bb560c8aee30f9957e5f9895805edd20602f2d7f720186dfd906e82b4982e1", size = 37286, upload-time = "2025-09-22T16:29:51.641Z" }, +] diff --git a/crates/brightstaff/src/handlers/llm.rs b/crates/brightstaff/src/handlers/llm.rs index b9fe0ba3..13ed8419 100644 --- a/crates/brightstaff/src/handlers/llm.rs +++ b/crates/brightstaff/src/handlers/llm.rs @@ -28,7 +28,7 @@ use crate::state::response_state_processor::ResponsesStateProcessor; use crate::state::{ extract_input_items, retrieve_and_combine_input, StateStorage, StateStorageError, }; -use crate::tracing::{operation_component, set_service_name}; +use crate::tracing::{llm as tracing_llm, operation_component, set_service_name}; fn full>(chunk: T) -> BoxBody { Full::new(chunk.into()) @@ -62,6 +62,10 @@ pub async fn llm_chat( request_id = %request_id, http.method = %request.method(), http.path = %request_path, + llm.model = tracing::field::Empty, + llm.tools = tracing::field::Empty, + llm.user_message_preview = tracing::field::Empty, + llm.temperature = tracing::field::Empty, ); // Execute the rest of the handler inside the span @@ -149,7 +153,7 @@ async fn llm_chat_inner( // Model alias resolution: update model field in client_request immediately // This ensures all downstream objects use the resolved model let model_from_request = client_request.model().to_string(); - let _temperature = client_request.get_temperature(); + let temperature = client_request.get_temperature(); let is_streaming_request = client_request.is_streaming(); let alias_resolved_model = resolve_model_alias(&model_from_request, &model_aliases); @@ -180,10 +184,25 @@ async fn llm_chat_inner( }; // Extract tool names and user message preview for span attributes - let _tool_names = client_request.get_tool_names(); - let _user_message_preview = client_request + let tool_names = client_request.get_tool_names(); + let user_message_preview = client_request .get_recent_user_message() .map(|msg| truncate_message(&msg, 50)); + let span = tracing::Span::current(); + if let Some(temp) = temperature { + span.record(tracing_llm::TEMPERATURE, tracing::field::display(temp)); + } + if let Some(tools) = &tool_names { + let formatted_tools = tools + .iter() + .map(|name| format!("{}(...)", name)) + .collect::>() + .join("\n"); + span.record(tracing_llm::TOOLS, formatted_tools.as_str()); + } + if let Some(preview) = &user_message_preview { + span.record(tracing_llm::USER_MESSAGE_PREVIEW, preview.as_str()); + } // Extract messages for signal analysis (clone before moving client_request) let messages_for_signals = Some(client_request.get_messages()); @@ -321,6 +340,7 @@ async fn llm_chat_inner( // Router returned "none" sentinel, use validated resolved_model from request alias_resolved_model.clone() }; + tracing::Span::current().record(tracing_llm::MODEL_NAME, resolved_model.as_str()); let span_name = if model_from_request == resolved_model { format!("POST {} {}", request_path, resolved_model) diff --git a/demos/use_cases/multi_agent_with_crewai_langchain/config.yaml b/demos/use_cases/multi_agent_with_crewai_langchain/config.yaml index 8b63ccea..b3a204f3 100644 --- a/demos/use_cases/multi_agent_with_crewai_langchain/config.yaml +++ b/demos/use_cases/multi_agent_with_crewai_langchain/config.yaml @@ -55,4 +55,3 @@ listeners: tracing: random_sampling: 100 - opentracing_grpc_endpoint: http://jaeger:4317 diff --git a/demos/use_cases/multi_agent_with_crewai_langchain/docker-compose.yaml b/demos/use_cases/multi_agent_with_crewai_langchain/docker-compose.yaml index c233699b..8f7bdb00 100644 --- a/demos/use_cases/multi_agent_with_crewai_langchain/docker-compose.yaml +++ b/demos/use_cases/multi_agent_with_crewai_langchain/docker-compose.yaml @@ -6,6 +6,7 @@ services: dockerfile: Dockerfile ports: - "8001:8001" + - "12000:12000" environment: - ARCH_CONFIG_PATH=/app/arch_config.yaml - OPENAI_API_KEY=${OPENAI_API_KEY:?OPENAI_API_KEY environment variable is required but not set} diff --git a/demos/use_cases/travel_agents/tracing.png b/demos/use_cases/travel_agents/tracing.png index 14adbbaf..c3a78a7a 100644 Binary files a/demos/use_cases/travel_agents/tracing.png and b/demos/use_cases/travel_agents/tracing.png differ diff --git a/demos/use_cases/travel_agents/uv.lock b/demos/use_cases/travel_agents/uv.lock index 3f577a83..985c19e7 100644 --- a/demos/use_cases/travel_agents/uv.lock +++ b/demos/use_cases/travel_agents/uv.lock @@ -146,6 +146,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, ] +[[package]] +name = "importlib-metadata" +version = "8.7.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "zipp" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f3/49/3b30cad09e7771a4982d9975a8cbf64f00d4a1ececb53297f1d9a7be1b10/importlib_metadata-8.7.1.tar.gz", hash = "sha256:49fef1ae6440c182052f407c8d34a68f72efc36db9ca90dc0113398f2fdde8bb", size = 57107, upload-time = "2025-12-21T10:00:19.278Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fa/5e/f8e9a1d23b9c20a551a8a02ea3637b4642e22c2626e3a13a9a29cdea99eb/importlib_metadata-8.7.1-py3-none-any.whl", hash = "sha256:5a1f80bf1daa489495071efbb095d75a634cf28a8bc299581244063b53176151", size = 27865, upload-time = "2025-12-21T10:00:18.329Z" }, +] + [[package]] name = "jiter" version = "0.12.0" @@ -262,6 +274,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/bb/d5/eb52edff49d3d5ea116e225538c118699ddeb7c29fa17ec28af14bc10033/openai-2.13.0-py3-none-any.whl", hash = "sha256:746521065fed68df2f9c2d85613bb50844343ea81f60009b60e6a600c9352c79", size = 1066837, upload-time = "2025-12-16T18:19:43.124Z" }, ] +[[package]] +name = "opentelemetry-api" +version = "1.39.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "importlib-metadata" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/97/b9/3161be15bb8e3ad01be8be5a968a9237c3027c5be504362ff800fca3e442/opentelemetry_api-1.39.1.tar.gz", hash = "sha256:fbde8c80e1b937a2c61f20347e91c0c18a1940cecf012d62e65a7caf08967c9c", size = 65767, upload-time = "2025-12-11T13:32:39.182Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cf/df/d3f1ddf4bb4cb50ed9b1139cc7b1c54c34a1e7ce8fd1b9a37c0d1551a6bd/opentelemetry_api-1.39.1-py3-none-any.whl", hash = "sha256:2edd8463432a7f8443edce90972169b195e7d6a05500cd29e6d13898187c9950", size = 66356, upload-time = "2025-12-11T13:32:17.304Z" }, +] + [[package]] name = "pydantic" version = "2.12.5" @@ -438,6 +463,7 @@ dependencies = [ { name = "fastapi" }, { name = "httpx" }, { name = "openai" }, + { name = "opentelemetry-api" }, { name = "pydantic" }, { name = "uvicorn" }, ] @@ -445,11 +471,12 @@ dependencies = [ [package.metadata] requires-dist = [ { name = "click", specifier = ">=8.2.1" }, - { name = "fastapi", specifier = ">=0.104.1" }, + { name = "fastapi", specifier = ">=0.115.0" }, { name = "httpx", specifier = ">=0.24.0" }, - { name = "openai", specifier = ">=2.13.0" }, + { name = "openai", specifier = ">=1.0.0" }, + { name = "opentelemetry-api", specifier = ">=1.20.0" }, { name = "pydantic", specifier = ">=2.11.7" }, - { name = "uvicorn", specifier = ">=0.24.0" }, + { name = "uvicorn", specifier = ">=0.30.0" }, ] [[package]] @@ -486,3 +513,12 @@ sdist = { url = "https://files.pythonhosted.org/packages/cb/ce/f06b84e2697fef468 wheels = [ { url = "https://files.pythonhosted.org/packages/ee/d9/d88e73ca598f4f6ff671fb5fde8a32925c2e08a637303a1d12883c7305fa/uvicorn-0.38.0-py3-none-any.whl", hash = "sha256:48c0afd214ceb59340075b4a052ea1ee91c16fbc2a9b1469cca0e54566977b02", size = 68109, upload-time = "2025-10-18T13:46:42.958Z" }, ] + +[[package]] +name = "zipp" +version = "3.23.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276, upload-time = "2025-06-08T17:06:38.034Z" }, +] diff --git a/docs/source/guides/observability/tracing.rst b/docs/source/guides/observability/tracing.rst index aab3b069..4b6d8090 100644 --- a/docs/source/guides/observability/tracing.rst +++ b/docs/source/guides/observability/tracing.rst @@ -168,6 +168,94 @@ How to Initiate A Trace You can adjust this value from 0-100. +Tracing with the CLI +-------------------- + +The Plano CLI ships with a local OTLP/gRPC listener and a trace viewer so you can inspect spans without wiring a full observability backend. This is ideal for development, debugging, and quick QA. + +Quick Start +~~~~~~~~~~~ + +You can enable tracing in either of these ways: + +1. Start the local listener explicitly: + +.. code-block:: console + + $ planoai trace listen + +2. Or start Plano with tracing enabled (auto-starts the local OTLP listener): + +.. code-block:: console + + $ planoai up --with-tracing + + # Optional: choose a different listener port + $ planoai up --with-tracing --tracing-port 4318 + +3. Send requests through Plano as usual. The listener accepts OTLP/gRPC on: + + - ``0.0.0.0:4317`` (default) + +4. View the most recent trace: + +.. code-block:: console + + $ planoai trace + +Inspect and Filter Traces +~~~~~~~~~~~~~~~~~~~~~~~~~ + +List available trace IDs: + +.. code-block:: console + + $ planoai trace --list + +Open a specific trace (full or short trace ID): + +.. code-block:: console + + $ planoai trace 7f4e9a1c + $ planoai trace 7f4e9a1c0d9d4a0bb9bf5a8a7d13f62a + +Filter by attributes and time window: + +.. code-block:: console + + $ planoai trace --where llm.model=gpt-4o-mini --since 30m + $ planoai trace --filter "http.*" --limit 5 + +Return JSON for automation: + +.. code-block:: console + + $ planoai trace --json + $ planoai trace --list --json + +Show full span attributes (disable default compact view): + +.. code-block:: console + + $ planoai trace --verbose + $ planoai trace -v + +Point the CLI at a different local listener port: + +.. code-block:: console + + $ export PLANO_TRACE_PORT=50051 + $ planoai trace --list + +Notes +~~~~~ + +- ``--where`` accepts repeatable ``key=value`` filters and uses AND semantics. +- ``--filter`` supports wildcards (``*``) to limit displayed attributes. +- ``--no-interactive`` disables prompts when listing traces. +- By default, inbound/outbound spans use a compact attribute view. + + Trace Propagation ----------------- @@ -409,6 +497,59 @@ tools like AWS X-Ray and Datadog, enhancing observability and facilitating faste Additional Resources -------------------- +CLI Reference +~~~~~~~~~~~~~ + +``planoai trace`` + Trace requests captured by the local OTLP listener. + + **Synopsis** + + .. code-block:: console + + $ planoai trace [TARGET] [OPTIONS] + + **Targets** + + - ``last`` (default): show the most recent trace. + - ``any``: allow interactive selection when available. + - ````: full 32-hex trace ID. + - ````: first 8 hex characters. + + **Options** + + - ``--filter ``: limit displayed attributes to matching keys (supports ``*``). + - ``--where ``: match traces containing a specific attribute (repeatable, AND). + - ``--list``: list trace IDs only. + - ``--no-interactive``: disable interactive prompts/selections. + - ``--limit ``: limit the number of traces returned. + - ``--since ``: look back window (``5m``, ``2h``, ``1d``). + - ``--json``: output raw JSON instead of formatted output. + - ``--verbose, -v``: show all span attributes. By default, inbound/outbound + spans are displayed in a compact view. + + **Environment** + + - ``PLANO_TRACE_PORT``: gRPC port used by ``planoai trace`` to query traces + (defaults to ``4317``). + +``planoai trace listen`` + Start a local OTLP/gRPC listener. + + **Synopsis** + + .. code-block:: console + + $ planoai trace listen [OPTIONS] + + **Options** + + - ``--host ``: bind address (default: ``0.0.0.0``). + - ``--port ``: gRPC listener port (default: ``4317``). + +External References +~~~~~~~~~~~~~~~~~~~ + - `OpenTelemetry Documentation `_ - `W3C Trace Context Specification `_ - `AWS X-Ray Exporter `_