diff --git a/README.md b/README.md index c0725a44f..ee2848ced 100644 --- a/README.md +++ b/README.md @@ -38,19 +38,24 @@ https://github.com/user-attachments/assets/a0a16566-6967-4374-ac51-9b3e07fbecd7 ## Key Features ### 💡 **Idea**: -Have your own highly customizable private NotebookLM and Perplexity integrated with external sources. +- Have your own highly customizable private NotebookLM and Perplexity integrated with external sources. ### 📁 **Multiple File Format Uploading Support** -Save content from your own personal files *(Documents, images, videos and supports **50+ file extensions**)* to your own personal knowledge base . +- Save content from your own personal files *(Documents, images, videos and supports **50+ file extensions**)* to your own personal knowledge base . ### 🔍 **Powerful Search** -Quickly research or find anything in your saved content . +- Quickly research or find anything in your saved content . ### 💬 **Chat with your Saved Content** - Interact in Natural Language and get cited answers. +- Interact in Natural Language and get cited answers. ### 📄 **Cited Answers** -Get Cited answers just like Perplexity. +- Get Cited answers just like Perplexity. ### 🔔 **Privacy & Local LLM Support** -Works Flawlessly with Ollama local LLMs. +- Works Flawlessly with Ollama local LLMs. ### 🏠 **Self Hostable** -Open source and easy to deploy locally. +- Open source and easy to deploy locally. +### 👥 **Team Collaboration with RBAC** +- Role-Based Access Control for Search Spaces +- Invite team members with customizable roles (Owner, Admin, Editor, Viewer) +- Granular permissions for documents, chats, connectors, and settings +- Share knowledge bases securely within your organization ### 🎙️ Podcasts - Blazingly fast podcast generation agent. (Creates a 3-minute podcast in under 20 seconds.) - Convert your chat conversations into engaging audio content diff --git a/README.zh-CN.md b/README.zh-CN.md index 84bf8a133..464242a4d 100644 --- a/README.zh-CN.md +++ b/README.zh-CN.md @@ -39,25 +39,31 @@ https://github.com/user-attachments/assets/a0a16566-6967-4374-ac51-9b3e07fbecd7 ## 核心功能 ### 💡 **理念**: -拥有您自己的高度可定制的私有 NotebookLM 和 Perplexity,并与外部数据源集成。 +- 拥有您自己的高度可定制的私有 NotebookLM 和 Perplexity,并与外部数据源集成。 ### 📁 **支持多种文件格式上传** -将您个人文件中的内容(文档、图像、视频,支持 **50+ 种文件扩展名**)保存到您自己的个人知识库。 +- 将您个人文件中的内容(文档、图像、视频,支持 **50+ 种文件扩展名**)保存到您自己的个人知识库。 ### 🔍 **强大的搜索功能** -快速研究或查找已保存内容中的任何信息。 +- 快速研究或查找已保存内容中的任何信息。 ### 💬 **与已保存内容对话** -使用自然语言交互并获得引用答案。 +- 使用自然语言交互并获得引用答案。 ### 📄 **引用答案** -像 Perplexity 一样获得带引用的答案。 +- 像 Perplexity 一样获得带引用的答案。 ### 🔔 **隐私保护与本地 LLM 支持** -完美支持 Ollama 本地大语言模型。 +- 完美支持 Ollama 本地大语言模型。 ### 🏠 **可自托管** -开源且易于本地部署。 +- 开源且易于本地部署。 + +### 👥 **团队协作与 RBAC** +- 搜索空间的基于角色的访问控制 +- 使用可自定义的角色(所有者、管理员、编辑者、查看者)邀请团队成员 +- 对文档、聊天、连接器和设置的细粒度权限控制 +- 在组织内安全共享知识库 ### 🎙️ **播客功能** - 超快速播客生成代理(在 20 秒内创建 3 分钟播客) diff --git a/surfsense_backend/alembic/versions/36_remove_fk_constraints_for_global_llm_configs.py b/surfsense_backend/alembic/versions/36_remove_fk_constraints_for_global_llm_configs.py index fa4c929ce..1faebf2ed 100644 --- a/surfsense_backend/alembic/versions/36_remove_fk_constraints_for_global_llm_configs.py +++ b/surfsense_backend/alembic/versions/36_remove_fk_constraints_for_global_llm_configs.py @@ -8,6 +8,8 @@ Create Date: 2025-11-13 23:20:12.912741 from collections.abc import Sequence +from sqlalchemy import text + from alembic import op # revision identifiers, used by Alembic. @@ -17,6 +19,20 @@ branch_labels: str | Sequence[str] | None = None depends_on: str | Sequence[str] | None = None +def constraint_exists(connection, table_name: str, constraint_name: str) -> bool: + """Check if a constraint exists on the given table.""" + result = connection.execute( + text( + """ + SELECT 1 FROM information_schema.table_constraints + WHERE table_name = :table_name AND constraint_name = :constraint_name + """ + ), + {"table_name": table_name, "constraint_name": constraint_name}, + ) + return result.fetchone() is not None + + def upgrade() -> None: """ Remove foreign key constraints on LLM preference columns to allow global configs (negative IDs). @@ -24,50 +40,55 @@ def upgrade() -> None: Global LLM configs use negative IDs and don't exist in the llm_configs table, so we need to remove the foreign key constraints that were preventing their use. """ - # Drop the foreign key constraints - op.drop_constraint( + connection = op.get_bind() + + # Drop the foreign key constraints if they exist + constraints_to_drop = [ "user_search_space_preferences_long_context_llm_id_fkey", - "user_search_space_preferences", - type_="foreignkey", - ) - op.drop_constraint( "user_search_space_preferences_fast_llm_id_fkey", - "user_search_space_preferences", - type_="foreignkey", - ) - op.drop_constraint( "user_search_space_preferences_strategic_llm_id_fkey", - "user_search_space_preferences", - type_="foreignkey", - ) + ] + + for constraint_name in constraints_to_drop: + if constraint_exists( + connection, "user_search_space_preferences", constraint_name + ): + op.drop_constraint( + constraint_name, + "user_search_space_preferences", + type_="foreignkey", + ) + else: + print(f"Constraint '{constraint_name}' does not exist. Skipping.") def downgrade() -> None: """ Re-add foreign key constraints (will fail if any negative IDs exist in the table). """ - # Re-add the foreign key constraints - op.create_foreign_key( - "user_search_space_preferences_long_context_llm_id_fkey", - "user_search_space_preferences", - "llm_configs", - ["long_context_llm_id"], - ["id"], - ondelete="SET NULL", - ) - op.create_foreign_key( - "user_search_space_preferences_fast_llm_id_fkey", - "user_search_space_preferences", - "llm_configs", - ["fast_llm_id"], - ["id"], - ondelete="SET NULL", - ) - op.create_foreign_key( - "user_search_space_preferences_strategic_llm_id_fkey", - "user_search_space_preferences", - "llm_configs", - ["strategic_llm_id"], - ["id"], - ondelete="SET NULL", - ) + connection = op.get_bind() + + # Re-add the foreign key constraints if they don't exist + constraints_to_create = [ + ( + "user_search_space_preferences_long_context_llm_id_fkey", + "long_context_llm_id", + ), + ("user_search_space_preferences_fast_llm_id_fkey", "fast_llm_id"), + ("user_search_space_preferences_strategic_llm_id_fkey", "strategic_llm_id"), + ] + + for constraint_name, column_name in constraints_to_create: + if not constraint_exists( + connection, "user_search_space_preferences", constraint_name + ): + op.create_foreign_key( + constraint_name, + "user_search_space_preferences", + "llm_configs", + [column_name], + ["id"], + ondelete="SET NULL", + ) + else: + print(f"Constraint '{constraint_name}' already exists. Skipping.") diff --git a/surfsense_backend/alembic/versions/37_add_system_prompts_to_searchspaces.py b/surfsense_backend/alembic/versions/37_add_system_prompts_to_searchspaces.py index afdee4942..6c82b4bad 100644 --- a/surfsense_backend/alembic/versions/37_add_system_prompts_to_searchspaces.py +++ b/surfsense_backend/alembic/versions/37_add_system_prompts_to_searchspaces.py @@ -9,6 +9,7 @@ Create Date: 2025-11-19 00:00:00.000000 from collections.abc import Sequence import sqlalchemy as sa +from sqlalchemy import text from alembic import op @@ -19,24 +20,55 @@ branch_labels: str | Sequence[str] | None = None depends_on: str | Sequence[str] | None = None +def column_exists(connection, table_name: str, column_name: str) -> bool: + """Check if a column exists on the given table.""" + result = connection.execute( + text( + """ + SELECT 1 FROM information_schema.columns + WHERE table_name = :table_name AND column_name = :column_name + """ + ), + {"table_name": table_name, "column_name": column_name}, + ) + return result.fetchone() is not None + + def upgrade() -> None: """Add QnA configuration columns to searchspaces table.""" + connection = op.get_bind() + # Add citations_enabled boolean (default True) - op.add_column( - "searchspaces", - sa.Column( - "citations_enabled", sa.Boolean(), nullable=False, server_default="true" - ), - ) + if not column_exists(connection, "searchspaces", "citations_enabled"): + op.add_column( + "searchspaces", + sa.Column( + "citations_enabled", sa.Boolean(), nullable=False, server_default="true" + ), + ) + else: + print("Column 'citations_enabled' already exists. Skipping.") # Add custom instructions text field (nullable, defaults to empty) - op.add_column( - "searchspaces", - sa.Column("qna_custom_instructions", sa.Text(), nullable=True), - ) + if not column_exists(connection, "searchspaces", "qna_custom_instructions"): + op.add_column( + "searchspaces", + sa.Column("qna_custom_instructions", sa.Text(), nullable=True), + ) + else: + print("Column 'qna_custom_instructions' already exists. Skipping.") def downgrade() -> None: """Remove QnA configuration columns from searchspaces table.""" - op.drop_column("searchspaces", "qna_custom_instructions") - op.drop_column("searchspaces", "citations_enabled") + connection = op.get_bind() + + if column_exists(connection, "searchspaces", "qna_custom_instructions"): + op.drop_column("searchspaces", "qna_custom_instructions") + else: + print("Column 'qna_custom_instructions' does not exist. Skipping.") + + if column_exists(connection, "searchspaces", "citations_enabled"): + op.drop_column("searchspaces", "citations_enabled") + else: + print("Column 'citations_enabled' does not exist. Skipping.") diff --git a/surfsense_backend/alembic/versions/38_add_webcrawler_connector_enum.py b/surfsense_backend/alembic/versions/38_add_webcrawler_connector_enum.py new file mode 100644 index 000000000..1b33c31b3 --- /dev/null +++ b/surfsense_backend/alembic/versions/38_add_webcrawler_connector_enum.py @@ -0,0 +1,59 @@ +"""Add Webcrawler connector enums + +Revision ID: 38 +Revises: 37 +Create Date: 2025-11-17 17:00:00.000000 + +""" + +from collections.abc import Sequence + +from alembic import op + +revision: str = "38" +down_revision: str | None = "37" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None + + +def upgrade() -> None: + """Safely add 'WEBCRAWLER_CONNECTOR' to enum types if missing.""" + + # Add to searchsourceconnectortype enum + op.execute( + """ + DO $$ + BEGIN + IF NOT EXISTS ( + SELECT 1 FROM pg_type t + JOIN pg_enum e ON t.oid = e.enumtypid + WHERE t.typname = 'searchsourceconnectortype' AND e.enumlabel = 'WEBCRAWLER_CONNECTOR' + ) THEN + ALTER TYPE searchsourceconnectortype ADD VALUE 'WEBCRAWLER_CONNECTOR'; + END IF; + END + $$; + """ + ) + + # Add to documenttype enum + op.execute( + """ + DO $$ + BEGIN + IF NOT EXISTS ( + SELECT 1 FROM pg_type t + JOIN pg_enum e ON t.oid = e.enumtypid + WHERE t.typname = 'documenttype' AND e.enumlabel = 'CRAWLED_URL' + ) THEN + ALTER TYPE documenttype ADD VALUE 'CRAWLED_URL'; + END IF; + END + $$; + """ + ) + + +def downgrade() -> None: + """Remove 'WEBCRAWLER_CONNECTOR' from enum types.""" + pass diff --git a/surfsense_backend/alembic/versions/39_add_rbac_tables.py b/surfsense_backend/alembic/versions/39_add_rbac_tables.py new file mode 100644 index 000000000..ac2df0df2 --- /dev/null +++ b/surfsense_backend/alembic/versions/39_add_rbac_tables.py @@ -0,0 +1,179 @@ +"""Add RBAC tables for search space access control + +Revision ID: 39 +Revises: 38 +Create Date: 2025-11-27 00:00:00.000000 + +This migration adds: +- Permission enum for granular access control +- search_space_roles table for custom roles per search space +- search_space_memberships table for user-searchspace-role relationships +- search_space_invites table for invite links +""" + +from collections.abc import Sequence + +from sqlalchemy import inspect + +from alembic import op + +revision: str = "39" +down_revision: str | None = "38" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None + + +def upgrade() -> None: + """Upgrade schema - add RBAC tables for search space access control.""" + + # Create search_space_roles table + op.execute( + """ + CREATE TABLE IF NOT EXISTS search_space_roles ( + id SERIAL PRIMARY KEY, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + name VARCHAR(100) NOT NULL, + description VARCHAR(500), + permissions TEXT[] NOT NULL DEFAULT '{}', + is_default BOOLEAN NOT NULL DEFAULT FALSE, + is_system_role BOOLEAN NOT NULL DEFAULT FALSE, + search_space_id INTEGER NOT NULL REFERENCES searchspaces(id) ON DELETE CASCADE, + CONSTRAINT uq_searchspace_role_name UNIQUE (search_space_id, name) + ); + """ + ) + + # Create search_space_invites table (needs to be created before memberships due to FK) + op.execute( + """ + CREATE TABLE IF NOT EXISTS search_space_invites ( + id SERIAL PRIMARY KEY, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + invite_code VARCHAR(64) NOT NULL UNIQUE, + search_space_id INTEGER NOT NULL REFERENCES searchspaces(id) ON DELETE CASCADE, + role_id INTEGER REFERENCES search_space_roles(id) ON DELETE SET NULL, + created_by_id UUID REFERENCES "user"(id) ON DELETE SET NULL, + expires_at TIMESTAMPTZ, + max_uses INTEGER, + uses_count INTEGER NOT NULL DEFAULT 0, + is_active BOOLEAN NOT NULL DEFAULT TRUE, + name VARCHAR(100) + ); + """ + ) + + # Create search_space_memberships table + op.execute( + """ + CREATE TABLE IF NOT EXISTS search_space_memberships ( + id SERIAL PRIMARY KEY, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + user_id UUID NOT NULL REFERENCES "user"(id) ON DELETE CASCADE, + search_space_id INTEGER NOT NULL REFERENCES searchspaces(id) ON DELETE CASCADE, + role_id INTEGER REFERENCES search_space_roles(id) ON DELETE SET NULL, + is_owner BOOLEAN NOT NULL DEFAULT FALSE, + joined_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + invited_by_invite_id INTEGER REFERENCES search_space_invites(id) ON DELETE SET NULL, + CONSTRAINT uq_user_searchspace_membership UNIQUE (user_id, search_space_id) + ); + """ + ) + + # Get connection and inspector for checking existing indexes + conn = op.get_bind() + inspector = inspect(conn) + + # Create indexes for search_space_roles + existing_indexes = [ + idx["name"] for idx in inspector.get_indexes("search_space_roles") + ] + if "ix_search_space_roles_id" not in existing_indexes: + op.create_index("ix_search_space_roles_id", "search_space_roles", ["id"]) + if "ix_search_space_roles_created_at" not in existing_indexes: + op.create_index( + "ix_search_space_roles_created_at", "search_space_roles", ["created_at"] + ) + if "ix_search_space_roles_name" not in existing_indexes: + op.create_index("ix_search_space_roles_name", "search_space_roles", ["name"]) + + # Create indexes for search_space_memberships + existing_indexes = [ + idx["name"] for idx in inspector.get_indexes("search_space_memberships") + ] + if "ix_search_space_memberships_id" not in existing_indexes: + op.create_index( + "ix_search_space_memberships_id", "search_space_memberships", ["id"] + ) + if "ix_search_space_memberships_created_at" not in existing_indexes: + op.create_index( + "ix_search_space_memberships_created_at", + "search_space_memberships", + ["created_at"], + ) + if "ix_search_space_memberships_user_id" not in existing_indexes: + op.create_index( + "ix_search_space_memberships_user_id", + "search_space_memberships", + ["user_id"], + ) + if "ix_search_space_memberships_search_space_id" not in existing_indexes: + op.create_index( + "ix_search_space_memberships_search_space_id", + "search_space_memberships", + ["search_space_id"], + ) + + # Create indexes for search_space_invites + existing_indexes = [ + idx["name"] for idx in inspector.get_indexes("search_space_invites") + ] + if "ix_search_space_invites_id" not in existing_indexes: + op.create_index("ix_search_space_invites_id", "search_space_invites", ["id"]) + if "ix_search_space_invites_created_at" not in existing_indexes: + op.create_index( + "ix_search_space_invites_created_at", "search_space_invites", ["created_at"] + ) + if "ix_search_space_invites_invite_code" not in existing_indexes: + op.create_index( + "ix_search_space_invites_invite_code", + "search_space_invites", + ["invite_code"], + ) + + +def downgrade() -> None: + """Downgrade schema - remove RBAC tables.""" + + # Drop indexes for search_space_memberships + op.drop_index( + "ix_search_space_memberships_search_space_id", + table_name="search_space_memberships", + ) + op.drop_index( + "ix_search_space_memberships_user_id", table_name="search_space_memberships" + ) + op.drop_index( + "ix_search_space_memberships_created_at", table_name="search_space_memberships" + ) + op.drop_index( + "ix_search_space_memberships_id", table_name="search_space_memberships" + ) + + # Drop indexes for search_space_invites + op.drop_index( + "ix_search_space_invites_invite_code", table_name="search_space_invites" + ) + op.drop_index( + "ix_search_space_invites_created_at", table_name="search_space_invites" + ) + op.drop_index("ix_search_space_invites_id", table_name="search_space_invites") + + # Drop indexes for search_space_roles + op.drop_index("ix_search_space_roles_name", table_name="search_space_roles") + op.drop_index("ix_search_space_roles_created_at", table_name="search_space_roles") + op.drop_index("ix_search_space_roles_id", table_name="search_space_roles") + + # Drop tables in correct order (respecting foreign key constraints) + op.drop_table("search_space_memberships") + op.drop_table("search_space_invites") + op.drop_table("search_space_roles") diff --git a/surfsense_backend/alembic/versions/40_move_llm_preferences_to_searchspace.py b/surfsense_backend/alembic/versions/40_move_llm_preferences_to_searchspace.py new file mode 100644 index 000000000..1067cffcc --- /dev/null +++ b/surfsense_backend/alembic/versions/40_move_llm_preferences_to_searchspace.py @@ -0,0 +1,63 @@ +"""Move LLM preferences from user-level to search space level + +Revision ID: 40 +Revises: 39 +Create Date: 2024-11-27 + +This migration moves LLM preferences (long_context_llm_id, fast_llm_id, strategic_llm_id) +from the user_search_space_preferences table to the searchspaces table itself. + +This change supports the RBAC model where LLM preferences are shared by all members +of a search space, rather than being per-user. +""" + +import sqlalchemy as sa + +from alembic import op + +# revision identifiers, used by Alembic. +revision = "40" +down_revision = "39" +branch_labels = None +depends_on = None + + +def upgrade(): + # Add LLM preference columns to searchspaces table + op.add_column( + "searchspaces", + sa.Column("long_context_llm_id", sa.Integer(), nullable=True), + ) + op.add_column( + "searchspaces", + sa.Column("fast_llm_id", sa.Integer(), nullable=True), + ) + op.add_column( + "searchspaces", + sa.Column("strategic_llm_id", sa.Integer(), nullable=True), + ) + + # Migrate existing preferences from user_search_space_preferences to searchspaces + # We take the owner's preferences (the user who created the search space) + connection = op.get_bind() + + # Get all search spaces and their owner's preferences + connection.execute( + sa.text(""" + UPDATE searchspaces ss + SET + long_context_llm_id = usp.long_context_llm_id, + fast_llm_id = usp.fast_llm_id, + strategic_llm_id = usp.strategic_llm_id + FROM user_search_space_preferences usp + WHERE ss.id = usp.search_space_id + AND ss.user_id = usp.user_id + """) + ) + + +def downgrade(): + # Remove LLM preference columns from searchspaces table + op.drop_column("searchspaces", "strategic_llm_id") + op.drop_column("searchspaces", "fast_llm_id") + op.drop_column("searchspaces", "long_context_llm_id") diff --git a/surfsense_backend/alembic/versions/41_backfill_rbac_for_existing_searchspaces.py b/surfsense_backend/alembic/versions/41_backfill_rbac_for_existing_searchspaces.py new file mode 100644 index 000000000..3a9b3e698 --- /dev/null +++ b/surfsense_backend/alembic/versions/41_backfill_rbac_for_existing_searchspaces.py @@ -0,0 +1,212 @@ +"""Backfill RBAC data for existing search spaces + +Revision ID: 41 +Revises: 40 +Create Date: 2025-11-28 + +This migration creates default roles and owner memberships for all existing +search spaces that were created before the RBAC system was implemented. +""" + +import sqlalchemy as sa + +from alembic import op + +# revision identifiers, used by Alembic. +revision = "41" +down_revision = "40" +branch_labels = None +depends_on = None + +# Default role permissions (must match DEFAULT_ROLE_PERMISSIONS in db.py) +DEFAULT_ROLES = [ + { + "name": "Owner", + "description": "Full access to all resources", + "permissions": ["*"], + "is_system_role": True, + "is_default": False, + }, + { + "name": "Admin", + "description": "Can manage members, roles, and all content", + "permissions": [ + "documents:create", + "documents:read", + "documents:update", + "documents:delete", + "chats:create", + "chats:read", + "chats:update", + "chats:delete", + "llm_configs:create", + "llm_configs:read", + "llm_configs:update", + "llm_configs:delete", + "logs:read", + "logs:delete", + "podcasts:create", + "podcasts:read", + "podcasts:update", + "podcasts:delete", + "connectors:create", + "connectors:read", + "connectors:update", + "connectors:delete", + "members:read", + "members:update", + "members:delete", + "roles:create", + "roles:read", + "roles:update", + "roles:delete", + "invites:create", + "invites:read", + "invites:delete", + "settings:read", + "settings:update", + ], + "is_system_role": True, + "is_default": False, + }, + { + "name": "Editor", + "description": "Can create and edit content", + "permissions": [ + "documents:create", + "documents:read", + "documents:update", + "chats:create", + "chats:read", + "chats:update", + "llm_configs:read", + "logs:read", + "podcasts:create", + "podcasts:read", + "podcasts:update", + "connectors:create", + "connectors:read", + "connectors:update", + "members:read", + "roles:read", + ], + "is_system_role": True, + "is_default": True, + }, + { + "name": "Viewer", + "description": "Read-only access to content", + "permissions": [ + "documents:read", + "chats:read", + "llm_configs:read", + "logs:read", + "podcasts:read", + "connectors:read", + "members:read", + "roles:read", + ], + "is_system_role": True, + "is_default": False, + }, +] + + +def upgrade(): + connection = op.get_bind() + + # Get all existing search spaces that don't have roles yet + search_spaces = connection.execute( + sa.text(""" + SELECT ss.id, ss.user_id + FROM searchspaces ss + WHERE NOT EXISTS ( + SELECT 1 FROM search_space_roles ssr + WHERE ssr.search_space_id = ss.id + ) + """) + ).fetchall() + + for ss_id, owner_user_id in search_spaces: + owner_role_id = None + + # Create default roles for each search space + for role in DEFAULT_ROLES: + # Convert permissions list to PostgreSQL array literal format for raw SQL + perms_literal = ( + "ARRAY[" + ",".join(f"'{p}'" for p in role["permissions"]) + "]::TEXT[]" + ) + + result = connection.execute( + sa.text(f""" + INSERT INTO search_space_roles + (name, description, permissions, is_default, is_system_role, search_space_id) + VALUES (:name, :description, {perms_literal}, :is_default, :is_system_role, :search_space_id) + RETURNING id + """), + { + "name": role["name"], + "description": role["description"], + "is_default": role["is_default"], + "is_system_role": role["is_system_role"], + "search_space_id": ss_id, + }, + ) + role_id = result.fetchone()[0] + + # Keep track of Owner role ID + if role["name"] == "Owner": + owner_role_id = role_id + + # Create owner membership for the search space creator + if owner_user_id and owner_role_id: + # Check if membership already exists + existing = connection.execute( + sa.text(""" + SELECT 1 FROM search_space_memberships + WHERE user_id = :user_id AND search_space_id = :search_space_id + """), + {"user_id": owner_user_id, "search_space_id": ss_id}, + ).fetchone() + + if not existing: + connection.execute( + sa.text(""" + INSERT INTO search_space_memberships + (user_id, search_space_id, role_id, is_owner) + VALUES (:user_id, :search_space_id, :role_id, TRUE) + """), + { + "user_id": owner_user_id, + "search_space_id": ss_id, + "role_id": owner_role_id, + }, + ) + + +def downgrade(): + # This migration only adds data, not schema changes + # Downgrade would remove all roles and memberships created by this migration + # However, this is destructive and may affect manually created data + # So we only remove system roles and owner memberships that were auto-created + connection = op.get_bind() + + # Remove memberships where user is owner and role is system Owner role + connection.execute( + sa.text(""" + DELETE FROM search_space_memberships ssm + USING search_space_roles ssr + WHERE ssm.role_id = ssr.id + AND ssm.is_owner = TRUE + AND ssr.is_system_role = TRUE + AND ssr.name = 'Owner' + """) + ) + + # Remove system roles + connection.execute( + sa.text(""" + DELETE FROM search_space_roles + WHERE is_system_role = TRUE + """) + ) diff --git a/surfsense_backend/alembic/versions/42_drop_user_search_space_preferences.py b/surfsense_backend/alembic/versions/42_drop_user_search_space_preferences.py new file mode 100644 index 000000000..9144421d8 --- /dev/null +++ b/surfsense_backend/alembic/versions/42_drop_user_search_space_preferences.py @@ -0,0 +1,52 @@ +"""Drop user_search_space_preferences table + +Revision ID: 42 +Revises: 41 +Create Date: 2025-11-28 + +This table is no longer needed after RBAC implementation: +- LLM preferences are now stored on SearchSpace directly +- User-SearchSpace relationships are handled by SearchSpaceMembership +""" + +import sqlalchemy as sa + +from alembic import op + +# revision identifiers, used by Alembic. +revision = "42" +down_revision = "41" +branch_labels = None +depends_on = None + + +def upgrade(): + # Drop the user_search_space_preferences table + op.drop_table("user_search_space_preferences") + + +def downgrade(): + # Recreate the table if rolling back + op.create_table( + "user_search_space_preferences", + sa.Column("id", sa.Integer(), primary_key=True), + sa.Column( + "created_at", sa.DateTime(timezone=True), server_default=sa.func.now() + ), + sa.Column( + "user_id", + sa.UUID(), + sa.ForeignKey("user.id", ondelete="CASCADE"), + nullable=False, + ), + sa.Column( + "search_space_id", + sa.Integer(), + sa.ForeignKey("searchspaces.id", ondelete="CASCADE"), + nullable=False, + ), + sa.Column("long_context_llm_id", sa.Integer(), nullable=True), + sa.Column("fast_llm_id", sa.Integer(), nullable=True), + sa.Column("strategic_llm_id", sa.Integer(), nullable=True), + sa.UniqueConstraint("user_id", "search_space_id", name="uq_user_searchspace"), + ) diff --git a/surfsense_backend/app/agents/researcher/nodes.py b/surfsense_backend/app/agents/researcher/nodes.py index 7b0e18a11..c53e3348f 100644 --- a/surfsense_backend/app/agents/researcher/nodes.py +++ b/surfsense_backend/app/agents/researcher/nodes.py @@ -11,7 +11,7 @@ from sqlalchemy.ext.asyncio import AsyncSession # Additional imports for document fetching from sqlalchemy.future import select -from app.db import Document, SearchSpace +from app.db import Document from app.services.connector_service import ConnectorService from app.services.query_service import QueryService @@ -92,19 +92,18 @@ def extract_sources_from_documents( async def fetch_documents_by_ids( - document_ids: list[int], user_id: str, db_session: AsyncSession + document_ids: list[int], search_space_id: int, db_session: AsyncSession ) -> tuple[list[dict[str, Any]], list[dict[str, Any]]]: """ - Fetch documents by their IDs with ownership check using DOCUMENTS mode approach. + Fetch documents by their IDs within a search space. - This function ensures that only documents belonging to the user are fetched, - providing security by checking ownership through SearchSpace association. + This function ensures that only documents belonging to the search space are fetched. Similar to SearchMode.DOCUMENTS, it fetches full documents and concatenates their chunks. Also creates source objects for UI display, grouped by document type. Args: document_ids: List of document IDs to fetch - user_id: The user ID to check ownership + search_space_id: The search space ID to filter by db_session: The database session Returns: @@ -114,11 +113,12 @@ async def fetch_documents_by_ids( return [], [] try: - # Query documents with ownership check + # Query documents filtered by search space result = await db_session.execute( - select(Document) - .join(SearchSpace) - .filter(Document.id.in_(document_ids), SearchSpace.user_id == user_id) + select(Document).filter( + Document.id.in_(document_ids), + Document.search_space_id == search_space_id, + ) ) documents = result.scalars().all() @@ -515,7 +515,6 @@ async def fetch_documents_by_ids( async def fetch_relevant_documents( research_questions: list[str], - user_id: str, search_space_id: int, db_session: AsyncSession, connectors_to_search: list[str], @@ -536,7 +535,6 @@ async def fetch_relevant_documents( Args: research_questions: List of research questions to find documents for - user_id: The user ID search_space_id: The search space ID db_session: The database session connectors_to_search: List of connectors to search @@ -619,7 +617,6 @@ async def fetch_relevant_documents( youtube_chunks, ) = await connector_service.search_youtube( user_query=reformulated_query, - user_id=user_id, search_space_id=search_space_id, top_k=top_k, search_mode=search_mode, @@ -646,7 +643,6 @@ async def fetch_relevant_documents( extension_chunks, ) = await connector_service.search_extension( user_query=reformulated_query, - user_id=user_id, search_space_id=search_space_id, top_k=top_k, search_mode=search_mode, @@ -673,7 +669,6 @@ async def fetch_relevant_documents( crawled_urls_chunks, ) = await connector_service.search_crawled_urls( user_query=reformulated_query, - user_id=user_id, search_space_id=search_space_id, top_k=top_k, search_mode=search_mode, @@ -689,7 +684,7 @@ async def fetch_relevant_documents( writer( { "yield_value": streaming_service.format_terminal_info_delta( - f"🌐 Found {len(crawled_urls_chunks)} Web Pages chunks related to your query" + f"🌐 Found {len(crawled_urls_chunks)} Web Page chunks related to your query" ) } ) @@ -697,7 +692,6 @@ async def fetch_relevant_documents( elif connector == "FILE": source_object, files_chunks = await connector_service.search_files( user_query=reformulated_query, - user_id=user_id, search_space_id=search_space_id, top_k=top_k, search_mode=search_mode, @@ -721,7 +715,6 @@ async def fetch_relevant_documents( elif connector == "SLACK_CONNECTOR": source_object, slack_chunks = await connector_service.search_slack( user_query=reformulated_query, - user_id=user_id, search_space_id=search_space_id, top_k=top_k, search_mode=search_mode, @@ -748,7 +741,6 @@ async def fetch_relevant_documents( notion_chunks, ) = await connector_service.search_notion( user_query=reformulated_query, - user_id=user_id, search_space_id=search_space_id, top_k=top_k, search_mode=search_mode, @@ -775,7 +767,6 @@ async def fetch_relevant_documents( github_chunks, ) = await connector_service.search_github( user_query=reformulated_query, - user_id=user_id, search_space_id=search_space_id, top_k=top_k, search_mode=search_mode, @@ -802,7 +793,6 @@ async def fetch_relevant_documents( linear_chunks, ) = await connector_service.search_linear( user_query=reformulated_query, - user_id=user_id, search_space_id=search_space_id, top_k=top_k, search_mode=search_mode, @@ -829,7 +819,6 @@ async def fetch_relevant_documents( tavily_chunks, ) = await connector_service.search_tavily( user_query=reformulated_query, - user_id=user_id, search_space_id=search_space_id, top_k=top_k, ) @@ -855,7 +844,6 @@ async def fetch_relevant_documents( searx_chunks, ) = await connector_service.search_searxng( user_query=reformulated_query, - user_id=user_id, search_space_id=search_space_id, top_k=top_k, ) @@ -881,7 +869,6 @@ async def fetch_relevant_documents( linkup_chunks, ) = await connector_service.search_linkup( user_query=reformulated_query, - user_id=user_id, search_space_id=search_space_id, mode=linkup_mode, ) @@ -907,7 +894,6 @@ async def fetch_relevant_documents( baidu_chunks, ) = await connector_service.search_baidu( user_query=reformulated_query, - user_id=user_id, search_space_id=search_space_id, top_k=top_k, ) @@ -933,7 +919,6 @@ async def fetch_relevant_documents( discord_chunks, ) = await connector_service.search_discord( user_query=reformulated_query, - user_id=user_id, search_space_id=search_space_id, top_k=top_k, search_mode=search_mode, @@ -955,7 +940,6 @@ async def fetch_relevant_documents( elif connector == "JIRA_CONNECTOR": source_object, jira_chunks = await connector_service.search_jira( user_query=reformulated_query, - user_id=user_id, search_space_id=search_space_id, top_k=top_k, search_mode=search_mode, @@ -981,7 +965,6 @@ async def fetch_relevant_documents( calendar_chunks, ) = await connector_service.search_google_calendar( user_query=reformulated_query, - user_id=user_id, search_space_id=search_space_id, top_k=top_k, search_mode=search_mode, @@ -1007,7 +990,6 @@ async def fetch_relevant_documents( airtable_chunks, ) = await connector_service.search_airtable( user_query=reformulated_query, - user_id=user_id, search_space_id=search_space_id, top_k=top_k, search_mode=search_mode, @@ -1033,7 +1015,6 @@ async def fetch_relevant_documents( gmail_chunks, ) = await connector_service.search_google_gmail( user_query=reformulated_query, - user_id=user_id, search_space_id=search_space_id, top_k=top_k, search_mode=search_mode, @@ -1059,7 +1040,6 @@ async def fetch_relevant_documents( confluence_chunks, ) = await connector_service.search_confluence( user_query=reformulated_query, - user_id=user_id, search_space_id=search_space_id, top_k=top_k, search_mode=search_mode, @@ -1085,7 +1065,6 @@ async def fetch_relevant_documents( clickup_chunks, ) = await connector_service.search_clickup( user_query=reformulated_query, - user_id=user_id, search_space_id=search_space_id, top_k=top_k, search_mode=search_mode, @@ -1112,7 +1091,6 @@ async def fetch_relevant_documents( luma_chunks, ) = await connector_service.search_luma( user_query=reformulated_query, - user_id=user_id, search_space_id=search_space_id, top_k=top_k, search_mode=search_mode, @@ -1139,7 +1117,6 @@ async def fetch_relevant_documents( elasticsearch_chunks, ) = await connector_service.search_elasticsearch( user_query=reformulated_query, - user_id=user_id, search_space_id=search_space_id, top_k=top_k, search_mode=search_mode, @@ -1315,7 +1292,6 @@ async def reformulate_user_query( reformulated_query = await QueryService.reformulate_query_with_chat_history( user_query=user_query, session=state.db_session, - user_id=configuration.user_id, search_space_id=configuration.search_space_id, chat_history_str=chat_history_str, ) @@ -1389,7 +1365,7 @@ async def handle_qna_workflow( user_selected_documents, ) = await fetch_documents_by_ids( document_ids=configuration.document_ids_to_add_in_context, - user_id=configuration.user_id, + search_space_id=configuration.search_space_id, db_session=state.db_session, ) @@ -1404,7 +1380,7 @@ async def handle_qna_workflow( # Create connector service using state db_session connector_service = ConnectorService( - state.db_session, user_id=configuration.user_id + state.db_session, search_space_id=configuration.search_space_id ) await connector_service.initialize_counter() @@ -1413,7 +1389,6 @@ async def handle_qna_workflow( relevant_documents = await fetch_relevant_documents( research_questions=research_questions, - user_id=configuration.user_id, search_space_id=configuration.search_space_id, db_session=state.db_session, connectors_to_search=configuration.connectors_to_search, @@ -1459,7 +1434,6 @@ async def handle_qna_workflow( "user_query": user_query, # Use the reformulated query "reformulated_query": reformulated_query, "relevant_documents": all_documents, # Use combined documents - "user_id": configuration.user_id, "search_space_id": configuration.search_space_id, "language": configuration.language, } @@ -1551,12 +1525,11 @@ async def generate_further_questions( Returns: Dict containing the further questions in the "further_questions" key for state update. """ - from app.services.llm_service import get_user_fast_llm + from app.services.llm_service import get_fast_llm # Get configuration and state data configuration = Configuration.from_runnable_config(config) chat_history = state.chat_history - user_id = configuration.user_id search_space_id = configuration.search_space_id streaming_service = state.streaming_service @@ -1571,10 +1544,10 @@ async def generate_further_questions( } ) - # Get user's fast LLM - llm = await get_user_fast_llm(state.db_session, user_id, search_space_id) + # Get search space's fast LLM + llm = await get_fast_llm(state.db_session, search_space_id) if not llm: - error_message = f"No fast LLM configured for user {user_id} in search space {search_space_id}" + error_message = f"No fast LLM configured for search space {search_space_id}" print(error_message) writer({"yield_value": streaming_service.format_error(error_message)}) diff --git a/surfsense_backend/app/agents/researcher/qna_agent/configuration.py b/surfsense_backend/app/agents/researcher/qna_agent/configuration.py index ea107a575..e7dd9175e 100644 --- a/surfsense_backend/app/agents/researcher/qna_agent/configuration.py +++ b/surfsense_backend/app/agents/researcher/qna_agent/configuration.py @@ -18,7 +18,6 @@ class Configuration: relevant_documents: list[ Any ] # Documents provided directly to the agent for answering - user_id: str # User identifier search_space_id: int # Search space identifier language: str | None = None # Language for responses diff --git a/surfsense_backend/app/agents/researcher/qna_agent/default_prompts.py b/surfsense_backend/app/agents/researcher/qna_agent/default_prompts.py index 18ad16682..7b5d251fe 100644 --- a/surfsense_backend/app/agents/researcher/qna_agent/default_prompts.py +++ b/surfsense_backend/app/agents/researcher/qna_agent/default_prompts.py @@ -17,7 +17,6 @@ You are SurfSense, an advanced AI research assistant that provides detailed, wel {chat_history_section} - EXTENSION: "Web content saved via SurfSense browser extension" (personal browsing history) -- CRAWLED_URL: "Webpages indexed by SurfSense web crawler" (personally selected websites) - FILE: "User-uploaded documents (PDFs, Word, etc.)" (personal files) - SLACK_CONNECTOR: "Slack conversations and shared content" (personal workspace communications) - NOTION_CONNECTOR: "Notion workspace pages and databases" (personal knowledge management) @@ -35,6 +34,7 @@ You are SurfSense, an advanced AI research assistant that provides detailed, wel - TAVILY_API: "Tavily search API results" (personalized search results) - LINKUP_API: "Linkup search API results" (personalized search results) - LUMA_CONNECTOR: "Luma events" +- WEBCRAWLER_CONNECTOR: "Webpages indexed by SurfSense" (personally selected websites) diff --git a/surfsense_backend/app/agents/researcher/qna_agent/nodes.py b/surfsense_backend/app/agents/researcher/qna_agent/nodes.py index 3112a581a..37bdbc362 100644 --- a/surfsense_backend/app/agents/researcher/qna_agent/nodes.py +++ b/surfsense_backend/app/agents/researcher/qna_agent/nodes.py @@ -142,13 +142,12 @@ async def answer_question(state: State, config: RunnableConfig) -> dict[str, Any Returns: Dict containing the final answer in the "final_answer" key. """ - from app.services.llm_service import get_user_fast_llm + from app.services.llm_service import get_fast_llm # Get configuration and relevant documents from configuration configuration = Configuration.from_runnable_config(config) documents = state.reranked_documents user_query = configuration.user_query - user_id = configuration.user_id search_space_id = configuration.search_space_id language = configuration.language @@ -178,10 +177,10 @@ async def answer_question(state: State, config: RunnableConfig) -> dict[str, Any else "" ) - # Get user's fast LLM - llm = await get_user_fast_llm(state.db_session, user_id, search_space_id) + # Get search space's fast LLM + llm = await get_fast_llm(state.db_session, search_space_id) if not llm: - error_message = f"No fast LLM configured for user {user_id} in search space {search_space_id}" + error_message = f"No fast LLM configured for search space {search_space_id}" print(error_message) raise RuntimeError(error_message) diff --git a/surfsense_backend/app/agents/researcher/utils.py b/surfsense_backend/app/agents/researcher/utils.py index a2c211f28..41d5a1f55 100644 --- a/surfsense_backend/app/agents/researcher/utils.py +++ b/surfsense_backend/app/agents/researcher/utils.py @@ -19,7 +19,6 @@ def get_connector_emoji(connector_name: str) -> str: connector_emojis = { "YOUTUBE_VIDEO": "📹", "EXTENSION": "🧩", - "CRAWLED_URL": "🌐", "FILE": "📄", "SLACK_CONNECTOR": "💬", "NOTION_CONNECTOR": "📘", @@ -34,6 +33,7 @@ def get_connector_emoji(connector_name: str) -> str: "AIRTABLE_CONNECTOR": "🗃️", "LUMA_CONNECTOR": "✨", "ELASTICSEARCH_CONNECTOR": "⚡", + "WEBCRAWLER_CONNECTOR": "🌐", } return connector_emojis.get(connector_name, "🔎") @@ -43,7 +43,6 @@ def get_connector_friendly_name(connector_name: str) -> str: connector_friendly_names = { "YOUTUBE_VIDEO": "YouTube", "EXTENSION": "Browser Extension", - "CRAWLED_URL": "Web Pages", "FILE": "Files", "SLACK_CONNECTOR": "Slack", "NOTION_CONNECTOR": "Notion", @@ -59,6 +58,7 @@ def get_connector_friendly_name(connector_name: str) -> str: "AIRTABLE_CONNECTOR": "Airtable", "LUMA_CONNECTOR": "Luma", "ELASTICSEARCH_CONNECTOR": "Elasticsearch", + "WEBCRAWLER_CONNECTOR": "Web Pages", } return connector_friendly_names.get(connector_name, connector_name) diff --git a/surfsense_backend/app/config/__init__.py b/surfsense_backend/app/config/__init__.py index 7d06643e1..efa51d2db 100644 --- a/surfsense_backend/app/config/__init__.py +++ b/surfsense_backend/app/config/__init__.py @@ -208,9 +208,6 @@ class Config: # LlamaCloud API Key LLAMA_CLOUD_API_KEY = os.getenv("LLAMA_CLOUD_API_KEY") - # Firecrawl API Key - FIRECRAWL_API_KEY = os.getenv("FIRECRAWL_API_KEY", None) - # Litellm TTS Configuration TTS_SERVICE = os.getenv("TTS_SERVICE") TTS_SERVICE_API_BASE = os.getenv("TTS_SERVICE_API_BASE") diff --git a/surfsense_backend/app/connectors/webcrawler_connector.py b/surfsense_backend/app/connectors/webcrawler_connector.py new file mode 100644 index 000000000..edd7f8800 --- /dev/null +++ b/surfsense_backend/app/connectors/webcrawler_connector.py @@ -0,0 +1,188 @@ +""" +WebCrawler Connector Module + +A module for crawling web pages and extracting content using Firecrawl or AsyncChromiumLoader. +Provides a unified interface for web scraping. +""" + +from typing import Any + +import validators +from firecrawl import AsyncFirecrawlApp +from langchain_community.document_loaders import AsyncChromiumLoader + + +class WebCrawlerConnector: + """Class for crawling web pages and extracting content.""" + + def __init__(self, firecrawl_api_key: str | None = None): + """ + Initialize the WebCrawlerConnector class. + + Args: + firecrawl_api_key: Firecrawl API key (optional, will use AsyncChromiumLoader if not provided) + """ + self.firecrawl_api_key = firecrawl_api_key + self.use_firecrawl = bool(firecrawl_api_key) + + def set_api_key(self, api_key: str) -> None: + """ + Set the Firecrawl API key and enable Firecrawl usage. + + Args: + api_key: Firecrawl API key + """ + self.firecrawl_api_key = api_key + self.use_firecrawl = True + + async def crawl_url( + self, url: str, formats: list[str] | None = None + ) -> tuple[dict[str, Any] | None, str | None]: + """ + Crawl a single URL and extract its content. + + Args: + url: URL to crawl + formats: List of formats to extract (e.g., ["markdown", "html"]) - only for Firecrawl + + Returns: + Tuple containing (crawl result dict, error message or None) + Result dict contains: + - content: Extracted content (markdown or HTML) + - metadata: Page metadata (title, description, etc.) + - source: Original URL + - crawler_type: Type of crawler used + """ + try: + # Validate URL + if not validators.url(url): + return None, f"Invalid URL: {url}" + + if self.use_firecrawl: + result = await self._crawl_with_firecrawl(url, formats) + else: + result = await self._crawl_with_chromium(url) + + return result, None + + except Exception as e: + return None, f"Error crawling URL {url}: {e!s}" + + async def _crawl_with_firecrawl( + self, url: str, formats: list[str] | None = None + ) -> dict[str, Any]: + """ + Crawl URL using Firecrawl. + + Args: + url: URL to crawl + formats: List of formats to extract + + Returns: + Dict containing crawled content and metadata + + Raises: + ValueError: If Firecrawl scraping fails + """ + if not self.firecrawl_api_key: + raise ValueError("Firecrawl API key not set. Call set_api_key() first.") + + firecrawl_app = AsyncFirecrawlApp(api_key=self.firecrawl_api_key) + + # Default to markdown format + if formats is None: + formats = ["markdown"] + + # v2 API returns Document directly and raises an exception on failure + scrape_result = await firecrawl_app.scrape(url, formats=formats) + + if not scrape_result: + raise ValueError("Firecrawl returned no result") + + # Extract content based on format + content = scrape_result.markdown or scrape_result.html or "" + + # Extract metadata - v2 returns DocumentMetadata object + metadata_obj = scrape_result.metadata + metadata = metadata_obj.model_dump() if metadata_obj else {} + + return { + "content": content, + "metadata": { + "source": url, + "title": metadata.get("title", url), + "description": metadata.get("description", ""), + "language": metadata.get("language", ""), + "sourceURL": metadata.get("source_url", url), + **metadata, + }, + "crawler_type": "firecrawl", + } + + async def _crawl_with_chromium(self, url: str) -> dict[str, Any]: + """ + Crawl URL using AsyncChromiumLoader. + + Args: + url: URL to crawl + + Returns: + Dict containing crawled content and metadata + + Raises: + Exception: If crawling fails + """ + crawl_loader = AsyncChromiumLoader(urls=[url], headless=True) + documents = await crawl_loader.aload() + + if not documents: + raise ValueError(f"Failed to load content from {url}") + + doc = documents[0] + + # Extract basic metadata from the document + metadata = doc.metadata if doc.metadata else {} + + return { + "content": doc.page_content, + "metadata": { + "source": url, + "title": metadata.get("title", url), + **metadata, + }, + "crawler_type": "chromium", + } + + def format_to_structured_document(self, crawl_result: dict[str, Any]) -> str: + """ + Format crawl result as a structured document. + + Args: + crawl_result: Result from crawl_url method + + Returns: + Structured document string + """ + metadata = crawl_result["metadata"] + content = crawl_result["content"] + + document_parts = ["", ""] + + # Add all metadata fields + for key, value in metadata.items(): + document_parts.append(f"{key.upper()}: {value}") + + document_parts.extend( + [ + "", + "", + "FORMAT: markdown", + "TEXT_START", + content, + "TEXT_END", + "", + "", + ] + ) + + return "\n".join(document_parts) diff --git a/surfsense_backend/app/db.py b/surfsense_backend/app/db.py index 890ea2473..20a4adc23 100644 --- a/surfsense_backend/app/db.py +++ b/surfsense_backend/app/db.py @@ -73,6 +73,7 @@ class SearchSourceConnectorType(str, Enum): AIRTABLE_CONNECTOR = "AIRTABLE_CONNECTOR" LUMA_CONNECTOR = "LUMA_CONNECTOR" ELASTICSEARCH_CONNECTOR = "ELASTICSEARCH_CONNECTOR" + WEBCRAWLER_CONNECTOR = "WEBCRAWLER_CONNECTOR" class ChatType(str, Enum): @@ -130,6 +131,169 @@ class LogStatus(str, Enum): FAILED = "FAILED" +class Permission(str, Enum): + """ + Granular permissions for search space resources. + Use '*' (FULL_ACCESS) to grant all permissions. + """ + + # Documents + DOCUMENTS_CREATE = "documents:create" + DOCUMENTS_READ = "documents:read" + DOCUMENTS_UPDATE = "documents:update" + DOCUMENTS_DELETE = "documents:delete" + + # Chats + CHATS_CREATE = "chats:create" + CHATS_READ = "chats:read" + CHATS_UPDATE = "chats:update" + CHATS_DELETE = "chats:delete" + + # LLM Configs + LLM_CONFIGS_CREATE = "llm_configs:create" + LLM_CONFIGS_READ = "llm_configs:read" + LLM_CONFIGS_UPDATE = "llm_configs:update" + LLM_CONFIGS_DELETE = "llm_configs:delete" + + # Podcasts + PODCASTS_CREATE = "podcasts:create" + PODCASTS_READ = "podcasts:read" + PODCASTS_UPDATE = "podcasts:update" + PODCASTS_DELETE = "podcasts:delete" + + # Connectors + CONNECTORS_CREATE = "connectors:create" + CONNECTORS_READ = "connectors:read" + CONNECTORS_UPDATE = "connectors:update" + CONNECTORS_DELETE = "connectors:delete" + + # Logs + LOGS_READ = "logs:read" + LOGS_DELETE = "logs:delete" + + # Members + MEMBERS_INVITE = "members:invite" + MEMBERS_VIEW = "members:view" + MEMBERS_REMOVE = "members:remove" + MEMBERS_MANAGE_ROLES = "members:manage_roles" + + # Roles + ROLES_CREATE = "roles:create" + ROLES_READ = "roles:read" + ROLES_UPDATE = "roles:update" + ROLES_DELETE = "roles:delete" + + # Search Space Settings + SETTINGS_VIEW = "settings:view" + SETTINGS_UPDATE = "settings:update" + SETTINGS_DELETE = "settings:delete" # Delete the entire search space + + # Full access wildcard + FULL_ACCESS = "*" + + +# Predefined role permission sets for convenience +DEFAULT_ROLE_PERMISSIONS = { + "Owner": [Permission.FULL_ACCESS.value], + "Admin": [ + # Documents + Permission.DOCUMENTS_CREATE.value, + Permission.DOCUMENTS_READ.value, + Permission.DOCUMENTS_UPDATE.value, + Permission.DOCUMENTS_DELETE.value, + # Chats + Permission.CHATS_CREATE.value, + Permission.CHATS_READ.value, + Permission.CHATS_UPDATE.value, + Permission.CHATS_DELETE.value, + # LLM Configs + Permission.LLM_CONFIGS_CREATE.value, + Permission.LLM_CONFIGS_READ.value, + Permission.LLM_CONFIGS_UPDATE.value, + Permission.LLM_CONFIGS_DELETE.value, + # Podcasts + Permission.PODCASTS_CREATE.value, + Permission.PODCASTS_READ.value, + Permission.PODCASTS_UPDATE.value, + Permission.PODCASTS_DELETE.value, + # Connectors + Permission.CONNECTORS_CREATE.value, + Permission.CONNECTORS_READ.value, + Permission.CONNECTORS_UPDATE.value, + Permission.CONNECTORS_DELETE.value, + # Logs + Permission.LOGS_READ.value, + Permission.LOGS_DELETE.value, + # Members + Permission.MEMBERS_INVITE.value, + Permission.MEMBERS_VIEW.value, + Permission.MEMBERS_REMOVE.value, + Permission.MEMBERS_MANAGE_ROLES.value, + # Roles + Permission.ROLES_CREATE.value, + Permission.ROLES_READ.value, + Permission.ROLES_UPDATE.value, + Permission.ROLES_DELETE.value, + # Settings (no delete) + Permission.SETTINGS_VIEW.value, + Permission.SETTINGS_UPDATE.value, + ], + "Editor": [ + # Documents + Permission.DOCUMENTS_CREATE.value, + Permission.DOCUMENTS_READ.value, + Permission.DOCUMENTS_UPDATE.value, + Permission.DOCUMENTS_DELETE.value, + # Chats + Permission.CHATS_CREATE.value, + Permission.CHATS_READ.value, + Permission.CHATS_UPDATE.value, + Permission.CHATS_DELETE.value, + # LLM Configs (read only) + Permission.LLM_CONFIGS_READ.value, + Permission.LLM_CONFIGS_CREATE.value, + Permission.LLM_CONFIGS_UPDATE.value, + # Podcasts + Permission.PODCASTS_CREATE.value, + Permission.PODCASTS_READ.value, + Permission.PODCASTS_UPDATE.value, + Permission.PODCASTS_DELETE.value, + # Connectors (full access for editors) + Permission.CONNECTORS_CREATE.value, + Permission.CONNECTORS_READ.value, + Permission.CONNECTORS_UPDATE.value, + # Logs + Permission.LOGS_READ.value, + # Members (view only) + Permission.MEMBERS_VIEW.value, + # Roles (read only) + Permission.ROLES_READ.value, + # Settings (view only) + Permission.SETTINGS_VIEW.value, + ], + "Viewer": [ + # Documents (read only) + Permission.DOCUMENTS_READ.value, + # Chats (read only) + Permission.CHATS_READ.value, + # LLM Configs (read only) + Permission.LLM_CONFIGS_READ.value, + # Podcasts (read only) + Permission.PODCASTS_READ.value, + # Connectors (read only) + Permission.CONNECTORS_READ.value, + # Logs (read only) + Permission.LOGS_READ.value, + # Members (view only) + Permission.MEMBERS_VIEW.value, + # Roles (read only) + Permission.ROLES_READ.value, + # Settings (view only) + Permission.SETTINGS_VIEW.value, + ], +} + + class Base(DeclarativeBase): pass @@ -240,6 +404,13 @@ class SearchSpace(BaseModel, TimestampMixin): qna_custom_instructions = Column( Text, nullable=True, default="" ) # User's custom instructions + + # Search space-level LLM preferences (shared by all members) + # Note: These can be negative IDs for global configs (from YAML) or positive IDs for custom configs (from DB) + long_context_llm_id = Column(Integer, nullable=True) + fast_llm_id = Column(Integer, nullable=True) + strategic_llm_id = Column(Integer, nullable=True) + user_id = Column( UUID(as_uuid=True), ForeignKey("user.id", ondelete="CASCADE"), nullable=False ) @@ -281,9 +452,24 @@ class SearchSpace(BaseModel, TimestampMixin): order_by="LLMConfig.id", cascade="all, delete-orphan", ) - user_preferences = relationship( - "UserSearchSpacePreference", + + # RBAC relationships + roles = relationship( + "SearchSpaceRole", back_populates="search_space", + order_by="SearchSpaceRole.id", + cascade="all, delete-orphan", + ) + memberships = relationship( + "SearchSpaceMembership", + back_populates="search_space", + order_by="SearchSpaceMembership.id", + cascade="all, delete-orphan", + ) + invites = relationship( + "SearchSpaceInvite", + back_populates="search_space", + order_by="SearchSpaceInvite.id", cascade="all, delete-orphan", ) @@ -347,45 +533,6 @@ class LLMConfig(BaseModel, TimestampMixin): search_space = relationship("SearchSpace", back_populates="llm_configs") -class UserSearchSpacePreference(BaseModel, TimestampMixin): - __tablename__ = "user_search_space_preferences" - __table_args__ = ( - UniqueConstraint( - "user_id", - "search_space_id", - name="uq_user_searchspace", - ), - ) - - user_id = Column( - UUID(as_uuid=True), ForeignKey("user.id", ondelete="CASCADE"), nullable=False - ) - search_space_id = Column( - Integer, ForeignKey("searchspaces.id", ondelete="CASCADE"), nullable=False - ) - - # User-specific LLM preferences for this search space - # Note: These can be negative IDs for global configs (from YAML) or positive IDs for custom configs (from DB) - # Foreign keys removed to support global configs with negative IDs - long_context_llm_id = Column(Integer, nullable=True) - fast_llm_id = Column(Integer, nullable=True) - strategic_llm_id = Column(Integer, nullable=True) - - # Future RBAC fields can be added here - # role = Column(String(50), nullable=True) # e.g., 'owner', 'editor', 'viewer' - # permissions = Column(JSON, nullable=True) - - user = relationship("User", back_populates="search_space_preferences") - search_space = relationship("SearchSpace", back_populates="user_preferences") - - # Note: Relationships removed because foreign keys no longer exist - # Global configs (negative IDs) don't exist in llm_configs table - # Application code manually fetches configs when needed - # long_context_llm = relationship("LLMConfig", foreign_keys=[long_context_llm_id], post_update=True) - # fast_llm = relationship("LLMConfig", foreign_keys=[fast_llm_id], post_update=True) - # strategic_llm = relationship("LLMConfig", foreign_keys=[strategic_llm_id], post_update=True) - - class Log(BaseModel, TimestampMixin): __tablename__ = "logs" @@ -403,6 +550,140 @@ class Log(BaseModel, TimestampMixin): search_space = relationship("SearchSpace", back_populates="logs") +class SearchSpaceRole(BaseModel, TimestampMixin): + """ + Custom roles that can be defined per search space. + Each search space can have multiple roles with different permission sets. + """ + + __tablename__ = "search_space_roles" + __table_args__ = ( + UniqueConstraint( + "search_space_id", + "name", + name="uq_searchspace_role_name", + ), + ) + + name = Column(String(100), nullable=False, index=True) + description = Column(String(500), nullable=True) + # List of Permission enum values (e.g., ["documents:read", "chats:create"]) + permissions = Column(ARRAY(String), nullable=False, default=[]) + # Whether this role is assigned to new members by default when they join via invite + is_default = Column(Boolean, nullable=False, default=False) + # System roles (Owner, Admin, Editor, Viewer) cannot be deleted + is_system_role = Column(Boolean, nullable=False, default=False) + + search_space_id = Column( + Integer, ForeignKey("searchspaces.id", ondelete="CASCADE"), nullable=False + ) + search_space = relationship("SearchSpace", back_populates="roles") + + memberships = relationship( + "SearchSpaceMembership", back_populates="role", passive_deletes=True + ) + invites = relationship( + "SearchSpaceInvite", back_populates="role", passive_deletes=True + ) + + +class SearchSpaceMembership(BaseModel, TimestampMixin): + """ + Tracks user membership in search spaces with their assigned role. + Each user can be a member of multiple search spaces with different roles. + """ + + __tablename__ = "search_space_memberships" + __table_args__ = ( + UniqueConstraint( + "user_id", + "search_space_id", + name="uq_user_searchspace_membership", + ), + ) + + user_id = Column( + UUID(as_uuid=True), ForeignKey("user.id", ondelete="CASCADE"), nullable=False + ) + search_space_id = Column( + Integer, ForeignKey("searchspaces.id", ondelete="CASCADE"), nullable=False + ) + role_id = Column( + Integer, + ForeignKey("search_space_roles.id", ondelete="SET NULL"), + nullable=True, + ) + # Indicates if this user is the original creator/owner of the search space + is_owner = Column(Boolean, nullable=False, default=False) + # Timestamp when the user joined (via invite or as creator) + joined_at = Column( + TIMESTAMP(timezone=True), + nullable=False, + default=lambda: datetime.now(UTC), + ) + # Reference to the invite used to join (null if owner/creator) + invited_by_invite_id = Column( + Integer, + ForeignKey("search_space_invites.id", ondelete="SET NULL"), + nullable=True, + ) + + user = relationship("User", back_populates="search_space_memberships") + search_space = relationship("SearchSpace", back_populates="memberships") + role = relationship("SearchSpaceRole", back_populates="memberships") + invited_by_invite = relationship( + "SearchSpaceInvite", back_populates="used_by_memberships" + ) + + +class SearchSpaceInvite(BaseModel, TimestampMixin): + """ + Invite links for search spaces. + Users can create invite links with specific roles that others can use to join. + """ + + __tablename__ = "search_space_invites" + + # Unique invite code (used in invite URLs) + invite_code = Column(String(64), nullable=False, unique=True, index=True) + + search_space_id = Column( + Integer, ForeignKey("searchspaces.id", ondelete="CASCADE"), nullable=False + ) + # Role to assign when invite is used (null means use default role) + role_id = Column( + Integer, + ForeignKey("search_space_roles.id", ondelete="SET NULL"), + nullable=True, + ) + # User who created this invite + created_by_id = Column( + UUID(as_uuid=True), + ForeignKey("user.id", ondelete="SET NULL"), + nullable=True, + ) + + # Expiration timestamp (null means never expires) + expires_at = Column(TIMESTAMP(timezone=True), nullable=True) + # Maximum number of times this invite can be used (null means unlimited) + max_uses = Column(Integer, nullable=True) + # Number of times this invite has been used + uses_count = Column(Integer, nullable=False, default=0) + # Whether this invite is currently active + is_active = Column(Boolean, nullable=False, default=True) + # Optional custom name/label for the invite + name = Column(String(100), nullable=True) + + search_space = relationship("SearchSpace", back_populates="invites") + role = relationship("SearchSpaceRole", back_populates="invites") + created_by = relationship("User", back_populates="created_invites") + used_by_memberships = relationship( + "SearchSpaceMembership", + back_populates="invited_by_invite", + passive_deletes=True, + ) + + if config.AUTH_TYPE == "GOOGLE": class OAuthAccount(SQLAlchemyBaseOAuthAccountTableUUID, Base): @@ -413,11 +694,18 @@ if config.AUTH_TYPE == "GOOGLE": "OAuthAccount", lazy="joined" ) search_spaces = relationship("SearchSpace", back_populates="user") - search_space_preferences = relationship( - "UserSearchSpacePreference", + + # RBAC relationships + search_space_memberships = relationship( + "SearchSpaceMembership", back_populates="user", cascade="all, delete-orphan", ) + created_invites = relationship( + "SearchSpaceInvite", + back_populates="created_by", + passive_deletes=True, + ) # Page usage tracking for ETL services pages_limit = Column(Integer, nullable=False, default=500, server_default="500") @@ -427,11 +715,18 @@ else: class User(SQLAlchemyBaseUserTableUUID, Base): search_spaces = relationship("SearchSpace", back_populates="user") - search_space_preferences = relationship( - "UserSearchSpacePreference", + + # RBAC relationships + search_space_memberships = relationship( + "SearchSpaceMembership", back_populates="user", cascade="all, delete-orphan", ) + created_invites = relationship( + "SearchSpaceInvite", + back_populates="created_by", + passive_deletes=True, + ) # Page usage tracking for ETL services pages_limit = Column(Integer, nullable=False, default=500, server_default="500") @@ -502,3 +797,109 @@ async def get_documents_hybrid_search_retriever( session: AsyncSession = Depends(get_async_session), ): return DocumentHybridSearchRetriever(session) + + +def has_permission(user_permissions: list[str], required_permission: str) -> bool: + """ + Check if the user has the required permission. + Supports wildcard (*) for full access. + + Args: + user_permissions: List of permission strings the user has + required_permission: The permission string to check for + + Returns: + True if user has the permission, False otherwise + """ + if not user_permissions: + return False + + # Full access wildcard grants all permissions + if Permission.FULL_ACCESS.value in user_permissions: + return True + + return required_permission in user_permissions + + +def has_any_permission( + user_permissions: list[str], required_permissions: list[str] +) -> bool: + """ + Check if the user has any of the required permissions. + + Args: + user_permissions: List of permission strings the user has + required_permissions: List of permission strings to check for (any match) + + Returns: + True if user has at least one of the permissions, False otherwise + """ + if not user_permissions: + return False + + if Permission.FULL_ACCESS.value in user_permissions: + return True + + return any(perm in user_permissions for perm in required_permissions) + + +def has_all_permissions( + user_permissions: list[str], required_permissions: list[str] +) -> bool: + """ + Check if the user has all of the required permissions. + + Args: + user_permissions: List of permission strings the user has + required_permissions: List of permission strings to check for (all must match) + + Returns: + True if user has all of the permissions, False otherwise + """ + if not user_permissions: + return False + + if Permission.FULL_ACCESS.value in user_permissions: + return True + + return all(perm in user_permissions for perm in required_permissions) + + +def get_default_roles_config() -> list[dict]: + """ + Get the configuration for default system roles. + These roles are created automatically when a search space is created. + + Returns: + List of role configurations with name, description, permissions, and flags + """ + return [ + { + "name": "Owner", + "description": "Full access to all search space resources and settings", + "permissions": DEFAULT_ROLE_PERMISSIONS["Owner"], + "is_default": False, + "is_system_role": True, + }, + { + "name": "Admin", + "description": "Can manage most resources except deleting the search space", + "permissions": DEFAULT_ROLE_PERMISSIONS["Admin"], + "is_default": False, + "is_system_role": True, + }, + { + "name": "Editor", + "description": "Can create and edit documents, chats, and podcasts", + "permissions": DEFAULT_ROLE_PERMISSIONS["Editor"], + "is_default": True, # Default role for new members via invite + "is_system_role": True, + }, + { + "name": "Viewer", + "description": "Read-only access to search space resources", + "permissions": DEFAULT_ROLE_PERMISSIONS["Viewer"], + "is_default": False, + "is_system_role": True, + }, + ] diff --git a/surfsense_backend/app/retriver/chunks_hybrid_search.py b/surfsense_backend/app/retriver/chunks_hybrid_search.py index cb96ac695..25a121ad7 100644 --- a/surfsense_backend/app/retriver/chunks_hybrid_search.py +++ b/surfsense_backend/app/retriver/chunks_hybrid_search.py @@ -12,8 +12,7 @@ class ChucksHybridSearchRetriever: self, query_text: str, top_k: int, - user_id: str, - search_space_id: int | None = None, + search_space_id: int, ) -> list: """ Perform vector similarity search on chunks. @@ -21,8 +20,7 @@ class ChucksHybridSearchRetriever: Args: query_text: The search query text top_k: Number of results to return - user_id: The ID of the user performing the search - search_space_id: Optional search space ID to filter results + search_space_id: The search space ID to search within Returns: List of chunks sorted by vector similarity @@ -31,25 +29,20 @@ class ChucksHybridSearchRetriever: from sqlalchemy.orm import joinedload from app.config import config - from app.db import Chunk, Document, SearchSpace + from app.db import Chunk, Document # Get embedding for the query embedding_model = config.embedding_model_instance query_embedding = embedding_model.embed(query_text) - # Build the base query with user ownership check + # Build the query filtered by search space query = ( select(Chunk) .options(joinedload(Chunk.document).joinedload(Document.search_space)) .join(Document, Chunk.document_id == Document.id) - .join(SearchSpace, Document.search_space_id == SearchSpace.id) - .where(SearchSpace.user_id == user_id) + .where(Document.search_space_id == search_space_id) ) - # Add search space filter if provided - if search_space_id is not None: - query = query.where(Document.search_space_id == search_space_id) - # Add vector similarity ordering query = query.order_by(Chunk.embedding.op("<=>")(query_embedding)).limit(top_k) @@ -63,8 +56,7 @@ class ChucksHybridSearchRetriever: self, query_text: str, top_k: int, - user_id: str, - search_space_id: int | None = None, + search_space_id: int, ) -> list: """ Perform full-text keyword search on chunks. @@ -72,8 +64,7 @@ class ChucksHybridSearchRetriever: Args: query_text: The search query text top_k: Number of results to return - user_id: The ID of the user performing the search - search_space_id: Optional search space ID to filter results + search_space_id: The search space ID to search within Returns: List of chunks sorted by text relevance @@ -81,28 +72,23 @@ class ChucksHybridSearchRetriever: from sqlalchemy import func, select from sqlalchemy.orm import joinedload - from app.db import Chunk, Document, SearchSpace + from app.db import Chunk, Document # Create tsvector and tsquery for PostgreSQL full-text search tsvector = func.to_tsvector("english", Chunk.content) tsquery = func.plainto_tsquery("english", query_text) - # Build the base query with user ownership check + # Build the query filtered by search space query = ( select(Chunk) .options(joinedload(Chunk.document).joinedload(Document.search_space)) .join(Document, Chunk.document_id == Document.id) - .join(SearchSpace, Document.search_space_id == SearchSpace.id) - .where(SearchSpace.user_id == user_id) + .where(Document.search_space_id == search_space_id) .where( tsvector.op("@@")(tsquery) ) # Only include results that match the query ) - # Add search space filter if provided - if search_space_id is not None: - query = query.where(Document.search_space_id == search_space_id) - # Add text search ranking query = query.order_by(func.ts_rank_cd(tsvector, tsquery).desc()).limit(top_k) @@ -116,8 +102,7 @@ class ChucksHybridSearchRetriever: self, query_text: str, top_k: int, - user_id: str, - search_space_id: int | None = None, + search_space_id: int, document_type: str | None = None, ) -> list: """ @@ -126,8 +111,7 @@ class ChucksHybridSearchRetriever: Args: query_text: The search query text top_k: Number of results to return - user_id: The ID of the user performing the search - search_space_id: Optional search space ID to filter results + search_space_id: The search space ID to search within document_type: Optional document type to filter results (e.g., "FILE", "CRAWLED_URL") Returns: @@ -137,7 +121,7 @@ class ChucksHybridSearchRetriever: from sqlalchemy.orm import joinedload from app.config import config - from app.db import Chunk, Document, DocumentType, SearchSpace + from app.db import Chunk, Document, DocumentType # Get embedding for the query embedding_model = config.embedding_model_instance @@ -151,12 +135,8 @@ class ChucksHybridSearchRetriever: tsvector = func.to_tsvector("english", Chunk.content) tsquery = func.plainto_tsquery("english", query_text) - # Base conditions for document filtering - base_conditions = [SearchSpace.user_id == user_id] - - # Add search space filter if provided - if search_space_id is not None: - base_conditions.append(Document.search_space_id == search_space_id) + # Base conditions for chunk filtering - search space is required + base_conditions = [Document.search_space_id == search_space_id] # Add document type filter if provided if document_type is not None: @@ -171,7 +151,7 @@ class ChucksHybridSearchRetriever: else: base_conditions.append(Document.document_type == document_type) - # CTE for semantic search with user ownership check + # CTE for semantic search filtered by search space semantic_search_cte = ( select( Chunk.id, @@ -180,7 +160,6 @@ class ChucksHybridSearchRetriever: .label("rank"), ) .join(Document, Chunk.document_id == Document.id) - .join(SearchSpace, Document.search_space_id == SearchSpace.id) .where(*base_conditions) ) @@ -190,7 +169,7 @@ class ChucksHybridSearchRetriever: .cte("semantic_search") ) - # CTE for keyword search with user ownership check + # CTE for keyword search filtered by search space keyword_search_cte = ( select( Chunk.id, @@ -199,7 +178,6 @@ class ChucksHybridSearchRetriever: .label("rank"), ) .join(Document, Chunk.document_id == Document.id) - .join(SearchSpace, Document.search_space_id == SearchSpace.id) .where(*base_conditions) .where(tsvector.op("@@")(tsquery)) ) diff --git a/surfsense_backend/app/retriver/documents_hybrid_search.py b/surfsense_backend/app/retriver/documents_hybrid_search.py index b4e826189..0c08ecc05 100644 --- a/surfsense_backend/app/retriver/documents_hybrid_search.py +++ b/surfsense_backend/app/retriver/documents_hybrid_search.py @@ -12,8 +12,7 @@ class DocumentHybridSearchRetriever: self, query_text: str, top_k: int, - user_id: str, - search_space_id: int | None = None, + search_space_id: int, ) -> list: """ Perform vector similarity search on documents. @@ -21,8 +20,7 @@ class DocumentHybridSearchRetriever: Args: query_text: The search query text top_k: Number of results to return - user_id: The ID of the user performing the search - search_space_id: Optional search space ID to filter results + search_space_id: The search space ID to search within Returns: List of documents sorted by vector similarity @@ -31,24 +29,19 @@ class DocumentHybridSearchRetriever: from sqlalchemy.orm import joinedload from app.config import config - from app.db import Document, SearchSpace + from app.db import Document # Get embedding for the query embedding_model = config.embedding_model_instance query_embedding = embedding_model.embed(query_text) - # Build the base query with user ownership check + # Build the query filtered by search space query = ( select(Document) .options(joinedload(Document.search_space)) - .join(SearchSpace, Document.search_space_id == SearchSpace.id) - .where(SearchSpace.user_id == user_id) + .where(Document.search_space_id == search_space_id) ) - # Add search space filter if provided - if search_space_id is not None: - query = query.where(Document.search_space_id == search_space_id) - # Add vector similarity ordering query = query.order_by(Document.embedding.op("<=>")(query_embedding)).limit( top_k @@ -64,8 +57,7 @@ class DocumentHybridSearchRetriever: self, query_text: str, top_k: int, - user_id: str, - search_space_id: int | None = None, + search_space_id: int, ) -> list: """ Perform full-text keyword search on documents. @@ -73,8 +65,7 @@ class DocumentHybridSearchRetriever: Args: query_text: The search query text top_k: Number of results to return - user_id: The ID of the user performing the search - search_space_id: Optional search space ID to filter results + search_space_id: The search space ID to search within Returns: List of documents sorted by text relevance @@ -82,27 +73,22 @@ class DocumentHybridSearchRetriever: from sqlalchemy import func, select from sqlalchemy.orm import joinedload - from app.db import Document, SearchSpace + from app.db import Document # Create tsvector and tsquery for PostgreSQL full-text search tsvector = func.to_tsvector("english", Document.content) tsquery = func.plainto_tsquery("english", query_text) - # Build the base query with user ownership check + # Build the query filtered by search space query = ( select(Document) .options(joinedload(Document.search_space)) - .join(SearchSpace, Document.search_space_id == SearchSpace.id) - .where(SearchSpace.user_id == user_id) + .where(Document.search_space_id == search_space_id) .where( tsvector.op("@@")(tsquery) ) # Only include results that match the query ) - # Add search space filter if provided - if search_space_id is not None: - query = query.where(Document.search_space_id == search_space_id) - # Add text search ranking query = query.order_by(func.ts_rank_cd(tsvector, tsquery).desc()).limit(top_k) @@ -116,8 +102,7 @@ class DocumentHybridSearchRetriever: self, query_text: str, top_k: int, - user_id: str, - search_space_id: int | None = None, + search_space_id: int, document_type: str | None = None, ) -> list: """ @@ -126,8 +111,7 @@ class DocumentHybridSearchRetriever: Args: query_text: The search query text top_k: Number of results to return - user_id: The ID of the user performing the search - search_space_id: Optional search space ID to filter results + search_space_id: The search space ID to search within document_type: Optional document type to filter results (e.g., "FILE", "CRAWLED_URL") """ @@ -135,7 +119,7 @@ class DocumentHybridSearchRetriever: from sqlalchemy.orm import joinedload from app.config import config - from app.db import Document, DocumentType, SearchSpace + from app.db import Document, DocumentType # Get embedding for the query embedding_model = config.embedding_model_instance @@ -149,12 +133,8 @@ class DocumentHybridSearchRetriever: tsvector = func.to_tsvector("english", Document.content) tsquery = func.plainto_tsquery("english", query_text) - # Base conditions for document filtering - base_conditions = [SearchSpace.user_id == user_id] - - # Add search space filter if provided - if search_space_id is not None: - base_conditions.append(Document.search_space_id == search_space_id) + # Base conditions for document filtering - search space is required + base_conditions = [Document.search_space_id == search_space_id] # Add document type filter if provided if document_type is not None: @@ -169,17 +149,13 @@ class DocumentHybridSearchRetriever: else: base_conditions.append(Document.document_type == document_type) - # CTE for semantic search with user ownership check - semantic_search_cte = ( - select( - Document.id, - func.rank() - .over(order_by=Document.embedding.op("<=>")(query_embedding)) - .label("rank"), - ) - .join(SearchSpace, Document.search_space_id == SearchSpace.id) - .where(*base_conditions) - ) + # CTE for semantic search filtered by search space + semantic_search_cte = select( + Document.id, + func.rank() + .over(order_by=Document.embedding.op("<=>")(query_embedding)) + .label("rank"), + ).where(*base_conditions) semantic_search_cte = ( semantic_search_cte.order_by(Document.embedding.op("<=>")(query_embedding)) @@ -187,7 +163,7 @@ class DocumentHybridSearchRetriever: .cte("semantic_search") ) - # CTE for keyword search with user ownership check + # CTE for keyword search filtered by search space keyword_search_cte = ( select( Document.id, @@ -195,7 +171,6 @@ class DocumentHybridSearchRetriever: .over(order_by=func.ts_rank_cd(tsvector, tsquery).desc()) .label("rank"), ) - .join(SearchSpace, Document.search_space_id == SearchSpace.id) .where(*base_conditions) .where(tsvector.op("@@")(tsquery)) ) diff --git a/surfsense_backend/app/routes/__init__.py b/surfsense_backend/app/routes/__init__.py index 3edcf8cf8..10ddefe14 100644 --- a/surfsense_backend/app/routes/__init__.py +++ b/surfsense_backend/app/routes/__init__.py @@ -16,13 +16,14 @@ from .llm_config_routes import router as llm_config_router from .logs_routes import router as logs_router from .luma_add_connector_route import router as luma_add_connector_router from .podcasts_routes import router as podcasts_router +from .rbac_routes import router as rbac_router from .search_source_connectors_routes import router as search_source_connectors_router from .search_spaces_routes import router as search_spaces_router router = APIRouter() router.include_router(search_spaces_router) -router.include_router(editor_router) +router.include_router(rbac_router) # RBAC routes for roles, members, invites router.include_router(documents_router) router.include_router(podcasts_router) router.include_router(chats_router) diff --git a/surfsense_backend/app/routes/chats_routes.py b/surfsense_backend/app/routes/chats_routes.py index 05360cee0..d7aff102b 100644 --- a/surfsense_backend/app/routes/chats_routes.py +++ b/surfsense_backend/app/routes/chats_routes.py @@ -6,7 +6,14 @@ from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.future import select from sqlalchemy.orm import selectinload -from app.db import Chat, SearchSpace, User, UserSearchSpacePreference, get_async_session +from app.db import ( + Chat, + Permission, + SearchSpace, + SearchSpaceMembership, + User, + get_async_session, +) from app.schemas import ( AISDKChatRequest, ChatCreate, @@ -16,7 +23,7 @@ from app.schemas import ( ) from app.tasks.stream_connector_search_results import stream_connector_search_results from app.users import current_active_user -from app.utils.check_ownership import check_ownership +from app.utils.rbac import check_permission from app.utils.validators import ( validate_connectors, validate_document_ids, @@ -59,45 +66,38 @@ async def handle_chat_data( # print("RESQUEST DATA:", request_data) # print("SELECTED CONNECTORS:", selected_connectors) - # Check if the search space belongs to the current user + # Check if the user has chat access to the search space try: - await check_ownership(session, SearchSpace, search_space_id, user) - language_result = await session.execute( - select(UserSearchSpacePreference) - .options( - selectinload(UserSearchSpacePreference.search_space).selectinload( - SearchSpace.llm_configs - ), - # Note: Removed selectinload for LLM relationships as they no longer exist - # Global configs (negative IDs) don't have foreign keys - # LLM configs are now fetched manually when needed - ) - .filter( - UserSearchSpacePreference.search_space_id == search_space_id, - UserSearchSpacePreference.user_id == user.id, - ) + await check_permission( + session, + user, + search_space_id, + Permission.CHATS_CREATE.value, + "You don't have permission to use chat in this search space", ) - user_preference = language_result.scalars().first() - # print("UserSearchSpacePreference:", user_preference) + + # Get search space with LLM configs (preferences are now stored at search space level) + search_space_result = await session.execute( + select(SearchSpace) + .options(selectinload(SearchSpace.llm_configs)) + .filter(SearchSpace.id == search_space_id) + ) + search_space = search_space_result.scalars().first() language = None llm_configs = [] # Initialize to empty list - if ( - user_preference - and user_preference.search_space - and user_preference.search_space.llm_configs - ): - llm_configs = user_preference.search_space.llm_configs + if search_space and search_space.llm_configs: + llm_configs = search_space.llm_configs - # Manually fetch LLM configs since relationships no longer exist - # Check fast_llm, long_context_llm, and strategic_llm IDs + # Get language from configured LLM preferences + # LLM preferences are now stored on the SearchSpace model from app.config import config as app_config for llm_id in [ - user_preference.fast_llm_id, - user_preference.long_context_llm_id, - user_preference.strategic_llm_id, + search_space.fast_llm_id, + search_space.long_context_llm_id, + search_space.strategic_llm_id, ]: if llm_id is not None: # Check if it's a global config (negative ID) @@ -161,8 +161,18 @@ async def create_chat( session: AsyncSession = Depends(get_async_session), user: User = Depends(current_active_user), ): + """ + Create a new chat. + Requires CHATS_CREATE permission. + """ try: - await check_ownership(session, SearchSpace, chat.search_space_id, user) + await check_permission( + session, + user, + chat.search_space_id, + Permission.CHATS_CREATE.value, + "You don't have permission to create chats in this search space", + ) db_chat = Chat(**chat.model_dump()) session.add(db_chat) await session.commit() @@ -197,6 +207,10 @@ async def read_chats( session: AsyncSession = Depends(get_async_session), user: User = Depends(current_active_user), ): + """ + List chats the user has access to. + Requires CHATS_READ permission for the search space(s). + """ # Validate pagination parameters if skip < 0: raise HTTPException( @@ -212,9 +226,17 @@ async def read_chats( status_code=400, detail="search_space_id must be a positive integer" ) try: - # Select specific fields excluding messages - query = ( - select( + if search_space_id is not None: + # Check permission for specific search space + await check_permission( + session, + user, + search_space_id, + Permission.CHATS_READ.value, + "You don't have permission to read chats in this search space", + ) + # Select specific fields excluding messages + query = select( Chat.id, Chat.type, Chat.title, @@ -222,17 +244,28 @@ async def read_chats( Chat.search_space_id, Chat.created_at, Chat.state_version, + ).filter(Chat.search_space_id == search_space_id) + else: + # Get chats from all search spaces user has membership in + query = ( + select( + Chat.id, + Chat.type, + Chat.title, + Chat.initial_connectors, + Chat.search_space_id, + Chat.created_at, + Chat.state_version, + ) + .join(SearchSpace) + .join(SearchSpaceMembership) + .filter(SearchSpaceMembership.user_id == user.id) ) - .join(SearchSpace) - .filter(SearchSpace.user_id == user.id) - ) - - # Filter by search_space_id if provided - if search_space_id is not None: - query = query.filter(Chat.search_space_id == search_space_id) result = await session.execute(query.offset(skip).limit(limit)) return result.all() + except HTTPException: + raise except OperationalError: raise HTTPException( status_code=503, detail="Database operation failed. Please try again later." @@ -249,19 +282,32 @@ async def read_chat( session: AsyncSession = Depends(get_async_session), user: User = Depends(current_active_user), ): + """ + Get a specific chat by ID. + Requires CHATS_READ permission for the search space. + """ try: - result = await session.execute( - select(Chat) - .join(SearchSpace) - .filter(Chat.id == chat_id, SearchSpace.user_id == user.id) - ) + result = await session.execute(select(Chat).filter(Chat.id == chat_id)) chat = result.scalars().first() + if not chat: raise HTTPException( status_code=404, - detail="Chat not found or you don't have permission to access it", + detail="Chat not found", ) + + # Check permission for the search space + await check_permission( + session, + user, + chat.search_space_id, + Permission.CHATS_READ.value, + "You don't have permission to read chats in this search space", + ) + return chat + except HTTPException: + raise except OperationalError: raise HTTPException( status_code=503, detail="Database operation failed. Please try again later." @@ -280,8 +326,26 @@ async def update_chat( session: AsyncSession = Depends(get_async_session), user: User = Depends(current_active_user), ): + """ + Update a chat. + Requires CHATS_UPDATE permission for the search space. + """ try: - db_chat = await read_chat(chat_id, session, user) + result = await session.execute(select(Chat).filter(Chat.id == chat_id)) + db_chat = result.scalars().first() + + if not db_chat: + raise HTTPException(status_code=404, detail="Chat not found") + + # Check permission for the search space + await check_permission( + session, + user, + db_chat.search_space_id, + Permission.CHATS_UPDATE.value, + "You don't have permission to update chats in this search space", + ) + update_data = chat_update.model_dump(exclude_unset=True) for key, value in update_data.items(): if key == "messages": @@ -318,8 +382,26 @@ async def delete_chat( session: AsyncSession = Depends(get_async_session), user: User = Depends(current_active_user), ): + """ + Delete a chat. + Requires CHATS_DELETE permission for the search space. + """ try: - db_chat = await read_chat(chat_id, session, user) + result = await session.execute(select(Chat).filter(Chat.id == chat_id)) + db_chat = result.scalars().first() + + if not db_chat: + raise HTTPException(status_code=404, detail="Chat not found") + + # Check permission for the search space + await check_permission( + session, + user, + db_chat.search_space_id, + Permission.CHATS_DELETE.value, + "You don't have permission to delete chats in this search space", + ) + await session.delete(db_chat) await session.commit() return {"message": "Chat deleted successfully"} diff --git a/surfsense_backend/app/routes/documents_routes.py b/surfsense_backend/app/routes/documents_routes.py index 344a2503d..67015243f 100644 --- a/surfsense_backend/app/routes/documents_routes.py +++ b/surfsense_backend/app/routes/documents_routes.py @@ -10,7 +10,9 @@ from app.db import ( Chunk, Document, DocumentType, + Permission, SearchSpace, + SearchSpaceMembership, User, get_async_session, ) @@ -22,7 +24,7 @@ from app.schemas import ( PaginatedResponse, ) from app.users import current_active_user -from app.utils.check_ownership import check_ownership +from app.utils.rbac import check_permission try: asyncio.set_event_loop_policy(asyncio.DefaultEventLoopPolicy()) @@ -44,9 +46,19 @@ async def create_documents( session: AsyncSession = Depends(get_async_session), user: User = Depends(current_active_user), ): + """ + Create new documents. + Requires DOCUMENTS_CREATE permission. + """ try: - # Check if the user owns the search space - await check_ownership(session, SearchSpace, request.search_space_id, user) + # Check permission + await check_permission( + session, + user, + request.search_space_id, + Permission.DOCUMENTS_CREATE.value, + "You don't have permission to create documents in this search space", + ) if request.document_type == DocumentType.EXTENSION: from app.tasks.celery_tasks.document_tasks import ( @@ -65,13 +77,6 @@ async def create_documents( process_extension_document_task.delay( document_dict, request.search_space_id, str(user.id) ) - elif request.document_type == DocumentType.CRAWLED_URL: - from app.tasks.celery_tasks.document_tasks import process_crawled_url_task - - for url in request.content: - process_crawled_url_task.delay( - url, request.search_space_id, str(user.id) - ) elif request.document_type == DocumentType.YOUTUBE_VIDEO: from app.tasks.celery_tasks.document_tasks import process_youtube_video_task @@ -100,8 +105,19 @@ async def create_documents_file_upload( session: AsyncSession = Depends(get_async_session), user: User = Depends(current_active_user), ): + """ + Upload files as documents. + Requires DOCUMENTS_CREATE permission. + """ try: - await check_ownership(session, SearchSpace, search_space_id, user) + # Check permission + await check_permission( + session, + user, + search_space_id, + Permission.DOCUMENTS_CREATE.value, + "You don't have permission to create documents in this search space", + ) if not files: raise HTTPException(status_code=400, detail="No files provided") @@ -158,7 +174,8 @@ async def read_documents( user: User = Depends(current_active_user), ): """ - List documents owned by the current user, with optional filtering and pagination. + List documents the user has access to, with optional filtering and pagination. + Requires DOCUMENTS_READ permission for the search space(s). Args: skip: Absolute number of items to skip from the beginning. If provided, it takes precedence over 'page'. @@ -174,40 +191,49 @@ async def read_documents( Notes: - If both 'skip' and 'page' are provided, 'skip' is used. - - Results are scoped to documents owned by the current user. + - Results are scoped to documents in search spaces the user has membership in. """ try: from sqlalchemy import func - query = ( - select(Document).join(SearchSpace).filter(SearchSpace.user_id == user.id) - ) - - # Filter by search_space_id if provided + # If specific search_space_id, check permission if search_space_id is not None: - query = query.filter(Document.search_space_id == search_space_id) + await check_permission( + session, + user, + search_space_id, + Permission.DOCUMENTS_READ.value, + "You don't have permission to read documents in this search space", + ) + query = select(Document).filter(Document.search_space_id == search_space_id) + count_query = ( + select(func.count()) + .select_from(Document) + .filter(Document.search_space_id == search_space_id) + ) + else: + # Get documents from all search spaces user has membership in + query = ( + select(Document) + .join(SearchSpace) + .join(SearchSpaceMembership) + .filter(SearchSpaceMembership.user_id == user.id) + ) + count_query = ( + select(func.count()) + .select_from(Document) + .join(SearchSpace) + .join(SearchSpaceMembership) + .filter(SearchSpaceMembership.user_id == user.id) + ) # Filter by document_types if provided if document_types is not None and document_types.strip(): type_list = [t.strip() for t in document_types.split(",") if t.strip()] if type_list: query = query.filter(Document.document_type.in_(type_list)) - - # Get total count - count_query = ( - select(func.count()) - .select_from(Document) - .join(SearchSpace) - .filter(SearchSpace.user_id == user.id) - ) - if search_space_id is not None: - count_query = count_query.filter( - Document.search_space_id == search_space_id - ) - if document_types is not None and document_types.strip(): - type_list = [t.strip() for t in document_types.split(",") if t.strip()] - if type_list: count_query = count_query.filter(Document.document_type.in_(type_list)) + total_result = await session.execute(count_query) total = total_result.scalar() or 0 @@ -242,6 +268,8 @@ async def read_documents( ) return PaginatedResponse(items=api_documents, total=total) + except HTTPException: + raise except Exception as e: raise HTTPException( status_code=500, detail=f"Failed to fetch documents: {e!s}" @@ -261,6 +289,7 @@ async def search_documents( ): """ Search documents by title substring, optionally filtered by search_space_id and document_types. + Requires DOCUMENTS_READ permission for the search space(s). Args: title: Case-insensitive substring to match against document titles. Required. @@ -282,37 +311,48 @@ async def search_documents( try: from sqlalchemy import func - query = ( - select(Document).join(SearchSpace).filter(SearchSpace.user_id == user.id) - ) + # If specific search_space_id, check permission if search_space_id is not None: - query = query.filter(Document.search_space_id == search_space_id) + await check_permission( + session, + user, + search_space_id, + Permission.DOCUMENTS_READ.value, + "You don't have permission to read documents in this search space", + ) + query = select(Document).filter(Document.search_space_id == search_space_id) + count_query = ( + select(func.count()) + .select_from(Document) + .filter(Document.search_space_id == search_space_id) + ) + else: + # Get documents from all search spaces user has membership in + query = ( + select(Document) + .join(SearchSpace) + .join(SearchSpaceMembership) + .filter(SearchSpaceMembership.user_id == user.id) + ) + count_query = ( + select(func.count()) + .select_from(Document) + .join(SearchSpace) + .join(SearchSpaceMembership) + .filter(SearchSpaceMembership.user_id == user.id) + ) # Only search by title (case-insensitive) query = query.filter(Document.title.ilike(f"%{title}%")) + count_query = count_query.filter(Document.title.ilike(f"%{title}%")) # Filter by document_types if provided if document_types is not None and document_types.strip(): type_list = [t.strip() for t in document_types.split(",") if t.strip()] if type_list: query = query.filter(Document.document_type.in_(type_list)) - - # Get total count - count_query = ( - select(func.count()) - .select_from(Document) - .join(SearchSpace) - .filter(SearchSpace.user_id == user.id) - ) - if search_space_id is not None: - count_query = count_query.filter( - Document.search_space_id == search_space_id - ) - count_query = count_query.filter(Document.title.ilike(f"%{title}%")) - if document_types is not None and document_types.strip(): - type_list = [t.strip() for t in document_types.split(",") if t.strip()] - if type_list: count_query = count_query.filter(Document.document_type.in_(type_list)) + total_result = await session.execute(count_query) total = total_result.scalar() or 0 @@ -347,6 +387,8 @@ async def search_documents( ) return PaginatedResponse(items=api_documents, total=total) + except HTTPException: + raise except Exception as e: raise HTTPException( status_code=500, detail=f"Failed to search documents: {e!s}" @@ -360,7 +402,8 @@ async def get_document_type_counts( user: User = Depends(current_active_user), ): """ - Get counts of documents by type for the current user. + Get counts of documents by type for search spaces the user has access to. + Requires DOCUMENTS_READ permission for the search space(s). Args: search_space_id: If provided, restrict counts to a specific search space. @@ -373,20 +416,36 @@ async def get_document_type_counts( try: from sqlalchemy import func - query = ( - select(Document.document_type, func.count(Document.id)) - .join(SearchSpace) - .filter(SearchSpace.user_id == user.id) - .group_by(Document.document_type) - ) - if search_space_id is not None: - query = query.filter(Document.search_space_id == search_space_id) + # Check permission for specific search space + await check_permission( + session, + user, + search_space_id, + Permission.DOCUMENTS_READ.value, + "You don't have permission to read documents in this search space", + ) + query = ( + select(Document.document_type, func.count(Document.id)) + .filter(Document.search_space_id == search_space_id) + .group_by(Document.document_type) + ) + else: + # Get counts from all search spaces user has membership in + query = ( + select(Document.document_type, func.count(Document.id)) + .join(SearchSpace) + .join(SearchSpaceMembership) + .filter(SearchSpaceMembership.user_id == user.id) + .group_by(Document.document_type) + ) result = await session.execute(query) type_counts = dict(result.all()) return type_counts + except HTTPException: + raise except Exception as e: raise HTTPException( status_code=500, detail=f"Failed to fetch document type counts: {e!s}" @@ -401,6 +460,7 @@ async def get_document_by_chunk_id( ): """ Retrieves a document based on a chunk ID, including all its chunks ordered by creation time. + Requires DOCUMENTS_READ permission for the search space. The document's embedding and chunk embeddings are excluded from the response. """ try: @@ -413,21 +473,29 @@ async def get_document_by_chunk_id( status_code=404, detail=f"Chunk with id {chunk_id} not found" ) - # Get the associated document and verify ownership + # Get the associated document document_result = await session.execute( select(Document) .options(selectinload(Document.chunks)) - .join(SearchSpace) - .filter(Document.id == chunk.document_id, SearchSpace.user_id == user.id) + .filter(Document.id == chunk.document_id) ) document = document_result.scalars().first() if not document: raise HTTPException( status_code=404, - detail="Document not found or you don't have access to it", + detail="Document not found", ) + # Check permission for the search space + await check_permission( + session, + user, + document.search_space_id, + Permission.DOCUMENTS_READ.value, + "You don't have permission to read documents in this search space", + ) + # Sort chunks by creation time sorted_chunks = sorted(document.chunks, key=lambda x: x.created_at) @@ -456,11 +524,13 @@ async def read_document( session: AsyncSession = Depends(get_async_session), user: User = Depends(current_active_user), ): + """ + Get a specific document by ID. + Requires DOCUMENTS_READ permission for the search space. + """ try: result = await session.execute( - select(Document) - .join(SearchSpace) - .filter(Document.id == document_id, SearchSpace.user_id == user.id) + select(Document).filter(Document.id == document_id) ) document = result.scalars().first() @@ -469,6 +539,15 @@ async def read_document( status_code=404, detail=f"Document with id {document_id} not found" ) + # Check permission for the search space + await check_permission( + session, + user, + document.search_space_id, + Permission.DOCUMENTS_READ.value, + "You don't have permission to read documents in this search space", + ) + # Convert database object to API-friendly format return DocumentRead( id=document.id, @@ -479,6 +558,8 @@ async def read_document( created_at=document.created_at, search_space_id=document.search_space_id, ) + except HTTPException: + raise except Exception as e: raise HTTPException( status_code=500, detail=f"Failed to fetch document: {e!s}" @@ -492,12 +573,13 @@ async def update_document( session: AsyncSession = Depends(get_async_session), user: User = Depends(current_active_user), ): + """ + Update a document. + Requires DOCUMENTS_UPDATE permission for the search space. + """ try: - # Query the document directly instead of using read_document function result = await session.execute( - select(Document) - .join(SearchSpace) - .filter(Document.id == document_id, SearchSpace.user_id == user.id) + select(Document).filter(Document.id == document_id) ) db_document = result.scalars().first() @@ -506,6 +588,15 @@ async def update_document( status_code=404, detail=f"Document with id {document_id} not found" ) + # Check permission for the search space + await check_permission( + session, + user, + db_document.search_space_id, + Permission.DOCUMENTS_UPDATE.value, + "You don't have permission to update documents in this search space", + ) + update_data = document_update.model_dump(exclude_unset=True) for key, value in update_data.items(): setattr(db_document, key, value) @@ -537,12 +628,13 @@ async def delete_document( session: AsyncSession = Depends(get_async_session), user: User = Depends(current_active_user), ): + """ + Delete a document. + Requires DOCUMENTS_DELETE permission for the search space. + """ try: - # Query the document directly instead of using read_document function result = await session.execute( - select(Document) - .join(SearchSpace) - .filter(Document.id == document_id, SearchSpace.user_id == user.id) + select(Document).filter(Document.id == document_id) ) document = result.scalars().first() @@ -551,6 +643,15 @@ async def delete_document( status_code=404, detail=f"Document with id {document_id} not found" ) + # Check permission for the search space + await check_permission( + session, + user, + document.search_space_id, + Permission.DOCUMENTS_DELETE.value, + "You don't have permission to delete documents in this search space", + ) + await session.delete(document) await session.commit() return {"message": "Document deleted successfully"} diff --git a/surfsense_backend/app/routes/llm_config_routes.py b/surfsense_backend/app/routes/llm_config_routes.py index 35c3ce574..31c7200f5 100644 --- a/surfsense_backend/app/routes/llm_config_routes.py +++ b/surfsense_backend/app/routes/llm_config_routes.py @@ -8,67 +8,22 @@ from sqlalchemy.future import select from app.config import config from app.db import ( LLMConfig, + Permission, SearchSpace, User, - UserSearchSpacePreference, get_async_session, ) from app.schemas import LLMConfigCreate, LLMConfigRead, LLMConfigUpdate from app.services.llm_service import validate_llm_config from app.users import current_active_user +from app.utils.rbac import check_permission router = APIRouter() logger = logging.getLogger(__name__) -# Helper function to check search space access -async def check_search_space_access( - session: AsyncSession, search_space_id: int, user: User -) -> SearchSpace: - """Verify that the user has access to the search space""" - result = await session.execute( - select(SearchSpace).filter( - SearchSpace.id == search_space_id, SearchSpace.user_id == user.id - ) - ) - search_space = result.scalars().first() - if not search_space: - raise HTTPException( - status_code=404, - detail="Search space not found or you don't have permission to access it", - ) - return search_space - - -# Helper function to get or create user search space preference -async def get_or_create_user_preference( - session: AsyncSession, user_id, search_space_id: int -) -> UserSearchSpacePreference: - """Get or create user preference for a search space""" - result = await session.execute( - select(UserSearchSpacePreference).filter( - UserSearchSpacePreference.user_id == user_id, - UserSearchSpacePreference.search_space_id == search_space_id, - ) - # Removed selectinload options since relationships no longer exist - ) - preference = result.scalars().first() - - if not preference: - # Create new preference entry - preference = UserSearchSpacePreference( - user_id=user_id, - search_space_id=search_space_id, - ) - session.add(preference) - await session.commit() - await session.refresh(preference) - - return preference - - class LLMPreferencesUpdate(BaseModel): - """Schema for updating user LLM preferences""" + """Schema for updating search space LLM preferences""" long_context_llm_id: int | None = None fast_llm_id: int | None = None @@ -76,7 +31,7 @@ class LLMPreferencesUpdate(BaseModel): class LLMPreferencesRead(BaseModel): - """Schema for reading user LLM preferences""" + """Schema for reading search space LLM preferences""" long_context_llm_id: int | None = None fast_llm_id: int | None = None @@ -144,10 +99,19 @@ async def create_llm_config( session: AsyncSession = Depends(get_async_session), user: User = Depends(current_active_user), ): - """Create a new LLM configuration for a search space""" + """ + Create a new LLM configuration for a search space. + Requires LLM_CONFIGS_CREATE permission. + """ try: - # Verify user has access to the search space - await check_search_space_access(session, llm_config.search_space_id, user) + # Verify user has permission to create LLM configs + await check_permission( + session, + user, + llm_config.search_space_id, + Permission.LLM_CONFIGS_CREATE.value, + "You don't have permission to create LLM configurations in this search space", + ) # Validate the LLM configuration by making a test API call is_valid, error_message = await validate_llm_config( @@ -187,10 +151,19 @@ async def read_llm_configs( session: AsyncSession = Depends(get_async_session), user: User = Depends(current_active_user), ): - """Get all LLM configurations for a search space""" + """ + Get all LLM configurations for a search space. + Requires LLM_CONFIGS_READ permission. + """ try: - # Verify user has access to the search space - await check_search_space_access(session, search_space_id, user) + # Verify user has permission to read LLM configs + await check_permission( + session, + user, + search_space_id, + Permission.LLM_CONFIGS_READ.value, + "You don't have permission to view LLM configurations in this search space", + ) result = await session.execute( select(LLMConfig) @@ -213,7 +186,10 @@ async def read_llm_config( session: AsyncSession = Depends(get_async_session), user: User = Depends(current_active_user), ): - """Get a specific LLM configuration by ID""" + """ + Get a specific LLM configuration by ID. + Requires LLM_CONFIGS_READ permission. + """ try: # Get the LLM config result = await session.execute( @@ -224,8 +200,14 @@ async def read_llm_config( if not llm_config: raise HTTPException(status_code=404, detail="LLM configuration not found") - # Verify user has access to the search space - await check_search_space_access(session, llm_config.search_space_id, user) + # Verify user has permission to read LLM configs + await check_permission( + session, + user, + llm_config.search_space_id, + Permission.LLM_CONFIGS_READ.value, + "You don't have permission to view LLM configurations in this search space", + ) return llm_config except HTTPException: @@ -243,7 +225,10 @@ async def update_llm_config( session: AsyncSession = Depends(get_async_session), user: User = Depends(current_active_user), ): - """Update an existing LLM configuration""" + """ + Update an existing LLM configuration. + Requires LLM_CONFIGS_UPDATE permission. + """ try: # Get the LLM config result = await session.execute( @@ -254,8 +239,14 @@ async def update_llm_config( if not db_llm_config: raise HTTPException(status_code=404, detail="LLM configuration not found") - # Verify user has access to the search space - await check_search_space_access(session, db_llm_config.search_space_id, user) + # Verify user has permission to update LLM configs + await check_permission( + session, + user, + db_llm_config.search_space_id, + Permission.LLM_CONFIGS_UPDATE.value, + "You don't have permission to update LLM configurations in this search space", + ) update_data = llm_config_update.model_dump(exclude_unset=True) @@ -311,7 +302,10 @@ async def delete_llm_config( session: AsyncSession = Depends(get_async_session), user: User = Depends(current_active_user), ): - """Delete an LLM configuration""" + """ + Delete an LLM configuration. + Requires LLM_CONFIGS_DELETE permission. + """ try: # Get the LLM config result = await session.execute( @@ -322,8 +316,14 @@ async def delete_llm_config( if not db_llm_config: raise HTTPException(status_code=404, detail="LLM configuration not found") - # Verify user has access to the search space - await check_search_space_access(session, db_llm_config.search_space_id, user) + # Verify user has permission to delete LLM configs + await check_permission( + session, + user, + db_llm_config.search_space_id, + Permission.LLM_CONFIGS_DELETE.value, + "You don't have permission to delete LLM configurations in this search space", + ) await session.delete(db_llm_config) await session.commit() @@ -337,28 +337,42 @@ async def delete_llm_config( ) from e -# User LLM Preferences endpoints +# Search Space LLM Preferences endpoints @router.get( "/search-spaces/{search_space_id}/llm-preferences", response_model=LLMPreferencesRead, ) -async def get_user_llm_preferences( +async def get_llm_preferences( search_space_id: int, session: AsyncSession = Depends(get_async_session), user: User = Depends(current_active_user), ): - """Get the current user's LLM preferences for a specific search space""" + """ + Get the LLM preferences for a specific search space. + LLM preferences are shared by all members of the search space. + Requires LLM_CONFIGS_READ permission. + """ try: - # Verify user has access to the search space - await check_search_space_access(session, search_space_id, user) - - # Get or create user preference for this search space - preference = await get_or_create_user_preference( - session, user.id, search_space_id + # Verify user has permission to read LLM configs + await check_permission( + session, + user, + search_space_id, + Permission.LLM_CONFIGS_READ.value, + "You don't have permission to view LLM preferences in this search space", ) + # Get the search space + result = await session.execute( + select(SearchSpace).filter(SearchSpace.id == search_space_id) + ) + search_space = result.scalars().first() + + if not search_space: + raise HTTPException(status_code=404, detail="Search space not found") + # Helper function to get config (global or custom) async def get_config_for_id(config_id): if config_id is None: @@ -391,14 +405,14 @@ async def get_user_llm_preferences( return result.scalars().first() # Get the configs (from DB for custom, or constructed for global) - long_context_llm = await get_config_for_id(preference.long_context_llm_id) - fast_llm = await get_config_for_id(preference.fast_llm_id) - strategic_llm = await get_config_for_id(preference.strategic_llm_id) + long_context_llm = await get_config_for_id(search_space.long_context_llm_id) + fast_llm = await get_config_for_id(search_space.fast_llm_id) + strategic_llm = await get_config_for_id(search_space.strategic_llm_id) return { - "long_context_llm_id": preference.long_context_llm_id, - "fast_llm_id": preference.fast_llm_id, - "strategic_llm_id": preference.strategic_llm_id, + "long_context_llm_id": search_space.long_context_llm_id, + "fast_llm_id": search_space.fast_llm_id, + "strategic_llm_id": search_space.strategic_llm_id, "long_context_llm": long_context_llm, "fast_llm": fast_llm, "strategic_llm": strategic_llm, @@ -415,22 +429,37 @@ async def get_user_llm_preferences( "/search-spaces/{search_space_id}/llm-preferences", response_model=LLMPreferencesRead, ) -async def update_user_llm_preferences( +async def update_llm_preferences( search_space_id: int, preferences: LLMPreferencesUpdate, session: AsyncSession = Depends(get_async_session), user: User = Depends(current_active_user), ): - """Update the current user's LLM preferences for a specific search space""" + """ + Update the LLM preferences for a specific search space. + LLM preferences are shared by all members of the search space. + Requires SETTINGS_UPDATE permission (only users with settings access can change). + """ try: - # Verify user has access to the search space - await check_search_space_access(session, search_space_id, user) - - # Get or create user preference for this search space - preference = await get_or_create_user_preference( - session, user.id, search_space_id + # Verify user has permission to update settings (not just LLM configs) + # This ensures only users with settings access can change shared LLM preferences + await check_permission( + session, + user, + search_space_id, + Permission.SETTINGS_UPDATE.value, + "You don't have permission to update LLM preferences in this search space", ) + # Get the search space + result = await session.execute( + select(SearchSpace).filter(SearchSpace.id == search_space_id) + ) + search_space = result.scalars().first() + + if not search_space: + raise HTTPException(status_code=404, detail="Search space not found") + # Validate that all provided LLM config IDs belong to the search space update_data = preferences.model_dump(exclude_unset=True) @@ -485,18 +514,13 @@ async def update_user_llm_preferences( f"Multiple languages detected in LLM selection for search_space {search_space_id}: {languages}. " "This may affect response quality." ) - # Don't raise an exception - allow users to proceed - # raise HTTPException( - # status_code=400, - # detail="All selected LLM configurations must have the same language setting", - # ) - # Update user preferences + # Update search space LLM preferences for key, value in update_data.items(): - setattr(preference, key, value) + setattr(search_space, key, value) await session.commit() - await session.refresh(preference) + await session.refresh(search_space) # Helper function to get config (global or custom) async def get_config_for_id(config_id): @@ -530,15 +554,15 @@ async def update_user_llm_preferences( return result.scalars().first() # Get the configs (from DB for custom, or constructed for global) - long_context_llm = await get_config_for_id(preference.long_context_llm_id) - fast_llm = await get_config_for_id(preference.fast_llm_id) - strategic_llm = await get_config_for_id(preference.strategic_llm_id) + long_context_llm = await get_config_for_id(search_space.long_context_llm_id) + fast_llm = await get_config_for_id(search_space.fast_llm_id) + strategic_llm = await get_config_for_id(search_space.strategic_llm_id) # Return updated preferences return { - "long_context_llm_id": preference.long_context_llm_id, - "fast_llm_id": preference.fast_llm_id, - "strategic_llm_id": preference.strategic_llm_id, + "long_context_llm_id": search_space.long_context_llm_id, + "fast_llm_id": search_space.fast_llm_id, + "strategic_llm_id": search_space.strategic_llm_id, "long_context_llm": long_context_llm, "fast_llm": fast_llm, "strategic_llm": strategic_llm, diff --git a/surfsense_backend/app/routes/logs_routes.py b/surfsense_backend/app/routes/logs_routes.py index d9dd997ce..98fd9141e 100644 --- a/surfsense_backend/app/routes/logs_routes.py +++ b/surfsense_backend/app/routes/logs_routes.py @@ -5,10 +5,19 @@ from sqlalchemy import and_, desc from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.future import select -from app.db import Log, LogLevel, LogStatus, SearchSpace, User, get_async_session +from app.db import ( + Log, + LogLevel, + LogStatus, + Permission, + SearchSpace, + SearchSpaceMembership, + User, + get_async_session, +) from app.schemas import LogCreate, LogRead, LogUpdate from app.users import current_active_user -from app.utils.check_ownership import check_ownership +from app.utils.rbac import check_permission router = APIRouter() @@ -19,10 +28,19 @@ async def create_log( session: AsyncSession = Depends(get_async_session), user: User = Depends(current_active_user), ): - """Create a new log entry.""" + """ + Create a new log entry. + Note: This is typically called internally. Requires LOGS_READ permission (since logs are usually system-generated). + """ try: - # Check if the user owns the search space - await check_ownership(session, SearchSpace, log.search_space_id, user) + # Check if the user has access to the search space + await check_permission( + session, + user, + log.search_space_id, + Permission.LOGS_READ.value, + "You don't have permission to access logs in this search space", + ) db_log = Log(**log.model_dump()) session.add(db_log) @@ -51,22 +69,38 @@ async def read_logs( session: AsyncSession = Depends(get_async_session), user: User = Depends(current_active_user), ): - """Get logs with optional filtering.""" + """ + Get logs with optional filtering. + Requires LOGS_READ permission for the search space(s). + """ try: - # Build base query - only logs from user's search spaces - query = ( - select(Log) - .join(SearchSpace) - .filter(SearchSpace.user_id == user.id) - .order_by(desc(Log.created_at)) # Most recent first - ) - # Apply filters filters = [] if search_space_id is not None: - await check_ownership(session, SearchSpace, search_space_id, user) - filters.append(Log.search_space_id == search_space_id) + # Check permission for specific search space + await check_permission( + session, + user, + search_space_id, + Permission.LOGS_READ.value, + "You don't have permission to read logs in this search space", + ) + # Build query for specific search space + query = ( + select(Log) + .filter(Log.search_space_id == search_space_id) + .order_by(desc(Log.created_at)) + ) + else: + # Build base query - logs from search spaces user has membership in + query = ( + select(Log) + .join(SearchSpace) + .join(SearchSpaceMembership) + .filter(SearchSpaceMembership.user_id == user.id) + .order_by(desc(Log.created_at)) + ) if level is not None: filters.append(Log.level == level) @@ -104,19 +138,26 @@ async def read_log( session: AsyncSession = Depends(get_async_session), user: User = Depends(current_active_user), ): - """Get a specific log by ID.""" + """ + Get a specific log by ID. + Requires LOGS_READ permission for the search space. + """ try: - # Get log and verify user owns the search space - result = await session.execute( - select(Log) - .join(SearchSpace) - .filter(Log.id == log_id, SearchSpace.user_id == user.id) - ) + result = await session.execute(select(Log).filter(Log.id == log_id)) log = result.scalars().first() if not log: raise HTTPException(status_code=404, detail="Log not found") + # Check permission for the search space + await check_permission( + session, + user, + log.search_space_id, + Permission.LOGS_READ.value, + "You don't have permission to read logs in this search space", + ) + return log except HTTPException: raise @@ -133,19 +174,26 @@ async def update_log( session: AsyncSession = Depends(get_async_session), user: User = Depends(current_active_user), ): - """Update a log entry.""" + """ + Update a log entry. + Requires LOGS_READ permission (logs are typically updated by system). + """ try: - # Get log and verify user owns the search space - result = await session.execute( - select(Log) - .join(SearchSpace) - .filter(Log.id == log_id, SearchSpace.user_id == user.id) - ) + result = await session.execute(select(Log).filter(Log.id == log_id)) db_log = result.scalars().first() if not db_log: raise HTTPException(status_code=404, detail="Log not found") + # Check permission for the search space + await check_permission( + session, + user, + db_log.search_space_id, + Permission.LOGS_READ.value, + "You don't have permission to access logs in this search space", + ) + # Update only provided fields update_data = log_update.model_dump(exclude_unset=True) for field, value in update_data.items(): @@ -169,19 +217,26 @@ async def delete_log( session: AsyncSession = Depends(get_async_session), user: User = Depends(current_active_user), ): - """Delete a log entry.""" + """ + Delete a log entry. + Requires LOGS_DELETE permission for the search space. + """ try: - # Get log and verify user owns the search space - result = await session.execute( - select(Log) - .join(SearchSpace) - .filter(Log.id == log_id, SearchSpace.user_id == user.id) - ) + result = await session.execute(select(Log).filter(Log.id == log_id)) db_log = result.scalars().first() if not db_log: raise HTTPException(status_code=404, detail="Log not found") + # Check permission for the search space + await check_permission( + session, + user, + db_log.search_space_id, + Permission.LOGS_DELETE.value, + "You don't have permission to delete logs in this search space", + ) + await session.delete(db_log) await session.commit() return {"message": "Log deleted successfully"} @@ -201,10 +256,19 @@ async def get_logs_summary( session: AsyncSession = Depends(get_async_session), user: User = Depends(current_active_user), ): - """Get a summary of logs for a search space in the last X hours.""" + """ + Get a summary of logs for a search space in the last X hours. + Requires LOGS_READ permission for the search space. + """ try: - # Check ownership - await check_ownership(session, SearchSpace, search_space_id, user) + # Check permission + await check_permission( + session, + user, + search_space_id, + Permission.LOGS_READ.value, + "You don't have permission to read logs in this search space", + ) # Calculate time window since = datetime.utcnow().replace(microsecond=0) - timedelta(hours=hours) diff --git a/surfsense_backend/app/routes/podcasts_routes.py b/surfsense_backend/app/routes/podcasts_routes.py index ae1fdaeef..deb9d9744 100644 --- a/surfsense_backend/app/routes/podcasts_routes.py +++ b/surfsense_backend/app/routes/podcasts_routes.py @@ -7,7 +7,15 @@ from sqlalchemy.exc import IntegrityError, SQLAlchemyError from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.future import select -from app.db import Chat, Podcast, SearchSpace, User, get_async_session +from app.db import ( + Chat, + Permission, + Podcast, + SearchSpace, + SearchSpaceMembership, + User, + get_async_session, +) from app.schemas import ( PodcastCreate, PodcastGenerateRequest, @@ -16,7 +24,7 @@ from app.schemas import ( ) from app.tasks.podcast_tasks import generate_chat_podcast from app.users import current_active_user -from app.utils.check_ownership import check_ownership +from app.utils.rbac import check_permission router = APIRouter() @@ -27,8 +35,18 @@ async def create_podcast( session: AsyncSession = Depends(get_async_session), user: User = Depends(current_active_user), ): + """ + Create a new podcast. + Requires PODCASTS_CREATE permission. + """ try: - await check_ownership(session, SearchSpace, podcast.search_space_id, user) + await check_permission( + session, + user, + podcast.search_space_id, + Permission.PODCASTS_CREATE.value, + "You don't have permission to create podcasts in this search space", + ) db_podcast = Podcast(**podcast.model_dump()) session.add(db_podcast) await session.commit() @@ -58,20 +76,45 @@ async def create_podcast( async def read_podcasts( skip: int = 0, limit: int = 100, + search_space_id: int | None = None, session: AsyncSession = Depends(get_async_session), user: User = Depends(current_active_user), ): + """ + List podcasts the user has access to. + Requires PODCASTS_READ permission for the search space(s). + """ if skip < 0 or limit < 1: raise HTTPException(status_code=400, detail="Invalid pagination parameters") try: - result = await session.execute( - select(Podcast) - .join(SearchSpace) - .filter(SearchSpace.user_id == user.id) - .offset(skip) - .limit(limit) - ) + if search_space_id is not None: + # Check permission for specific search space + await check_permission( + session, + user, + search_space_id, + Permission.PODCASTS_READ.value, + "You don't have permission to read podcasts in this search space", + ) + result = await session.execute( + select(Podcast) + .filter(Podcast.search_space_id == search_space_id) + .offset(skip) + .limit(limit) + ) + else: + # Get podcasts from all search spaces user has membership in + result = await session.execute( + select(Podcast) + .join(SearchSpace) + .join(SearchSpaceMembership) + .filter(SearchSpaceMembership.user_id == user.id) + .offset(skip) + .limit(limit) + ) return result.scalars().all() + except HTTPException: + raise except SQLAlchemyError: raise HTTPException( status_code=500, detail="Database error occurred while fetching podcasts" @@ -84,18 +127,29 @@ async def read_podcast( session: AsyncSession = Depends(get_async_session), user: User = Depends(current_active_user), ): + """ + Get a specific podcast by ID. + Requires PODCASTS_READ permission for the search space. + """ try: - result = await session.execute( - select(Podcast) - .join(SearchSpace) - .filter(Podcast.id == podcast_id, SearchSpace.user_id == user.id) - ) + result = await session.execute(select(Podcast).filter(Podcast.id == podcast_id)) podcast = result.scalars().first() + if not podcast: raise HTTPException( status_code=404, - detail="Podcast not found or you don't have permission to access it", + detail="Podcast not found", ) + + # Check permission for the search space + await check_permission( + session, + user, + podcast.search_space_id, + Permission.PODCASTS_READ.value, + "You don't have permission to read podcasts in this search space", + ) + return podcast except HTTPException as he: raise he @@ -112,8 +166,26 @@ async def update_podcast( session: AsyncSession = Depends(get_async_session), user: User = Depends(current_active_user), ): + """ + Update a podcast. + Requires PODCASTS_UPDATE permission for the search space. + """ try: - db_podcast = await read_podcast(podcast_id, session, user) + result = await session.execute(select(Podcast).filter(Podcast.id == podcast_id)) + db_podcast = result.scalars().first() + + if not db_podcast: + raise HTTPException(status_code=404, detail="Podcast not found") + + # Check permission for the search space + await check_permission( + session, + user, + db_podcast.search_space_id, + Permission.PODCASTS_UPDATE.value, + "You don't have permission to update podcasts in this search space", + ) + update_data = podcast_update.model_dump(exclude_unset=True) for key, value in update_data.items(): setattr(db_podcast, key, value) @@ -140,8 +212,26 @@ async def delete_podcast( session: AsyncSession = Depends(get_async_session), user: User = Depends(current_active_user), ): + """ + Delete a podcast. + Requires PODCASTS_DELETE permission for the search space. + """ try: - db_podcast = await read_podcast(podcast_id, session, user) + result = await session.execute(select(Podcast).filter(Podcast.id == podcast_id)) + db_podcast = result.scalars().first() + + if not db_podcast: + raise HTTPException(status_code=404, detail="Podcast not found") + + # Check permission for the search space + await check_permission( + session, + user, + db_podcast.search_space_id, + Permission.PODCASTS_DELETE.value, + "You don't have permission to delete podcasts in this search space", + ) + await session.delete(db_podcast) await session.commit() return {"message": "Podcast deleted successfully"} @@ -181,9 +271,19 @@ async def generate_podcast( session: AsyncSession = Depends(get_async_session), user: User = Depends(current_active_user), ): + """ + Generate a podcast from a chat or document. + Requires PODCASTS_CREATE permission. + """ try: - # Check if the user owns the search space - await check_ownership(session, SearchSpace, request.search_space_id, user) + # Check if the user has permission to create podcasts + await check_permission( + session, + user, + request.search_space_id, + Permission.PODCASTS_CREATE.value, + "You don't have permission to create podcasts in this search space", + ) if request.type == "CHAT": # Verify that all chat IDs belong to this user and search space @@ -251,22 +351,29 @@ async def stream_podcast( session: AsyncSession = Depends(get_async_session), user: User = Depends(current_active_user), ): - """Stream a podcast audio file.""" + """ + Stream a podcast audio file. + Requires PODCASTS_READ permission for the search space. + """ try: - # Get the podcast and check if user has access - result = await session.execute( - select(Podcast) - .join(SearchSpace) - .filter(Podcast.id == podcast_id, SearchSpace.user_id == user.id) - ) + result = await session.execute(select(Podcast).filter(Podcast.id == podcast_id)) podcast = result.scalars().first() if not podcast: raise HTTPException( status_code=404, - detail="Podcast not found or you don't have permission to access it", + detail="Podcast not found", ) + # Check permission for the search space + await check_permission( + session, + user, + podcast.search_space_id, + Permission.PODCASTS_READ.value, + "You don't have permission to access podcasts in this search space", + ) + # Get the file path file_path = podcast.file_location @@ -303,12 +410,30 @@ async def get_podcast_by_chat_id( session: AsyncSession = Depends(get_async_session), user: User = Depends(current_active_user), ): + """ + Get a podcast by its associated chat ID. + Requires PODCASTS_READ permission for the search space. + """ try: - # Get the podcast and check if user has access + # First get the chat to find its search space + chat_result = await session.execute(select(Chat).filter(Chat.id == chat_id)) + chat = chat_result.scalars().first() + + if not chat: + return None + + # Check permission for the search space + await check_permission( + session, + user, + chat.search_space_id, + Permission.PODCASTS_READ.value, + "You don't have permission to read podcasts in this search space", + ) + + # Get the podcast result = await session.execute( - select(Podcast) - .join(SearchSpace) - .filter(Podcast.chat_id == chat_id, SearchSpace.user_id == user.id) + select(Podcast).filter(Podcast.chat_id == chat_id) ) podcast = result.scalars().first() diff --git a/surfsense_backend/app/routes/rbac_routes.py b/surfsense_backend/app/routes/rbac_routes.py new file mode 100644 index 000000000..c5392f284 --- /dev/null +++ b/surfsense_backend/app/routes/rbac_routes.py @@ -0,0 +1,1084 @@ +""" +RBAC (Role-Based Access Control) routes for managing roles, memberships, and invites. + +Endpoints: +- /searchspaces/{search_space_id}/roles - CRUD for roles +- /searchspaces/{search_space_id}/members - CRUD for memberships +- /searchspaces/{search_space_id}/invites - CRUD for invites +- /invites/{invite_code}/info - Get invite info (public) +- /invites/accept - Accept an invite +- /permissions - List all available permissions +""" + +import logging +from datetime import UTC, datetime + +from fastapi import APIRouter, Depends, HTTPException +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.future import select +from sqlalchemy.orm import selectinload + +from app.db import ( + Permission, + SearchSpace, + SearchSpaceInvite, + SearchSpaceMembership, + SearchSpaceRole, + User, + get_async_session, +) +from app.schemas import ( + InviteAcceptRequest, + InviteAcceptResponse, + InviteCreate, + InviteInfoResponse, + InviteRead, + InviteUpdate, + MembershipRead, + MembershipUpdate, + PermissionInfo, + PermissionsListResponse, + RoleCreate, + RoleRead, + RoleUpdate, + UserSearchSpaceAccess, +) +from app.users import current_active_user +from app.utils.rbac import ( + check_permission, + check_search_space_access, + generate_invite_code, + get_default_role, + get_user_permissions, +) + +logger = logging.getLogger(__name__) + +router = APIRouter() + + +# ============ Permissions Endpoints ============ + + +@router.get("/permissions", response_model=PermissionsListResponse) +async def list_all_permissions( + user: User = Depends(current_active_user), +): + """ + List all available permissions that can be assigned to roles. + """ + permissions = [] + for perm in Permission: + # Extract category from permission value (e.g., "documents:read" -> "documents") + category = perm.value.split(":")[0] if ":" in perm.value else "general" + + permissions.append( + PermissionInfo( + value=perm.value, + name=perm.name, + category=category, + ) + ) + + return PermissionsListResponse(permissions=permissions) + + +# ============ Role Endpoints ============ + + +@router.post( + "/searchspaces/{search_space_id}/roles", + response_model=RoleRead, +) +async def create_role( + search_space_id: int, + role_data: RoleCreate, + session: AsyncSession = Depends(get_async_session), + user: User = Depends(current_active_user), +): + """ + Create a new custom role in a search space. + Requires ROLES_CREATE permission. + """ + try: + await check_permission( + session, + user, + search_space_id, + Permission.ROLES_CREATE.value, + "You don't have permission to create roles", + ) + + # Check if role with same name already exists + result = await session.execute( + select(SearchSpaceRole).filter( + SearchSpaceRole.search_space_id == search_space_id, + SearchSpaceRole.name == role_data.name, + ) + ) + if result.scalars().first(): + raise HTTPException( + status_code=409, + detail=f"A role with name '{role_data.name}' already exists in this search space", + ) + + # Validate permissions + valid_permissions = {p.value for p in Permission} + for perm in role_data.permissions: + if perm not in valid_permissions: + raise HTTPException( + status_code=400, + detail=f"Invalid permission: {perm}", + ) + + # If setting is_default to True, unset any existing default + if role_data.is_default: + await session.execute( + select(SearchSpaceRole).filter( + SearchSpaceRole.search_space_id == search_space_id, + SearchSpaceRole.is_default == True, # noqa: E712 + ) + ) + existing_defaults = await session.execute( + select(SearchSpaceRole).filter( + SearchSpaceRole.search_space_id == search_space_id, + SearchSpaceRole.is_default == True, # noqa: E712 + ) + ) + for existing in existing_defaults.scalars().all(): + existing.is_default = False + + db_role = SearchSpaceRole( + **role_data.model_dump(), + search_space_id=search_space_id, + is_system_role=False, + ) + session.add(db_role) + await session.commit() + await session.refresh(db_role) + return db_role + + except HTTPException: + raise + except Exception as e: + await session.rollback() + logger.error(f"Failed to create role: {e!s}", exc_info=True) + raise HTTPException( + status_code=500, detail=f"Failed to create role: {e!s}" + ) from e + + +@router.get( + "/searchspaces/{search_space_id}/roles", + response_model=list[RoleRead], +) +async def list_roles( + search_space_id: int, + session: AsyncSession = Depends(get_async_session), + user: User = Depends(current_active_user), +): + """ + List all roles in a search space. + Requires ROLES_READ permission. + """ + try: + await check_permission( + session, + user, + search_space_id, + Permission.ROLES_READ.value, + "You don't have permission to view roles", + ) + + result = await session.execute( + select(SearchSpaceRole).filter( + SearchSpaceRole.search_space_id == search_space_id + ) + ) + return result.scalars().all() + + except HTTPException: + raise + except Exception as e: + raise HTTPException( + status_code=500, detail=f"Failed to fetch roles: {e!s}" + ) from e + + +@router.get( + "/searchspaces/{search_space_id}/roles/{role_id}", + response_model=RoleRead, +) +async def get_role( + search_space_id: int, + role_id: int, + session: AsyncSession = Depends(get_async_session), + user: User = Depends(current_active_user), +): + """ + Get a specific role by ID. + Requires ROLES_READ permission. + """ + try: + await check_permission( + session, + user, + search_space_id, + Permission.ROLES_READ.value, + "You don't have permission to view roles", + ) + + result = await session.execute( + select(SearchSpaceRole).filter( + SearchSpaceRole.id == role_id, + SearchSpaceRole.search_space_id == search_space_id, + ) + ) + role = result.scalars().first() + + if not role: + raise HTTPException(status_code=404, detail="Role not found") + + return role + + except HTTPException: + raise + except Exception as e: + raise HTTPException( + status_code=500, detail=f"Failed to fetch role: {e!s}" + ) from e + + +@router.put( + "/searchspaces/{search_space_id}/roles/{role_id}", + response_model=RoleRead, +) +async def update_role( + search_space_id: int, + role_id: int, + role_update: RoleUpdate, + session: AsyncSession = Depends(get_async_session), + user: User = Depends(current_active_user), +): + """ + Update a role. + Requires ROLES_UPDATE permission. + System roles can only have their permissions updated, not name/description. + """ + try: + await check_permission( + session, + user, + search_space_id, + Permission.ROLES_UPDATE.value, + "You don't have permission to update roles", + ) + + result = await session.execute( + select(SearchSpaceRole).filter( + SearchSpaceRole.id == role_id, + SearchSpaceRole.search_space_id == search_space_id, + ) + ) + db_role = result.scalars().first() + + if not db_role: + raise HTTPException(status_code=404, detail="Role not found") + + update_data = role_update.model_dump(exclude_unset=True) + + # System roles have restrictions on what can be updated + if db_role.is_system_role: + # Can only update permissions for system roles + restricted_fields = {"name", "description", "is_default"} + if any(field in update_data for field in restricted_fields): + raise HTTPException( + status_code=400, + detail="Cannot modify name, description, or default status of system roles", + ) + + # Check for name conflict if updating name + if "name" in update_data and update_data["name"] != db_role.name: + existing = await session.execute( + select(SearchSpaceRole).filter( + SearchSpaceRole.search_space_id == search_space_id, + SearchSpaceRole.name == update_data["name"], + ) + ) + if existing.scalars().first(): + raise HTTPException( + status_code=409, + detail=f"A role with name '{update_data['name']}' already exists", + ) + + # Validate permissions if provided + if "permissions" in update_data: + valid_permissions = {p.value for p in Permission} + for perm in update_data["permissions"]: + if perm not in valid_permissions: + raise HTTPException( + status_code=400, + detail=f"Invalid permission: {perm}", + ) + + # Handle is_default change + if update_data.get("is_default") and not db_role.is_default: + # Unset existing default + existing_defaults = await session.execute( + select(SearchSpaceRole).filter( + SearchSpaceRole.search_space_id == search_space_id, + SearchSpaceRole.is_default == True, # noqa: E712 + ) + ) + for existing in existing_defaults.scalars().all(): + existing.is_default = False + + for key, value in update_data.items(): + setattr(db_role, key, value) + + await session.commit() + await session.refresh(db_role) + return db_role + + except HTTPException: + raise + except Exception as e: + await session.rollback() + logger.error(f"Failed to update role: {e!s}", exc_info=True) + raise HTTPException( + status_code=500, detail=f"Failed to update role: {e!s}" + ) from e + + +@router.delete("/searchspaces/{search_space_id}/roles/{role_id}") +async def delete_role( + search_space_id: int, + role_id: int, + session: AsyncSession = Depends(get_async_session), + user: User = Depends(current_active_user), +): + """ + Delete a custom role. + Requires ROLES_DELETE permission. + System roles cannot be deleted. + """ + try: + await check_permission( + session, + user, + search_space_id, + Permission.ROLES_DELETE.value, + "You don't have permission to delete roles", + ) + + result = await session.execute( + select(SearchSpaceRole).filter( + SearchSpaceRole.id == role_id, + SearchSpaceRole.search_space_id == search_space_id, + ) + ) + db_role = result.scalars().first() + + if not db_role: + raise HTTPException(status_code=404, detail="Role not found") + + if db_role.is_system_role: + raise HTTPException( + status_code=400, + detail="System roles cannot be deleted", + ) + + await session.delete(db_role) + await session.commit() + return {"message": "Role deleted successfully"} + + except HTTPException: + raise + except Exception as e: + await session.rollback() + logger.error(f"Failed to delete role: {e!s}", exc_info=True) + raise HTTPException( + status_code=500, detail=f"Failed to delete role: {e!s}" + ) from e + + +# ============ Membership Endpoints ============ + + +@router.get( + "/searchspaces/{search_space_id}/members", + response_model=list[MembershipRead], +) +async def list_members( + search_space_id: int, + session: AsyncSession = Depends(get_async_session), + user: User = Depends(current_active_user), +): + """ + List all members of a search space. + Requires MEMBERS_VIEW permission. + """ + try: + await check_permission( + session, + user, + search_space_id, + Permission.MEMBERS_VIEW.value, + "You don't have permission to view members", + ) + + result = await session.execute( + select(SearchSpaceMembership) + .options(selectinload(SearchSpaceMembership.role)) + .filter(SearchSpaceMembership.search_space_id == search_space_id) + ) + memberships = result.scalars().all() + + # Fetch user emails for each membership + response = [] + for membership in memberships: + user_result = await session.execute( + select(User).filter(User.id == membership.user_id) + ) + member_user = user_result.scalars().first() + + membership_dict = { + "id": membership.id, + "user_id": membership.user_id, + "search_space_id": membership.search_space_id, + "role_id": membership.role_id, + "is_owner": membership.is_owner, + "joined_at": membership.joined_at, + "created_at": membership.created_at, + "role": membership.role, + "user_email": member_user.email if member_user else None, + } + response.append(membership_dict) + + return response + + except HTTPException: + raise + except Exception as e: + raise HTTPException( + status_code=500, detail=f"Failed to fetch members: {e!s}" + ) from e + + +@router.put( + "/searchspaces/{search_space_id}/members/{membership_id}", + response_model=MembershipRead, +) +async def update_member_role( + search_space_id: int, + membership_id: int, + membership_update: MembershipUpdate, + session: AsyncSession = Depends(get_async_session), + user: User = Depends(current_active_user), +): + """ + Update a member's role. + Requires MEMBERS_MANAGE_ROLES permission. + Cannot change owner's role. + """ + try: + await check_permission( + session, + user, + search_space_id, + Permission.MEMBERS_MANAGE_ROLES.value, + "You don't have permission to manage member roles", + ) + + result = await session.execute( + select(SearchSpaceMembership) + .options(selectinload(SearchSpaceMembership.role)) + .filter( + SearchSpaceMembership.id == membership_id, + SearchSpaceMembership.search_space_id == search_space_id, + ) + ) + db_membership = result.scalars().first() + + if not db_membership: + raise HTTPException(status_code=404, detail="Membership not found") + + # Cannot change owner's role + if db_membership.is_owner: + raise HTTPException( + status_code=400, + detail="Cannot change the owner's role", + ) + + # Verify the new role exists in this search space + if membership_update.role_id: + role_result = await session.execute( + select(SearchSpaceRole).filter( + SearchSpaceRole.id == membership_update.role_id, + SearchSpaceRole.search_space_id == search_space_id, + ) + ) + if not role_result.scalars().first(): + raise HTTPException( + status_code=404, + detail="Role not found in this search space", + ) + + db_membership.role_id = membership_update.role_id + await session.commit() + await session.refresh(db_membership) + + # Fetch user email + user_result = await session.execute( + select(User).filter(User.id == db_membership.user_id) + ) + member_user = user_result.scalars().first() + + return { + "id": db_membership.id, + "user_id": db_membership.user_id, + "search_space_id": db_membership.search_space_id, + "role_id": db_membership.role_id, + "is_owner": db_membership.is_owner, + "joined_at": db_membership.joined_at, + "created_at": db_membership.created_at, + "role": db_membership.role, + "user_email": member_user.email if member_user else None, + } + + except HTTPException: + raise + except Exception as e: + await session.rollback() + logger.error(f"Failed to update member role: {e!s}", exc_info=True) + raise HTTPException( + status_code=500, detail=f"Failed to update member role: {e!s}" + ) from e + + +@router.delete("/searchspaces/{search_space_id}/members/{membership_id}") +async def remove_member( + search_space_id: int, + membership_id: int, + session: AsyncSession = Depends(get_async_session), + user: User = Depends(current_active_user), +): + """ + Remove a member from a search space. + Requires MEMBERS_REMOVE permission. + Cannot remove the owner. + """ + try: + await check_permission( + session, + user, + search_space_id, + Permission.MEMBERS_REMOVE.value, + "You don't have permission to remove members", + ) + + result = await session.execute( + select(SearchSpaceMembership).filter( + SearchSpaceMembership.id == membership_id, + SearchSpaceMembership.search_space_id == search_space_id, + ) + ) + db_membership = result.scalars().first() + + if not db_membership: + raise HTTPException(status_code=404, detail="Membership not found") + + if db_membership.is_owner: + raise HTTPException( + status_code=400, + detail="Cannot remove the owner from the search space", + ) + + await session.delete(db_membership) + await session.commit() + return {"message": "Member removed successfully"} + + except HTTPException: + raise + except Exception as e: + await session.rollback() + logger.error(f"Failed to remove member: {e!s}", exc_info=True) + raise HTTPException( + status_code=500, detail=f"Failed to remove member: {e!s}" + ) from e + + +@router.delete("/searchspaces/{search_space_id}/members/me") +async def leave_search_space( + search_space_id: int, + session: AsyncSession = Depends(get_async_session), + user: User = Depends(current_active_user), +): + """ + Leave a search space (remove own membership). + Owners cannot leave their search space. + """ + try: + result = await session.execute( + select(SearchSpaceMembership).filter( + SearchSpaceMembership.user_id == user.id, + SearchSpaceMembership.search_space_id == search_space_id, + ) + ) + db_membership = result.scalars().first() + + if not db_membership: + raise HTTPException( + status_code=404, + detail="You are not a member of this search space", + ) + + if db_membership.is_owner: + raise HTTPException( + status_code=400, + detail="Owners cannot leave their search space. Transfer ownership first or delete the search space.", + ) + + await session.delete(db_membership) + await session.commit() + return {"message": "Successfully left the search space"} + + except HTTPException: + raise + except Exception as e: + await session.rollback() + logger.error(f"Failed to leave search space: {e!s}", exc_info=True) + raise HTTPException( + status_code=500, detail=f"Failed to leave search space: {e!s}" + ) from e + + +# ============ Invite Endpoints ============ + + +@router.post( + "/searchspaces/{search_space_id}/invites", + response_model=InviteRead, +) +async def create_invite( + search_space_id: int, + invite_data: InviteCreate, + session: AsyncSession = Depends(get_async_session), + user: User = Depends(current_active_user), +): + """ + Create a new invite link for a search space. + Requires MEMBERS_INVITE permission. + """ + try: + await check_permission( + session, + user, + search_space_id, + Permission.MEMBERS_INVITE.value, + "You don't have permission to create invites", + ) + + # Verify role exists if specified + if invite_data.role_id: + role_result = await session.execute( + select(SearchSpaceRole).filter( + SearchSpaceRole.id == invite_data.role_id, + SearchSpaceRole.search_space_id == search_space_id, + ) + ) + if not role_result.scalars().first(): + raise HTTPException( + status_code=404, + detail="Role not found in this search space", + ) + + db_invite = SearchSpaceInvite( + **invite_data.model_dump(), + invite_code=generate_invite_code(), + search_space_id=search_space_id, + created_by_id=user.id, + ) + session.add(db_invite) + await session.commit() + + # Reload with role + result = await session.execute( + select(SearchSpaceInvite) + .options(selectinload(SearchSpaceInvite.role)) + .filter(SearchSpaceInvite.id == db_invite.id) + ) + db_invite = result.scalars().first() + + return db_invite + + except HTTPException: + raise + except Exception as e: + await session.rollback() + logger.error(f"Failed to create invite: {e!s}", exc_info=True) + raise HTTPException( + status_code=500, detail=f"Failed to create invite: {e!s}" + ) from e + + +@router.get( + "/searchspaces/{search_space_id}/invites", + response_model=list[InviteRead], +) +async def list_invites( + search_space_id: int, + session: AsyncSession = Depends(get_async_session), + user: User = Depends(current_active_user), +): + """ + List all invites for a search space. + Requires MEMBERS_INVITE permission. + """ + try: + await check_permission( + session, + user, + search_space_id, + Permission.MEMBERS_INVITE.value, + "You don't have permission to view invites", + ) + + result = await session.execute( + select(SearchSpaceInvite) + .options(selectinload(SearchSpaceInvite.role)) + .filter(SearchSpaceInvite.search_space_id == search_space_id) + ) + return result.scalars().all() + + except HTTPException: + raise + except Exception as e: + raise HTTPException( + status_code=500, detail=f"Failed to fetch invites: {e!s}" + ) from e + + +@router.put( + "/searchspaces/{search_space_id}/invites/{invite_id}", + response_model=InviteRead, +) +async def update_invite( + search_space_id: int, + invite_id: int, + invite_update: InviteUpdate, + session: AsyncSession = Depends(get_async_session), + user: User = Depends(current_active_user), +): + """ + Update an invite. + Requires MEMBERS_INVITE permission. + """ + try: + await check_permission( + session, + user, + search_space_id, + Permission.MEMBERS_INVITE.value, + "You don't have permission to update invites", + ) + + result = await session.execute( + select(SearchSpaceInvite) + .options(selectinload(SearchSpaceInvite.role)) + .filter( + SearchSpaceInvite.id == invite_id, + SearchSpaceInvite.search_space_id == search_space_id, + ) + ) + db_invite = result.scalars().first() + + if not db_invite: + raise HTTPException(status_code=404, detail="Invite not found") + + update_data = invite_update.model_dump(exclude_unset=True) + + # Verify role exists if updating role_id + if update_data.get("role_id"): + role_result = await session.execute( + select(SearchSpaceRole).filter( + SearchSpaceRole.id == update_data["role_id"], + SearchSpaceRole.search_space_id == search_space_id, + ) + ) + if not role_result.scalars().first(): + raise HTTPException( + status_code=404, + detail="Role not found in this search space", + ) + + for key, value in update_data.items(): + setattr(db_invite, key, value) + + await session.commit() + await session.refresh(db_invite) + return db_invite + + except HTTPException: + raise + except Exception as e: + await session.rollback() + logger.error(f"Failed to update invite: {e!s}", exc_info=True) + raise HTTPException( + status_code=500, detail=f"Failed to update invite: {e!s}" + ) from e + + +@router.delete("/searchspaces/{search_space_id}/invites/{invite_id}") +async def revoke_invite( + search_space_id: int, + invite_id: int, + session: AsyncSession = Depends(get_async_session), + user: User = Depends(current_active_user), +): + """ + Revoke (delete) an invite. + Requires MEMBERS_INVITE permission. + """ + try: + await check_permission( + session, + user, + search_space_id, + Permission.MEMBERS_INVITE.value, + "You don't have permission to revoke invites", + ) + + result = await session.execute( + select(SearchSpaceInvite).filter( + SearchSpaceInvite.id == invite_id, + SearchSpaceInvite.search_space_id == search_space_id, + ) + ) + db_invite = result.scalars().first() + + if not db_invite: + raise HTTPException(status_code=404, detail="Invite not found") + + await session.delete(db_invite) + await session.commit() + return {"message": "Invite revoked successfully"} + + except HTTPException: + raise + except Exception as e: + await session.rollback() + logger.error(f"Failed to revoke invite: {e!s}", exc_info=True) + raise HTTPException( + status_code=500, detail=f"Failed to revoke invite: {e!s}" + ) from e + + +# ============ Public Invite Endpoints ============ + + +@router.get("/invites/{invite_code}/info", response_model=InviteInfoResponse) +async def get_invite_info( + invite_code: str, + session: AsyncSession = Depends(get_async_session), +): + """ + Get information about an invite (public endpoint, no auth required). + Returns minimal info for displaying on invite acceptance page. + """ + try: + result = await session.execute( + select(SearchSpaceInvite) + .options( + selectinload(SearchSpaceInvite.role), + selectinload(SearchSpaceInvite.search_space), + ) + .filter(SearchSpaceInvite.invite_code == invite_code) + ) + invite = result.scalars().first() + + if not invite: + return InviteInfoResponse( + search_space_name="", + role_name=None, + is_valid=False, + message="Invite not found", + ) + + # Check if invite is still valid + if not invite.is_active: + return InviteInfoResponse( + search_space_name=invite.search_space.name + if invite.search_space + else "", + role_name=invite.role.name if invite.role else None, + is_valid=False, + message="This invite is no longer active", + ) + + if invite.expires_at and invite.expires_at < datetime.now(UTC): + return InviteInfoResponse( + search_space_name=invite.search_space.name + if invite.search_space + else "", + role_name=invite.role.name if invite.role else None, + is_valid=False, + message="This invite has expired", + ) + + if invite.max_uses and invite.uses_count >= invite.max_uses: + return InviteInfoResponse( + search_space_name=invite.search_space.name + if invite.search_space + else "", + role_name=invite.role.name if invite.role else None, + is_valid=False, + message="This invite has reached its maximum uses", + ) + + return InviteInfoResponse( + search_space_name=invite.search_space.name if invite.search_space else "", + role_name=invite.role.name if invite.role else "Default", + is_valid=True, + ) + + except Exception as e: + logger.error(f"Failed to get invite info: {e!s}", exc_info=True) + raise HTTPException( + status_code=500, detail=f"Failed to get invite info: {e!s}" + ) from e + + +@router.post("/invites/accept", response_model=InviteAcceptResponse) +async def accept_invite( + request: InviteAcceptRequest, + session: AsyncSession = Depends(get_async_session), + user: User = Depends(current_active_user), +): + """ + Accept an invite and join a search space. + """ + try: + result = await session.execute( + select(SearchSpaceInvite) + .options( + selectinload(SearchSpaceInvite.role), + selectinload(SearchSpaceInvite.search_space), + ) + .filter(SearchSpaceInvite.invite_code == request.invite_code) + ) + invite = result.scalars().first() + + if not invite: + raise HTTPException(status_code=404, detail="Invite not found") + + # Validate invite + if not invite.is_active: + raise HTTPException( + status_code=400, detail="This invite is no longer active" + ) + + if invite.expires_at and invite.expires_at < datetime.now(UTC): + raise HTTPException(status_code=400, detail="This invite has expired") + + if invite.max_uses and invite.uses_count >= invite.max_uses: + raise HTTPException( + status_code=400, detail="This invite has reached its maximum uses" + ) + + # Check if user is already a member + existing_membership = await session.execute( + select(SearchSpaceMembership).filter( + SearchSpaceMembership.user_id == user.id, + SearchSpaceMembership.search_space_id == invite.search_space_id, + ) + ) + if existing_membership.scalars().first(): + raise HTTPException( + status_code=400, + detail="You are already a member of this search space", + ) + + # Determine role to assign + role_id = invite.role_id + if not role_id: + # Use default role + default_role = await get_default_role(session, invite.search_space_id) + role_id = default_role.id if default_role else None + + # Create membership + membership = SearchSpaceMembership( + user_id=user.id, + search_space_id=invite.search_space_id, + role_id=role_id, + is_owner=False, + invited_by_invite_id=invite.id, + ) + session.add(membership) + + # Increment invite usage + invite.uses_count += 1 + + await session.commit() + + role_name = invite.role.name if invite.role else "Default" + search_space_name = invite.search_space.name if invite.search_space else "" + + return InviteAcceptResponse( + message="Successfully joined the search space", + search_space_id=invite.search_space_id, + search_space_name=search_space_name, + role_name=role_name, + ) + + except HTTPException: + raise + except Exception as e: + await session.rollback() + logger.error(f"Failed to accept invite: {e!s}", exc_info=True) + raise HTTPException( + status_code=500, detail=f"Failed to accept invite: {e!s}" + ) from e + + +# ============ User Access Info ============ + + +@router.get( + "/searchspaces/{search_space_id}/my-access", + response_model=UserSearchSpaceAccess, +) +async def get_my_access( + search_space_id: int, + session: AsyncSession = Depends(get_async_session), + user: User = Depends(current_active_user), +): + """ + Get the current user's access info for a search space. + """ + try: + membership = await check_search_space_access(session, user, search_space_id) + + # Get search space name + result = await session.execute( + select(SearchSpace).filter(SearchSpace.id == search_space_id) + ) + search_space = result.scalars().first() + + # Get permissions + permissions = await get_user_permissions(session, user.id, search_space_id) + + return UserSearchSpaceAccess( + search_space_id=search_space_id, + search_space_name=search_space.name if search_space else "", + is_owner=membership.is_owner, + role_name=membership.role.name if membership.role else None, + permissions=permissions, + ) + + except HTTPException: + raise + except Exception as e: + raise HTTPException( + status_code=500, detail=f"Failed to get access info: {e!s}" + ) from e diff --git a/surfsense_backend/app/routes/search_source_connectors_routes.py b/surfsense_backend/app/routes/search_source_connectors_routes.py index 4e62035ff..624353e19 100644 --- a/surfsense_backend/app/routes/search_source_connectors_routes.py +++ b/surfsense_backend/app/routes/search_source_connectors_routes.py @@ -22,9 +22,9 @@ from sqlalchemy.future import select from app.connectors.github_connector import GitHubConnector from app.db import ( + Permission, SearchSourceConnector, SearchSourceConnectorType, - SearchSpace, User, async_session_maker, get_async_session, @@ -39,6 +39,7 @@ from app.tasks.connector_indexers import ( index_airtable_records, index_clickup_tasks, index_confluence_pages, + index_crawled_urls, index_discord_messages, index_elasticsearch_documents, index_github_repos, @@ -51,12 +52,12 @@ from app.tasks.connector_indexers import ( index_slack_messages, ) from app.users import current_active_user -from app.utils.check_ownership import check_ownership from app.utils.periodic_scheduler import ( create_periodic_schedule, delete_periodic_schedule, update_periodic_schedule, ) +from app.utils.rbac import check_permission # Set up logging logger = logging.getLogger(__name__) @@ -107,19 +108,25 @@ async def create_search_source_connector( ): """ Create a new search source connector. + Requires CONNECTORS_CREATE permission. - Each search space can have only one connector of each type per user (based on search_space_id, user_id, and connector_type). + Each search space can have only one connector of each type (based on search_space_id and connector_type). The config must contain the appropriate keys for the connector type. """ try: - # Check if the search space belongs to the user - await check_ownership(session, SearchSpace, search_space_id, user) + # Check if user has permission to create connectors + await check_permission( + session, + user, + search_space_id, + Permission.CONNECTORS_CREATE.value, + "You don't have permission to create connectors in this search space", + ) - # Check if a connector with the same type already exists for this search space and user + # Check if a connector with the same type already exists for this search space result = await session.execute( select(SearchSourceConnector).filter( SearchSourceConnector.search_space_id == search_space_id, - SearchSourceConnector.user_id == user.id, SearchSourceConnector.connector_type == connector.connector_type, ) ) @@ -127,7 +134,7 @@ async def create_search_source_connector( if existing_connector: raise HTTPException( status_code=409, - detail=f"A connector with type {connector.connector_type} already exists in this search space. Each search space can have only one connector of each type per user.", + detail=f"A connector with type {connector.connector_type} already exists in this search space.", ) # Prepare connector data @@ -197,22 +204,34 @@ async def read_search_source_connectors( session: AsyncSession = Depends(get_async_session), user: User = Depends(current_active_user), ): - """List all search source connectors for the current user, optionally filtered by search space.""" + """ + List all search source connectors for a search space. + Requires CONNECTORS_READ permission. + """ try: - query = select(SearchSourceConnector).filter( - SearchSourceConnector.user_id == user.id + if search_space_id is None: + raise HTTPException( + status_code=400, + detail="search_space_id is required", + ) + + # Check if user has permission to read connectors + await check_permission( + session, + user, + search_space_id, + Permission.CONNECTORS_READ.value, + "You don't have permission to view connectors in this search space", ) - # Filter by search_space_id if provided - if search_space_id is not None: - # Verify the search space belongs to the user - await check_ownership(session, SearchSpace, search_space_id, user) - query = query.filter( - SearchSourceConnector.search_space_id == search_space_id - ) + query = select(SearchSourceConnector).filter( + SearchSourceConnector.search_space_id == search_space_id + ) result = await session.execute(query.offset(skip).limit(limit)) return result.scalars().all() + except HTTPException: + raise except Exception as e: raise HTTPException( status_code=500, @@ -228,9 +247,32 @@ async def read_search_source_connector( session: AsyncSession = Depends(get_async_session), user: User = Depends(current_active_user), ): - """Get a specific search source connector by ID.""" + """ + Get a specific search source connector by ID. + Requires CONNECTORS_READ permission. + """ try: - return await check_ownership(session, SearchSourceConnector, connector_id, user) + # Get the connector first + result = await session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.id == connector_id + ) + ) + connector = result.scalars().first() + + if not connector: + raise HTTPException(status_code=404, detail="Connector not found") + + # Check permission + await check_permission( + session, + user, + connector.search_space_id, + Permission.CONNECTORS_READ.value, + "You don't have permission to view this connector", + ) + + return connector except HTTPException: raise except Exception as e: @@ -250,10 +292,25 @@ async def update_search_source_connector( ): """ Update a search source connector. + Requires CONNECTORS_UPDATE permission. Handles partial updates, including merging changes into the 'config' field. """ - db_connector = await check_ownership( - session, SearchSourceConnector, connector_id, user + # Get the connector first + result = await session.execute( + select(SearchSourceConnector).filter(SearchSourceConnector.id == connector_id) + ) + db_connector = result.scalars().first() + + if not db_connector: + raise HTTPException(status_code=404, detail="Connector not found") + + # Check permission + await check_permission( + session, + user, + db_connector.search_space_id, + Permission.CONNECTORS_UPDATE.value, + "You don't have permission to update this connector", ) # Convert the sparse update data (only fields present in request) to a dict @@ -348,20 +405,19 @@ async def update_search_source_connector( for key, value in update_data.items(): # Prevent changing connector_type if it causes a duplicate (check moved here) if key == "connector_type" and value != db_connector.connector_type: - result = await session.execute( + check_result = await session.execute( select(SearchSourceConnector).filter( SearchSourceConnector.search_space_id == db_connector.search_space_id, - SearchSourceConnector.user_id == user.id, SearchSourceConnector.connector_type == value, SearchSourceConnector.id != connector_id, ) ) - existing_connector = result.scalars().first() + existing_connector = check_result.scalars().first() if existing_connector: raise HTTPException( status_code=409, - detail=f"A connector with type {value} already exists in this search space. Each search space can have only one connector of each type per user.", + detail=f"A connector with type {value} already exists in this search space.", ) setattr(db_connector, key, value) @@ -424,10 +480,29 @@ async def delete_search_source_connector( session: AsyncSession = Depends(get_async_session), user: User = Depends(current_active_user), ): - """Delete a search source connector.""" + """ + Delete a search source connector. + Requires CONNECTORS_DELETE permission. + """ try: - db_connector = await check_ownership( - session, SearchSourceConnector, connector_id, user + # Get the connector first + result = await session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.id == connector_id + ) + ) + db_connector = result.scalars().first() + + if not db_connector: + raise HTTPException(status_code=404, detail="Connector not found") + + # Check permission + await check_permission( + session, + user, + db_connector.search_space_id, + Permission.CONNECTORS_DELETE.value, + "You don't have permission to delete this connector", ) # Delete any periodic schedule associated with this connector @@ -472,6 +547,7 @@ async def index_connector_content( ): """ Index content from a connector to a search space. + Requires CONNECTORS_UPDATE permission (to trigger indexing). Currently supports: - SLACK_CONNECTOR: Indexes messages from all accessible Slack channels @@ -482,24 +558,34 @@ async def index_connector_content( - DISCORD_CONNECTOR: Indexes messages from all accessible Discord channels - LUMA_CONNECTOR: Indexes events from Luma - ELASTICSEARCH_CONNECTOR: Indexes documents from Elasticsearch + - WEBCRAWLER_CONNECTOR: Indexes web pages from crawled websites Args: connector_id: ID of the connector to use search_space_id: ID of the search space to store indexed content - background_tasks: FastAPI background tasks Returns: Dictionary with indexing status """ try: - # Check if the connector belongs to the user - connector = await check_ownership( - session, SearchSourceConnector, connector_id, user + # Get the connector first + result = await session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.id == connector_id + ) ) + connector = result.scalars().first() - # Check if the search space belongs to the user - _search_space = await check_ownership( - session, SearchSpace, search_space_id, user + if not connector: + raise HTTPException(status_code=404, detail="Connector not found") + + # Check if user has permission to update connectors (indexing is an update operation) + await check_permission( + session, + user, + search_space_id, + Permission.CONNECTORS_UPDATE.value, + "You don't have permission to index content in this search space", ) # Handle different connector types @@ -688,6 +774,17 @@ async def index_connector_content( ) response_message = "Elasticsearch indexing started in the background." + elif connector.connector_type == SearchSourceConnectorType.WEBCRAWLER_CONNECTOR: + from app.tasks.celery_tasks.connector_tasks import index_crawled_urls_task + + logger.info( + f"Triggering web pages indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}" + ) + index_crawled_urls_task.delay( + connector_id, search_space_id, str(user.id), indexing_from, indexing_to + ) + response_message = "Web page indexing started in the background." + else: raise HTTPException( status_code=400, @@ -1523,3 +1620,64 @@ async def run_elasticsearch_indexing( f"Critical error in run_elasticsearch_indexing for connector {connector_id}: {e}", exc_info=True, ) + + +# Add new helper functions for crawled web page indexing +async def run_web_page_indexing_with_new_session( + connector_id: int, + search_space_id: int, + user_id: str, + start_date: str, + end_date: str, +): + """ + Create a new session and run the Web page indexing task. + This prevents session leaks by creating a dedicated session for the background task. + """ + async with async_session_maker() as session: + await run_web_page_indexing( + session, connector_id, search_space_id, user_id, start_date, end_date + ) + + +async def run_web_page_indexing( + session: AsyncSession, + connector_id: int, + search_space_id: int, + user_id: str, + start_date: str, + end_date: str, +): + """ + Background task to run Web page indexing. + Args: + session: Database session + connector_id: ID of the webcrawler connector + search_space_id: ID of the search space + user_id: ID of the user + start_date: Start date for indexing + end_date: End date for indexing + """ + try: + documents_processed, error_or_warning = await index_crawled_urls( + session=session, + connector_id=connector_id, + search_space_id=search_space_id, + user_id=user_id, + start_date=start_date, + end_date=end_date, + update_last_indexed=False, # Don't update timestamp in the indexing function + ) + + # Only update last_indexed_at if indexing was successful (either new docs or updated docs) + if documents_processed > 0: + await update_connector_last_indexed(session, connector_id) + logger.info( + f"Web page indexing completed successfully: {documents_processed} documents processed" + ) + else: + logger.error( + f"Web page indexing failed or no documents processed: {error_or_warning}" + ) + except Exception as e: + logger.error(f"Error in background Web page indexing task: {e!s}") diff --git a/surfsense_backend/app/routes/search_spaces_routes.py b/surfsense_backend/app/routes/search_spaces_routes.py index 7a01f2171..d04cf11ce 100644 --- a/surfsense_backend/app/routes/search_spaces_routes.py +++ b/surfsense_backend/app/routes/search_spaces_routes.py @@ -1,18 +1,77 @@ +import logging from pathlib import Path import yaml from fastapi import APIRouter, Depends, HTTPException +from sqlalchemy import func from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.future import select -from app.db import SearchSpace, User, get_async_session -from app.schemas import SearchSpaceCreate, SearchSpaceRead, SearchSpaceUpdate +from app.db import ( + Permission, + SearchSpace, + SearchSpaceMembership, + SearchSpaceRole, + User, + get_async_session, + get_default_roles_config, +) +from app.schemas import ( + SearchSpaceCreate, + SearchSpaceRead, + SearchSpaceUpdate, + SearchSpaceWithStats, +) from app.users import current_active_user -from app.utils.check_ownership import check_ownership +from app.utils.rbac import check_permission, check_search_space_access + +logger = logging.getLogger(__name__) router = APIRouter() +async def create_default_roles_and_membership( + session: AsyncSession, + search_space_id: int, + owner_user_id, +) -> None: + """ + Create default system roles for a search space and add the owner as a member. + + Args: + session: Database session + search_space_id: The ID of the newly created search space + owner_user_id: The UUID of the user who created the search space + """ + # Create default roles + default_roles = get_default_roles_config() + owner_role_id = None + + for role_config in default_roles: + db_role = SearchSpaceRole( + name=role_config["name"], + description=role_config["description"], + permissions=role_config["permissions"], + is_default=role_config["is_default"], + is_system_role=role_config["is_system_role"], + search_space_id=search_space_id, + ) + session.add(db_role) + await session.flush() # Get the ID + + if role_config["name"] == "Owner": + owner_role_id = db_role.id + + # Create owner membership + owner_membership = SearchSpaceMembership( + user_id=owner_user_id, + search_space_id=search_space_id, + role_id=owner_role_id, + is_owner=True, + ) + session.add(owner_membership) + + @router.post("/searchspaces", response_model=SearchSpaceRead) async def create_search_space( search_space: SearchSpaceCreate, @@ -27,6 +86,11 @@ async def create_search_space( db_search_space = SearchSpace(**search_space_data, user_id=user.id) session.add(db_search_space) + await session.flush() # Get the search space ID + + # Create default roles and owner membership + await create_default_roles_and_membership(session, db_search_space.id, user.id) + await session.commit() await session.refresh(db_search_space) return db_search_space @@ -34,26 +98,86 @@ async def create_search_space( raise except Exception as e: await session.rollback() + logger.error(f"Failed to create search space: {e!s}", exc_info=True) raise HTTPException( status_code=500, detail=f"Failed to create search space: {e!s}" ) from e -@router.get("/searchspaces", response_model=list[SearchSpaceRead]) +@router.get("/searchspaces", response_model=list[SearchSpaceWithStats]) async def read_search_spaces( skip: int = 0, limit: int = 200, + owned_only: bool = False, session: AsyncSession = Depends(get_async_session), user: User = Depends(current_active_user), ): + """ + Get all search spaces the user has access to, with member count and ownership info. + + Args: + skip: Number of items to skip + limit: Maximum number of items to return + owned_only: If True, only return search spaces owned by the user. + If False (default), return all search spaces the user has access to. + """ try: - result = await session.execute( - select(SearchSpace) - .filter(SearchSpace.user_id == user.id) - .offset(skip) - .limit(limit) - ) - return result.scalars().all() + if owned_only: + # Return only search spaces where user is the original creator (user_id) + result = await session.execute( + select(SearchSpace) + .filter(SearchSpace.user_id == user.id) + .offset(skip) + .limit(limit) + ) + else: + # Return all search spaces the user has membership in + result = await session.execute( + select(SearchSpace) + .join(SearchSpaceMembership) + .filter(SearchSpaceMembership.user_id == user.id) + .offset(skip) + .limit(limit) + ) + + search_spaces = result.scalars().all() + + # Get member counts and ownership info for each search space + search_spaces_with_stats = [] + for space in search_spaces: + # Get member count + count_result = await session.execute( + select(func.count(SearchSpaceMembership.id)).filter( + SearchSpaceMembership.search_space_id == space.id + ) + ) + member_count = count_result.scalar() or 1 + + # Check if current user is owner + ownership_result = await session.execute( + select(SearchSpaceMembership).filter( + SearchSpaceMembership.search_space_id == space.id, + SearchSpaceMembership.user_id == user.id, + SearchSpaceMembership.is_owner == True, # noqa: E712 + ) + ) + is_owner = ownership_result.scalars().first() is not None + + search_spaces_with_stats.append( + SearchSpaceWithStats( + id=space.id, + name=space.name, + description=space.description, + created_at=space.created_at, + user_id=space.user_id, + citations_enabled=space.citations_enabled, + qna_custom_instructions=space.qna_custom_instructions, + member_count=member_count, + is_owner=is_owner, + ) + ) + + return search_spaces_with_stats except Exception as e: raise HTTPException( status_code=500, detail=f"Failed to fetch search spaces: {e!s}" @@ -97,10 +221,22 @@ async def read_search_space( session: AsyncSession = Depends(get_async_session), user: User = Depends(current_active_user), ): + """ + Get a specific search space by ID. + Requires SETTINGS_VIEW permission or membership. + """ try: - search_space = await check_ownership( - session, SearchSpace, search_space_id, user + # Check if user has access (is a member) + await check_search_space_access(session, user, search_space_id) + + result = await session.execute( + select(SearchSpace).filter(SearchSpace.id == search_space_id) ) + search_space = result.scalars().first() + + if not search_space: + raise HTTPException(status_code=404, detail="Search space not found") + return search_space except HTTPException: @@ -118,10 +254,28 @@ async def update_search_space( session: AsyncSession = Depends(get_async_session), user: User = Depends(current_active_user), ): + """ + Update a search space. + Requires SETTINGS_UPDATE permission. + """ try: - db_search_space = await check_ownership( - session, SearchSpace, search_space_id, user + # Check permission + await check_permission( + session, + user, + search_space_id, + Permission.SETTINGS_UPDATE.value, + "You don't have permission to update this search space", ) + + result = await session.execute( + select(SearchSpace).filter(SearchSpace.id == search_space_id) + ) + db_search_space = result.scalars().first() + + if not db_search_space: + raise HTTPException(status_code=404, detail="Search space not found") + update_data = search_space_update.model_dump(exclude_unset=True) for key, value in update_data.items(): setattr(db_search_space, key, value) @@ -143,10 +297,28 @@ async def delete_search_space( session: AsyncSession = Depends(get_async_session), user: User = Depends(current_active_user), ): + """ + Delete a search space. + Requires SETTINGS_DELETE permission (only owners have this by default). + """ try: - db_search_space = await check_ownership( - session, SearchSpace, search_space_id, user + # Check permission - only those with SETTINGS_DELETE can delete + await check_permission( + session, + user, + search_space_id, + Permission.SETTINGS_DELETE.value, + "You don't have permission to delete this search space", ) + + result = await session.execute( + select(SearchSpace).filter(SearchSpace.id == search_space_id) + ) + db_search_space = result.scalars().first() + + if not db_search_space: + raise HTTPException(status_code=404, detail="Search space not found") + await session.delete(db_search_space) await session.commit() return {"message": "Search space deleted successfully"} diff --git a/surfsense_backend/app/schemas/__init__.py b/surfsense_backend/app/schemas/__init__.py index 41b2ce23c..d48d1b7f3 100644 --- a/surfsense_backend/app/schemas/__init__.py +++ b/surfsense_backend/app/schemas/__init__.py @@ -27,6 +27,23 @@ from .podcasts import ( PodcastRead, PodcastUpdate, ) +from .rbac_schemas import ( + InviteAcceptRequest, + InviteAcceptResponse, + InviteCreate, + InviteInfoResponse, + InviteRead, + InviteUpdate, + MembershipRead, + MembershipReadWithUser, + MembershipUpdate, + PermissionInfo, + PermissionsListResponse, + RoleCreate, + RoleRead, + RoleUpdate, + UserSearchSpaceAccess, +) from .search_source_connector import ( SearchSourceConnectorBase, SearchSourceConnectorCreate, @@ -38,6 +55,7 @@ from .search_space import ( SearchSpaceCreate, SearchSpaceRead, SearchSpaceUpdate, + SearchSpaceWithStats, ) from .users import UserCreate, UserRead, UserUpdate @@ -60,6 +78,13 @@ __all__ = [ "ExtensionDocumentContent", "ExtensionDocumentMetadata", "IDModel", + # RBAC schemas + "InviteAcceptRequest", + "InviteAcceptResponse", + "InviteCreate", + "InviteInfoResponse", + "InviteRead", + "InviteUpdate", "LLMConfigBase", "LLMConfigCreate", "LLMConfigRead", @@ -69,12 +94,20 @@ __all__ = [ "LogFilter", "LogRead", "LogUpdate", + "MembershipRead", + "MembershipReadWithUser", + "MembershipUpdate", "PaginatedResponse", + "PermissionInfo", + "PermissionsListResponse", "PodcastBase", "PodcastCreate", "PodcastGenerateRequest", "PodcastRead", "PodcastUpdate", + "RoleCreate", + "RoleRead", + "RoleUpdate", "SearchSourceConnectorBase", "SearchSourceConnectorCreate", "SearchSourceConnectorRead", @@ -83,8 +116,10 @@ __all__ = [ "SearchSpaceCreate", "SearchSpaceRead", "SearchSpaceUpdate", + "SearchSpaceWithStats", "TimestampModel", "UserCreate", "UserRead", + "UserSearchSpaceAccess", "UserUpdate", ] diff --git a/surfsense_backend/app/schemas/rbac_schemas.py b/surfsense_backend/app/schemas/rbac_schemas.py new file mode 100644 index 000000000..736d40807 --- /dev/null +++ b/surfsense_backend/app/schemas/rbac_schemas.py @@ -0,0 +1,186 @@ +""" +Pydantic schemas for RBAC (Role-Based Access Control) endpoints. +""" + +from datetime import datetime +from uuid import UUID + +from pydantic import BaseModel, Field + +# ============ Role Schemas ============ + + +class RoleBase(BaseModel): + """Base schema for roles.""" + + name: str = Field(..., min_length=1, max_length=100) + description: str | None = Field(None, max_length=500) + permissions: list[str] = Field(default_factory=list) + is_default: bool = False + + +class RoleCreate(RoleBase): + """Schema for creating a new role.""" + + pass + + +class RoleUpdate(BaseModel): + """Schema for updating a role (partial update).""" + + name: str | None = Field(None, min_length=1, max_length=100) + description: str | None = Field(None, max_length=500) + permissions: list[str] | None = None + is_default: bool | None = None + + +class RoleRead(RoleBase): + """Schema for reading a role.""" + + id: int + search_space_id: int + is_system_role: bool + created_at: datetime + + class Config: + from_attributes = True + + +# ============ Membership Schemas ============ + + +class MembershipBase(BaseModel): + """Base schema for memberships.""" + + pass + + +class MembershipUpdate(BaseModel): + """Schema for updating a membership (change role).""" + + role_id: int | None = None + + +class MembershipRead(BaseModel): + """Schema for reading a membership.""" + + id: int + user_id: UUID + search_space_id: int + role_id: int | None + is_owner: bool + joined_at: datetime + created_at: datetime + # Nested role info + role: RoleRead | None = None + # User email (populated separately) + user_email: str | None = None + + class Config: + from_attributes = True + + +class MembershipReadWithUser(MembershipRead): + """Schema for reading a membership with user details.""" + + user_email: str | None = None + user_is_active: bool | None = None + + +# ============ Invite Schemas ============ + + +class InviteBase(BaseModel): + """Base schema for invites.""" + + name: str | None = Field(None, max_length=100) + role_id: int | None = None + expires_at: datetime | None = None + max_uses: int | None = Field(None, ge=1) + + +class InviteCreate(InviteBase): + """Schema for creating a new invite.""" + + pass + + +class InviteUpdate(BaseModel): + """Schema for updating an invite (partial update).""" + + name: str | None = Field(None, max_length=100) + role_id: int | None = None + expires_at: datetime | None = None + max_uses: int | None = Field(None, ge=1) + is_active: bool | None = None + + +class InviteRead(InviteBase): + """Schema for reading an invite.""" + + id: int + invite_code: str + search_space_id: int + created_by_id: UUID | None + uses_count: int + is_active: bool + created_at: datetime + # Nested role info + role: RoleRead | None = None + + class Config: + from_attributes = True + + +class InviteAcceptRequest(BaseModel): + """Schema for accepting an invite.""" + + invite_code: str = Field(..., min_length=1) + + +class InviteAcceptResponse(BaseModel): + """Response schema for accepting an invite.""" + + message: str + search_space_id: int + search_space_name: str + role_name: str | None + + +class InviteInfoResponse(BaseModel): + """Response schema for getting invite info (public endpoint).""" + + search_space_name: str + role_name: str | None + is_valid: bool + message: str | None = None + + +# ============ Permission Schemas ============ + + +class PermissionInfo(BaseModel): + """Schema for permission information.""" + + value: str + name: str + category: str + + +class PermissionsListResponse(BaseModel): + """Response schema for listing all available permissions.""" + + permissions: list[PermissionInfo] + + +# ============ User Access Info ============ + + +class UserSearchSpaceAccess(BaseModel): + """Schema for user's access info in a search space.""" + + search_space_id: int + search_space_name: str + is_owner: bool + role_name: str | None + permissions: list[str] diff --git a/surfsense_backend/app/schemas/search_space.py b/surfsense_backend/app/schemas/search_space.py index 49cc0791f..729ff4e7d 100644 --- a/surfsense_backend/app/schemas/search_space.py +++ b/surfsense_backend/app/schemas/search_space.py @@ -34,3 +34,10 @@ class SearchSpaceRead(SearchSpaceBase, IDModel, TimestampModel): qna_custom_instructions: str | None = None model_config = ConfigDict(from_attributes=True) + + +class SearchSpaceWithStats(SearchSpaceRead): + """Extended search space info with member count and ownership status.""" + + member_count: int = 1 + is_owner: bool = False diff --git a/surfsense_backend/app/services/connector_service.py b/surfsense_backend/app/services/connector_service.py index 28f70d285..20a9ffa32 100644 --- a/surfsense_backend/app/services/connector_service.py +++ b/surfsense_backend/app/services/connector_service.py @@ -15,18 +15,17 @@ from app.db import ( Document, SearchSourceConnector, SearchSourceConnectorType, - SearchSpace, ) from app.retriver.chunks_hybrid_search import ChucksHybridSearchRetriever from app.retriver.documents_hybrid_search import DocumentHybridSearchRetriever class ConnectorService: - def __init__(self, session: AsyncSession, user_id: str | None = None): + def __init__(self, session: AsyncSession, search_space_id: int | None = None): self.session = session self.chunk_retriever = ChucksHybridSearchRetriever(session) self.document_retriever = DocumentHybridSearchRetriever(session) - self.user_id = user_id + self.search_space_id = search_space_id self.source_id_counter = ( 100000 # High starting value to avoid collisions with existing IDs ) @@ -36,23 +35,22 @@ class ConnectorService: async def initialize_counter(self): """ - Initialize the source_id_counter based on the total number of chunks for the user. + Initialize the source_id_counter based on the total number of chunks for the search space. This ensures unique IDs across different sessions. """ - if self.user_id: + if self.search_space_id: try: - # Count total chunks for documents belonging to this user + # Count total chunks for documents belonging to this search space result = await self.session.execute( select(func.count(Chunk.id)) .join(Document) - .join(SearchSpace) - .filter(SearchSpace.user_id == self.user_id) + .filter(Document.search_space_id == self.search_space_id) ) chunk_count = result.scalar() or 0 self.source_id_counter = chunk_count + 1 print( - f"Initialized source_id_counter to {self.source_id_counter} for user {self.user_id}" + f"Initialized source_id_counter to {self.source_id_counter} for search space {self.search_space_id}" ) except Exception as e: print(f"Error initializing source_id_counter: {e!s}") @@ -62,7 +60,6 @@ class ConnectorService: async def search_crawled_urls( self, user_query: str, - user_id: str, search_space_id: int, top_k: int = 20, search_mode: SearchMode = SearchMode.CHUNKS, @@ -70,6 +67,12 @@ class ConnectorService: """ Search for crawled URLs and return both the source information and langchain documents + Args: + user_query: The user's query + search_space_id: The search space ID to search in + top_k: Maximum number of results to return + search_mode: Search mode (CHUNKS or DOCUMENTS) + Returns: tuple: (sources_info, langchain_documents) """ @@ -77,7 +80,6 @@ class ConnectorService: crawled_urls_chunks = await self.chunk_retriever.hybrid_search( query_text=user_query, top_k=top_k, - user_id=user_id, search_space_id=search_space_id, document_type="CRAWLED_URL", ) @@ -85,7 +87,6 @@ class ConnectorService: crawled_urls_chunks = await self.document_retriever.hybrid_search( query_text=user_query, top_k=top_k, - user_id=user_id, search_space_id=search_space_id, document_type="CRAWLED_URL", ) @@ -109,15 +110,43 @@ class ConnectorService: document = chunk.get("document", {}) metadata = document.get("metadata", {}) - # Create a source entry + # Extract webcrawler-specific metadata + url = metadata.get("source", metadata.get("url", "")) + title = document.get( + "title", metadata.get("title", "Untitled Document") + ) + description = metadata.get("description", "") + language = metadata.get("language", "") + last_crawled_at = metadata.get("last_crawled_at", "") + + # Build description with crawler info + content_preview = chunk.get("content", "") + if not description and content_preview: + # Use content preview if no description + description = content_preview[:200] + if len(content_preview) > 200: + description += "..." + + # Add crawler metadata to description if available + info_parts = [] + if language: + info_parts.append(f"Language: {language}") + if last_crawled_at: + info_parts.append(f"Last crawled: {last_crawled_at}") + + if info_parts: + if description: + description += f" | {' | '.join(info_parts)}" + else: + description = " | ".join(info_parts) + source = { "id": chunk.get("chunk_id", self.source_id_counter), - "title": document.get("title", "Untitled Document"), - "description": metadata.get( - "og:description", - metadata.get("ogDescription", chunk.get("content", "")), - ), - "url": metadata.get("url", ""), + "title": title, + "description": description, + "url": url, + "language": language, + "last_crawled_at": last_crawled_at, } self.source_id_counter += 1 @@ -136,7 +165,6 @@ class ConnectorService: async def search_files( self, user_query: str, - user_id: str, search_space_id: int, top_k: int = 20, search_mode: SearchMode = SearchMode.CHUNKS, @@ -151,7 +179,6 @@ class ConnectorService: files_chunks = await self.chunk_retriever.hybrid_search( query_text=user_query, top_k=top_k, - user_id=user_id, search_space_id=search_space_id, document_type="FILE", ) @@ -159,7 +186,6 @@ class ConnectorService: files_chunks = await self.document_retriever.hybrid_search( query_text=user_query, top_k=top_k, - user_id=user_id, search_space_id=search_space_id, document_type="FILE", ) @@ -239,43 +265,35 @@ class ConnectorService: async def get_connector_by_type( self, - user_id: str, connector_type: SearchSourceConnectorType, - search_space_id: int | None = None, + search_space_id: int, ) -> SearchSourceConnector | None: """ - Get a connector by type for a specific user and optionally a search space + Get a connector by type for a specific search space Args: - user_id: The user's ID connector_type: The connector type to retrieve - search_space_id: Optional search space ID to filter by + search_space_id: The search space ID to filter by Returns: Optional[SearchSourceConnector]: The connector if found, None otherwise """ query = select(SearchSourceConnector).filter( - SearchSourceConnector.user_id == user_id, + SearchSourceConnector.search_space_id == search_space_id, SearchSourceConnector.connector_type == connector_type, ) - if search_space_id is not None: - query = query.filter( - SearchSourceConnector.search_space_id == search_space_id - ) - result = await self.session.execute(query) return result.scalars().first() async def search_tavily( - self, user_query: str, user_id: str, search_space_id: int, top_k: int = 20 + self, user_query: str, search_space_id: int, top_k: int = 20 ) -> tuple: """ Search using Tavily API and return both the source information and documents Args: user_query: The user's query - user_id: The user's ID search_space_id: The search space ID top_k: Maximum number of results to return @@ -284,7 +302,7 @@ class ConnectorService: """ # Get Tavily connector configuration tavily_connector = await self.get_connector_by_type( - user_id, SearchSourceConnectorType.TAVILY_API, search_space_id + SearchSourceConnectorType.TAVILY_API, search_space_id ) if not tavily_connector: @@ -377,7 +395,6 @@ class ConnectorService: async def search_searxng( self, user_query: str, - user_id: str, search_space_id: int, top_k: int = 20, ) -> tuple: @@ -385,7 +402,7 @@ class ConnectorService: Search using a configured SearxNG instance and return both sources and documents. """ searx_connector = await self.get_connector_by_type( - user_id, SearchSourceConnectorType.SEARXNG_API, search_space_id + SearchSourceConnectorType.SEARXNG_API, search_space_id ) if not searx_connector: @@ -563,7 +580,6 @@ class ConnectorService: async def search_baidu( self, user_query: str, - user_id: str, search_space_id: int, top_k: int = 20, ) -> tuple: @@ -575,7 +591,6 @@ class ConnectorService: Args: user_query: User's search query - user_id: User ID search_space_id: Search space ID top_k: Maximum number of results to return @@ -584,7 +599,7 @@ class ConnectorService: """ # Get Baidu connector configuration baidu_connector = await self.get_connector_by_type( - user_id, SearchSourceConnectorType.BAIDU_SEARCH_API, search_space_id + SearchSourceConnectorType.BAIDU_SEARCH_API, search_space_id ) if not baidu_connector: @@ -789,7 +804,6 @@ class ConnectorService: async def search_slack( self, user_query: str, - user_id: str, search_space_id: int, top_k: int = 20, search_mode: SearchMode = SearchMode.CHUNKS, @@ -804,7 +818,6 @@ class ConnectorService: slack_chunks = await self.chunk_retriever.hybrid_search( query_text=user_query, top_k=top_k, - user_id=user_id, search_space_id=search_space_id, document_type="SLACK_CONNECTOR", ) @@ -812,7 +825,6 @@ class ConnectorService: slack_chunks = await self.document_retriever.hybrid_search( query_text=user_query, top_k=top_k, - user_id=user_id, search_space_id=search_space_id, document_type="SLACK_CONNECTOR", ) @@ -877,7 +889,6 @@ class ConnectorService: async def search_notion( self, user_query: str, - user_id: str, search_space_id: int, top_k: int = 20, search_mode: SearchMode = SearchMode.CHUNKS, @@ -887,7 +898,6 @@ class ConnectorService: Args: user_query: The user's query - user_id: The user's ID search_space_id: The search space ID to search in top_k: Maximum number of results to return @@ -898,7 +908,6 @@ class ConnectorService: notion_chunks = await self.chunk_retriever.hybrid_search( query_text=user_query, top_k=top_k, - user_id=user_id, search_space_id=search_space_id, document_type="NOTION_CONNECTOR", ) @@ -906,7 +915,6 @@ class ConnectorService: notion_chunks = await self.document_retriever.hybrid_search( query_text=user_query, top_k=top_k, - user_id=user_id, search_space_id=search_space_id, document_type="NOTION_CONNECTOR", ) @@ -974,7 +982,6 @@ class ConnectorService: async def search_extension( self, user_query: str, - user_id: str, search_space_id: int, top_k: int = 20, search_mode: SearchMode = SearchMode.CHUNKS, @@ -984,7 +991,6 @@ class ConnectorService: Args: user_query: The user's query - user_id: The user's ID search_space_id: The search space ID to search in top_k: Maximum number of results to return @@ -995,7 +1001,6 @@ class ConnectorService: extension_chunks = await self.chunk_retriever.hybrid_search( query_text=user_query, top_k=top_k, - user_id=user_id, search_space_id=search_space_id, document_type="EXTENSION", ) @@ -1003,7 +1008,6 @@ class ConnectorService: extension_chunks = await self.document_retriever.hybrid_search( query_text=user_query, top_k=top_k, - user_id=user_id, search_space_id=search_space_id, document_type="EXTENSION", ) @@ -1095,7 +1099,6 @@ class ConnectorService: async def search_youtube( self, user_query: str, - user_id: str, search_space_id: int, top_k: int = 20, search_mode: SearchMode = SearchMode.CHUNKS, @@ -1105,7 +1108,6 @@ class ConnectorService: Args: user_query: The user's query - user_id: The user's ID search_space_id: The search space ID to search in top_k: Maximum number of results to return @@ -1116,7 +1118,6 @@ class ConnectorService: youtube_chunks = await self.chunk_retriever.hybrid_search( query_text=user_query, top_k=top_k, - user_id=user_id, search_space_id=search_space_id, document_type="YOUTUBE_VIDEO", ) @@ -1124,7 +1125,6 @@ class ConnectorService: youtube_chunks = await self.document_retriever.hybrid_search( query_text=user_query, top_k=top_k, - user_id=user_id, search_space_id=search_space_id, document_type="YOUTUBE_VIDEO", ) @@ -1192,7 +1192,6 @@ class ConnectorService: async def search_github( self, user_query: str, - user_id: int, search_space_id: int, top_k: int = 20, search_mode: SearchMode = SearchMode.CHUNKS, @@ -1207,7 +1206,6 @@ class ConnectorService: github_chunks = await self.chunk_retriever.hybrid_search( query_text=user_query, top_k=top_k, - user_id=user_id, search_space_id=search_space_id, document_type="GITHUB_CONNECTOR", ) @@ -1215,7 +1213,6 @@ class ConnectorService: github_chunks = await self.document_retriever.hybrid_search( query_text=user_query, top_k=top_k, - user_id=user_id, search_space_id=search_space_id, document_type="GITHUB_CONNECTOR", ) @@ -1267,7 +1264,6 @@ class ConnectorService: async def search_linear( self, user_query: str, - user_id: str, search_space_id: int, top_k: int = 20, search_mode: SearchMode = SearchMode.CHUNKS, @@ -1277,7 +1273,6 @@ class ConnectorService: Args: user_query: The user's query - user_id: The user's ID search_space_id: The search space ID to search in top_k: Maximum number of results to return @@ -1288,7 +1283,6 @@ class ConnectorService: linear_chunks = await self.chunk_retriever.hybrid_search( query_text=user_query, top_k=top_k, - user_id=user_id, search_space_id=search_space_id, document_type="LINEAR_CONNECTOR", ) @@ -1296,7 +1290,6 @@ class ConnectorService: linear_chunks = await self.document_retriever.hybrid_search( query_text=user_query, top_k=top_k, - user_id=user_id, search_space_id=search_space_id, document_type="LINEAR_CONNECTOR", ) @@ -1376,7 +1369,6 @@ class ConnectorService: async def search_jira( self, user_query: str, - user_id: str, search_space_id: int, top_k: int = 20, search_mode: SearchMode = SearchMode.CHUNKS, @@ -1386,7 +1378,6 @@ class ConnectorService: Args: user_query: The user's query - user_id: The user's ID search_space_id: The search space ID to search in top_k: Maximum number of results to return search_mode: Search mode (CHUNKS or DOCUMENTS) @@ -1398,7 +1389,6 @@ class ConnectorService: jira_chunks = await self.chunk_retriever.hybrid_search( query_text=user_query, top_k=top_k, - user_id=user_id, search_space_id=search_space_id, document_type="JIRA_CONNECTOR", ) @@ -1406,7 +1396,6 @@ class ConnectorService: jira_chunks = await self.document_retriever.hybrid_search( query_text=user_query, top_k=top_k, - user_id=user_id, search_space_id=search_space_id, document_type="JIRA_CONNECTOR", ) @@ -1497,7 +1486,6 @@ class ConnectorService: async def search_google_calendar( self, user_query: str, - user_id: str, search_space_id: int, top_k: int = 20, search_mode: SearchMode = SearchMode.CHUNKS, @@ -1507,7 +1495,6 @@ class ConnectorService: Args: user_query: The user's query - user_id: The user's ID search_space_id: The search space ID to search in top_k: Maximum number of results to return search_mode: Search mode (CHUNKS or DOCUMENTS) @@ -1519,7 +1506,6 @@ class ConnectorService: calendar_chunks = await self.chunk_retriever.hybrid_search( query_text=user_query, top_k=top_k, - user_id=user_id, search_space_id=search_space_id, document_type="GOOGLE_CALENDAR_CONNECTOR", ) @@ -1527,7 +1513,6 @@ class ConnectorService: calendar_chunks = await self.document_retriever.hybrid_search( query_text=user_query, top_k=top_k, - user_id=user_id, search_space_id=search_space_id, document_type="GOOGLE_CALENDAR_CONNECTOR", ) @@ -1630,7 +1615,6 @@ class ConnectorService: async def search_airtable( self, user_query: str, - user_id: str, search_space_id: int, top_k: int = 20, search_mode: SearchMode = SearchMode.CHUNKS, @@ -1640,7 +1624,6 @@ class ConnectorService: Args: user_query: The user's query - user_id: The user's ID search_space_id: The search space ID to search in top_k: Maximum number of results to return search_mode: Search mode (CHUNKS or DOCUMENTS) @@ -1652,7 +1635,6 @@ class ConnectorService: airtable_chunks = await self.chunk_retriever.hybrid_search( query_text=user_query, top_k=top_k, - user_id=user_id, search_space_id=search_space_id, document_type="AIRTABLE_CONNECTOR", ) @@ -1660,7 +1642,6 @@ class ConnectorService: airtable_chunks = await self.document_retriever.hybrid_search( query_text=user_query, top_k=top_k, - user_id=user_id, search_space_id=search_space_id, document_type="AIRTABLE_CONNECTOR", ) @@ -1718,7 +1699,6 @@ class ConnectorService: async def search_google_gmail( self, user_query: str, - user_id: str, search_space_id: int, top_k: int = 20, search_mode: SearchMode = SearchMode.CHUNKS, @@ -1728,7 +1708,6 @@ class ConnectorService: Args: user_query: The user's query - user_id: The user's ID search_space_id: The search space ID to search in top_k: Maximum number of results to return search_mode: Search mode (CHUNKS or DOCUMENTS) @@ -1740,7 +1719,6 @@ class ConnectorService: gmail_chunks = await self.chunk_retriever.hybrid_search( query_text=user_query, top_k=top_k, - user_id=user_id, search_space_id=search_space_id, document_type="GOOGLE_GMAIL_CONNECTOR", ) @@ -1748,7 +1726,6 @@ class ConnectorService: gmail_chunks = await self.document_retriever.hybrid_search( query_text=user_query, top_k=top_k, - user_id=user_id, search_space_id=search_space_id, document_type="GOOGLE_GMAIL_CONNECTOR", ) @@ -1842,7 +1819,6 @@ class ConnectorService: async def search_confluence( self, user_query: str, - user_id: str, search_space_id: int, top_k: int = 20, search_mode: SearchMode = SearchMode.CHUNKS, @@ -1852,7 +1828,6 @@ class ConnectorService: Args: user_query: The user's query - user_id: The user's ID search_space_id: The search space ID to search in top_k: Maximum number of results to return search_mode: Search mode (CHUNKS or DOCUMENTS) @@ -1864,7 +1839,6 @@ class ConnectorService: confluence_chunks = await self.chunk_retriever.hybrid_search( query_text=user_query, top_k=top_k, - user_id=user_id, search_space_id=search_space_id, document_type="CONFLUENCE_CONNECTOR", ) @@ -1872,7 +1846,6 @@ class ConnectorService: confluence_chunks = await self.document_retriever.hybrid_search( query_text=user_query, top_k=top_k, - user_id=user_id, search_space_id=search_space_id, document_type="CONFLUENCE_CONNECTOR", ) @@ -1937,7 +1910,6 @@ class ConnectorService: async def search_clickup( self, user_query: str, - user_id: str, search_space_id: int, top_k: int = 20, search_mode: SearchMode = SearchMode.CHUNKS, @@ -1947,7 +1919,6 @@ class ConnectorService: Args: user_query: The user's query - user_id: The user's ID search_space_id: The search space ID to search in top_k: Maximum number of results to return search_mode: Search mode (CHUNKS or DOCUMENTS) @@ -1959,7 +1930,6 @@ class ConnectorService: clickup_chunks = await self.chunk_retriever.hybrid_search( query_text=user_query, top_k=top_k, - user_id=user_id, search_space_id=search_space_id, document_type="CLICKUP_CONNECTOR", ) @@ -1967,7 +1937,6 @@ class ConnectorService: clickup_chunks = await self.document_retriever.hybrid_search( query_text=user_query, top_k=top_k, - user_id=user_id, search_space_id=search_space_id, document_type="CLICKUP_CONNECTOR", ) @@ -2053,7 +2022,6 @@ class ConnectorService: async def search_linkup( self, user_query: str, - user_id: str, search_space_id: int, mode: str = "standard", ) -> tuple: @@ -2062,7 +2030,6 @@ class ConnectorService: Args: user_query: The user's query - user_id: The user's ID search_space_id: The search space ID mode: Search depth mode, can be "standard" or "deep" @@ -2071,7 +2038,7 @@ class ConnectorService: """ # Get Linkup connector configuration linkup_connector = await self.get_connector_by_type( - user_id, SearchSourceConnectorType.LINKUP_API, search_space_id + SearchSourceConnectorType.LINKUP_API, search_space_id ) if not linkup_connector: @@ -2176,7 +2143,6 @@ class ConnectorService: async def search_discord( self, user_query: str, - user_id: str, search_space_id: int, top_k: int = 20, search_mode: SearchMode = SearchMode.CHUNKS, @@ -2186,7 +2152,6 @@ class ConnectorService: Args: user_query: The user's query - user_id: The user's ID search_space_id: The search space ID to search in top_k: Maximum number of results to return @@ -2197,7 +2162,6 @@ class ConnectorService: discord_chunks = await self.chunk_retriever.hybrid_search( query_text=user_query, top_k=top_k, - user_id=user_id, search_space_id=search_space_id, document_type="DISCORD_CONNECTOR", ) @@ -2205,7 +2169,6 @@ class ConnectorService: discord_chunks = await self.document_retriever.hybrid_search( query_text=user_query, top_k=top_k, - user_id=user_id, search_space_id=search_space_id, document_type="DISCORD_CONNECTOR", ) @@ -2273,7 +2236,6 @@ class ConnectorService: async def search_luma( self, user_query: str, - user_id: str, search_space_id: int, top_k: int = 20, search_mode: SearchMode = SearchMode.CHUNKS, @@ -2283,7 +2245,6 @@ class ConnectorService: Args: user_query: The user's query - user_id: The user's ID search_space_id: The search space ID to search in top_k: Maximum number of results to return search_mode: Search mode (CHUNKS or DOCUMENTS) @@ -2295,7 +2256,6 @@ class ConnectorService: luma_chunks = await self.chunk_retriever.hybrid_search( query_text=user_query, top_k=top_k, - user_id=user_id, search_space_id=search_space_id, document_type="LUMA_CONNECTOR", ) @@ -2303,7 +2263,6 @@ class ConnectorService: luma_chunks = await self.document_retriever.hybrid_search( query_text=user_query, top_k=top_k, - user_id=user_id, search_space_id=search_space_id, document_type="LUMA_CONNECTOR", ) @@ -2431,7 +2390,6 @@ class ConnectorService: async def search_elasticsearch( self, user_query: str, - user_id: str, search_space_id: int, top_k: int = 20, search_mode: SearchMode = SearchMode.CHUNKS, @@ -2441,7 +2399,6 @@ class ConnectorService: Args: user_query: The user's query - user_id: The user's ID search_space_id: The search space ID to search in top_k: Maximum number of results to return search_mode: Search mode (CHUNKS or DOCUMENTS) @@ -2453,7 +2410,6 @@ class ConnectorService: elasticsearch_chunks = await self.chunk_retriever.hybrid_search( query_text=user_query, top_k=top_k, - user_id=user_id, search_space_id=search_space_id, document_type="ELASTICSEARCH_CONNECTOR", ) @@ -2461,7 +2417,6 @@ class ConnectorService: elasticsearch_chunks = await self.document_retriever.hybrid_search( query_text=user_query, top_k=top_k, - user_id=user_id, search_space_id=search_space_id, document_type="ELASTICSEARCH_CONNECTOR", ) diff --git a/surfsense_backend/app/services/llm_service.py b/surfsense_backend/app/services/llm_service.py index ea9140f8e..c3270b59e 100644 --- a/surfsense_backend/app/services/llm_service.py +++ b/surfsense_backend/app/services/llm_service.py @@ -7,7 +7,7 @@ from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.future import select from app.config import config -from app.db import LLMConfig, UserSearchSpacePreference +from app.db import LLMConfig, SearchSpace # Configure litellm to automatically drop unsupported parameters litellm.drop_params = True @@ -144,15 +144,16 @@ async def validate_llm_config( return False, error_msg -async def get_user_llm_instance( - session: AsyncSession, user_id: str, search_space_id: int, role: str +async def get_search_space_llm_instance( + session: AsyncSession, search_space_id: int, role: str ) -> ChatLiteLLM | None: """ - Get a ChatLiteLLM instance for a specific user, search space, and role. + Get a ChatLiteLLM instance for a specific search space and role. + + LLM preferences are stored at the search space level and shared by all members. Args: session: Database session - user_id: User ID search_space_id: Search Space ID role: LLM role ('long_context', 'fast', or 'strategic') @@ -160,37 +161,30 @@ async def get_user_llm_instance( ChatLiteLLM instance or None if not found """ try: - # Get user's LLM preferences for this search space + # Get the search space with its LLM preferences result = await session.execute( - select(UserSearchSpacePreference).where( - UserSearchSpacePreference.user_id == user_id, - UserSearchSpacePreference.search_space_id == search_space_id, - ) + select(SearchSpace).where(SearchSpace.id == search_space_id) ) - preference = result.scalars().first() + search_space = result.scalars().first() - if not preference: - logger.error( - f"No LLM preferences found for user {user_id} in search space {search_space_id}" - ) + if not search_space: + logger.error(f"Search space {search_space_id} not found") return None # Get the appropriate LLM config ID based on role llm_config_id = None if role == LLMRole.LONG_CONTEXT: - llm_config_id = preference.long_context_llm_id + llm_config_id = search_space.long_context_llm_id elif role == LLMRole.FAST: - llm_config_id = preference.fast_llm_id + llm_config_id = search_space.fast_llm_id elif role == LLMRole.STRATEGIC: - llm_config_id = preference.strategic_llm_id + llm_config_id = search_space.strategic_llm_id else: logger.error(f"Invalid LLM role: {role}") return None if not llm_config_id: - logger.error( - f"No {role} LLM configured for user {user_id} in search space {search_space_id}" - ) + logger.error(f"No {role} LLM configured for search space {search_space_id}") return None # Check if this is a global config (negative ID) @@ -331,31 +325,63 @@ async def get_user_llm_instance( except Exception as e: logger.error( - f"Error getting LLM instance for user {user_id}, role {role}: {e!s}" + f"Error getting LLM instance for search space {search_space_id}, role {role}: {e!s}" ) return None +async def get_long_context_llm( + session: AsyncSession, search_space_id: int +) -> ChatLiteLLM | None: + """Get the search space's long context LLM instance.""" + return await get_search_space_llm_instance( + session, search_space_id, LLMRole.LONG_CONTEXT + ) + + +async def get_fast_llm( + session: AsyncSession, search_space_id: int +) -> ChatLiteLLM | None: + """Get the search space's fast LLM instance.""" + return await get_search_space_llm_instance(session, search_space_id, LLMRole.FAST) + + +async def get_strategic_llm( + session: AsyncSession, search_space_id: int +) -> ChatLiteLLM | None: + """Get the search space's strategic LLM instance.""" + return await get_search_space_llm_instance( + session, search_space_id, LLMRole.STRATEGIC + ) + + +# Backward-compatible aliases (deprecated - will be removed in future versions) +async def get_user_llm_instance( + session: AsyncSession, user_id: str, search_space_id: int, role: str +) -> ChatLiteLLM | None: + """ + Deprecated: Use get_search_space_llm_instance instead. + LLM preferences are now stored at the search space level, not per-user. + """ + return await get_search_space_llm_instance(session, search_space_id, role) + + async def get_user_long_context_llm( session: AsyncSession, user_id: str, search_space_id: int ) -> ChatLiteLLM | None: - """Get user's long context LLM instance for a specific search space.""" - return await get_user_llm_instance( - session, user_id, search_space_id, LLMRole.LONG_CONTEXT - ) + """Deprecated: Use get_long_context_llm instead.""" + return await get_long_context_llm(session, search_space_id) async def get_user_fast_llm( session: AsyncSession, user_id: str, search_space_id: int ) -> ChatLiteLLM | None: - """Get user's fast LLM instance for a specific search space.""" - return await get_user_llm_instance(session, user_id, search_space_id, LLMRole.FAST) + """Deprecated: Use get_fast_llm instead.""" + return await get_fast_llm(session, search_space_id) async def get_user_strategic_llm( session: AsyncSession, user_id: str, search_space_id: int ) -> ChatLiteLLM | None: - """Get user's strategic LLM instance for a specific search space.""" - return await get_user_llm_instance( - session, user_id, search_space_id, LLMRole.STRATEGIC - ) + """Deprecated: Use get_strategic_llm instead.""" + return await get_strategic_llm(session, search_space_id) diff --git a/surfsense_backend/app/services/query_service.py b/surfsense_backend/app/services/query_service.py index d2759ab27..0521dc942 100644 --- a/surfsense_backend/app/services/query_service.py +++ b/surfsense_backend/app/services/query_service.py @@ -4,7 +4,7 @@ from typing import Any from langchain.schema import AIMessage, HumanMessage, SystemMessage from sqlalchemy.ext.asyncio import AsyncSession -from app.services.llm_service import get_user_strategic_llm +from app.services.llm_service import get_strategic_llm class QueryService: @@ -16,19 +16,17 @@ class QueryService: async def reformulate_query_with_chat_history( user_query: str, session: AsyncSession, - user_id: str, search_space_id: int, chat_history_str: str | None = None, ) -> str: """ - Reformulate the user query using the user's strategic LLM to make it more + Reformulate the user query using the search space's strategic LLM to make it more effective for information retrieval and research purposes. Args: user_query: The original user query - session: Database session for accessing user LLM configs - user_id: User ID to get their specific LLM configuration - search_space_id: Search Space ID to get user's LLM preferences + session: Database session for accessing LLM configs + search_space_id: Search Space ID to get LLM preferences chat_history_str: Optional chat history string Returns: @@ -38,11 +36,11 @@ class QueryService: return user_query try: - # Get the user's strategic LLM instance - llm = await get_user_strategic_llm(session, user_id, search_space_id) + # Get the search space's strategic LLM instance + llm = await get_strategic_llm(session, search_space_id) if not llm: print( - f"Warning: No strategic LLM configured for user {user_id} in search space {search_space_id}. Using original query." + f"Warning: No strategic LLM configured for search space {search_space_id}. Using original query." ) return user_query diff --git a/surfsense_backend/app/tasks/celery_tasks/connector_tasks.py b/surfsense_backend/app/tasks/celery_tasks/connector_tasks.py index 5e6907499..b735741fe 100644 --- a/surfsense_backend/app/tasks/celery_tasks/connector_tasks.py +++ b/surfsense_backend/app/tasks/celery_tasks/connector_tasks.py @@ -600,3 +600,46 @@ async def _index_elasticsearch_documents( await run_elasticsearch_indexing( session, connector_id, search_space_id, user_id, start_date, end_date ) + + +@celery_app.task(name="index_crawled_urls", bind=True) +def index_crawled_urls_task( + self, + connector_id: int, + search_space_id: int, + user_id: str, + start_date: str, + end_date: str, +): + """Celery task to index Web page Urls.""" + import asyncio + + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + + try: + loop.run_until_complete( + _index_crawled_urls( + connector_id, search_space_id, user_id, start_date, end_date + ) + ) + finally: + loop.close() + + +async def _index_crawled_urls( + connector_id: int, + search_space_id: int, + user_id: str, + start_date: str, + end_date: str, +): + """Index Web page Urls with new session.""" + from app.routes.search_source_connectors_routes import ( + run_web_page_indexing, + ) + + async with get_celery_session_maker()() as session: + await run_web_page_indexing( + session, connector_id, search_space_id, user_id, start_date, end_date + ) diff --git a/surfsense_backend/app/tasks/celery_tasks/document_tasks.py b/surfsense_backend/app/tasks/celery_tasks/document_tasks.py index 73af21550..5cf5a662a 100644 --- a/surfsense_backend/app/tasks/celery_tasks/document_tasks.py +++ b/surfsense_backend/app/tasks/celery_tasks/document_tasks.py @@ -9,7 +9,6 @@ from app.celery_app import celery_app from app.config import config from app.services.task_logging_service import TaskLoggingService from app.tasks.document_processors import ( - add_crawled_url_document, add_extension_received_document, add_youtube_video_document, ) @@ -120,71 +119,6 @@ async def _process_extension_document( raise -@celery_app.task(name="process_crawled_url", bind=True) -def process_crawled_url_task(self, url: str, search_space_id: int, user_id: str): - """ - Celery task to process crawled URL. - - Args: - url: URL to crawl and process - search_space_id: ID of the search space - user_id: ID of the user - """ - import asyncio - - loop = asyncio.new_event_loop() - asyncio.set_event_loop(loop) - - try: - loop.run_until_complete(_process_crawled_url(url, search_space_id, user_id)) - finally: - loop.close() - - -async def _process_crawled_url(url: str, search_space_id: int, user_id: str): - """Process crawled URL with new session.""" - async with get_celery_session_maker()() as session: - task_logger = TaskLoggingService(session, search_space_id) - - log_entry = await task_logger.log_task_start( - task_name="process_crawled_url", - source="document_processor", - message=f"Starting URL crawling and processing for: {url}", - metadata={"document_type": "CRAWLED_URL", "url": url, "user_id": user_id}, - ) - - try: - result = await add_crawled_url_document( - session, url, search_space_id, user_id - ) - - if result: - await task_logger.log_task_success( - log_entry, - f"Successfully crawled and processed URL: {url}", - { - "document_id": result.id, - "title": result.title, - "content_hash": result.content_hash, - }, - ) - else: - await task_logger.log_task_success( - log_entry, - f"URL document already exists (duplicate): {url}", - {"duplicate_detected": True}, - ) - except Exception as e: - await task_logger.log_task_failure( - log_entry, - f"Failed to crawl URL: {url}", - str(e), - {"error_type": type(e).__name__}, - ) - logger.error(f"Error processing crawled URL: {e!s}") - raise - - @celery_app.task(name="process_youtube_video", bind=True) def process_youtube_video_task(self, url: str, search_space_id: int, user_id: str): """ diff --git a/surfsense_backend/app/tasks/celery_tasks/schedule_checker_task.py b/surfsense_backend/app/tasks/celery_tasks/schedule_checker_task.py index 39d6bf840..dbc326406 100644 --- a/surfsense_backend/app/tasks/celery_tasks/schedule_checker_task.py +++ b/surfsense_backend/app/tasks/celery_tasks/schedule_checker_task.py @@ -67,6 +67,7 @@ async def _check_and_trigger_schedules(): index_airtable_records_task, index_clickup_tasks_task, index_confluence_pages_task, + index_crawled_urls_task, index_discord_messages_task, index_elasticsearch_documents_task, index_github_repos_task, @@ -94,6 +95,7 @@ async def _check_and_trigger_schedules(): SearchSourceConnectorType.DISCORD_CONNECTOR: index_discord_messages_task, SearchSourceConnectorType.LUMA_CONNECTOR: index_luma_events_task, SearchSourceConnectorType.ELASTICSEARCH_CONNECTOR: index_elasticsearch_documents_task, + SearchSourceConnectorType.WEBCRAWLER_CONNECTOR: index_crawled_urls_task, } # Trigger indexing for each due connector diff --git a/surfsense_backend/app/tasks/connector_indexers/__init__.py b/surfsense_backend/app/tasks/connector_indexers/__init__.py index 766506f70..f62739679 100644 --- a/surfsense_backend/app/tasks/connector_indexers/__init__.py +++ b/surfsense_backend/app/tasks/connector_indexers/__init__.py @@ -17,6 +17,7 @@ Available indexers: - Google Gmail: Index messages from Google Gmail - Google Calendar: Index events from Google Calendar - Luma: Index events from Luma +- Webcrawler: Index crawled URLs - Elasticsearch: Index documents from Elasticsearch instances """ @@ -41,6 +42,7 @@ from .luma_indexer import index_luma_events # Documentation and knowledge management from .notion_indexer import index_notion_pages from .slack_indexer import index_slack_messages +from .webcrawler_indexer import index_crawled_urls __all__ = [ # noqa: RUF022 "index_airtable_records", @@ -58,6 +60,7 @@ __all__ = [ # noqa: RUF022 "index_linear_issues", # Documentation and knowledge management "index_notion_pages", + "index_crawled_urls", # Communication platforms "index_slack_messages", "index_google_gmail_messages", diff --git a/surfsense_backend/app/tasks/connector_indexers/webcrawler_indexer.py b/surfsense_backend/app/tasks/connector_indexers/webcrawler_indexer.py new file mode 100644 index 000000000..6a6cb0ef8 --- /dev/null +++ b/surfsense_backend/app/tasks/connector_indexers/webcrawler_indexer.py @@ -0,0 +1,450 @@ +""" +Webcrawler connector indexer. +""" + +from datetime import datetime + +from sqlalchemy.exc import SQLAlchemyError +from sqlalchemy.ext.asyncio import AsyncSession + +from app.config import config +from app.connectors.webcrawler_connector import WebCrawlerConnector +from app.db import Document, DocumentType, SearchSourceConnectorType +from app.services.llm_service import get_user_long_context_llm +from app.services.task_logging_service import TaskLoggingService +from app.utils.document_converters import ( + create_document_chunks, + generate_content_hash, + generate_document_summary, + generate_unique_identifier_hash, +) + +from .base import ( + check_document_by_unique_identifier, + get_connector_by_id, + logger, + update_connector_last_indexed, +) + + +async def index_crawled_urls( + session: AsyncSession, + connector_id: int, + search_space_id: int, + user_id: str, + start_date: str | None = None, + end_date: str | None = None, + update_last_indexed: bool = True, +) -> tuple[int, str | None]: + """ + Index web page URLs. + + Args: + session: Database session + connector_id: ID of the webcrawler connector + search_space_id: ID of the search space to store documents in + user_id: User ID + start_date: Start date for filtering (YYYY-MM-DD format) - optional + end_date: End date for filtering (YYYY-MM-DD format) - optional + update_last_indexed: Whether to update the last_indexed_at timestamp (default: True) + + Returns: + Tuple containing (number of documents indexed, error message or None) + """ + task_logger = TaskLoggingService(session, search_space_id) + + # Log task start + log_entry = await task_logger.log_task_start( + task_name="crawled_url_indexing", + source="connector_indexing_task", + message=f"Starting web page URL indexing for connector {connector_id}", + metadata={ + "connector_id": connector_id, + "user_id": str(user_id), + "start_date": start_date, + "end_date": end_date, + }, + ) + + try: + # Get the connector + await task_logger.log_task_progress( + log_entry, + f"Retrieving webcrawler connector {connector_id} from database", + {"stage": "connector_retrieval"}, + ) + + # Get the connector from the database + connector = await get_connector_by_id( + session, connector_id, SearchSourceConnectorType.WEBCRAWLER_CONNECTOR + ) + + if not connector: + await task_logger.log_task_failure( + log_entry, + f"Connector with ID {connector_id} not found or is not a webcrawler connector", + "Connector not found", + {"error_type": "ConnectorNotFound"}, + ) + return ( + 0, + f"Connector with ID {connector_id} not found or is not a webcrawler connector", + ) + + # Get the Firecrawl API key from the connector config (optional) + api_key = connector.config.get("FIRECRAWL_API_KEY") + + # Get URLs from connector config + initial_urls = connector.config.get("INITIAL_URLS", "") + if isinstance(initial_urls, str): + urls = [url.strip() for url in initial_urls.split("\n") if url.strip()] + elif isinstance(initial_urls, list): + urls = [url.strip() for url in initial_urls if url.strip()] + else: + urls = [] + + logger.info( + f"Starting crawled web page indexing for connector {connector_id} with {len(urls)} URLs" + ) + + # Initialize webcrawler client + await task_logger.log_task_progress( + log_entry, + f"Initializing webcrawler client for connector {connector_id}", + { + "stage": "client_initialization", + "use_firecrawl": bool(api_key), + }, + ) + + crawler = WebCrawlerConnector(firecrawl_api_key=api_key) + + # Validate URLs + if not urls: + await task_logger.log_task_failure( + log_entry, + "No URLs provided for indexing", + "Empty URL list", + {"error_type": "ValidationError"}, + ) + return 0, "No URLs provided for indexing" + + await task_logger.log_task_progress( + log_entry, + f"Starting to crawl {len(urls)} URLs", + { + "stage": "crawling", + "total_urls": len(urls), + }, + ) + + documents_indexed = 0 + documents_updated = 0 + documents_skipped = 0 + failed_urls = [] + + for idx, url in enumerate(urls, 1): + try: + logger.info(f"Processing URL {idx}/{len(urls)}: {url}") + + await task_logger.log_task_progress( + log_entry, + f"Crawling URL {idx}/{len(urls)}: {url}", + { + "stage": "crawling_url", + "url_index": idx, + "url": url, + }, + ) + + # Crawl the URL + crawl_result, error = await crawler.crawl_url(url) + + if error or not crawl_result: + logger.warning(f"Failed to crawl URL {url}: {error}") + failed_urls.append((url, error or "Unknown error")) + continue + + # Extract content and metadata + content = crawl_result.get("content", "") + metadata = crawl_result.get("metadata", {}) + crawler_type = crawl_result.get("crawler_type", "unknown") + + if not content.strip(): + logger.warning(f"Skipping URL with no content: {url}") + failed_urls.append((url, "No content extracted")) + documents_skipped += 1 + continue + + # Format content as structured document + structured_document = crawler.format_to_structured_document( + crawl_result + ) + + # Generate unique identifier hash for this URL + unique_identifier_hash = generate_unique_identifier_hash( + DocumentType.CRAWLED_URL, url, search_space_id + ) + + # Generate content hash + # TODO: To fix this by not including dynamic content like date, time, etc. + content_hash = generate_content_hash( + structured_document, search_space_id + ) + + # Check if document with this unique identifier already exists + existing_document = await check_document_by_unique_identifier( + session, unique_identifier_hash + ) + + # Extract useful metadata + title = metadata.get("title", url) + description = metadata.get("description", "") + language = metadata.get("language", "") + + if existing_document: + # Document exists - check if content has changed + if existing_document.content_hash == content_hash: + logger.info(f"Document for URL {url} unchanged. Skipping.") + documents_skipped += 1 + continue + else: + # Content has changed - update the existing document + logger.info( + f"Content changed for URL {url}. Updating document." + ) + + # Generate summary with metadata + user_llm = await get_user_long_context_llm( + session, user_id, search_space_id + ) + + if user_llm: + document_metadata = { + "url": url, + "title": title, + "description": description, + "language": language, + "document_type": "Crawled URL", + "crawler_type": crawler_type, + } + ( + summary_content, + summary_embedding, + ) = await generate_document_summary( + structured_document, user_llm, document_metadata + ) + else: + # Fallback to simple summary if no LLM configured + summary_content = f"Crawled URL: {title}\n\n" + summary_content += f"URL: {url}\n" + if description: + summary_content += f"Description: {description}\n" + if language: + summary_content += f"Language: {language}\n" + summary_content += f"Crawler: {crawler_type}\n\n" + + # Add content preview + content_preview = content[:1000] + if len(content) > 1000: + content_preview += "..." + summary_content += f"Content Preview:\n{content_preview}\n" + + summary_embedding = config.embedding_model_instance.embed( + summary_content + ) + + # Process chunks + chunks = await create_document_chunks(content) + + # Update existing document + existing_document.title = title + existing_document.content = summary_content + existing_document.content_hash = content_hash + existing_document.embedding = summary_embedding + existing_document.document_metadata = { + **metadata, + "crawler_type": crawler_type, + "last_crawled_at": datetime.now().strftime( + "%Y-%m-%d %H:%M:%S" + ), + } + existing_document.chunks = chunks + + documents_updated += 1 + logger.info(f"Successfully updated URL {url}") + continue + + # Document doesn't exist - create new one + # Generate summary with metadata + user_llm = await get_user_long_context_llm( + session, user_id, search_space_id + ) + + if user_llm: + document_metadata = { + "url": url, + "title": title, + "description": description, + "language": language, + "document_type": "Crawled URL", + "crawler_type": crawler_type, + } + ( + summary_content, + summary_embedding, + ) = await generate_document_summary( + structured_document, user_llm, document_metadata + ) + else: + # Fallback to simple summary if no LLM configured + summary_content = f"Crawled URL: {title}\n\n" + summary_content += f"URL: {url}\n" + if description: + summary_content += f"Description: {description}\n" + if language: + summary_content += f"Language: {language}\n" + summary_content += f"Crawler: {crawler_type}\n\n" + + # Add content preview + content_preview = content[:1000] + if len(content) > 1000: + content_preview += "..." + summary_content += f"Content Preview:\n{content_preview}\n" + + summary_embedding = config.embedding_model_instance.embed( + summary_content + ) + + chunks = await create_document_chunks(content) + + document = Document( + search_space_id=search_space_id, + title=title, + document_type=DocumentType.CRAWLED_URL, + document_metadata={ + **metadata, + "crawler_type": crawler_type, + "indexed_at": datetime.now().strftime("%Y-%m-%d %H:%M:%S"), + }, + content=summary_content, + content_hash=content_hash, + unique_identifier_hash=unique_identifier_hash, + embedding=summary_embedding, + chunks=chunks, + ) + + session.add(document) + documents_indexed += 1 + logger.info(f"Successfully indexed new URL {url}") + + # Batch commit every 10 documents + if (documents_indexed + documents_updated) % 10 == 0: + logger.info( + f"Committing batch: {documents_indexed + documents_updated} URLs processed so far" + ) + await session.commit() + + except Exception as e: + logger.error( + f"Error processing URL {url}: {e!s}", + exc_info=True, + ) + failed_urls.append((url, str(e))) + continue + + total_processed = documents_indexed + documents_updated + + if total_processed > 0: + await update_connector_last_indexed(session, connector, update_last_indexed) + + # Final commit for any remaining documents not yet committed in batches + logger.info( + f"Final commit: Total {documents_indexed} new, {documents_updated} updated URLs processed" + ) + await session.commit() + + # Build result message + result_message = None + if failed_urls: + failed_summary = "; ".join( + [f"{url}: {error}" for url, error in failed_urls[:5]] + ) + if len(failed_urls) > 5: + failed_summary += f" (and {len(failed_urls) - 5} more)" + result_message = ( + f"Completed with {len(failed_urls)} failures: {failed_summary}" + ) + + await task_logger.log_task_success( + log_entry, + f"Successfully completed crawled web page indexing for connector {connector_id}", + { + "urls_processed": total_processed, + "documents_indexed": documents_indexed, + "documents_updated": documents_updated, + "documents_skipped": documents_skipped, + "failed_urls_count": len(failed_urls), + }, + ) + + logger.info( + f"Web page indexing completed: {documents_indexed} new, " + f"{documents_updated} updated, {documents_skipped} skipped, " + f"{len(failed_urls)} failed" + ) + return total_processed, result_message + + except SQLAlchemyError as db_error: + await session.rollback() + await task_logger.log_task_failure( + log_entry, + f"Database error during web page indexing for connector {connector_id}", + str(db_error), + {"error_type": "SQLAlchemyError"}, + ) + logger.error(f"Database error: {db_error!s}", exc_info=True) + return 0, f"Database error: {db_error!s}" + except Exception as e: + await session.rollback() + await task_logger.log_task_failure( + log_entry, + f"Failed to index web page URLs for connector {connector_id}", + str(e), + {"error_type": type(e).__name__}, + ) + logger.error(f"Failed to index web page URLs: {e!s}", exc_info=True) + return 0, f"Failed to index web page URLs: {e!s}" + + +async def get_crawled_url_documents( + session: AsyncSession, + search_space_id: int, + connector_id: int | None = None, +) -> list[Document]: + """ + Get all crawled URL documents for a search space. + + Args: + session: Database session + search_space_id: ID of the search space + connector_id: Optional connector ID to filter by + + Returns: + List of Document objects + """ + from sqlalchemy import select + + query = select(Document).filter( + Document.search_space_id == search_space_id, + Document.document_type == DocumentType.CRAWLED_URL, + ) + + if connector_id: + # Filter by connector if needed - you might need to add a connector_id field to Document + # or filter by some other means depending on your schema + pass + + result = await session.execute(query) + documents = result.scalars().all() + return list(documents) diff --git a/surfsense_backend/app/tasks/document_processors/__init__.py b/surfsense_backend/app/tasks/document_processors/__init__.py index a238ac877..e70c41cb4 100644 --- a/surfsense_backend/app/tasks/document_processors/__init__.py +++ b/surfsense_backend/app/tasks/document_processors/__init__.py @@ -6,7 +6,6 @@ and sources. Each processor is responsible for handling a specific type of docum processing task in the background. Available processors: -- URL crawler: Process web pages from URLs - Extension processor: Handle documents from browser extension - Markdown processor: Process markdown files - File processors: Handle files using different ETL services (Unstructured, LlamaCloud, Docling) @@ -26,14 +25,11 @@ from .file_processors import ( # Markdown processor from .markdown_processor import add_received_markdown_file_document -from .url_crawler import add_crawled_url_document # YouTube processor from .youtube_processor import add_youtube_video_document __all__ = [ - # URL processing - "add_crawled_url_document", # Extension processing "add_extension_received_document", "add_received_file_document_using_docling", diff --git a/surfsense_backend/app/tasks/document_processors/url_crawler.py b/surfsense_backend/app/tasks/document_processors/url_crawler.py deleted file mode 100644 index 1b516b5bf..000000000 --- a/surfsense_backend/app/tasks/document_processors/url_crawler.py +++ /dev/null @@ -1,342 +0,0 @@ -""" -URL crawler document processor. -""" - -import logging - -import validators -from firecrawl import AsyncFirecrawlApp -from langchain_community.document_loaders import AsyncChromiumLoader -from langchain_core.documents import Document as LangchainDocument -from sqlalchemy.exc import SQLAlchemyError -from sqlalchemy.ext.asyncio import AsyncSession - -from app.config import config -from app.db import Document, DocumentType -from app.services.llm_service import get_user_long_context_llm -from app.services.task_logging_service import TaskLoggingService -from app.utils.document_converters import ( - create_document_chunks, - generate_content_hash, - generate_document_summary, - generate_unique_identifier_hash, -) - -from .base import ( - check_document_by_unique_identifier, - md, -) - - -async def add_crawled_url_document( - session: AsyncSession, url: str, search_space_id: int, user_id: str -) -> Document | None: - """ - Process and store a document from a crawled URL. - - Args: - session: Database session - url: URL to crawl - search_space_id: ID of the search space - user_id: ID of the user - - Returns: - Document object if successful, None if failed - """ - task_logger = TaskLoggingService(session, search_space_id) - - # Log task start - log_entry = await task_logger.log_task_start( - task_name="crawl_url_document", - source="background_task", - message=f"Starting URL crawling process for: {url}", - metadata={"url": url, "user_id": str(user_id)}, - ) - - try: - # URL validation step - await task_logger.log_task_progress( - log_entry, f"Validating URL: {url}", {"stage": "validation"} - ) - - if not validators.url(url): - raise ValueError(f"Url {url} is not a valid URL address") - - # Set up crawler - await task_logger.log_task_progress( - log_entry, - f"Setting up crawler for URL: {url}", - { - "stage": "crawler_setup", - "firecrawl_available": bool(config.FIRECRAWL_API_KEY), - }, - ) - - use_firecrawl = bool(config.FIRECRAWL_API_KEY) - - if use_firecrawl: - # Use Firecrawl SDK directly - firecrawl_app = AsyncFirecrawlApp(api_key=config.FIRECRAWL_API_KEY) - else: - crawl_loader = AsyncChromiumLoader(urls=[url], headless=True) - - # Perform crawling - await task_logger.log_task_progress( - log_entry, - f"Crawling URL content: {url}", - { - "stage": "crawling", - "crawler_type": "AsyncFirecrawlApp" - if use_firecrawl - else "AsyncChromiumLoader", - }, - ) - - if use_firecrawl: - # Use async Firecrawl SDK with v1 API - properly awaited - scrape_result = await firecrawl_app.scrape_url( - url=url, formats=["markdown"] - ) - - # scrape_result is a Pydantic ScrapeResponse object - # Access attributes directly - if scrape_result and scrape_result.success: - # Extract markdown content - markdown_content = scrape_result.markdown or "" - - # Extract metadata - this is a DICT - metadata = scrape_result.metadata if scrape_result.metadata else {} - - # Convert to LangChain Document format - url_crawled = [ - LangchainDocument( - page_content=markdown_content, - metadata={ - "source": url, - "title": metadata.get("title", url), - "description": metadata.get("description", ""), - "language": metadata.get("language", ""), - "sourceURL": metadata.get("sourceURL", url), - **metadata, # Include all other metadata fields - }, - ) - ] - content_in_markdown = url_crawled[0].page_content - else: - error_msg = ( - scrape_result.error - if scrape_result and hasattr(scrape_result, "error") - else "Unknown error" - ) - raise ValueError(f"Firecrawl failed to scrape URL: {error_msg}") - else: - # Use AsyncChromiumLoader as fallback - url_crawled = await crawl_loader.aload() - content_in_markdown = md.transform_documents(url_crawled)[0].page_content - - # Format document - await task_logger.log_task_progress( - log_entry, - f"Processing crawled content from: {url}", - {"stage": "content_processing", "content_length": len(content_in_markdown)}, - ) - - # Format document metadata in a more maintainable way - metadata_sections = [ - ( - "METADATA", - [ - f"{key.upper()}: {value}" - for key, value in url_crawled[0].metadata.items() - ], - ), - ( - "CONTENT", - ["FORMAT: markdown", "TEXT_START", content_in_markdown, "TEXT_END"], - ), - ] - - # Build the document string more efficiently - document_parts = [] - document_parts.append("") - - for section_title, section_content in metadata_sections: - document_parts.append(f"<{section_title}>") - document_parts.extend(section_content) - document_parts.append(f"") - - document_parts.append("") - combined_document_string = "\n".join(document_parts) - - # Generate unique identifier hash for this URL - unique_identifier_hash = generate_unique_identifier_hash( - DocumentType.CRAWLED_URL, url, search_space_id - ) - - # Generate content hash - content_hash = generate_content_hash(combined_document_string, search_space_id) - - # Check if document with this unique identifier already exists - await task_logger.log_task_progress( - log_entry, - f"Checking for existing URL: {url}", - {"stage": "duplicate_check", "url": url}, - ) - - existing_document = await check_document_by_unique_identifier( - session, unique_identifier_hash - ) - - if existing_document: - # Document exists - check if content has changed - if existing_document.content_hash == content_hash: - await task_logger.log_task_success( - log_entry, - f"URL document unchanged: {url}", - { - "duplicate_detected": True, - "existing_document_id": existing_document.id, - }, - ) - logging.info(f"Document for URL {url} unchanged. Skipping.") - return existing_document - else: - # Content has changed - update the existing document - logging.info(f"Content changed for URL {url}. Updating document.") - await task_logger.log_task_progress( - log_entry, - f"Updating URL document: {url}", - {"stage": "document_update", "url": url}, - ) - - # Get LLM for summary generation (needed for both create and update) - await task_logger.log_task_progress( - log_entry, - f"Preparing for summary generation: {url}", - {"stage": "llm_setup"}, - ) - - # Get user's long context LLM - user_llm = await get_user_long_context_llm(session, user_id, search_space_id) - if not user_llm: - raise RuntimeError( - f"No long context LLM configured for user {user_id} in search space {search_space_id}" - ) - - # Generate summary - await task_logger.log_task_progress( - log_entry, - f"Generating summary for URL content: {url}", - {"stage": "summary_generation"}, - ) - - # Generate summary with metadata - document_metadata = { - "url": url, - "title": url_crawled[0].metadata.get("title", url), - "document_type": "Crawled URL Document", - "crawler_type": "FirecrawlApp" if use_firecrawl else "AsyncChromiumLoader", - } - summary_content, summary_embedding = await generate_document_summary( - combined_document_string, user_llm, document_metadata - ) - - # Process chunks - await task_logger.log_task_progress( - log_entry, - f"Processing content chunks for URL: {url}", - {"stage": "chunk_processing"}, - ) - - from app.utils.blocknote_converter import convert_markdown_to_blocknote - - # Convert markdown to BlockNote JSON - blocknote_json = await convert_markdown_to_blocknote(combined_document_string) - if not blocknote_json: - logging.warning( - f"Failed to convert crawled URL '{url}' to BlockNote JSON, " - "document will not be editable" - ) - - chunks = await create_document_chunks(content_in_markdown) - - # Update or create document - if existing_document: - # Update existing document - await task_logger.log_task_progress( - log_entry, - f"Updating document in database for URL: {url}", - {"stage": "document_update", "chunks_count": len(chunks)}, - ) - - existing_document.title = url_crawled[0].metadata.get( - "title", url_crawled[0].metadata.get("source", url) - ) - existing_document.content = summary_content - existing_document.content_hash = content_hash - existing_document.embedding = summary_embedding - existing_document.document_metadata = url_crawled[0].metadata - existing_document.chunks = chunks - existing_document.blocknote_document = blocknote_json - - document = existing_document - else: - # Create new document - await task_logger.log_task_progress( - log_entry, - f"Creating document in database for URL: {url}", - {"stage": "document_creation", "chunks_count": len(chunks)}, - ) - - document = Document( - search_space_id=search_space_id, - title=url_crawled[0].metadata.get( - "title", url_crawled[0].metadata.get("source", url) - ), - document_type=DocumentType.CRAWLED_URL, - document_metadata=url_crawled[0].metadata, - content=summary_content, - embedding=summary_embedding, - chunks=chunks, - content_hash=content_hash, - unique_identifier_hash=unique_identifier_hash, - blocknote_document=blocknote_json, - ) - - session.add(document) - await session.commit() - await session.refresh(document) - - # Log success - await task_logger.log_task_success( - log_entry, - f"Successfully crawled and processed URL: {url}", - { - "document_id": document.id, - "title": document.title, - "content_hash": content_hash, - "chunks_count": len(chunks), - "summary_length": len(summary_content), - }, - ) - - return document - - except SQLAlchemyError as db_error: - await session.rollback() - await task_logger.log_task_failure( - log_entry, - f"Database error while processing URL: {url}", - str(db_error), - {"error_type": "SQLAlchemyError"}, - ) - raise db_error - except Exception as e: - await session.rollback() - await task_logger.log_task_failure( - log_entry, - f"Failed to crawl URL: {url}", - str(e), - {"error_type": type(e).__name__}, - ) - raise RuntimeError(f"Failed to crawl URL: {e!s}") from e diff --git a/surfsense_backend/app/utils/check_ownership.py b/surfsense_backend/app/utils/check_ownership.py deleted file mode 100644 index 0bd290ff3..000000000 --- a/surfsense_backend/app/utils/check_ownership.py +++ /dev/null @@ -1,19 +0,0 @@ -from fastapi import HTTPException -from sqlalchemy.ext.asyncio import AsyncSession -from sqlalchemy.future import select - -from app.db import User - - -# Helper function to check user ownership -async def check_ownership(session: AsyncSession, model, item_id: int, user: User): - item = await session.execute( - select(model).filter(model.id == item_id, model.user_id == user.id) - ) - item = item.scalars().first() - if not item: - raise HTTPException( - status_code=404, - detail="Item not found or you don't have permission to access it", - ) - return item diff --git a/surfsense_backend/app/utils/periodic_scheduler.py b/surfsense_backend/app/utils/periodic_scheduler.py index 225425714..e33661d65 100644 --- a/surfsense_backend/app/utils/periodic_scheduler.py +++ b/surfsense_backend/app/utils/periodic_scheduler.py @@ -31,6 +31,7 @@ CONNECTOR_TASK_MAP = { SearchSourceConnectorType.DISCORD_CONNECTOR: "index_discord_messages", SearchSourceConnectorType.LUMA_CONNECTOR: "index_luma_events", SearchSourceConnectorType.ELASTICSEARCH_CONNECTOR: "index_elasticsearch_documents", + SearchSourceConnectorType.WEBCRAWLER_CONNECTOR: "index_crawled_urls", } @@ -69,6 +70,7 @@ def create_periodic_schedule( index_airtable_records_task, index_clickup_tasks_task, index_confluence_pages_task, + index_crawled_urls_task, index_discord_messages_task, index_elasticsearch_documents_task, index_github_repos_task, @@ -96,6 +98,7 @@ def create_periodic_schedule( SearchSourceConnectorType.DISCORD_CONNECTOR: index_discord_messages_task, SearchSourceConnectorType.LUMA_CONNECTOR: index_luma_events_task, SearchSourceConnectorType.ELASTICSEARCH_CONNECTOR: index_elasticsearch_documents_task, + SearchSourceConnectorType.WEBCRAWLER_CONNECTOR: index_crawled_urls_task, } # Trigger the first run immediately diff --git a/surfsense_backend/app/utils/rbac.py b/surfsense_backend/app/utils/rbac.py new file mode 100644 index 000000000..6cb180d80 --- /dev/null +++ b/surfsense_backend/app/utils/rbac.py @@ -0,0 +1,274 @@ +""" +RBAC (Role-Based Access Control) utility functions. +Provides helpers for checking user permissions in search spaces. +""" + +import secrets +from uuid import UUID + +from fastapi import HTTPException +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.future import select +from sqlalchemy.orm import selectinload + +from app.db import ( + Permission, + SearchSpace, + SearchSpaceMembership, + SearchSpaceRole, + User, + has_permission, +) + + +async def get_user_membership( + session: AsyncSession, + user_id: UUID, + search_space_id: int, +) -> SearchSpaceMembership | None: + """ + Get the user's membership in a search space. + + Args: + session: Database session + user_id: User UUID + search_space_id: Search space ID + + Returns: + SearchSpaceMembership if found, None otherwise + """ + result = await session.execute( + select(SearchSpaceMembership) + .options(selectinload(SearchSpaceMembership.role)) + .filter( + SearchSpaceMembership.user_id == user_id, + SearchSpaceMembership.search_space_id == search_space_id, + ) + ) + return result.scalars().first() + + +async def get_user_permissions( + session: AsyncSession, + user_id: UUID, + search_space_id: int, +) -> list[str]: + """ + Get the user's permissions in a search space. + + Args: + session: Database session + user_id: User UUID + search_space_id: Search space ID + + Returns: + List of permission strings + """ + membership = await get_user_membership(session, user_id, search_space_id) + + if not membership: + return [] + + # Owners always have full access + if membership.is_owner: + return [Permission.FULL_ACCESS.value] + + # Get permissions from role + if membership.role: + return membership.role.permissions or [] + + return [] + + +async def check_permission( + session: AsyncSession, + user: User, + search_space_id: int, + required_permission: str, + error_message: str = "You don't have permission to perform this action", +) -> SearchSpaceMembership: + """ + Check if a user has a specific permission in a search space. + Raises HTTPException if permission is denied. + + Args: + session: Database session + user: User object + search_space_id: Search space ID + required_permission: Permission string to check + error_message: Custom error message for permission denied + + Returns: + SearchSpaceMembership if permission granted + + Raises: + HTTPException: If user doesn't have access or permission + """ + membership = await get_user_membership(session, user.id, search_space_id) + + if not membership: + raise HTTPException( + status_code=403, + detail="You don't have access to this search space", + ) + + # Get user's permissions + if membership.is_owner: + permissions = [Permission.FULL_ACCESS.value] + elif membership.role: + permissions = membership.role.permissions or [] + else: + permissions = [] + + if not has_permission(permissions, required_permission): + raise HTTPException(status_code=403, detail=error_message) + + return membership + + +async def check_search_space_access( + session: AsyncSession, + user: User, + search_space_id: int, +) -> SearchSpaceMembership: + """ + Check if a user has any access to a search space. + This is used for basic access control (user is a member). + + Args: + session: Database session + user: User object + search_space_id: Search space ID + + Returns: + SearchSpaceMembership if user has access + + Raises: + HTTPException: If user doesn't have access + """ + membership = await get_user_membership(session, user.id, search_space_id) + + if not membership: + raise HTTPException( + status_code=403, + detail="You don't have access to this search space", + ) + + return membership + + +async def is_search_space_owner( + session: AsyncSession, + user_id: UUID, + search_space_id: int, +) -> bool: + """ + Check if a user is the owner of a search space. + + Args: + session: Database session + user_id: User UUID + search_space_id: Search space ID + + Returns: + True if user is the owner, False otherwise + """ + membership = await get_user_membership(session, user_id, search_space_id) + return membership is not None and membership.is_owner + + +async def get_search_space_with_access_check( + session: AsyncSession, + user: User, + search_space_id: int, + required_permission: str | None = None, +) -> tuple[SearchSpace, SearchSpaceMembership]: + """ + Get a search space with access and optional permission check. + + Args: + session: Database session + user: User object + search_space_id: Search space ID + required_permission: Optional permission to check + + Returns: + Tuple of (SearchSpace, SearchSpaceMembership) + + Raises: + HTTPException: If search space not found or user lacks access/permission + """ + # Get the search space + result = await session.execute( + select(SearchSpace).filter(SearchSpace.id == search_space_id) + ) + search_space = result.scalars().first() + + if not search_space: + raise HTTPException(status_code=404, detail="Search space not found") + + # Check access + if required_permission: + membership = await check_permission( + session, user, search_space_id, required_permission + ) + else: + membership = await check_search_space_access(session, user, search_space_id) + + return search_space, membership + + +def generate_invite_code() -> str: + """ + Generate a unique invite code for search space invites. + + Returns: + A 32-character URL-safe invite code + """ + return secrets.token_urlsafe(24) + + +async def get_default_role( + session: AsyncSession, + search_space_id: int, +) -> SearchSpaceRole | None: + """ + Get the default role for a search space (used when accepting invites without a specific role). + + Args: + session: Database session + search_space_id: Search space ID + + Returns: + Default SearchSpaceRole or None + """ + result = await session.execute( + select(SearchSpaceRole).filter( + SearchSpaceRole.search_space_id == search_space_id, + SearchSpaceRole.is_default == True, # noqa: E712 + ) + ) + return result.scalars().first() + + +async def get_owner_role( + session: AsyncSession, + search_space_id: int, +) -> SearchSpaceRole | None: + """ + Get the Owner role for a search space. + + Args: + session: Database session + search_space_id: Search space ID + + Returns: + Owner SearchSpaceRole or None + """ + result = await session.execute( + select(SearchSpaceRole).filter( + SearchSpaceRole.search_space_id == search_space_id, + SearchSpaceRole.name == "Owner", + ) + ) + return result.scalars().first() diff --git a/surfsense_backend/app/utils/validators.py b/surfsense_backend/app/utils/validators.py index a8460cd14..6b69fb3e1 100644 --- a/surfsense_backend/app/utils/validators.py +++ b/surfsense_backend/app/utils/validators.py @@ -469,6 +469,22 @@ def validate_connector_config( if not isinstance(value, list) or not value: raise ValueError(f"{field_name} must be a non-empty list of strings") + def validate_firecrawl_api_key_format() -> None: + """Validate Firecrawl API key format if provided.""" + api_key = config.get("FIRECRAWL_API_KEY", "") + if api_key and api_key.strip() and not api_key.strip().startswith("fc-"): + raise ValueError( + "Firecrawl API key should start with 'fc-'. Please verify your API key." + ) + + def validate_initial_urls() -> None: + initial_urls = config.get("INITIAL_URLS", "") + if initial_urls and initial_urls.strip(): + urls = [url.strip() for url in initial_urls.split("\n") if url.strip()] + for url in urls: + if not validators.url(url): + raise ValueError(f"Invalid URL format in INITIAL_URLS: {url}") + # Lookup table for connector validation rules connector_rules = { "SERPER_API": {"required": ["SERPER_API_KEY"], "validators": {}}, @@ -550,6 +566,14 @@ def validate_connector_config( # "validators": {} # }, "LUMA_CONNECTOR": {"required": ["LUMA_API_KEY"], "validators": {}}, + "WEBCRAWLER_CONNECTOR": { + "required": [], # No required fields - API key is optional + "optional": ["FIRECRAWL_API_KEY", "INITIAL_URLS"], + "validators": { + "FIRECRAWL_API_KEY": lambda: validate_firecrawl_api_key_format(), + "INITIAL_URLS": lambda: validate_initial_urls(), + }, + }, } rules = connector_rules.get(connector_type_str) diff --git a/surfsense_backend/pyproject.toml b/surfsense_backend/pyproject.toml index 58511a101..1951afdd0 100644 --- a/surfsense_backend/pyproject.toml +++ b/surfsense_backend/pyproject.toml @@ -11,7 +11,6 @@ dependencies = [ "docling>=2.15.0", "fastapi>=0.115.8", "fastapi-users[oauth,sqlalchemy]>=14.0.1", - "firecrawl-py>=1.12.0", "github3.py==4.0.1", "google-api-python-client>=2.156.0", "google-auth-oauthlib>=1.2.1", @@ -49,6 +48,7 @@ dependencies = [ "flower>=2.0.1", "redis>=5.2.1", "chonkie[all]>=1.4.0", + "firecrawl-py>=4.9.0", ] [dependency-groups] diff --git a/surfsense_backend/uv.lock b/surfsense_backend/uv.lock index d367c10c7..7509cfadb 100644 --- a/surfsense_backend/uv.lock +++ b/surfsense_backend/uv.lock @@ -1541,19 +1541,20 @@ wheels = [ [[package]] name = "firecrawl-py" -version = "2.8.0" +version = "4.9.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiohttp" }, + { name = "httpx" }, { name = "nest-asyncio" }, { name = "pydantic" }, { name = "python-dotenv" }, { name = "requests" }, { name = "websockets" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/11/83/64127a0faafb027c2870c3919aae13fd6f8f8066d000bea93c880ab9772a/firecrawl_py-2.8.0.tar.gz", hash = "sha256:657795b6ddd63f0bd38b38bf0571187e0a66becda23d97c032801895257403c9", size = 37941 } +sdist = { url = "https://files.pythonhosted.org/packages/a5/2e/e4112ebd229bc03202584f5ad2ece81c26cb2a7bad0cd4773b8705d996e9/firecrawl_py-4.9.0.tar.gz", hash = "sha256:8e5740ed923c89e6066dfd63b0449f049bbd274652dfac3d735c9ae0572c4b0c", size = 153395 } wheels = [ - { url = "https://files.pythonhosted.org/packages/74/e6/e69bd2156856f2b1849244ca3b1d993676175b16acbf704ad85580ebaa3c/firecrawl_py-2.8.0-py3-none-any.whl", hash = "sha256:f2e148086aa1ca42f603a56009577b4f66a2c23893eaa71f7c9c0082b4fdcf60", size = 173118 }, + { url = "https://files.pythonhosted.org/packages/3a/cf/99848233303ca9c9d84cf22de08adc1051e8b6df672aeed14f32272df86b/firecrawl_py-4.9.0-py3-none-any.whl", hash = "sha256:adb027ed8bdda712201dc9727ead1a051dc3d114c2a0051de1f159c420703684", size = 190971 }, ] [[package]] @@ -5926,7 +5927,7 @@ requires-dist = [ { name = "fastapi", specifier = ">=0.115.8" }, { name = "fastapi-users", extras = ["oauth", "sqlalchemy"], specifier = ">=14.0.1" }, { name = "faster-whisper", specifier = ">=1.1.0" }, - { name = "firecrawl-py", specifier = ">=1.12.0" }, + { name = "firecrawl-py", specifier = ">=4.9.0" }, { name = "flower", specifier = ">=2.0.1" }, { name = "github3-py", specifier = "==4.0.1" }, { name = "google-api-python-client", specifier = ">=2.156.0" }, diff --git a/surfsense_browser_extension/package.json b/surfsense_browser_extension/package.json index 985f27733..e45af9f39 100644 --- a/surfsense_browser_extension/package.json +++ b/surfsense_browser_extension/package.json @@ -4,6 +4,15 @@ "version": "0.0.8", "description": "Extension to collect Browsing History for SurfSense.", "author": "https://github.com/MODSetter", + "engines": { + "node": ">=18.0.0 <23.0.0", + "pnpm": ">=8.0.0" + }, + "pnpm": { + "overrides": { + "sharp": "^0.33.5" + } + }, "scripts": { "dev": "plasmo dev", "build": "plasmo build", @@ -24,13 +33,14 @@ "dom-to-semantic-markdown": "^1.2.11", "linkedom": "0.1.34", "lucide-react": "^0.454.0", - "plasmo": "0.89.4", + "plasmo": "0.90.5", "postcss-loader": "^8.1.1", "radix-ui": "^1.0.1", "react": "18.2.0", "react-dom": "18.2.0", "react-hooks-global-state": "^2.1.0", "react-router-dom": "^6.26.1", + "sharp": "^0.33.5", "tailwind-merge": "^2.5.4", "tailwindcss-animate": "^1.0.7" }, diff --git a/surfsense_browser_extension/pnpm-lock.yaml b/surfsense_browser_extension/pnpm-lock.yaml index e1239fa9b..308f0277d 100644 --- a/surfsense_browser_extension/pnpm-lock.yaml +++ b/surfsense_browser_extension/pnpm-lock.yaml @@ -4,6 +4,9 @@ settings: autoInstallPeers: true excludeLinksFromLockfile: false +overrides: + sharp: ^0.33.5 + importers: .: @@ -51,8 +54,8 @@ importers: specifier: ^0.454.0 version: 0.454.0(react@18.2.0) plasmo: - specifier: 0.89.4 - version: 0.89.4(@swc/core@1.7.14(@swc/helpers@0.5.12))(@swc/helpers@0.5.12)(lodash@4.17.21)(postcss@8.4.41)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + specifier: 0.90.5 + version: 0.90.5(@swc/core@1.7.14(@swc/helpers@0.5.12))(@swc/helpers@0.5.12)(@types/node@20.11.5)(lodash@4.17.21)(postcss@8.4.41)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) postcss-loader: specifier: ^8.1.1 version: 8.1.1(postcss@8.4.41)(typescript@5.3.3) @@ -71,6 +74,9 @@ importers: react-router-dom: specifier: ^6.26.1 version: 6.26.1(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + sharp: + specifier: ^0.33.5 + version: 0.33.5 tailwind-merge: specifier: ^2.5.4 version: 2.5.4 @@ -244,6 +250,9 @@ packages: cpu: [x64] os: [win32] + '@emnapi/runtime@1.7.1': + resolution: {integrity: sha512-PVtJr5CmLwYAU9PZDMITZoR5iAOShYREoR45EyyLrbntV50mdePTgUn4AmOw90Ifcj+x2kRjdzr1HP3RrNiHGA==} + '@esbuild/android-arm64@0.18.20': resolution: {integrity: sha512-Nz4rJcchGDtENV0eMKUNa6L12zz2zBDXuhj/Vjh18zGqB44Bi7MBMSXjgunJgjRhCmKOjnPuZp4Mb6OKqtMHLQ==} engines: {node: '>=12'} @@ -395,6 +404,245 @@ packages: '@floating-ui/utils@0.2.8': resolution: {integrity: sha512-kym7SodPp8/wloecOpcmSnWJsK7M0E5Wg8UcFA+uO4B9s5d0ywXOEro/8HM9x0rW+TljRzul/14UYz3TleT3ig==} + '@img/sharp-darwin-arm64@0.33.5': + resolution: {integrity: sha512-UT4p+iz/2H4twwAoLCqfA9UH5pI6DggwKEGuaPy7nCVQ8ZsiY5PIcrRvD1DzuY3qYL07NtIQcWnBSY/heikIFQ==} + engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} + cpu: [arm64] + os: [darwin] + + '@img/sharp-darwin-x64@0.33.5': + resolution: {integrity: sha512-fyHac4jIc1ANYGRDxtiqelIbdWkIuQaI84Mv45KvGRRxSAa7o7d1ZKAOBaYbnepLC1WqxfpimdeWfvqqSGwR2Q==} + engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} + cpu: [x64] + os: [darwin] + + '@img/sharp-libvips-darwin-arm64@1.0.4': + resolution: {integrity: sha512-XblONe153h0O2zuFfTAbQYAX2JhYmDHeWikp1LM9Hul9gVPjFY427k6dFEcOL72O01QxQsWi761svJ/ev9xEDg==} + cpu: [arm64] + os: [darwin] + + '@img/sharp-libvips-darwin-x64@1.0.4': + resolution: {integrity: sha512-xnGR8YuZYfJGmWPvmlunFaWJsb9T/AO2ykoP3Fz/0X5XV2aoYBPkX6xqCQvUTKKiLddarLaxpzNe+b1hjeWHAQ==} + cpu: [x64] + os: [darwin] + + '@img/sharp-libvips-linux-arm64@1.0.4': + resolution: {integrity: sha512-9B+taZ8DlyyqzZQnoeIvDVR/2F4EbMepXMc/NdVbkzsJbzkUjhXv/70GQJ7tdLA4YJgNP25zukcxpX2/SueNrA==} + cpu: [arm64] + os: [linux] + + '@img/sharp-libvips-linux-arm@1.0.5': + resolution: {integrity: sha512-gvcC4ACAOPRNATg/ov8/MnbxFDJqf/pDePbBnuBDcjsI8PssmjoKMAz4LtLaVi+OnSb5FK/yIOamqDwGmXW32g==} + cpu: [arm] + os: [linux] + + '@img/sharp-libvips-linux-s390x@1.0.4': + resolution: {integrity: sha512-u7Wz6ntiSSgGSGcjZ55im6uvTrOxSIS8/dgoVMoiGE9I6JAfU50yH5BoDlYA1tcuGS7g/QNtetJnxA6QEsCVTA==} + cpu: [s390x] + os: [linux] + + '@img/sharp-libvips-linux-x64@1.0.4': + resolution: {integrity: sha512-MmWmQ3iPFZr0Iev+BAgVMb3ZyC4KeFc3jFxnNbEPas60e1cIfevbtuyf9nDGIzOaW9PdnDciJm+wFFaTlj5xYw==} + cpu: [x64] + os: [linux] + + '@img/sharp-libvips-linuxmusl-arm64@1.0.4': + resolution: {integrity: sha512-9Ti+BbTYDcsbp4wfYib8Ctm1ilkugkA/uscUn6UXK1ldpC1JjiXbLfFZtRlBhjPZ5o1NCLiDbg8fhUPKStHoTA==} + cpu: [arm64] + os: [linux] + + '@img/sharp-libvips-linuxmusl-x64@1.0.4': + resolution: {integrity: sha512-viYN1KX9m+/hGkJtvYYp+CCLgnJXwiQB39damAO7WMdKWlIhmYTfHjwSbQeUK/20vY154mwezd9HflVFM1wVSw==} + cpu: [x64] + os: [linux] + + '@img/sharp-linux-arm64@0.33.5': + resolution: {integrity: sha512-JMVv+AMRyGOHtO1RFBiJy/MBsgz0x4AWrT6QoEVVTyh1E39TrCUpTRI7mx9VksGX4awWASxqCYLCV4wBZHAYxA==} + engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} + cpu: [arm64] + os: [linux] + + '@img/sharp-linux-arm@0.33.5': + resolution: {integrity: sha512-JTS1eldqZbJxjvKaAkxhZmBqPRGmxgu+qFKSInv8moZ2AmT5Yib3EQ1c6gp493HvrvV8QgdOXdyaIBrhvFhBMQ==} + engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} + cpu: [arm] + os: [linux] + + '@img/sharp-linux-s390x@0.33.5': + resolution: {integrity: sha512-y/5PCd+mP4CA/sPDKl2961b+C9d+vPAveS33s6Z3zfASk2j5upL6fXVPZi7ztePZ5CuH+1kW8JtvxgbuXHRa4Q==} + engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} + cpu: [s390x] + os: [linux] + + '@img/sharp-linux-x64@0.33.5': + resolution: {integrity: sha512-opC+Ok5pRNAzuvq1AG0ar+1owsu842/Ab+4qvU879ippJBHvyY5n2mxF1izXqkPYlGuP/M556uh53jRLJmzTWA==} + engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} + cpu: [x64] + os: [linux] + + '@img/sharp-linuxmusl-arm64@0.33.5': + resolution: {integrity: sha512-XrHMZwGQGvJg2V/oRSUfSAfjfPxO+4DkiRh6p2AFjLQztWUuY/o8Mq0eMQVIY7HJ1CDQUJlxGGZRw1a5bqmd1g==} + engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} + cpu: [arm64] + os: [linux] + + '@img/sharp-linuxmusl-x64@0.33.5': + resolution: {integrity: sha512-WT+d/cgqKkkKySYmqoZ8y3pxx7lx9vVejxW/W4DOFMYVSkErR+w7mf2u8m/y4+xHe7yY9DAXQMWQhpnMuFfScw==} + engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} + cpu: [x64] + os: [linux] + + '@img/sharp-wasm32@0.33.5': + resolution: {integrity: sha512-ykUW4LVGaMcU9lu9thv85CbRMAwfeadCJHRsg2GmeRa/cJxsVY9Rbd57JcMxBkKHag5U/x7TSBpScF4U8ElVzg==} + engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} + cpu: [wasm32] + + '@img/sharp-win32-ia32@0.33.5': + resolution: {integrity: sha512-T36PblLaTwuVJ/zw/LaH0PdZkRz5rd3SmMHX8GSmR7vtNSP5Z6bQkExdSK7xGWyxLw4sUknBuugTelgw2faBbQ==} + engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} + cpu: [ia32] + os: [win32] + + '@img/sharp-win32-x64@0.33.5': + resolution: {integrity: sha512-MpY/o8/8kj+EcnxwvrP4aTJSWw/aZ7JIGR4aBeZkZw5B7/Jn+tY9/VNwtcoGmdT7GfggGIU4kygOMSbYnOrAbg==} + engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} + cpu: [x64] + os: [win32] + + '@inquirer/ansi@1.0.2': + resolution: {integrity: sha512-S8qNSZiYzFd0wAcyG5AXCvUHC5Sr7xpZ9wZ2py9XR88jUz8wooStVx5M6dRzczbBWjic9NP7+rY0Xi7qqK/aMQ==} + engines: {node: '>=18'} + + '@inquirer/checkbox@4.3.2': + resolution: {integrity: sha512-VXukHf0RR1doGe6Sm4F0Em7SWYLTHSsbGfJdS9Ja2bX5/D5uwVOEjr07cncLROdBvmnvCATYEWlHqYmXv2IlQA==} + engines: {node: '>=18'} + peerDependencies: + '@types/node': '>=18' + peerDependenciesMeta: + '@types/node': + optional: true + + '@inquirer/confirm@5.1.21': + resolution: {integrity: sha512-KR8edRkIsUayMXV+o3Gv+q4jlhENF9nMYUZs9PA2HzrXeHI8M5uDag70U7RJn9yyiMZSbtF5/UexBtAVtZGSbQ==} + engines: {node: '>=18'} + peerDependencies: + '@types/node': '>=18' + peerDependenciesMeta: + '@types/node': + optional: true + + '@inquirer/core@10.3.2': + resolution: {integrity: sha512-43RTuEbfP8MbKzedNqBrlhhNKVwoK//vUFNW3Q3vZ88BLcrs4kYpGg+B2mm5p2K/HfygoCxuKwJJiv8PbGmE0A==} + engines: {node: '>=18'} + peerDependencies: + '@types/node': '>=18' + peerDependenciesMeta: + '@types/node': + optional: true + + '@inquirer/editor@4.2.23': + resolution: {integrity: sha512-aLSROkEwirotxZ1pBaP8tugXRFCxW94gwrQLxXfrZsKkfjOYC1aRvAZuhpJOb5cu4IBTJdsCigUlf2iCOu4ZDQ==} + engines: {node: '>=18'} + peerDependencies: + '@types/node': '>=18' + peerDependenciesMeta: + '@types/node': + optional: true + + '@inquirer/expand@4.0.23': + resolution: {integrity: sha512-nRzdOyFYnpeYTTR2qFwEVmIWypzdAx/sIkCMeTNTcflFOovfqUk+HcFhQQVBftAh9gmGrpFj6QcGEqrDMDOiew==} + engines: {node: '>=18'} + peerDependencies: + '@types/node': '>=18' + peerDependenciesMeta: + '@types/node': + optional: true + + '@inquirer/external-editor@1.0.3': + resolution: {integrity: sha512-RWbSrDiYmO4LbejWY7ttpxczuwQyZLBUyygsA9Nsv95hpzUWwnNTVQmAq3xuh7vNwCp07UTmE5i11XAEExx4RA==} + engines: {node: '>=18'} + peerDependencies: + '@types/node': '>=18' + peerDependenciesMeta: + '@types/node': + optional: true + + '@inquirer/figures@1.0.15': + resolution: {integrity: sha512-t2IEY+unGHOzAaVM5Xx6DEWKeXlDDcNPeDyUpsRc6CUhBfU3VQOEl+Vssh7VNp1dR8MdUJBWhuObjXCsVpjN5g==} + engines: {node: '>=18'} + + '@inquirer/input@4.3.1': + resolution: {integrity: sha512-kN0pAM4yPrLjJ1XJBjDxyfDduXOuQHrBB8aLDMueuwUGn+vNpF7Gq7TvyVxx8u4SHlFFj4trmj+a2cbpG4Jn1g==} + engines: {node: '>=18'} + peerDependencies: + '@types/node': '>=18' + peerDependenciesMeta: + '@types/node': + optional: true + + '@inquirer/number@3.0.23': + resolution: {integrity: sha512-5Smv0OK7K0KUzUfYUXDXQc9jrf8OHo4ktlEayFlelCjwMXz0299Y8OrI+lj7i4gCBY15UObk76q0QtxjzFcFcg==} + engines: {node: '>=18'} + peerDependencies: + '@types/node': '>=18' + peerDependenciesMeta: + '@types/node': + optional: true + + '@inquirer/password@4.0.23': + resolution: {integrity: sha512-zREJHjhT5vJBMZX/IUbyI9zVtVfOLiTO66MrF/3GFZYZ7T4YILW5MSkEYHceSii/KtRk+4i3RE7E1CUXA2jHcA==} + engines: {node: '>=18'} + peerDependencies: + '@types/node': '>=18' + peerDependenciesMeta: + '@types/node': + optional: true + + '@inquirer/prompts@7.10.1': + resolution: {integrity: sha512-Dx/y9bCQcXLI5ooQ5KyvA4FTgeo2jYj/7plWfV5Ak5wDPKQZgudKez2ixyfz7tKXzcJciTxqLeK7R9HItwiByg==} + engines: {node: '>=18'} + peerDependencies: + '@types/node': '>=18' + peerDependenciesMeta: + '@types/node': + optional: true + + '@inquirer/rawlist@4.1.11': + resolution: {integrity: sha512-+LLQB8XGr3I5LZN/GuAHo+GpDJegQwuPARLChlMICNdwW7OwV2izlCSCxN6cqpL0sMXmbKbFcItJgdQq5EBXTw==} + engines: {node: '>=18'} + peerDependencies: + '@types/node': '>=18' + peerDependenciesMeta: + '@types/node': + optional: true + + '@inquirer/search@3.2.2': + resolution: {integrity: sha512-p2bvRfENXCZdWF/U2BXvnSI9h+tuA8iNqtUKb9UWbmLYCRQxd8WkvwWvYn+3NgYaNwdUkHytJMGG4MMLucI1kA==} + engines: {node: '>=18'} + peerDependencies: + '@types/node': '>=18' + peerDependenciesMeta: + '@types/node': + optional: true + + '@inquirer/select@4.4.2': + resolution: {integrity: sha512-l4xMuJo55MAe+N7Qr4rX90vypFwCajSakx59qe/tMaC1aEHWLyw68wF4o0A4SLAY4E0nd+Vt+EyskeDIqu1M6w==} + engines: {node: '>=18'} + peerDependencies: + '@types/node': '>=18' + peerDependenciesMeta: + '@types/node': + optional: true + + '@inquirer/type@3.0.10': + resolution: {integrity: sha512-BvziSRxfz5Ov8ch0z/n3oijRSEcEsHnhggm4xFZe93DHcUCTlutlq9Ox4SVENAfcRD22UQq7T/atg9Wr3k09eA==} + engines: {node: '>=18'} + peerDependencies: + '@types/node': '>=18' + peerDependenciesMeta: + '@types/node': + optional: true + '@isaacs/cliui@8.0.2': resolution: {integrity: sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==} engines: {node: '>=12'} @@ -429,10 +677,6 @@ packages: '@lezer/lr@1.4.2': resolution: {integrity: sha512-pu0K1jCIdnQ12aWNaAVU5bzi7Bd1w54J3ECgANPmYLtQKP0HBj2cE/5coBD66MT10xbtIuUr7tg0Shbsvk0mDA==} - '@ljharb/through@2.3.13': - resolution: {integrity: sha512-/gKJun8NNiWGZJkGzI/Ragc53cOdcLNdzjLaIa+GEjguQs0ulsurx8WN0jijdK9yPqDvziX995sMRLyLt1uZMQ==} - engines: {node: '>= 0.4'} - '@lmdb/lmdb-darwin-arm64@2.5.2': resolution: {integrity: sha512-+F8ioQIUN68B4UFiIBYu0QQvgb9FmlKw2ctQMSBfW2QBrZIxz9vD9jCGqTCPqZBRbPHAS/vG1zSXnKqnS2ch/A==} cpu: [arm64] @@ -844,68 +1088,86 @@ packages: resolution: {integrity: sha512-cesanjtj/oLehW8Waq9JFPmAImhoiHX03ihc3JTWkrvJYSbD7wYKCDgPAM3JiRAqvh1LZ6P699uITrYWNoRLUg==} engines: {node: '>= 12.0.0'} - '@parcel/watcher-android-arm64@2.2.0': - resolution: {integrity: sha512-nU2wh00CTQT9rr1TIKTjdQ9lAGYpmz6XuKw0nAwAN+S2A5YiD55BK1u+E5WMCT8YOIDe/n6gaj4o/Bi9294SSQ==} + '@parcel/watcher-android-arm64@2.5.1': + resolution: {integrity: sha512-KF8+j9nNbUN8vzOFDpRMsaKBHZ/mcjEjMToVMJOhTozkDonQFFrRcfdLWn6yWKCmJKmdVxSgHiYvTCef4/qcBA==} engines: {node: '>= 10.0.0'} cpu: [arm64] os: [android] - '@parcel/watcher-darwin-arm64@2.2.0': - resolution: {integrity: sha512-cJl0UZDcodciy3TDMomoK/Huxpjlkkim3SyMgWzjovHGOZKNce9guLz2dzuFwfObBFCjfznbFMIvAZ5syXotYw==} + '@parcel/watcher-darwin-arm64@2.5.1': + resolution: {integrity: sha512-eAzPv5osDmZyBhou8PoF4i6RQXAfeKL9tjb3QzYuccXFMQU0ruIc/POh30ePnaOyD1UXdlKguHBmsTs53tVoPw==} engines: {node: '>= 10.0.0'} cpu: [arm64] os: [darwin] - '@parcel/watcher-darwin-x64@2.2.0': - resolution: {integrity: sha512-QI77zxaGrCV1StKcoRYfsUfmUmvPMPfQrubkBBy5XujV2fwaLgZivQOTQMBgp5K2+E19u1ufpspKXAPqSzpbyg==} + '@parcel/watcher-darwin-x64@2.5.1': + resolution: {integrity: sha512-1ZXDthrnNmwv10A0/3AJNZ9JGlzrF82i3gNQcWOzd7nJ8aj+ILyW1MTxVk35Db0u91oD5Nlk9MBiujMlwmeXZg==} engines: {node: '>= 10.0.0'} cpu: [x64] os: [darwin] - '@parcel/watcher-linux-arm-glibc@2.2.0': - resolution: {integrity: sha512-I2GPBcAXazPzabCmfsa3HRRW+MGlqxYd8g8RIueJU+a4o5nyNZDz0CR1cu0INT0QSQXEZV7w6UE8Hz9CF8u3Pg==} + '@parcel/watcher-freebsd-x64@2.5.1': + resolution: {integrity: sha512-SI4eljM7Flp9yPuKi8W0ird8TI/JK6CSxju3NojVI6BjHsTyK7zxA9urjVjEKJ5MBYC+bLmMcbAWlZ+rFkLpJQ==} + engines: {node: '>= 10.0.0'} + cpu: [x64] + os: [freebsd] + + '@parcel/watcher-linux-arm-glibc@2.5.1': + resolution: {integrity: sha512-RCdZlEyTs8geyBkkcnPWvtXLY44BCeZKmGYRtSgtwwnHR4dxfHRG3gR99XdMEdQ7KeiDdasJwwvNSF5jKtDwdA==} engines: {node: '>= 10.0.0'} cpu: [arm] os: [linux] - '@parcel/watcher-linux-arm64-glibc@2.2.0': - resolution: {integrity: sha512-St5mlfp+2lS9AmgixUqfwJa/DwVmTCJxC1HcOubUTz6YFOKIlkHCeUa1Bxi4E/tR/HSez8+heXHL8HQkJ4Bd8g==} + '@parcel/watcher-linux-arm-musl@2.5.1': + resolution: {integrity: sha512-6E+m/Mm1t1yhB8X412stiKFG3XykmgdIOqhjWj+VL8oHkKABfu/gjFj8DvLrYVHSBNC+/u5PeNrujiSQ1zwd1Q==} + engines: {node: '>= 10.0.0'} + cpu: [arm] + os: [linux] + + '@parcel/watcher-linux-arm64-glibc@2.5.1': + resolution: {integrity: sha512-LrGp+f02yU3BN9A+DGuY3v3bmnFUggAITBGriZHUREfNEzZh/GO06FF5u2kx8x+GBEUYfyTGamol4j3m9ANe8w==} engines: {node: '>= 10.0.0'} cpu: [arm64] os: [linux] - '@parcel/watcher-linux-arm64-musl@2.2.0': - resolution: {integrity: sha512-jS+qfhhoOBVWwMLP65MaG8xdInMK30pPW8wqTCg2AAuVJh5xepMbzkhHJ4zURqHiyY3EiIRuYu4ONJKCxt8iqA==} + '@parcel/watcher-linux-arm64-musl@2.5.1': + resolution: {integrity: sha512-cFOjABi92pMYRXS7AcQv9/M1YuKRw8SZniCDw0ssQb/noPkRzA+HBDkwmyOJYp5wXcsTrhxO0zq1U11cK9jsFg==} engines: {node: '>= 10.0.0'} cpu: [arm64] os: [linux] - '@parcel/watcher-linux-x64-glibc@2.2.0': - resolution: {integrity: sha512-xJvJ7R2wJdi47WZBFS691RDOWvP1j/IAs3EXaWVhDI8FFITbWrWaln7KoNcR0Y3T+ZwimFY/cfb0PNht1q895g==} + '@parcel/watcher-linux-x64-glibc@2.5.1': + resolution: {integrity: sha512-GcESn8NZySmfwlTsIur+49yDqSny2IhPeZfXunQi48DMugKeZ7uy1FX83pO0X22sHntJ4Ub+9k34XQCX+oHt2A==} engines: {node: '>= 10.0.0'} cpu: [x64] os: [linux] - '@parcel/watcher-linux-x64-musl@2.2.0': - resolution: {integrity: sha512-D+NMpgr23a+RI5mu8ZPKWy7AqjBOkURFDgP5iIXXEf/K3hm0jJ3ogzi0Ed2237B/CdYREimCgXyeiAlE/FtwyA==} + '@parcel/watcher-linux-x64-musl@2.5.1': + resolution: {integrity: sha512-n0E2EQbatQ3bXhcH2D1XIAANAcTZkQICBPVaxMeaCVBtOpBZpWJuf7LwyWPSBDITb7In8mqQgJ7gH8CILCURXg==} engines: {node: '>= 10.0.0'} cpu: [x64] os: [linux] - '@parcel/watcher-win32-arm64@2.2.0': - resolution: {integrity: sha512-z225cPn3aygJsyVUOWwfyW+fY0Tvk7N3XCOl66qUPFxpbuXeZuiuuJemmtm8vxyqa3Ur7peU/qJxrpC64aeI7Q==} + '@parcel/watcher-win32-arm64@2.5.1': + resolution: {integrity: sha512-RFzklRvmc3PkjKjry3hLF9wD7ppR4AKcWNzH7kXR7GUe0Igb3Nz8fyPwtZCSquGrhU5HhUNDr/mKBqj7tqA2Vw==} engines: {node: '>= 10.0.0'} cpu: [arm64] os: [win32] - '@parcel/watcher-win32-x64@2.2.0': - resolution: {integrity: sha512-JqGW0RJ61BkKx+yYzIURt9s53P7xMVbv0uxYPzAXLBINGaFmkIKSuUPyBVfy8TMbvp93lvF4SPBNDzVRJfvgOw==} + '@parcel/watcher-win32-ia32@2.5.1': + resolution: {integrity: sha512-c2KkcVN+NJmuA7CGlaGD1qJh1cLfDnQsHjE89E60vUEMlqduHGCdCLJCID5geFVM0dOtA3ZiIO8BoEQmzQVfpQ==} + engines: {node: '>= 10.0.0'} + cpu: [ia32] + os: [win32] + + '@parcel/watcher-win32-x64@2.5.1': + resolution: {integrity: sha512-9lHBdJITeNR++EvSQVUcaZoWupyHfXe1jZvGZ06O/5MflPcuPLtEphScIBL+AiCWBO46tDSHzWyD0uDmmZqsgA==} engines: {node: '>= 10.0.0'} cpu: [x64] os: [win32] - '@parcel/watcher@2.2.0': - resolution: {integrity: sha512-71S4TF+IMyAn24PK4KSkdKtqJDR3zRzb0HE3yXpacItqTM7XfF2f5q9NEGLEVl0dAaBAGfNwDCjH120y25F6Tg==} + '@parcel/watcher@2.5.1': + resolution: {integrity: sha512-dfUnCxiN9H4ap84DvD2ubjw+3vUNpstxa0TneY/Paat8a3R4uQZDLSvWjmznAY/DoahqTHl9V46HF/Zs3F29pg==} engines: {node: '>= 10.0.0'} '@parcel/workers@2.8.3': @@ -1087,11 +1349,11 @@ packages: resolution: {integrity: sha512-9zcF39XIBzauYLERoGNVSy7qR1MzEqjhQn16RrlCpZ1AyNMlBJ3B28SmnUpBQNgne8JOHTtcx6cUVm1IvM3J+g==} engines: {parcel: '>= 2.8.0'} - '@plasmohq/parcel-config@0.41.1': - resolution: {integrity: sha512-peNpm+F1tVIZmDx8Mca8b3769cxc2IWS7q0+0Y4BLT1+2kis7X4b46IAYgOXtsDRZCb9pvxQhRhrVHpwGtdVLg==} + '@plasmohq/parcel-config@0.42.0': + resolution: {integrity: sha512-GHtipmFGA84UsBVLO4v9qrc14XD3iKQA1PfHKiUW/xvGL2+gFzV8+WOvOnTslsh+VpOfJdVQQ5nWqVIH9yRiXg==} - '@plasmohq/parcel-core@0.1.10': - resolution: {integrity: sha512-XbJrqlgPNo+uQaukWayfRDZnAvdkYrmcydCOz0wfmuksTjDisyGkL3ZbWeX86QPN65CXFyou11/9h3+U9IsHfA==} + '@plasmohq/parcel-core@0.1.11': + resolution: {integrity: sha512-Jy/6xHSewP8CGUgBLONI2H02LKGhltySp31E0NbRP7qJ+AX58AMd7SKE8xsVB1pTgJ/bRLl9HXw8/929UDLrew==} engines: {parcel: '>= 2.7.0'} '@plasmohq/parcel-namer-manifest@0.3.12': @@ -1118,8 +1380,8 @@ packages: resolution: {integrity: sha512-1nmmMI7N5rtpni2TpUyPkI8XU1wIk/lTDGNZXLxtkzOoFiFP2sc2xZq4OGhmnRYvWphZYrnhMjRrjNJmqOFqxw==} engines: {parcel: '>= 2.7.0'} - '@plasmohq/parcel-runtime@0.25.1': - resolution: {integrity: sha512-asr4DMXJSKPilye0uiyZf51NUC3WZAr0Y6mzl+mtRGIcywuv42+X52qnZl9a9xYkVZeYlVJq62Kfk4+wPthakg==} + '@plasmohq/parcel-runtime@0.25.2': + resolution: {integrity: sha512-oeW/JKIYBkkB8vtFAvCTODYH+UeXjh78iFchUyIkdGh69SPViPqW91xS45M7G8Q+0kNV7In/Byv701XyS3W4sg==} engines: {parcel: '>= 2.7.0'} '@plasmohq/parcel-transformer-inject-env@0.2.12': @@ -1130,8 +1392,8 @@ packages: resolution: {integrity: sha512-EUSwEowFNSgC/F1q/V4H4NXJ23wwLzlmRI6lvIr6S0mIuG/FCga+lAV3IZ+yAuXqUM2VexX6JyYYpNVidrMSxw==} engines: {parcel: '>= 2.7.0'} - '@plasmohq/parcel-transformer-manifest@0.20.1': - resolution: {integrity: sha512-fA2d+u7eAURr8Vyi1HAB8zwndBW2czi5YcLgZRVwEqHODYYIyNcmqMJHLt7TAQYTD+POG+z4WpM81AKdhcq8mg==} + '@plasmohq/parcel-transformer-manifest@0.21.0': + resolution: {integrity: sha512-swxCJWU/tfCTbcQl2u5TpoQCcxlp3xjdPKk9RB709CWN4fsmNhFDUCAKo5kpl7+YGwCqlGr09b5sqWJrriUBrw==} engines: {parcel: '>= 2.7.0'} '@plasmohq/parcel-transformer-svelte@0.6.0': @@ -1168,11 +1430,11 @@ packages: '@radix-ui/primitive@1.1.0': resolution: {integrity: sha512-4Z8dn6Upk0qk4P74xBhZ6Hd/w0mPEzOOLxy4xiPXOXqjF7jZS0VAKk7/x/H6FyY2zCkYJqePf1G5KmkmNJ4RBA==} - '@radix-ui/primitive@1.1.2': - resolution: {integrity: sha512-XnbHrrprsNqZKQhStrSwgRUQzoCI1glLzdw79xiZPoofhGICeZRSQ3dIxAKH1gb3OHfNf4d6f+vAv3kil2eggA==} + '@radix-ui/primitive@1.1.3': + resolution: {integrity: sha512-JTF99U/6XIjCBo0wqkU5sK10glYe27MRRsfwoiq5zzOEZLHU3A3KCMa5X/azekYRCJ0HlwI0crAXS/5dEHTzDg==} - '@radix-ui/react-accessible-icon@1.1.7': - resolution: {integrity: sha512-XM+E4WXl0OqUJFovy6GjmxxFyx9opfCAIUku4dlKRd5YEPqt4kALOkQOp0Of6reHuUkJuiPBEc5k0o4z4lTC8A==} + '@radix-ui/react-accessible-icon@1.1.8': + resolution: {integrity: sha512-1k/SvTk5yW2x0eqepOxVjZyG8GBuYyj7z4/R5c9FYox7zb6vV08fNj6Wwv+TsHWZfPUd0tMlzM/6OguZO7F1eQ==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1184,8 +1446,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-accordion@1.2.11': - resolution: {integrity: sha512-l3W5D54emV2ues7jjeG1xcyN7S3jnK3zE2zHqgn0CmMsy9lNJwmgcrmaxS+7ipw15FAivzKNzH3d5EcGoFKw0A==} + '@radix-ui/react-accordion@1.2.12': + resolution: {integrity: sha512-T4nygeh9YE9dLRPhAHSeOZi7HBXo+0kYIPJXayZfvWOWA0+n3dESrZbjfDPUABkUNym6Hd+f2IR113To8D2GPA==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1197,8 +1459,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-alert-dialog@1.1.14': - resolution: {integrity: sha512-IOZfZ3nPvN6lXpJTBCunFQPRSvK8MDgSc1FB85xnIpUKOw9en0dJj8JmCAxV7BiZdtYlUpmrQjoTFkVYtdoWzQ==} + '@radix-ui/react-alert-dialog@1.1.15': + resolution: {integrity: sha512-oTVLkEw5GpdRe29BqJ0LSDFWI3qu0vR1M0mUkOQWDIUnY/QIkLpgDMWuKxP94c2NAC2LGcgVhG1ImF3jkZ5wXw==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1236,8 +1498,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-aspect-ratio@1.1.7': - resolution: {integrity: sha512-Yq6lvO9HQyPwev1onK1daHCHqXVLzPhSVjmsNjCa2Zcxy2f7uJD2itDtxknv6FzAKCwD1qQkeVDmX/cev13n/g==} + '@radix-ui/react-aspect-ratio@1.1.8': + resolution: {integrity: sha512-5nZrJTF7gH+e0nZS7/QxFz6tJV4VimhQb1avEgtsJxvvIp5JilL+c58HICsKzPxghdwaDt48hEfPM1au4zGy+w==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1249,8 +1511,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-avatar@1.1.10': - resolution: {integrity: sha512-V8piFfWapM5OmNCXTzVQY+E1rDa53zY+MQ4Y7356v4fFz6vqCyUtIz2rUD44ZEdwg78/jKmMJHj07+C/Z/rcog==} + '@radix-ui/react-avatar@1.1.11': + resolution: {integrity: sha512-0Qk603AHGV28BOBO34p7IgD5m+V5Sg/YovfayABkoDDBM5d3NCx0Mp4gGrjzLGes1jV5eNOE1r3itqOR33VC6Q==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1262,8 +1524,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-checkbox@1.3.2': - resolution: {integrity: sha512-yd+dI56KZqawxKZrJ31eENUwqc1QSqg4OZ15rybGjF2ZNwMO+wCyHzAVLRp9qoYJf7kYy0YpZ2b0JCzJ42HZpA==} + '@radix-ui/react-checkbox@1.3.3': + resolution: {integrity: sha512-wBbpv+NQftHDdG86Qc0pIyXk5IR3tM8Vd0nWLKDcX8nNn4nXFOFwsKuqw2okA/1D/mpaAkmuyndrPJTYDNZtFw==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1275,8 +1537,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-collapsible@1.1.11': - resolution: {integrity: sha512-2qrRsVGSCYasSz1RFOorXwl0H7g7J1frQtgpQgYrt+MOidtPAINHn9CPovQXb83r8ahapdx3Tu0fa/pdFFSdPg==} + '@radix-ui/react-collapsible@1.1.12': + resolution: {integrity: sha512-Uu+mSh4agx2ib1uIGPP4/CKNULyajb3p92LsVXmH2EHVMTfZWpll88XJ0j4W0z3f8NK1eYl1+Mf/szHPmcHzyA==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1332,8 +1594,8 @@ packages: '@types/react': optional: true - '@radix-ui/react-context-menu@2.2.15': - resolution: {integrity: sha512-UsQUMjcYTsBjTSXw0P3GO0werEQvUY2plgRQuKoCTtkNr45q1DiL51j4m7gxhABzZ0BadoXNsIbg7F3KwiUBbw==} + '@radix-ui/react-context-menu@2.2.16': + resolution: {integrity: sha512-O8morBEW+HsVG28gYDZPTrT9UUovQUlJue5YO836tiTJhuIWBm/zQHc7j388sHWtdH/xUZurK9olD2+pcqx5ww==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1372,8 +1634,17 @@ packages: '@types/react': optional: true - '@radix-ui/react-dialog@1.1.14': - resolution: {integrity: sha512-+CpweKjqpzTmwRwcYECQcNYbI8V9VSQt0SNFKeEBLgfucbsLssU6Ppq7wUdNXEGb573bMjFhVjKVll8rmV6zMw==} + '@radix-ui/react-context@1.1.3': + resolution: {integrity: sha512-ieIFACdMpYfMEjF0rEf5KLvfVyIkOz6PDGyNnP+u+4xQ6jny3VCgA4OgXOwNx2aUkxn8zx9fiVcM8CfFYv9Lxw==} + peerDependencies: + '@types/react': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + '@radix-ui/react-dialog@1.1.15': + resolution: {integrity: sha512-TCglVRtzlffRNxRMEyR36DGBLJpeusFcgMVD9PZEzAKnUs1lKCgX5u9BmC2Yg+LL9MgZDugFFs1Vl+Jp4t/PGw==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1420,8 +1691,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-dismissable-layer@1.1.10': - resolution: {integrity: sha512-IM1zzRV4W3HtVgftdQiiOmA0AdJlCtMLe00FXaHwgt3rAnNsIyDqshvkIW3hj/iu5hu8ERP7KIYki6NkqDxAwQ==} + '@radix-ui/react-dismissable-layer@1.1.11': + resolution: {integrity: sha512-Nqcp+t5cTB8BinFkZgXiMJniQH0PsUt2k51FUhbdfeKvc4ACcG2uQniY/8+h1Yv6Kza4Q7lD7PQV0z0oicE0Mg==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1433,8 +1704,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-dropdown-menu@2.1.15': - resolution: {integrity: sha512-mIBnOjgwo9AH3FyKaSWoSu/dYj6VdhJ7frEPiGTeXCdUFHjl9h3mFh2wwhEtINOmYXWhdpf1rY2minFsmaNgVQ==} + '@radix-ui/react-dropdown-menu@2.1.16': + resolution: {integrity: sha512-1PLGQEynI/3OX/ftV54COn+3Sud/Mn8vALg2rWnBLnRaGtJDduNW/22XjlGgPdpcIbiQxjKtb7BkcjP00nqfJw==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1455,8 +1726,8 @@ packages: '@types/react': optional: true - '@radix-ui/react-focus-guards@1.1.2': - resolution: {integrity: sha512-fyjAACV62oPV925xFCrH8DR5xWhg9KYtJT4s3u54jxp+L/hbpTY2kIeEFFbFe+a/HCE94zGQMZLIpVTPVZDhaA==} + '@radix-ui/react-focus-guards@1.1.3': + resolution: {integrity: sha512-0rFg/Rj2Q62NCm62jZw0QX7a3sz6QCQU0LpZdNrJX8byRGaGVTqbrW9jAoIAHyMQqsNpeZ81YgSizOt5WXq0Pw==} peerDependencies: '@types/react': '*' react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc @@ -1490,8 +1761,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-hover-card@1.1.14': - resolution: {integrity: sha512-CPYZ24Mhirm+g6D8jArmLzjYu4Eyg3TTUHswR26QgzXBHBe64BO/RHOJKzmF/Dxb4y4f9PKyJdwm/O/AhNkb+Q==} + '@radix-ui/react-hover-card@1.1.15': + resolution: {integrity: sha512-qgTkjNT1CfKMoP0rcasmlH2r1DAiYicWsDsufxl940sT2wHNEWWv6FMWIQXWhVdmC1d/HYfbhQx60KYyAtKxjg==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1539,8 +1810,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-menu@2.1.15': - resolution: {integrity: sha512-tVlmA3Vb9n8SZSd+YSbuFR66l87Wiy4du+YE+0hzKQEANA+7cWKH1WgqcEX4pXqxUFQKrWQGHdvEfw00TjFiew==} + '@radix-ui/react-label@2.1.8': + resolution: {integrity: sha512-FmXs37I6hSBVDlO4y764TNz1rLgKwjJMQ0EGte6F3Cb3f4bIuHB/iLa/8I9VKkmOy+gNHq8rql3j686ACVV21A==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1552,8 +1823,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-navigation-menu@1.2.13': - resolution: {integrity: sha512-WG8wWfDiJlSF5hELjwfjSGOXcBR/ZMhBFCGYe8vERpC39CQYZeq1PQ2kaYHdye3V95d06H89KGMsVCIE4LWo3g==} + '@radix-ui/react-menu@2.1.16': + resolution: {integrity: sha512-72F2T+PLlphrqLcAotYPp0uJMr5SjP5SL01wfEspJbru5Zs5vQaSHb4VB3ZMJPimgHHCHG7gMOeOB9H3Hdmtxg==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1565,8 +1836,21 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-popover@1.1.14': - resolution: {integrity: sha512-ODz16+1iIbGUfFEfKx2HTPKizg2MN39uIOV8MXeHnmdd3i/N9Wt7vU46wbHsqA0xoaQyXVcs0KIlBdOA2Y95bw==} + '@radix-ui/react-navigation-menu@1.2.14': + resolution: {integrity: sha512-YB9mTFQvCOAQMHU+C/jVl96WmuWeltyUEpRJJky51huhds5W2FQr1J8D/16sQlf0ozxkPK8uF3niQMdUwZPv5w==} + peerDependencies: + '@types/react': '*' + '@types/react-dom': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + + '@radix-ui/react-popover@1.1.15': + resolution: {integrity: sha512-kr0X2+6Yy/vJzLYJUPCZEc8SfQcf+1COFoAqauJm74umQhta9M7lNJHP7QQS3vkvcGLQUbWpMzwrXYwrYztHKA==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1604,8 +1888,21 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-popper@1.2.7': - resolution: {integrity: sha512-IUFAccz1JyKcf/RjB552PlWwxjeCJB8/4KxT7EhBHOJM+mN7LdW+B3kacJXILm32xawcMMjb2i0cIZpo+f9kiQ==} + '@radix-ui/react-popper@1.2.8': + resolution: {integrity: sha512-0NJQ4LFFUuWkE7Oxf0htBKS6zLkkjBH+hM1uk7Ng705ReR8m/uelduy1DBo0PyBXPKVnBA6YBlU94MBGXrSBCw==} + peerDependencies: + '@types/react': '*' + '@types/react-dom': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + + '@radix-ui/react-portal@1.1.10': + resolution: {integrity: sha512-4kY9IVa6+9nJPsYmngK5Uk2kUmZnv7ChhHAFeQ5oaj8jrR1bIi3xww8nH71pz1/Ve4d/cXO3YxT8eikt1B0a8w==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1656,8 +1953,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-presence@1.1.4': - resolution: {integrity: sha512-ueDqRbdc4/bkaQT3GIpLQssRlFgWaL/U2z/S31qRwwLWoxHLgry3SIfCwhxeQNbirEUXFa+lq3RL3oBYXtcmIA==} + '@radix-ui/react-presence@1.1.5': + resolution: {integrity: sha512-/jfEwNDdQVBCNvjkGit4h6pMOzq8bHkopq458dPt2lMjx+eBQUohZNG9A7DtO/O5ukSbxuaNGXMjHicgwy6rQQ==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1695,8 +1992,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-progress@1.1.7': - resolution: {integrity: sha512-vPdg/tF6YC/ynuBIJlk1mm7Le0VgW6ub6J2UWnTQ7/D23KXcPI1qy+0vBkgKgd38RCMJavBXpB83HPNFMTb0Fg==} + '@radix-ui/react-primitive@2.1.4': + resolution: {integrity: sha512-9hQc4+GNVtJAIEPEqlYqW5RiYdrr8ea5XQ0ZOnD6fgru+83kqT15mq2OCcbe8KnjRZl5vF3ks69AKz3kh1jrhg==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1708,8 +2005,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-radio-group@1.3.7': - resolution: {integrity: sha512-9w5XhD0KPOrm92OTTE0SysH3sYzHsSTHNvZgUBo/VZ80VdYyB5RneDbc0dKpURS24IxkoFRu/hI0i4XyfFwY6g==} + '@radix-ui/react-progress@1.1.8': + resolution: {integrity: sha512-+gISHcSPUJ7ktBy9RnTqbdKW78bcGke3t6taawyZ71pio1JewwGSJizycs7rLhGTvMJYCQB1DBK4KQsxs7U8dA==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1721,8 +2018,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-roving-focus@1.1.10': - resolution: {integrity: sha512-dT9aOXUen9JSsxnMPv/0VqySQf5eDQ6LCk5Sw28kamz8wSOW2bJdlX2Bg5VUIIcV+6XlHpWTIuTPCf/UNIyq8Q==} + '@radix-ui/react-radio-group@1.3.8': + resolution: {integrity: sha512-VBKYIYImA5zsxACdisNQ3BjCBfmbGH3kQlnFVqlWU4tXwjy7cGX8ta80BcrO+WJXIn5iBylEH3K6ZTlee//lgQ==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1734,8 +2031,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-scroll-area@1.2.9': - resolution: {integrity: sha512-YSjEfBXnhUELsO2VzjdtYYD4CfQjvao+lhhrX5XsHD7/cyUNzljF1FHEbgTPN7LH2MClfwRMIsYlqTYpKTTe2A==} + '@radix-ui/react-roving-focus@1.1.11': + resolution: {integrity: sha512-7A6S9jSgm/S+7MdtNDSb+IU859vQqJ/QAtcYQcfFC6W8RS4IxIZDldLR0xqCFZ6DCyrQLjLPsxtTNch5jVA4lA==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1747,8 +2044,21 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-select@2.2.5': - resolution: {integrity: sha512-HnMTdXEVuuyzx63ME0ut4+sEMYW6oouHWNGUZc7ddvUWIcfCva/AMoqEW/3wnEllriMWBa0RHspCYnfCWJQYmA==} + '@radix-ui/react-scroll-area@1.2.10': + resolution: {integrity: sha512-tAXIa1g3sM5CGpVT0uIbUx/U3Gs5N8T52IICuCtObaos1S8fzsrPXG5WObkQN3S6NVl6wKgPhAIiBGbWnvc97A==} + peerDependencies: + '@types/react': '*' + '@types/react-dom': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + + '@radix-ui/react-select@2.2.6': + resolution: {integrity: sha512-I30RydO+bnn2PQztvo25tswPH+wFBjehVGtmagkU78yMdwTwVf12wnAOF+AeP8S2N8xD+5UPbGhkUfPyvT+mwQ==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1773,8 +2083,21 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-slider@1.3.5': - resolution: {integrity: sha512-rkfe2pU2NBAYfGaxa3Mqosi7VZEWX5CxKaanRv0vZd4Zhl9fvQrg0VM93dv3xGLGfrHuoTRF3JXH8nb9g+B3fw==} + '@radix-ui/react-separator@1.1.8': + resolution: {integrity: sha512-sDvqVY4itsKwwSMEe0jtKgfTh+72Sy3gPmQpjqcQneqQ4PFmr/1I0YA+2/puilhggCe2gJcx5EBAYFkWkdpa5g==} + peerDependencies: + '@types/react': '*' + '@types/react-dom': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + + '@radix-ui/react-slider@1.3.6': + resolution: {integrity: sha512-JPYb1GuM1bxfjMRlNLE+BcmBC8onfCi60Blk7OBqi2MLTFdS+8401U4uFjnwkOr49BLmXxLC6JHkvAsx5OJvHw==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1804,8 +2127,17 @@ packages: '@types/react': optional: true - '@radix-ui/react-switch@1.2.5': - resolution: {integrity: sha512-5ijLkak6ZMylXsaImpZ8u4Rlf5grRmoc0p0QeX9VJtlrM4f5m3nCTX8tWga/zOA8PZYIR/t0p2Mnvd7InrJ6yQ==} + '@radix-ui/react-slot@1.2.4': + resolution: {integrity: sha512-Jl+bCv8HxKnlTLVrcDE8zTMJ09R9/ukw4qBs/oZClOfoQk/cOTbDn+NceXfV7j09YPVQUryJPHurafcSg6EVKA==} + peerDependencies: + '@types/react': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + '@radix-ui/react-switch@1.2.6': + resolution: {integrity: sha512-bByzr1+ep1zk4VubeEVViV592vu2lHE2BZY5OnzehZqOOgogN80+mNtCqPkhn2gklJqOpxWgPoYTSnhBCqpOXQ==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1817,8 +2149,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-tabs@1.1.12': - resolution: {integrity: sha512-GTVAlRVrQrSw3cEARM0nAx73ixrWDPNZAruETn3oHCNP6SbZ/hNxdxp+u7VkIEv3/sFoLq1PfcHrl7Pnp0CDpw==} + '@radix-ui/react-tabs@1.1.13': + resolution: {integrity: sha512-7xdcatg7/U+7+Udyoj2zodtI9H/IIopqo+YOIcZOq1nJwXWBZ9p8xiu5llXlekDbZkca79a/fozEYQXIA4sW6A==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1830,8 +2162,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-toast@1.2.14': - resolution: {integrity: sha512-nAP5FBxBJGQ/YfUB+r+O6USFVkWq3gAInkxyEnmvEV5jtSbfDhfa4hwX8CraCnbjMLsE7XSf/K75l9xXY7joWg==} + '@radix-ui/react-toast@1.2.15': + resolution: {integrity: sha512-3OSz3TacUWy4WtOXV38DggwxoqJK4+eDkNMl5Z/MJZaoUPaP4/9lf81xXMe1I2ReTAptverZUpbPY4wWwWyL5g==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1856,8 +2188,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-toggle-group@1.1.10': - resolution: {integrity: sha512-kiU694Km3WFLTC75DdqgM/3Jauf3rD9wxeS9XtyWFKsBUeZA337lC+6uUazT7I1DhanZ5gyD5Stf8uf2dbQxOQ==} + '@radix-ui/react-toggle-group@1.1.11': + resolution: {integrity: sha512-5umnS0T8JQzQT6HbPyO7Hh9dgd82NmS36DQr+X/YJ9ctFNCiiQd6IJAYYZ33LUwm8M+taCz5t2ui29fHZc4Y6Q==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1869,8 +2201,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-toggle@1.1.9': - resolution: {integrity: sha512-ZoFkBBz9zv9GWer7wIjvdRxmh2wyc2oKWw6C6CseWd6/yq1DK/l5lJ+wnsmFwJZbBYqr02mrf8A2q/CVCuM3ZA==} + '@radix-ui/react-toggle@1.1.10': + resolution: {integrity: sha512-lS1odchhFTeZv3xwHH31YPObmJn8gOg7Lq12inrr0+BH/l3Tsq32VfjqH1oh80ARM3mlkfMic15n0kg4sD1poQ==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1882,8 +2214,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-toolbar@1.1.10': - resolution: {integrity: sha512-jiwQsduEL++M4YBIurjSa+voD86OIytCod0/dbIxFZDLD8NfO1//keXYMfsW8BPcfqwoNjt+y06XcJqAb4KR7A==} + '@radix-ui/react-toolbar@1.1.11': + resolution: {integrity: sha512-4ol06/1bLoFu1nwUqzdD4Y5RZ9oDdKeiHIsntug54Hcr1pgaHiPqHFEaXI1IFP/EsOfROQZ8Mig9VTIRza6Tjg==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1895,8 +2227,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-tooltip@1.2.7': - resolution: {integrity: sha512-Ap+fNYwKTYJ9pzqW+Xe2HtMRbQ/EeWkj2qykZ6SuEV4iS/o1bZI5ssJbk4D2r8XuDuOBVz/tIx2JObtuqU+5Zw==} + '@radix-ui/react-tooltip@1.2.8': + resolution: {integrity: sha512-tY7sVt1yL9ozIxvmbtN5qtmH2krXcBCfjEiCgKGLqunJHvgvZG2Pcl2oQ3kbcZARb1BGEHdkLzcYGO8ynVlieg==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -2069,6 +2401,19 @@ packages: '@types/react-dom': optional: true + '@radix-ui/react-visually-hidden@1.2.4': + resolution: {integrity: sha512-kaeiyGCe844dkb9AVF+rb4yTyb1LiLN/e3es3nLiRyN4dC8AduBYPMnnNlDjX2VDOcvDEiPnRNMJeWCfsX0txg==} + peerDependencies: + '@types/react': '*' + '@types/react-dom': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + '@radix-ui/rect@1.1.0': resolution: {integrity: sha512-A9+lCBZoaMJlVKcRBz2YByCG+Cp2t6nAnMnNba+XiWxnj6r4JUFqfsgwocMBZU9LPtdxC6wB56ySYpc7LQIoJg==} @@ -2079,10 +2424,17 @@ packages: resolution: {integrity: sha512-S45oynt/WH19bHbIXjtli6QmwNYvaz+vtnubvNpNDvUOoA/OWh6j1OikIP3G+v5GHdxyC6EXoChG3HgYGEUfcg==} engines: {node: '>=14.0.0'} + '@sec-ant/readable-stream@0.4.1': + resolution: {integrity: sha512-831qok9r2t8AlxLko40y2ebgSDhenenCatLVeW/uBtnHPyhHOvG0C7TvfgecV+wHzIm5KUICgzmVpWS+IMEAeg==} + '@sindresorhus/is@5.6.0': resolution: {integrity: sha512-TV7t8GKYaJWsn00tFDqBw8+Uqmr8A0fRU1tvTQhyZzGv0sJCGRQL3JGMI3ucuKo3XIZdUP+Lx7/gh2t3lewy7g==} engines: {node: '>=14.16'} + '@sindresorhus/is@7.1.1': + resolution: {integrity: sha512-rO92VvpgMc3kfiTjGT52LEtJ8Yc5kCWhZjLQ3LwlA4pSgPpQO7bVpYXParOD8Jwf+cVQECJo3yP/4I8aZtUQTQ==} + engines: {node: '>=18'} + '@svgr/babel-plugin-add-jsx-attribute@6.5.1': resolution: {integrity: sha512-9PYGcXrAxitycIjRmZB+Q0JaN07GZIWaTBIGQzfaZv+qr1n8X1XUEJ5rZ/vx6OVD9RRYlrNnXWExQXcmZeD/BQ==} engines: {node: '>=10'} @@ -2330,9 +2682,6 @@ packages: '@types/http-cache-semantics@4.0.4': resolution: {integrity: sha512-1m0bIFVc7eJWyve9S0RnuRgcQqF/Xd5QsUZAZeQFr1Q3/p9JWoQQEqmVy+DPTNpGXwhgIetAoYF8JSc33q29QA==} - '@types/json-schema@7.0.15': - resolution: {integrity: sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==} - '@types/node@20.11.5': resolution: {integrity: sha512-g557vgQjUUfN76MZAN/dt1z3dzcUsimuysco0KeluHgrPdJXkP/XdAURgyO2W9fZWHRtRBiVKzKn8vyOAwlG+w==} @@ -2395,6 +2744,9 @@ packages: abortcontroller-polyfill@1.7.5: resolution: {integrity: sha512-JMJ5soJWP18htbbxJjG7bG6yuI6pRhgJ0scHHTfkUjf6wjP912xZWvM+A4sJK3gqd9E8fcPbDnOefbA9Th/FIQ==} + abortcontroller-polyfill@1.7.8: + resolution: {integrity: sha512-9f1iZ2uWh92VcrU9Y8x+LdM4DLj75VE0MJB8zuF1iUnroEptStw+DQ8EQPMUdfe5k+PkB1uUfDQfWbhstH8LrQ==} + acorn@8.12.1: resolution: {integrity: sha512-tcpGyI9zbizT9JbV6oYE477V6mTlXvvi0T0G3SNIYE2apm/G5huBa1+K89VGeovbg+jycCrfhl3ADxErOuO6Jg==} engines: {node: '>=0.4.0'} @@ -2459,27 +2811,9 @@ packages: resolution: {integrity: sha512-aPTElBrbifBU1krmZxGZOlBkslORe7Ll7+BDnI50Wy4LgOt69luMgevkDfTq1O/ZgprooPCtWpjCwKSZw/iZ4A==} engines: {node: '>= 0.4'} - b4a@1.6.6: - resolution: {integrity: sha512-5Tk1HLk6b6ctmjIkAcU/Ujv/1WqiDl0F0JdRCR80VsOcUlHcu7pWeWRlOqQLHfDEsVx9YH/aif5AG4ehoCtTmg==} - balanced-match@1.0.2: resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} - bare-events@2.4.2: - resolution: {integrity: sha512-qMKFd2qG/36aA4GwvKq8MxnPgCQAmBWmSyLWsJcbn8v03wvIPQ/hG1Ms8bPzndZxMDoHpxez5VOS+gC9Yi24/Q==} - - bare-fs@2.3.1: - resolution: {integrity: sha512-W/Hfxc/6VehXlsgFtbB5B4xFcsCl+pAh30cYhoFyXErf6oGrwjh8SwiPAdHgpmWonKuYpZgGywN0SXt7dgsADA==} - - bare-os@2.4.0: - resolution: {integrity: sha512-v8DTT08AS/G0F9xrhyLtepoo9EJBJ85FRSMbu1pQUlAf6A8T0tEEQGMVObWeqpjhSPXsE0VGlluFBJu2fdoTNg==} - - bare-path@2.1.3: - resolution: {integrity: sha512-lh/eITfU8hrj9Ru5quUp0Io1kJWIk1bTjzo7JH1P5dWmQ2EL4hFUlfI8FonAhSlgIfhn63p84CDY/x+PisgcXA==} - - bare-stream@2.1.3: - resolution: {integrity: sha512-tiDAH9H/kP+tvNO5sczyn9ZAA7utrSMobyDchsnyyXBuUe2FSQWbxhtuHB8jwpHYYevVo2UJpcmvvjrbHboUUQ==} - base-x@3.0.10: resolution: {integrity: sha512-7d0s06rR9rYaIWHkpfLIFICM/tkSVdoPC9qYAQRpxn9DdKNWNsKC0uk++akckyLq16Tx2WIinnZ6WRriAt6njQ==} @@ -2490,9 +2824,6 @@ packages: resolution: {integrity: sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==} engines: {node: '>=8'} - bl@4.1.0: - resolution: {integrity: sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==} - bluebird@3.7.2: resolution: {integrity: sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==} @@ -2516,9 +2847,6 @@ packages: engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} hasBin: true - buffer@5.7.1: - resolution: {integrity: sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==} - buffer@6.0.3: resolution: {integrity: sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==} @@ -2540,9 +2868,9 @@ packages: resolution: {integrity: sha512-zkDT5WAF4hSSoUgyfg5tFIxz8XQK+25W/TLVojJTMKBaxevLBBtLxgqguAuVQB8PVW79FVjHcU+GJ9tVbDZ9mQ==} engines: {node: '>=14.16'} - call-bind@1.0.7: - resolution: {integrity: sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==} - engines: {node: '>= 0.4'} + cacheable-request@12.0.1: + resolution: {integrity: sha512-Yo9wGIQUaAfIbk+qY0X4cDQgCosecfBe3V9NSyeY4qPC2SAkbCS4Xj79VP8WOzitpJUZKc/wsRCYF5ariDIwkg==} + engines: {node: '>=18'} callsites@3.1.0: resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==} @@ -2567,23 +2895,20 @@ packages: resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==} engines: {node: '>=10'} - chalk@5.3.0: - resolution: {integrity: sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==} + chalk@5.4.1: + resolution: {integrity: sha512-zgVZuo2WcZgfUEmsn6eO3kINexW8RAE4maiQ8QNs8CtpPCSyMiYsULR3HQYkm3w8FIA3SberyMJMSldGsW+U3w==} engines: {node: ^12.17.0 || ^14.13 || >=16.0.0} - change-case@5.1.2: - resolution: {integrity: sha512-CAtbGEDulyjzs05RXy3uKcwqeztz/dMEuAc1Xu9NQBsbrhuGMneL0u9Dj5SoutLKBFYun8txxYIwhjtLNfUmCA==} + change-case@5.4.4: + resolution: {integrity: sha512-HRQyTk2/YPEkt9TnUPbOpr64Uw3KOicFWPVBb+xiHvd6eBx/qPr9xqfBFDT8P2vWsvvz4jbEkfDe71W3VyNu2w==} - chardet@0.7.0: - resolution: {integrity: sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==} + chardet@2.1.1: + resolution: {integrity: sha512-PsezH1rqdV9VvyNhxxOW32/d75r01NY7TQCmOqomRo15ZSOKbpTFVsfjghxo6JloQUCGnH4k1LGu0R4yCLlWQQ==} chokidar@3.6.0: resolution: {integrity: sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==} engines: {node: '>= 8.10.0'} - chownr@1.1.4: - resolution: {integrity: sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==} - chrome-trace-event@1.0.4: resolution: {integrity: sha512-rNjApaLzuwaOTjCiT8lSDdGN1APCiqkChLMJxJPWLunPAt5fy8xgU9/jNOchV84wfIxrA0lRQB7oCT8jrn/wrQ==} engines: {node: '>=6.0'} @@ -2591,22 +2916,10 @@ packages: class-variance-authority@0.7.0: resolution: {integrity: sha512-jFI8IQw4hczaL4ALINxqLEXQbWcNjoSkloa4IaufXCJr6QawJyw7tuRysRsrE8w2p/4gGaxKIt/hX3qz/IbD1A==} - cli-cursor@3.1.0: - resolution: {integrity: sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==} - engines: {node: '>=8'} - - cli-spinners@2.9.2: - resolution: {integrity: sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg==} - engines: {node: '>=6'} - cli-width@4.1.0: resolution: {integrity: sha512-ouuZd4/dm2Sw5Gmqy6bGyNNNe1qt9RpmxveLSO7KcgsTnU7RXfsw+/bukWGo1abgBiMAic068rclZsO4IWmmxQ==} engines: {node: '>= 12'} - clone@1.0.4: - resolution: {integrity: sha512-JQHZ2QMW6l3aH/j6xCqQThY/9OH4D/9ls34cgkUBiEeocRTU04tHfKPBsUK1PqZCUQM7GiA0IIXJSuXHI64Kbg==} - engines: {node: '>=0.8'} - clone@2.1.2: resolution: {integrity: sha512-3Pe/CF1Nn94hyhIYpjtiLhdCoEoz0DqQ+988E9gmeEdQZlojxnOb74wctFyuwWQHzqyf9X7C7MG8juUpqBJT8w==} engines: {node: '>=0.8'} @@ -2745,17 +3058,10 @@ packages: resolution: {integrity: sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==} engines: {node: '>=0.10.0'} - defaults@1.0.4: - resolution: {integrity: sha512-eFuaLoy/Rxalv2kr+lqMlUnrDWV+3j4pljOIJgLIhI058IQfWJ7vXhyEIHu+HtC738klGALYxOKDO0bQP3tg8A==} - defer-to-connect@2.0.1: resolution: {integrity: sha512-4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg==} engines: {node: '>=10'} - define-data-property@1.1.4: - resolution: {integrity: sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==} - engines: {node: '>= 0.4'} - dequal@2.0.3: resolution: {integrity: sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==} engines: {node: '>=6'} @@ -2798,15 +3104,15 @@ packages: domutils@2.8.0: resolution: {integrity: sha512-w96Cjofp72M5IIhpjgobBimYEfoPjx1Vx0BSX9P30WBdZW2WIKU0T1Bd0kz2eNZ9ikjKgHbEyKx8BB6H1L3h3A==} - dotenv-expand@10.0.0: - resolution: {integrity: sha512-GopVGCpVS1UKH75VKHGuQFqS1Gusej0z4FyQkPdwjil2gNIv+LNsqBlboOzpJFZKVT95GkCyWJbBSdFEFUWI2A==} + dotenv-expand@12.0.1: + resolution: {integrity: sha512-LaKRbou8gt0RNID/9RoI+J2rvXsBRPMV7p+ElHlPhcSARbCPDYcYG2s1TIzAfWv4YSgyY5taidWzzs31lNV3yQ==} engines: {node: '>=12'} dotenv-expand@5.1.0: resolution: {integrity: sha512-YXQl1DSa4/PQyRfgrv6aoNjhasp/p4qs9FjJ4q4cQk+8m4r6k4ZSiEyytKG8f8W9gi8WsQtIObNmKd+tMzNTmA==} - dotenv@16.3.1: - resolution: {integrity: sha512-IPzF4w4/Rd94bA9imS68tZBaYyBWSCE47V1RGuMrB94iyTOIEwRmVL2x/4An+6mETpLrKJ5hQkB8W4kFAadeIQ==} + dotenv@16.4.7: + resolution: {integrity: sha512-47qPchRCykZC03FhkYAhrvwU4xDBFIj1QPqaarj6mdM/hgUzfPHcpkHJOn3mJAufFeeAxAzeGsr5X0M4k6fLZQ==} engines: {node: '>=12'} dotenv@7.0.0: @@ -2825,9 +3131,6 @@ packages: emoji-regex@9.2.2: resolution: {integrity: sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==} - end-of-stream@1.4.4: - resolution: {integrity: sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==} - entities@2.2.0: resolution: {integrity: sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A==} @@ -2850,14 +3153,6 @@ packages: error-ex@1.3.2: resolution: {integrity: sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==} - es-define-property@1.0.0: - resolution: {integrity: sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==} - engines: {node: '>= 0.4'} - - es-errors@1.3.0: - resolution: {integrity: sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==} - engines: {node: '>= 0.4'} - esbuild@0.18.20: resolution: {integrity: sha512-ceqxoedUrcayh7Y7ZX6NdbbDzGROiyVBgC4PriJThBKSVPWnnFHZAkfI1lJT8QFkOwH4qOS2SJkS4wvpGl8BpA==} engines: {node: '>=12'} @@ -2871,10 +3166,6 @@ packages: resolution: {integrity: sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==} engines: {node: '>=0.8.0'} - escape-string-regexp@5.0.0: - resolution: {integrity: sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==} - engines: {node: '>=12'} - estree-walker@2.0.2: resolution: {integrity: sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==} @@ -2889,30 +3180,19 @@ packages: resolution: {integrity: sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==} engines: {node: '>=10'} - expand-template@2.0.3: - resolution: {integrity: sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg==} - engines: {node: '>=6'} - - external-editor@3.1.0: - resolution: {integrity: sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew==} - engines: {node: '>=4'} - - fast-fifo@1.3.2: - resolution: {integrity: sha512-/d9sfos4yxzpwkDkuN7k2SqFKtYNmCTzgfEpz82x34IM9/zc8KGxQoXg1liNC/izpRM/MBdt44Nmx41ZWqk+FQ==} - fast-glob@3.3.2: resolution: {integrity: sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==} engines: {node: '>=8.6.0'} + fast-glob@3.3.3: + resolution: {integrity: sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==} + engines: {node: '>=8.6.0'} + fastq@1.17.1: resolution: {integrity: sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w==} - fflate@0.8.1: - resolution: {integrity: sha512-/exOvEuc+/iaUm105QIiOt4LpBdMTWsXxqR0HDF35vx3fmaKzw7354gTilCh5rkzEt8WYyG//ku3h3nRmd7CHQ==} - - figures@5.0.0: - resolution: {integrity: sha512-ej8ksPF4x6e5wvK9yevct0UCXh8TTFlWGVLlgjZuoBH1HwjIfKE/IdL5mq89sFA7zELi1VhKpmtDnrs7zWyeyg==} - engines: {node: '>=14'} + fflate@0.8.2: + resolution: {integrity: sha512-cPJU47OaAoCbg0pBvzsgpTPhmhqI5eJjh/JIu8tPj5q+T7iLvW/JAYUqmE7KOB4R1ZyEhzBaIQpQpardBF5z8A==} fill-range@7.1.1: resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==} @@ -2926,12 +3206,13 @@ packages: resolution: {integrity: sha512-yDYSgNMraqvnxiEXO4hi88+YZxaHC6QKzb5N84iRCTDeRO7ZALpir/lVmf/uXUhnwUr2O4HU8s/n6x+yNjQkHw==} engines: {node: '>= 14.17'} + form-data-encoder@4.1.0: + resolution: {integrity: sha512-G6NsmEW15s0Uw9XnCg+33H3ViYRyiM0hMrMhhqQOR8NFc5GhYrI+6I3u7OTw7b91J2g8rtvMBZJDbcGb2YUniw==} + engines: {node: '>= 18'} + fraction.js@4.3.7: resolution: {integrity: sha512-ZsDfxO51wGAXREY55a7la9LScWpwv9RxIrYABrlvOFBlH/ShPnrtsXeuUIfXKKOVicNxQ+o8JTbJvjS4M89yew==} - fs-constants@1.0.0: - resolution: {integrity: sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==} - fs-extra@11.1.1: resolution: {integrity: sha512-MGIE4HOvQCeUCzmlHs0vXpih4ysz4wg9qiSAu6cd42lVwPbTM1TjV7RusoyQqMmk/95gdQZX72u+YW+c3eEpFQ==} engines: {node: '>=14.14'} @@ -2948,24 +3229,21 @@ packages: resolution: {integrity: sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==} engines: {node: '>=6.9.0'} - get-intrinsic@1.2.4: - resolution: {integrity: sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==} - engines: {node: '>= 0.4'} - get-nonce@1.0.1: resolution: {integrity: sha512-FJhYRoDaiatfEkUK8HKlicmu/3SGFD51q3itKDGoSTysQJBnfOcxU5GxnhE1E6soB76MbT0MBtnKJuXyAx+96Q==} engines: {node: '>=6'} - get-port@7.0.0: - resolution: {integrity: sha512-mDHFgApoQd+azgMdwylJrv2DX47ywGq1i5VFJE7fZ0dttNq3iQMfsU4IvEgBHojA3KqEudyu7Vq+oN8kNaNkWw==} + get-port@7.1.0: + resolution: {integrity: sha512-QB9NKEeDg3xxVwCCwJQ9+xycaz6pBB6iQ76wiWMl1927n0Kir6alPiP+yuiICLLU4jpMe08dXfpebuQppFA2zw==} engines: {node: '>=16'} get-stream@6.0.1: resolution: {integrity: sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==} engines: {node: '>=10'} - github-from-package@0.0.0: - resolution: {integrity: sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw==} + get-stream@9.0.1: + resolution: {integrity: sha512-kVCxPF3vQM/N0B1PmoqVUqgHP+EeVjmZSQn+1oCRPxd2P21P2F19lIgbR3HBosbB1PUhOAoctJnfEn2GbN2eZA==} + engines: {node: '>=18'} glob-parent@5.1.2: resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==} @@ -2991,17 +3269,14 @@ packages: resolution: {integrity: sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==} engines: {node: '>=10'} - gopd@1.0.1: - resolution: {integrity: sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==} - - got@12.6.1: - resolution: {integrity: sha512-mThBblvlAF1d4O5oqyvN+ZxLAYwIJK7bpMxgYqPD9okW0C3qm5FFn7k811QrcuEBwaogR3ngOFoCfs6mRv7teQ==} - engines: {node: '>=14.16'} - got@13.0.0: resolution: {integrity: sha512-XfBk1CxOOScDcMr9O1yKkNaQyy865NbYs+F7dr4H0LZMVgCj2Le59k6PqbNHoL5ToeaEQUYh6c6yMfVcc6SJxA==} engines: {node: '>=16'} + got@14.4.6: + resolution: {integrity: sha512-rnhwfM/PhMNJ1i17k3DuDqgj0cKx3IHxBKVv/WX1uDKqrhi2Gv3l7rhPThR/Cc6uU++dD97W9c8Y0qyw9x0jag==} + engines: {node: '>=20'} + graceful-fs@4.2.10: resolution: {integrity: sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==} @@ -3023,17 +3298,6 @@ packages: resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==} engines: {node: '>=8'} - has-property-descriptors@1.0.2: - resolution: {integrity: sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==} - - has-proto@1.0.3: - resolution: {integrity: sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q==} - engines: {node: '>= 0.4'} - - has-symbols@1.0.3: - resolution: {integrity: sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==} - engines: {node: '>= 0.4'} - hasown@2.0.2: resolution: {integrity: sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==} engines: {node: '>= 0.4'} @@ -3087,14 +3351,14 @@ packages: resolution: {integrity: sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==} engines: {node: '>=10.17.0'} - iconv-lite@0.4.24: - resolution: {integrity: sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==} - engines: {node: '>=0.10.0'} - iconv-lite@0.6.3: resolution: {integrity: sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==} engines: {node: '>=0.10.0'} + iconv-lite@0.7.0: + resolution: {integrity: sha512-cf6L2Ds3h57VVmkZe+Pn+5APsT7FpqJtEhhieDCvrE2MK5Qk9MyffgQyuxQTm6BChfeZNtcOLHp9IcWRVcIcBQ==} + engines: {node: '>=0.10.0'} + ieee754@1.2.1: resolution: {integrity: sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==} @@ -3102,6 +3366,10 @@ packages: resolution: {integrity: sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ==} engines: {node: '>= 4'} + ignore@7.0.3: + resolution: {integrity: sha512-bAH5jbK/F3T3Jls4I0SO1hmPR0dKU0a7+SY6n1yzRtG54FLO8d6w/nxLFX2Nb7dBu6cCWXPaAME6cYqFUMmuCA==} + engines: {node: '>= 4'} + image-size@0.5.5: resolution: {integrity: sha512-6TDAlDPZxUFCv+fuOkIoXT/V/f3Qbq8e37p+YOiYrUv3v9cc3/6x78VdfPgFVaB9dZYeLUfKgHRebpkm/oP2VQ==} engines: {node: '>=0.10.0'} @@ -3114,15 +3382,17 @@ packages: resolution: {integrity: sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==} engines: {node: '>=6'} - inherits@2.0.4: - resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==} - ini@1.3.8: resolution: {integrity: sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==} - inquirer@9.2.12: - resolution: {integrity: sha512-mg3Fh9g2zfuVWJn6lhST0O7x4n03k7G8Tx5nvikJkbq8/CK47WDVm+UznF0G6s5Zi0KcyUisr6DU8T67N5U+1Q==} - engines: {node: '>=14.18.0'} + inquirer@12.5.0: + resolution: {integrity: sha512-aiBBq5aKF1k87MTxXDylLfwpRwToShiHrSv4EmB07EYyLgmnjEz5B3rn0aGw1X3JA/64Ngf2T54oGwc+BCsPIQ==} + engines: {node: '>=18'} + peerDependencies: + '@types/node': '>=18' + peerDependenciesMeta: + '@types/node': + optional: true invariant@2.2.4: resolution: {integrity: sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA==} @@ -3153,10 +3423,6 @@ packages: resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==} engines: {node: '>=0.10.0'} - is-interactive@1.0.0: - resolution: {integrity: sha512-2HvIEKRoqS62guEC+qBjpvRubdX910WCMuJTZ+I9yvqKU2/12eSL549HMwtabb4oupdj2sMP50k+XJfB/8JE6w==} - engines: {node: '>=8'} - is-json@2.0.1: resolution: {integrity: sha512-6BEnpVn1rcf3ngfmViLM6vjUjGErbdrL4rwlv+u1NO1XO8kqT4YGL8+19Q+Z/bas8tY90BTWMk2+fW1g6hQjbA==} @@ -3179,13 +3445,9 @@ packages: resolution: {integrity: sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==} engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} - is-unicode-supported@0.1.0: - resolution: {integrity: sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==} - engines: {node: '>=10'} - - is-unicode-supported@1.3.0: - resolution: {integrity: sha512-43r2mRvz+8JRIKnWJ+3j8JtjRKZ6GmjzfaE/qiBJnikNnYv/6bagRJ1kUhNk8R5EX/GkobD+r+sfxCPJsiKBLQ==} - engines: {node: '>=12'} + is-stream@4.0.1: + resolution: {integrity: sha512-Dnz92NInDqYckGEUJv689RbRiTSEHCQ7wOVeALbkOz999YpqT46yMRIGtSNl2iCL1waAZSx40+h59NV/EwzV/A==} + engines: {node: '>=18'} is-what@3.14.1: resolution: {integrity: sha512-sNxgpk9793nzSs7bA6JQJGeIuRBQhAaNGG77kzYQgMkrID+lS6SlK07K5LaptscDlSaIgH+GPFzf+d75FVxozA==} @@ -3226,8 +3488,8 @@ packages: json-parse-even-better-errors@2.3.1: resolution: {integrity: sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==} - json-schema-to-ts@2.9.2: - resolution: {integrity: sha512-h9WqLkTVpBbiaPb5OmeUpz/FBLS/kvIJw4oRCPiEisIu2WjMh+aai0QIY2LoOhRFx5r92taGLcerIrzxKBAP6g==} + json-schema-to-ts@3.1.1: + resolution: {integrity: sha512-+DWg8jCJG2TEnpy7kOm/7/AxaYoaRbjVB4LFZLySZlWn8exGs3A4OLJR966cVvU26N7X9TWxl+Jsw7dzAqKT6g==} engines: {node: '>=16'} json5@2.2.3: @@ -3241,6 +3503,10 @@ packages: keyv@4.5.4: resolution: {integrity: sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==} + ky@1.14.0: + resolution: {integrity: sha512-Rczb6FMM6JT0lvrOlP5WUOCB7s9XKxzwgErzhKlKde1bEV90FXplV1o87fpt4PU/asJFiqjYJxAJyzJhcrxOsQ==} + engines: {node: '>=18'} + less@4.2.0: resolution: {integrity: sha512-P3b3HJDBtSzsXUl0im2L7gTO5Ubg8mEN6G8qoTS77iXxXX4Hvu4Qj540PZDvQ8V6DmX6iXo98k7Md0Cm1PrLaA==} engines: {node: '>=6'} @@ -3402,10 +3668,6 @@ packages: lodash@4.17.21: resolution: {integrity: sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==} - log-symbols@4.1.0: - resolution: {integrity: sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==} - engines: {node: '>=10'} - loose-envify@1.4.0: resolution: {integrity: sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==} hasBin: true @@ -3486,9 +3748,6 @@ packages: resolution: {integrity: sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==} engines: {node: '>=16 || 14 >=14.17'} - mkdirp-classic@0.5.3: - resolution: {integrity: sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==} - mnemonic-id@3.2.7: resolution: {integrity: sha512-kysx9gAGbvrzuFYxKkcRjnsg/NK61ovJOV4F1cHTRl9T5leg+bo6WI0pWIvOFh1Z/yDL0cjA5R3EEGPPLDv/XA==} @@ -3505,9 +3764,9 @@ packages: msgpackr@1.8.5: resolution: {integrity: sha512-mpPs3qqTug6ahbblkThoUY2DQdNXcm4IapwOS3Vm/87vmpzLVelvp9h3It1y9l1VPpiFLV11vfOXnmeEwiIXwg==} - mute-stream@1.0.0: - resolution: {integrity: sha512-avsJQhyd+680gKXyG/sQc0nXaC6rBkPOfyHYcFb9+hdkqQkR9bdnkJ0AMZhke0oesPqIO+mFFJ+IdBc7mst4IA==} - engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + mute-stream@2.0.0: + resolution: {integrity: sha512-WWdIxpyjEn+FhQJQQv9aQAYlHoNVdzIzUySNV1gHUPDSdZJ3yZn7pAAbQcV7B56Mvu881q9FZV+0Vx2xC44VWA==} + engines: {node: ^18.17.0 || >=20.5.0} mz@2.7.0: resolution: {integrity: sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==} @@ -3522,24 +3781,14 @@ packages: engines: {node: ^18 || >=20} hasBin: true - napi-build-utils@1.0.2: - resolution: {integrity: sha512-ONmRUqK7zj7DWX0D9ADe03wbwOBZxNAfF20PlGfCWQcD3+/MakShIHrMqx9YwPTfxDdF1zLeL+RGZiR9kGMLdg==} - needle@3.3.1: resolution: {integrity: sha512-6k0YULvhpw+RoLNiQCRKOl09Rv1dPLr8hHnVjHqdolKwDrdNyk+Hmrthi4lIGPPz3r39dLx0hsF5s40sZ3Us4Q==} engines: {node: '>= 4.4.x'} hasBin: true - node-abi@3.67.0: - resolution: {integrity: sha512-bLn/fU/ALVBE9wj+p4Y21ZJWYFjUXLXPi/IewyLZkx3ApxKDNBWCKdReeKOtD8dWpOdDCeMyLh6ZewzcLsG2Nw==} - engines: {node: '>=10'} - node-addon-api@4.3.0: resolution: {integrity: sha512-73sE9+3UaLYYFmDsFZnqCInzPyh3MqIwZO9cw58yIqAZhONrrabrYyYe3TuIqtIiOuTXVhsGau8hcrhhwSsDIQ==} - node-addon-api@6.1.0: - resolution: {integrity: sha512-+eawOlIgy680F0kBzPUNFhMZGtJ1YmqM6l4+Crf4IkImjYrO/mqPwRMh352g23uIaQKFItcQ64I7KMaJxHgAVA==} - node-addon-api@7.1.1: resolution: {integrity: sha512-5m3bsyrjFWE1xf7nz7YXdN4udnVtXK6/Yfgn5qnahL6bCkf2yKt4k3nuTKAtT4r3IG8JNR2ncsIMdZuAzJjHQQ==} @@ -3555,8 +3804,8 @@ packages: resolution: {integrity: sha512-s+w+rBWnpTMwSFbaE0UXsRlg7hU4FjekKU4eyAih5T8nJuNZT1nNsskXpxmeqSK9UzkBl6UgRlnKc8hz8IEqOw==} hasBin: true - node-object-hash@3.0.0: - resolution: {integrity: sha512-jLF6tlyletktvSAawuPmH1SReP0YfZQ+tBrDiTCK+Ai7eXPMS9odi5xW/iKC7ZhrWJJ0Z5xYcW/x+1fVMn1Qvw==} + node-object-hash@3.1.1: + resolution: {integrity: sha512-A32kRGjXtwQ+uSa3GrXiCl8HVFY0Jy6IiKFO7UjagAKSaOOrruxB2Qf/w7TP5QtNfB3uOiHTu3cjhp8k/C0PCg==} engines: {node: '>=16', pnpm: '>=8'} node-releases@2.0.18: @@ -3592,34 +3841,27 @@ packages: resolution: {integrity: sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw==} engines: {node: '>= 6'} - once@1.4.0: - resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==} - onetime@5.1.2: resolution: {integrity: sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==} engines: {node: '>=6'} - ora@5.4.1: - resolution: {integrity: sha512-5b6Y85tPxZZ7QytO+BQzysW31HJku27cRIlkbAXaNx+BdcVi+LlRFmVXzeF6a7JCwJpyw5c4b+YSVImQIrBpuQ==} - engines: {node: '>=10'} - ordered-binary@1.5.1: resolution: {integrity: sha512-5VyHfHY3cd0iza71JepYG50My+YUbrFtGoUz2ooEydPyPM7Aai/JW098juLr+RG6+rDJuzNNTsEQu2DZa1A41A==} - os-tmpdir@1.0.2: - resolution: {integrity: sha512-D2FR03Vir7FIu45XBY20mTb+/ZSWB00sjU9jdQXt83gDrI4Ztz5Fs7/yy74g2N5SVQY4xY1qDr4rNddwYRVX0g==} - engines: {node: '>=0.10.0'} - p-cancelable@3.0.0: resolution: {integrity: sha512-mlVgR3PGuzlo0MmTdk4cXqXWlwQDLnONTAg6sm62XkMJEiRxN3GL3SffkYvqwonbkJBcrI7Uvv5Zh9yjvn2iUw==} engines: {node: '>=12.20'} + p-cancelable@4.0.1: + resolution: {integrity: sha512-wBowNApzd45EIKdO1LaU+LrMBwAcjfPaYtVzV3lmfM3gf8Z4CHZsiIqlM8TZZ8okYvh5A1cP6gTfCRQtwUpaUg==} + engines: {node: '>=14.16'} + package-json-from-dist@1.0.0: resolution: {integrity: sha512-dATvCeZN/8wQsGywez1mzHtTlP22H8OEfPrVMLNr4/eGa+ijtLn/6M5f0dY8UKNrC2O9UCU6SSoG3qRKnt7STw==} - package-json@8.1.1: - resolution: {integrity: sha512-cbH9IAIJHNj9uXi196JVsRlt7cHKak6u/e6AkL/bkRelZ7rlL3X1YKxsZwa36xipOEKAsdtmaG6aAJoM1fx2zA==} - engines: {node: '>=14.16'} + package-json@10.0.1: + resolution: {integrity: sha512-ua1L4OgXSBdsu1FPb7F3tYH0F48a6kxvod4pLUlGY9COeJAJQNX/sNH2IiEmsxw7lqYiAwrdHMjz1FctOsyDQg==} + engines: {node: '>=18'} parent-module@1.0.1: resolution: {integrity: sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==} @@ -3674,8 +3916,8 @@ packages: resolution: {integrity: sha512-saLsH7WeYYPiD25LDuLRRY/i+6HaPYr6G1OUlN39otzkSTxKnubR9RTxS3/Kk50s1g2JTgFwWQDQyplC5/SHZg==} engines: {node: '>= 6'} - plasmo@0.89.4: - resolution: {integrity: sha512-vsoMe8ts0tyW27fZxwQLqWR/58NKqRepLFrZMVBH4ceSIyPDryfPpXzVxmBDH43odbiUVFdh8BGAt2ri2vQuGw==} + plasmo@0.90.5: + resolution: {integrity: sha512-VRFsRCHTKCDSRz7ZGmN4hCFqrHE8z7vDYqJK63v5gjRs+EUFdfEciQyGhPmG5NkT+yPvmMZO+R/j1HU/pg2BKA==} hasBin: true postcss-import@15.1.0: @@ -3748,11 +3990,6 @@ packages: resolution: {integrity: sha512-JcEmHlyLK/o0uGAlj65vgg+7LIms0xKXe60lcDOTU7oVX/3LuEuLwrQpW3VJ7de5TaFKiW4kWkaIpJL42FEgxQ==} engines: {node: '>=12.0.0'} - prebuild-install@7.1.2: - resolution: {integrity: sha512-UnNke3IQb6sgarcZIDU3gbMeTp/9SSU1DAIkil7PrqG1vZlBtY5msYccSKSHDqa3hNg436IXK+SNImReuA1wEQ==} - engines: {node: '>=10'} - hasBin: true - process@0.11.10: resolution: {integrity: sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==} engines: {node: '>= 0.6.0'} @@ -3763,9 +4000,6 @@ packages: prr@1.0.1: resolution: {integrity: sha512-yPw4Sng1gWghHQWj0B3ZggWUm4qVbPwPFcRG8KyxiU7J2OHFSoEHKS+EZ3fv5l1t9CyCiop6l/ZYeWbrgoQejw==} - pump@3.0.0: - resolution: {integrity: sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==} - punycode@2.3.1: resolution: {integrity: sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==} engines: {node: '>=6'} @@ -3773,9 +4007,6 @@ packages: queue-microtask@1.2.3: resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} - queue-tick@1.0.1: - resolution: {integrity: sha512-kJt5qhMxoszgU/62PLP1CJytzd2NKetjSRnyuj31fDd3Rlcz3fzlFdFLD1SItunPwyqEOkca6GbV612BWfaBag==} - quick-lru@5.1.1: resolution: {integrity: sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA==} engines: {node: '>=10'} @@ -3891,10 +4122,6 @@ packages: read-cache@1.0.0: resolution: {integrity: sha512-Owdv/Ft7IjOgm/i0xvNDZ1LrRANRfew4b2prF3OWMQLxLfu3bS8FVhCsrSCMK4lR56Y9ya+AThoTpDCTxCmpRA==} - readable-stream@3.6.2: - resolution: {integrity: sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==} - engines: {node: '>= 6'} - readdirp@3.6.0: resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==} engines: {node: '>=8.10.0'} @@ -3932,10 +4159,6 @@ packages: resolution: {integrity: sha512-40yHxbNcl2+rzXvZuVkrYohathsSJlMTXKryG5y8uciHv1+xDLHQpgjG64JUO9nrEq2jGLH6IZ8BcZyw3wrweg==} engines: {node: '>=14.16'} - restore-cursor@3.1.0: - resolution: {integrity: sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==} - engines: {node: '>=8'} - reusify@1.0.4: resolution: {integrity: sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==} engines: {iojs: '>=1.0.0', node: '>=0.10.0'} @@ -3952,8 +4175,8 @@ packages: run-parallel@1.2.0: resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} - rxjs@7.8.1: - resolution: {integrity: sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg==} + rxjs@7.8.2: + resolution: {integrity: sha512-dhKf903U/PQZY6boNNtAGdWbG85WAbjT/1xYoZIC7FAY0yWapOBQVsVrDl58W86//e1VpMNBtRV4MaXfdMySFA==} safe-buffer@5.2.1: resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==} @@ -3990,13 +4213,14 @@ packages: engines: {node: '>=10'} hasBin: true - set-function-length@1.2.2: - resolution: {integrity: sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==} - engines: {node: '>= 0.4'} + semver@7.7.1: + resolution: {integrity: sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA==} + engines: {node: '>=10'} + hasBin: true - sharp@0.32.6: - resolution: {integrity: sha512-KyLTWwgcR9Oe4d9HwCwNM2l7+J0dUQwn/yf7S0EnTtb0eVS4RxO0eUSvxPtzT4F3SY+C4K6fqdv/DO27sJ/v/w==} - engines: {node: '>=14.15.0'} + sharp@0.33.5: + resolution: {integrity: sha512-haPVm1EkS9pgvHrQ/F3Xy+hgcuMV0Wm9vfIBSiwZ05k+xgb0PkBQpGsAA/oWdDobNaZTH5ppvHtzCFbnSEwHVw==} + engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} shebang-command@2.0.0: resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==} @@ -4013,12 +4237,6 @@ packages: resolution: {integrity: sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==} engines: {node: '>=14'} - simple-concat@1.0.1: - resolution: {integrity: sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q==} - - simple-get@4.0.1: - resolution: {integrity: sha512-brv7p5WgH0jmQJr1ZDDfKDOSeWWg+OVypG99A/5vYGPqJ6pxiaHLy8nxtFjBA7oMa01ebA9gfh1uMCFqOuXxvA==} - simple-swizzle@0.2.2: resolution: {integrity: sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg==} @@ -4037,6 +4255,7 @@ packages: source-map@0.8.0-beta.0: resolution: {integrity: sha512-2ymg6oRBpebeZi9UUNsgQ89bhx01TcTkmNTGnNO88imTmbSgy4nfujrgVEFKWpMTEGA11EDkTt7mqObTPdigIA==} engines: {node: '>= 8'} + deprecated: The work that was done in this beta branch won't be included in future versions srcset@4.0.0: resolution: {integrity: sha512-wvLeHgcVHKO8Sc/H/5lkGreJQVeYMm9rlmt8PuR1xE31rIuXhuzznUUqAt8MqLhB3MqJdFzlNAfpcWnxiFUcPw==} @@ -4046,9 +4265,6 @@ packages: resolution: {integrity: sha512-ji9qxRnOVfcuLDySj9qzhGSEFVobyt1kIOSkj1qZzYLzq7Tos/oUUWvotUPQLlrsidqsK6tBH89Bc9kL5zHA6w==} deprecated: 'Modern JS already guarantees Array#sort() is a stable sort, so this library is deprecated. See the compatibility table on MDN: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/sort#browser_compatibility' - streamx@2.19.0: - resolution: {integrity: sha512-5z6CNR4gtkPbwlxyEqoDGDmWIzoNJqCBt4Eac1ICP9YaIT08ct712cFj0u1rx4F8luAuL+3Qc+RFIdI4OX00kg==} - string-width@4.2.3: resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==} engines: {node: '>=8'} @@ -4057,9 +4273,6 @@ packages: resolution: {integrity: sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==} engines: {node: '>=12'} - string_decoder@1.3.0: - resolution: {integrity: sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==} - strip-ansi@6.0.1: resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==} engines: {node: '>=8'} @@ -4118,19 +4331,6 @@ packages: engines: {node: '>=14.0.0'} hasBin: true - tar-fs@2.1.1: - resolution: {integrity: sha512-V0r2Y9scmbDRLCNex/+hYzvp/zyYjvFbHPNgVTKfQvVrb6guiE/fxP+XblDNR011utopbkex2nM4dHNV6GDsng==} - - tar-fs@3.0.6: - resolution: {integrity: sha512-iokBDQQkUyeXhgPYaZxmczGPhnhXZ0CmrqI+MOb/WFGS9DW5wnfrLgtjUJBvz50vQ3qfRwJ62QVoCFu8mPVu5w==} - - tar-stream@2.2.0: - resolution: {integrity: sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==} - engines: {node: '>=6'} - - tar-stream@3.1.7: - resolution: {integrity: sha512-qJj60CXt7IU1Ffyc3NJMjh6EkuCFej46zUqJ4J7pqYlThyd9bO0XBTmcOIhSzZJVWfsLks0+nle/j538YAW9RQ==} - temp-dir@3.0.0: resolution: {integrity: sha512-nHc6S/bwIilKHNRgK/3jlhDoIHcp45YgyiwcAk46Tr0LfEqGBVpmiAyuiuxeVE44m3mXnEeVhaipLOEWmH+Njw==} engines: {node: '>=14.16'} @@ -4139,9 +4339,6 @@ packages: resolution: {integrity: sha512-7jDLIdD2Zp0bDe5r3D2qtkd1QOCacylBuL7oa4udvN6v2pqr4+LcCr67C8DR1zkpaZ8XosF5m1yQSabKAW6f2g==} engines: {node: '>=14.16'} - text-decoder@1.1.1: - resolution: {integrity: sha512-8zll7REEv4GDD3x4/0pW+ppIxSNs7H1J10IKFZsuOMscumCdM2a+toDGLPA3T+1+fLBql4zbt5z83GEQGGV5VA==} - thenify-all@1.6.0: resolution: {integrity: sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA==} engines: {node: '>=0.8'} @@ -4152,10 +4349,6 @@ packages: timsort@0.3.0: resolution: {integrity: sha512-qsdtZH+vMoCARQtyod4imc2nIJwg9Cc7lPRrw9CzF8ZKR0khdr8+2nX80PBhET3tcyTtJDxAffGh2rXH4tyU8A==} - tmp@0.0.33: - resolution: {integrity: sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==} - engines: {node: '>=0.6.0'} - to-fast-properties@2.0.0: resolution: {integrity: sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==} engines: {node: '>=4'} @@ -4171,8 +4364,8 @@ packages: resolution: {integrity: sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A==} hasBin: true - ts-algebra@1.2.2: - resolution: {integrity: sha512-kloPhf1hq3JbCPOTYoOWDKxebWjNb2o/LKnNfkWhxVVisFFmMJPPdJeGoGmM+iRLyoXAR61e08Pb+vUXINg8aA==} + ts-algebra@2.0.0: + resolution: {integrity: sha512-FPAhNPFMrkwz76P7cdjdmiShwMynZYN6SgOujD1urY4oNm80Ou9oMdmbR45LotcKOXoy7wSmHkRFE6Mxbrhefw==} ts-interface-checker@0.1.13: resolution: {integrity: sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==} @@ -4196,9 +4389,6 @@ packages: typescript: optional: true - tunnel-agent@0.6.0: - resolution: {integrity: sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==} - type-fest@0.20.2: resolution: {integrity: sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==} engines: {node: '>=10'} @@ -4215,6 +4405,10 @@ packages: resolution: {integrity: sha512-RAH822pAdBgcNMAfWnCBU3CFZcfZ/i1eZjwFU/dsLKumyuuP3niueg2UAukXYF0E2AAoc82ZSSf9J0WQBinzHA==} engines: {node: '>=12.20'} + type-fest@4.41.0: + resolution: {integrity: sha512-TeTSQ6H5YHvpqVwBRcnLDCBnDOHWYu7IvGbHT6N8AOymcr9PJGjc1GTtiWZTYg0NCgYwvnYWEkVChQAr9bjfwA==} + engines: {node: '>=16'} + typescript@5.2.2: resolution: {integrity: sha512-mI4WrpHsbCIcwT9cF4FZvr80QUeKvsUsUvKDoR+X/7XHQH98xYD8YHZg7ANtz2GtZt/CBq2QJ0thkGJMHfqc1w==} engines: {node: '>=14.17'} @@ -4225,6 +4419,11 @@ packages: engines: {node: '>=14.17'} hasBin: true + typescript@5.8.2: + resolution: {integrity: sha512-aJn6wq13/afZp/jT9QZmwEjDqqvSGp1VT5GVg+f/t6/oVyrgXM6BY1h9BRh/O5p3PlUPAe+WuiEZOmb/49RqoQ==} + engines: {node: '>=14.17'} + hasBin: true + uhyphen@0.1.0: resolution: {integrity: sha512-o0QVGuFg24FK765Qdd5kk0zU/U4dEsCtN/GSiwNI9i8xsSVtjIAOdTaVhLwZ1nrbWxFVMxNDDl+9fednsOMsBw==} @@ -4310,9 +4509,6 @@ packages: vue@3.3.4: resolution: {integrity: sha512-VTyEYn3yvIeY1Py0WaYGZsXnz3y5UnGi62GjVEqvEGPl6nxbOrCXbVOTQWBEJUqAyTUk2uJ5JLVnYJ6ZzGbrSw==} - wcwidth@1.0.1: - resolution: {integrity: sha512-XHPEwS0q6TaxcvG85+8EYkbiCux2XtWG2mkc47Ng2A77BQu9+DqIOJldST4HgPkuea7dvKSj5VgX3P1d4rW8Tg==} - weak-lru-cache@1.2.2: resolution: {integrity: sha512-DEAoo25RfSYMuTGc9vPJzZcZullwIqRDSI9LOy+fkCJPi6hykCnfKaXTuPBDuXAUcqHXyOgFtHNp/kB2FjYHbw==} @@ -4339,9 +4535,6 @@ packages: resolution: {integrity: sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==} engines: {node: '>=12'} - wrappy@1.0.2: - resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==} - xxhash-wasm@0.4.2: resolution: {integrity: sha512-/eyHVRJQCirEkSZ1agRSCwriMhwlyUcFkXD5TPVSLP+IPzjsqMVzZwdoczLp1SoQU0R3dxz1RpIK+4YNQbCVOA==} @@ -4360,6 +4553,10 @@ packages: engines: {node: '>= 14'} hasBin: true + yoctocolors-cjs@2.1.3: + resolution: {integrity: sha512-U/PBtDf35ff0D8X8D0jfdzHYEPFxAI7jJlxZXwCSez5M3190m+QobIfh+sWDWSHMCWWJN2AWamkegn6vr6YBTw==} + engines: {node: '>=18'} + zustand@4.0.0: resolution: {integrity: sha512-OrsfQTnRXF1LZ9/vR/IqN9ws5EXUhb149xmPjErZnUrkgxS/gAHGy2dPNIVkVvoxrVe1sIydn4JjF0dYHmGeeQ==} engines: {node: '>=12.7.0'} @@ -4532,6 +4729,11 @@ snapshots: '@biomejs/cli-win32-x64@2.1.2': optional: true + '@emnapi/runtime@1.7.1': + dependencies: + tslib: 2.7.0 + optional: true + '@esbuild/android-arm64@0.18.20': optional: true @@ -4619,6 +4821,206 @@ snapshots: '@floating-ui/utils@0.2.8': {} + '@img/sharp-darwin-arm64@0.33.5': + optionalDependencies: + '@img/sharp-libvips-darwin-arm64': 1.0.4 + optional: true + + '@img/sharp-darwin-x64@0.33.5': + optionalDependencies: + '@img/sharp-libvips-darwin-x64': 1.0.4 + optional: true + + '@img/sharp-libvips-darwin-arm64@1.0.4': + optional: true + + '@img/sharp-libvips-darwin-x64@1.0.4': + optional: true + + '@img/sharp-libvips-linux-arm64@1.0.4': + optional: true + + '@img/sharp-libvips-linux-arm@1.0.5': + optional: true + + '@img/sharp-libvips-linux-s390x@1.0.4': + optional: true + + '@img/sharp-libvips-linux-x64@1.0.4': + optional: true + + '@img/sharp-libvips-linuxmusl-arm64@1.0.4': + optional: true + + '@img/sharp-libvips-linuxmusl-x64@1.0.4': + optional: true + + '@img/sharp-linux-arm64@0.33.5': + optionalDependencies: + '@img/sharp-libvips-linux-arm64': 1.0.4 + optional: true + + '@img/sharp-linux-arm@0.33.5': + optionalDependencies: + '@img/sharp-libvips-linux-arm': 1.0.5 + optional: true + + '@img/sharp-linux-s390x@0.33.5': + optionalDependencies: + '@img/sharp-libvips-linux-s390x': 1.0.4 + optional: true + + '@img/sharp-linux-x64@0.33.5': + optionalDependencies: + '@img/sharp-libvips-linux-x64': 1.0.4 + optional: true + + '@img/sharp-linuxmusl-arm64@0.33.5': + optionalDependencies: + '@img/sharp-libvips-linuxmusl-arm64': 1.0.4 + optional: true + + '@img/sharp-linuxmusl-x64@0.33.5': + optionalDependencies: + '@img/sharp-libvips-linuxmusl-x64': 1.0.4 + optional: true + + '@img/sharp-wasm32@0.33.5': + dependencies: + '@emnapi/runtime': 1.7.1 + optional: true + + '@img/sharp-win32-ia32@0.33.5': + optional: true + + '@img/sharp-win32-x64@0.33.5': + optional: true + + '@inquirer/ansi@1.0.2': {} + + '@inquirer/checkbox@4.3.2(@types/node@20.11.5)': + dependencies: + '@inquirer/ansi': 1.0.2 + '@inquirer/core': 10.3.2(@types/node@20.11.5) + '@inquirer/figures': 1.0.15 + '@inquirer/type': 3.0.10(@types/node@20.11.5) + yoctocolors-cjs: 2.1.3 + optionalDependencies: + '@types/node': 20.11.5 + + '@inquirer/confirm@5.1.21(@types/node@20.11.5)': + dependencies: + '@inquirer/core': 10.3.2(@types/node@20.11.5) + '@inquirer/type': 3.0.10(@types/node@20.11.5) + optionalDependencies: + '@types/node': 20.11.5 + + '@inquirer/core@10.3.2(@types/node@20.11.5)': + dependencies: + '@inquirer/ansi': 1.0.2 + '@inquirer/figures': 1.0.15 + '@inquirer/type': 3.0.10(@types/node@20.11.5) + cli-width: 4.1.0 + mute-stream: 2.0.0 + signal-exit: 4.1.0 + wrap-ansi: 6.2.0 + yoctocolors-cjs: 2.1.3 + optionalDependencies: + '@types/node': 20.11.5 + + '@inquirer/editor@4.2.23(@types/node@20.11.5)': + dependencies: + '@inquirer/core': 10.3.2(@types/node@20.11.5) + '@inquirer/external-editor': 1.0.3(@types/node@20.11.5) + '@inquirer/type': 3.0.10(@types/node@20.11.5) + optionalDependencies: + '@types/node': 20.11.5 + + '@inquirer/expand@4.0.23(@types/node@20.11.5)': + dependencies: + '@inquirer/core': 10.3.2(@types/node@20.11.5) + '@inquirer/type': 3.0.10(@types/node@20.11.5) + yoctocolors-cjs: 2.1.3 + optionalDependencies: + '@types/node': 20.11.5 + + '@inquirer/external-editor@1.0.3(@types/node@20.11.5)': + dependencies: + chardet: 2.1.1 + iconv-lite: 0.7.0 + optionalDependencies: + '@types/node': 20.11.5 + + '@inquirer/figures@1.0.15': {} + + '@inquirer/input@4.3.1(@types/node@20.11.5)': + dependencies: + '@inquirer/core': 10.3.2(@types/node@20.11.5) + '@inquirer/type': 3.0.10(@types/node@20.11.5) + optionalDependencies: + '@types/node': 20.11.5 + + '@inquirer/number@3.0.23(@types/node@20.11.5)': + dependencies: + '@inquirer/core': 10.3.2(@types/node@20.11.5) + '@inquirer/type': 3.0.10(@types/node@20.11.5) + optionalDependencies: + '@types/node': 20.11.5 + + '@inquirer/password@4.0.23(@types/node@20.11.5)': + dependencies: + '@inquirer/ansi': 1.0.2 + '@inquirer/core': 10.3.2(@types/node@20.11.5) + '@inquirer/type': 3.0.10(@types/node@20.11.5) + optionalDependencies: + '@types/node': 20.11.5 + + '@inquirer/prompts@7.10.1(@types/node@20.11.5)': + dependencies: + '@inquirer/checkbox': 4.3.2(@types/node@20.11.5) + '@inquirer/confirm': 5.1.21(@types/node@20.11.5) + '@inquirer/editor': 4.2.23(@types/node@20.11.5) + '@inquirer/expand': 4.0.23(@types/node@20.11.5) + '@inquirer/input': 4.3.1(@types/node@20.11.5) + '@inquirer/number': 3.0.23(@types/node@20.11.5) + '@inquirer/password': 4.0.23(@types/node@20.11.5) + '@inquirer/rawlist': 4.1.11(@types/node@20.11.5) + '@inquirer/search': 3.2.2(@types/node@20.11.5) + '@inquirer/select': 4.4.2(@types/node@20.11.5) + optionalDependencies: + '@types/node': 20.11.5 + + '@inquirer/rawlist@4.1.11(@types/node@20.11.5)': + dependencies: + '@inquirer/core': 10.3.2(@types/node@20.11.5) + '@inquirer/type': 3.0.10(@types/node@20.11.5) + yoctocolors-cjs: 2.1.3 + optionalDependencies: + '@types/node': 20.11.5 + + '@inquirer/search@3.2.2(@types/node@20.11.5)': + dependencies: + '@inquirer/core': 10.3.2(@types/node@20.11.5) + '@inquirer/figures': 1.0.15 + '@inquirer/type': 3.0.10(@types/node@20.11.5) + yoctocolors-cjs: 2.1.3 + optionalDependencies: + '@types/node': 20.11.5 + + '@inquirer/select@4.4.2(@types/node@20.11.5)': + dependencies: + '@inquirer/ansi': 1.0.2 + '@inquirer/core': 10.3.2(@types/node@20.11.5) + '@inquirer/figures': 1.0.15 + '@inquirer/type': 3.0.10(@types/node@20.11.5) + yoctocolors-cjs: 2.1.3 + optionalDependencies: + '@types/node': 20.11.5 + + '@inquirer/type@3.0.10(@types/node@20.11.5)': + optionalDependencies: + '@types/node': 20.11.5 + '@isaacs/cliui@8.0.2': dependencies: string-width: 5.1.2 @@ -4657,10 +5059,6 @@ snapshots: dependencies: '@lezer/common': 1.2.1 - '@ljharb/through@2.3.13': - dependencies: - call-bind: 1.0.7 - '@lmdb/lmdb-darwin-arm64@2.5.2': optional: true @@ -4780,14 +5178,14 @@ snapshots: transitivePeerDependencies: - '@parcel/core' - '@parcel/config-default@2.9.3(@parcel/core@2.9.3)(@swc/helpers@0.5.12)(postcss@8.4.41)(typescript@5.2.2)': + '@parcel/config-default@2.9.3(@parcel/core@2.9.3)(@swc/helpers@0.5.12)(postcss@8.4.41)(typescript@5.8.2)': dependencies: '@parcel/bundler-default': 2.9.3(@parcel/core@2.9.3) '@parcel/compressor-raw': 2.9.3(@parcel/core@2.9.3) '@parcel/core': 2.9.3 '@parcel/namer-default': 2.9.3(@parcel/core@2.9.3) '@parcel/optimizer-css': 2.9.3(@parcel/core@2.9.3) - '@parcel/optimizer-htmlnano': 2.9.3(@parcel/core@2.9.3)(postcss@8.4.41)(typescript@5.2.2) + '@parcel/optimizer-htmlnano': 2.9.3(@parcel/core@2.9.3)(postcss@8.4.41)(typescript@5.8.2) '@parcel/optimizer-image': 2.9.3(@parcel/core@2.9.3) '@parcel/optimizer-svgo': 2.9.3(@parcel/core@2.9.3) '@parcel/optimizer-swc': 2.9.3(@parcel/core@2.9.3)(@swc/helpers@0.5.12) @@ -4850,7 +5248,7 @@ snapshots: json5: 2.2.3 msgpackr: 1.11.0 nullthrows: 1.1.1 - semver: 7.6.3 + semver: 7.7.1 '@parcel/diagnostic@2.8.3': dependencies: @@ -4878,7 +5276,7 @@ snapshots: '@parcel/fs-search': 2.8.3 '@parcel/types': 2.8.3(@parcel/core@2.9.3) '@parcel/utils': 2.8.3 - '@parcel/watcher': 2.2.0 + '@parcel/watcher': 2.5.1 '@parcel/workers': 2.8.3(@parcel/core@2.9.3) '@parcel/fs@2.9.3(@parcel/core@2.9.3)': @@ -4887,7 +5285,7 @@ snapshots: '@parcel/fs-search': 2.9.3 '@parcel/types': 2.9.3(@parcel/core@2.9.3) '@parcel/utils': 2.9.3 - '@parcel/watcher': 2.2.0 + '@parcel/watcher': 2.5.1 '@parcel/workers': 2.9.3(@parcel/core@2.9.3) '@parcel/graph@2.9.3': @@ -4936,7 +5334,7 @@ snapshots: '@parcel/fs': 2.9.3(@parcel/core@2.9.3) '@parcel/utils': 2.9.3 nullthrows: 1.1.1 - semver: 7.6.3 + semver: 7.7.1 transitivePeerDependencies: - '@parcel/core' @@ -4961,10 +5359,10 @@ snapshots: transitivePeerDependencies: - '@parcel/core' - '@parcel/optimizer-htmlnano@2.9.3(@parcel/core@2.9.3)(postcss@8.4.41)(typescript@5.2.2)': + '@parcel/optimizer-htmlnano@2.9.3(@parcel/core@2.9.3)(postcss@8.4.41)(typescript@5.8.2)': dependencies: '@parcel/plugin': 2.9.3(@parcel/core@2.9.3) - htmlnano: 2.1.1(postcss@8.4.41)(svgo@2.8.0)(typescript@5.2.2) + htmlnano: 2.1.1(postcss@8.4.41)(svgo@2.8.0)(typescript@5.8.2) nullthrows: 1.1.1 posthtml: 0.16.6 svgo: 2.8.0 @@ -5029,7 +5427,7 @@ snapshots: '@parcel/types': 2.9.3(@parcel/core@2.9.3) '@parcel/utils': 2.9.3 '@parcel/workers': 2.9.3(@parcel/core@2.9.3) - semver: 7.6.3 + semver: 7.7.1 '@parcel/packager-css@2.9.3(@parcel/core@2.9.3)': dependencies: @@ -5170,7 +5568,7 @@ snapshots: browserslist: 4.23.3 json5: 2.2.3 nullthrows: 1.1.1 - semver: 7.6.3 + semver: 7.7.1 transitivePeerDependencies: - '@parcel/core' @@ -5203,7 +5601,7 @@ snapshots: posthtml: 0.16.6 posthtml-parser: 0.10.2 posthtml-render: 3.0.0 - semver: 7.6.3 + semver: 7.7.1 srcset: 4.0.0 transitivePeerDependencies: - '@parcel/core' @@ -5234,7 +5632,7 @@ snapshots: browserslist: 4.23.3 nullthrows: 1.1.1 regenerator-runtime: 0.13.11 - semver: 7.6.3 + semver: 7.7.1 '@parcel/transformer-json@2.9.3(@parcel/core@2.9.3)': dependencies: @@ -5260,7 +5658,7 @@ snapshots: clone: 2.1.2 nullthrows: 1.1.1 postcss-value-parser: 4.2.0 - semver: 7.6.3 + semver: 7.7.1 transitivePeerDependencies: - '@parcel/core' @@ -5272,7 +5670,7 @@ snapshots: posthtml: 0.16.6 posthtml-parser: 0.10.2 posthtml-render: 3.0.0 - semver: 7.6.3 + semver: 7.7.1 transitivePeerDependencies: - '@parcel/core' @@ -5317,7 +5715,7 @@ snapshots: posthtml: 0.16.6 posthtml-parser: 0.10.2 posthtml-render: 3.0.0 - semver: 7.6.3 + semver: 7.7.1 transitivePeerDependencies: - '@parcel/core' @@ -5372,53 +5770,65 @@ snapshots: chalk: 4.1.2 nullthrows: 1.1.1 - '@parcel/watcher-android-arm64@2.2.0': + '@parcel/watcher-android-arm64@2.5.1': optional: true - '@parcel/watcher-darwin-arm64@2.2.0': + '@parcel/watcher-darwin-arm64@2.5.1': optional: true - '@parcel/watcher-darwin-x64@2.2.0': + '@parcel/watcher-darwin-x64@2.5.1': optional: true - '@parcel/watcher-linux-arm-glibc@2.2.0': + '@parcel/watcher-freebsd-x64@2.5.1': optional: true - '@parcel/watcher-linux-arm64-glibc@2.2.0': + '@parcel/watcher-linux-arm-glibc@2.5.1': optional: true - '@parcel/watcher-linux-arm64-musl@2.2.0': + '@parcel/watcher-linux-arm-musl@2.5.1': optional: true - '@parcel/watcher-linux-x64-glibc@2.2.0': + '@parcel/watcher-linux-arm64-glibc@2.5.1': optional: true - '@parcel/watcher-linux-x64-musl@2.2.0': + '@parcel/watcher-linux-arm64-musl@2.5.1': optional: true - '@parcel/watcher-win32-arm64@2.2.0': + '@parcel/watcher-linux-x64-glibc@2.5.1': optional: true - '@parcel/watcher-win32-x64@2.2.0': + '@parcel/watcher-linux-x64-musl@2.5.1': optional: true - '@parcel/watcher@2.2.0': + '@parcel/watcher-win32-arm64@2.5.1': + optional: true + + '@parcel/watcher-win32-ia32@2.5.1': + optional: true + + '@parcel/watcher-win32-x64@2.5.1': + optional: true + + '@parcel/watcher@2.5.1': dependencies: detect-libc: 1.0.3 is-glob: 4.0.3 micromatch: 4.0.8 node-addon-api: 7.1.1 optionalDependencies: - '@parcel/watcher-android-arm64': 2.2.0 - '@parcel/watcher-darwin-arm64': 2.2.0 - '@parcel/watcher-darwin-x64': 2.2.0 - '@parcel/watcher-linux-arm-glibc': 2.2.0 - '@parcel/watcher-linux-arm64-glibc': 2.2.0 - '@parcel/watcher-linux-arm64-musl': 2.2.0 - '@parcel/watcher-linux-x64-glibc': 2.2.0 - '@parcel/watcher-linux-x64-musl': 2.2.0 - '@parcel/watcher-win32-arm64': 2.2.0 - '@parcel/watcher-win32-x64': 2.2.0 + '@parcel/watcher-android-arm64': 2.5.1 + '@parcel/watcher-darwin-arm64': 2.5.1 + '@parcel/watcher-darwin-x64': 2.5.1 + '@parcel/watcher-freebsd-x64': 2.5.1 + '@parcel/watcher-linux-arm-glibc': 2.5.1 + '@parcel/watcher-linux-arm-musl': 2.5.1 + '@parcel/watcher-linux-arm64-glibc': 2.5.1 + '@parcel/watcher-linux-arm64-musl': 2.5.1 + '@parcel/watcher-linux-x64-glibc': 2.5.1 + '@parcel/watcher-linux-x64-musl': 2.5.1 + '@parcel/watcher-win32-arm64': 2.5.1 + '@parcel/watcher-win32-ia32': 2.5.1 + '@parcel/watcher-win32-x64': 2.5.1 '@parcel/workers@2.8.3(@parcel/core@2.9.3)': dependencies: @@ -5475,10 +5885,10 @@ snapshots: transitivePeerDependencies: - '@parcel/core' - '@plasmohq/parcel-config@0.41.1(@swc/core@1.7.14(@swc/helpers@0.5.12))(@swc/helpers@0.5.12)(lodash@4.17.21)(postcss@8.4.41)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)(typescript@5.2.2)': + '@plasmohq/parcel-config@0.42.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(@swc/helpers@0.5.12)(lodash@4.17.21)(postcss@8.4.41)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)(typescript@5.8.2)': dependencies: '@parcel/compressor-raw': 2.9.3(@parcel/core@2.9.3) - '@parcel/config-default': 2.9.3(@parcel/core@2.9.3)(@swc/helpers@0.5.12)(postcss@8.4.41)(typescript@5.2.2) + '@parcel/config-default': 2.9.3(@parcel/core@2.9.3)(@swc/helpers@0.5.12)(postcss@8.4.41)(typescript@5.8.2) '@parcel/core': 2.9.3 '@parcel/optimizer-data-url': 2.9.3(@parcel/core@2.9.3) '@parcel/reporter-bundle-buddy': 2.9.3(@parcel/core@2.9.3) @@ -5506,10 +5916,10 @@ snapshots: '@plasmohq/parcel-packager': 0.6.15 '@plasmohq/parcel-resolver': 0.14.1 '@plasmohq/parcel-resolver-post': 0.4.5(@swc/core@1.7.14(@swc/helpers@0.5.12))(postcss@8.4.41) - '@plasmohq/parcel-runtime': 0.25.1 + '@plasmohq/parcel-runtime': 0.25.2 '@plasmohq/parcel-transformer-inject-env': 0.2.12 '@plasmohq/parcel-transformer-inline-css': 0.3.11 - '@plasmohq/parcel-transformer-manifest': 0.20.1 + '@plasmohq/parcel-transformer-manifest': 0.21.0 '@plasmohq/parcel-transformer-svelte': 0.6.0 '@plasmohq/parcel-transformer-vue': 0.5.0(lodash@4.17.21)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) transitivePeerDependencies: @@ -5572,7 +5982,7 @@ snapshots: - walrus - whiskers - '@plasmohq/parcel-core@0.1.10': + '@plasmohq/parcel-core@0.1.11': dependencies: '@parcel/cache': 2.9.3(@parcel/core@2.9.3) '@parcel/core': 2.9.3 @@ -5587,9 +5997,9 @@ snapshots: '@parcel/source-map': 2.1.1 '@parcel/types': 2.9.3(@parcel/core@2.9.3) '@parcel/utils': 2.9.3 - '@parcel/watcher': 2.2.0 + '@parcel/watcher': 2.5.1 '@parcel/workers': 2.9.3(@parcel/core@2.9.3) - abortcontroller-polyfill: 1.7.5 + abortcontroller-polyfill: 1.7.8 nullthrows: 1.1.1 '@plasmohq/parcel-namer-manifest@0.3.12': @@ -5650,7 +6060,7 @@ snapshots: fs-extra: 11.1.1 got: 13.0.0 - '@plasmohq/parcel-runtime@0.25.1': + '@plasmohq/parcel-runtime@0.25.2': dependencies: '@parcel/core': 2.9.3 '@parcel/plugin': 2.9.3(@parcel/core@2.9.3) @@ -5671,7 +6081,7 @@ snapshots: browserslist: 4.22.1 lightningcss: 1.21.8 - '@plasmohq/parcel-transformer-manifest@0.20.1': + '@plasmohq/parcel-transformer-manifest@0.21.0': dependencies: '@mischnic/json-sourcemap': 0.1.0 '@parcel/core': 2.9.3 @@ -5681,7 +6091,7 @@ snapshots: '@parcel/types': 2.9.3(@parcel/core@2.9.3) '@parcel/utils': 2.9.3 content-security-policy-parser: 0.4.1 - json-schema-to-ts: 2.9.2 + json-schema-to-ts: 3.1.1 nullthrows: 1.1.1 '@plasmohq/parcel-transformer-svelte@0.6.0': @@ -5776,21 +6186,21 @@ snapshots: '@radix-ui/primitive@1.1.0': {} - '@radix-ui/primitive@1.1.2': {} + '@radix-ui/primitive@1.1.3': {} - '@radix-ui/react-accessible-icon@1.1.7(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': + '@radix-ui/react-accessible-icon@1.1.8(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': dependencies: - '@radix-ui/react-visually-hidden': 1.2.3(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-visually-hidden': 1.2.4(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) react: 18.2.0 react-dom: 18.2.0(react@18.2.0) optionalDependencies: '@types/react': 18.2.48 '@types/react-dom': 18.2.18 - '@radix-ui/react-accordion@1.2.11(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': + '@radix-ui/react-accordion@1.2.12(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': dependencies: - '@radix-ui/primitive': 1.1.2 - '@radix-ui/react-collapsible': 1.1.11(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/primitive': 1.1.3 + '@radix-ui/react-collapsible': 1.1.12(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-collection': 1.1.7(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-context': 1.1.2(@types/react@18.2.48)(react@18.2.0) @@ -5804,12 +6214,12 @@ snapshots: '@types/react': 18.2.48 '@types/react-dom': 18.2.18 - '@radix-ui/react-alert-dialog@1.1.14(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': + '@radix-ui/react-alert-dialog@1.1.15(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': dependencies: - '@radix-ui/primitive': 1.1.2 + '@radix-ui/primitive': 1.1.3 '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-context': 1.1.2(@types/react@18.2.48)(react@18.2.0) - '@radix-ui/react-dialog': 1.1.14(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-dialog': 1.1.15(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-slot': 1.2.3(@types/react@18.2.48)(react@18.2.0) react: 18.2.0 @@ -5836,19 +6246,19 @@ snapshots: '@types/react': 18.2.48 '@types/react-dom': 18.2.18 - '@radix-ui/react-aspect-ratio@1.1.7(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': + '@radix-ui/react-aspect-ratio@1.1.8(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': dependencies: - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-primitive': 2.1.4(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) react: 18.2.0 react-dom: 18.2.0(react@18.2.0) optionalDependencies: '@types/react': 18.2.48 '@types/react-dom': 18.2.18 - '@radix-ui/react-avatar@1.1.10(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': + '@radix-ui/react-avatar@1.1.11(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': dependencies: - '@radix-ui/react-context': 1.1.2(@types/react@18.2.48)(react@18.2.0) - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-context': 1.1.3(@types/react@18.2.48)(react@18.2.0) + '@radix-ui/react-primitive': 2.1.4(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-use-is-hydrated': 0.1.0(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@18.2.48)(react@18.2.0) @@ -5858,12 +6268,12 @@ snapshots: '@types/react': 18.2.48 '@types/react-dom': 18.2.18 - '@radix-ui/react-checkbox@1.3.2(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': + '@radix-ui/react-checkbox@1.3.3(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': dependencies: - '@radix-ui/primitive': 1.1.2 + '@radix-ui/primitive': 1.1.3 '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-context': 1.1.2(@types/react@18.2.48)(react@18.2.0) - '@radix-ui/react-presence': 1.1.4(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-presence': 1.1.5(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-use-previous': 1.1.1(@types/react@18.2.48)(react@18.2.0) @@ -5874,13 +6284,13 @@ snapshots: '@types/react': 18.2.48 '@types/react-dom': 18.2.18 - '@radix-ui/react-collapsible@1.1.11(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': + '@radix-ui/react-collapsible@1.1.12(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': dependencies: - '@radix-ui/primitive': 1.1.2 + '@radix-ui/primitive': 1.1.3 '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-context': 1.1.2(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-id': 1.1.1(@types/react@18.2.48)(react@18.2.0) - '@radix-ui/react-presence': 1.1.4(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-presence': 1.1.5(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@18.2.48)(react@18.2.0) @@ -5926,11 +6336,11 @@ snapshots: optionalDependencies: '@types/react': 18.2.48 - '@radix-ui/react-context-menu@2.2.15(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': + '@radix-ui/react-context-menu@2.2.16(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': dependencies: - '@radix-ui/primitive': 1.1.2 + '@radix-ui/primitive': 1.1.3 '@radix-ui/react-context': 1.1.2(@types/react@18.2.48)(react@18.2.0) - '@radix-ui/react-menu': 2.1.15(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-menu': 2.1.16(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.2.48)(react@18.2.0) @@ -5958,17 +6368,23 @@ snapshots: optionalDependencies: '@types/react': 18.2.48 - '@radix-ui/react-dialog@1.1.14(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': + '@radix-ui/react-context@1.1.3(@types/react@18.2.48)(react@18.2.0)': dependencies: - '@radix-ui/primitive': 1.1.2 + react: 18.2.0 + optionalDependencies: + '@types/react': 18.2.48 + + '@radix-ui/react-dialog@1.1.15(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': + dependencies: + '@radix-ui/primitive': 1.1.3 '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-context': 1.1.2(@types/react@18.2.48)(react@18.2.0) - '@radix-ui/react-dismissable-layer': 1.1.10(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@radix-ui/react-focus-guards': 1.1.2(@types/react@18.2.48)(react@18.2.0) + '@radix-ui/react-dismissable-layer': 1.1.11(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-focus-guards': 1.1.3(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-focus-scope': 1.1.7(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-id': 1.1.1(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-portal': 1.1.9(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@radix-ui/react-presence': 1.1.4(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-presence': 1.1.5(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-slot': 1.2.3(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.2.48)(react@18.2.0) @@ -6021,9 +6437,9 @@ snapshots: '@types/react': 18.2.48 '@types/react-dom': 18.2.18 - '@radix-ui/react-dismissable-layer@1.1.10(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': + '@radix-ui/react-dismissable-layer@1.1.11(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': dependencies: - '@radix-ui/primitive': 1.1.2 + '@radix-ui/primitive': 1.1.3 '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@18.2.48)(react@18.2.0) @@ -6034,13 +6450,13 @@ snapshots: '@types/react': 18.2.48 '@types/react-dom': 18.2.18 - '@radix-ui/react-dropdown-menu@2.1.15(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': + '@radix-ui/react-dropdown-menu@2.1.16(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': dependencies: - '@radix-ui/primitive': 1.1.2 + '@radix-ui/primitive': 1.1.3 '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-context': 1.1.2(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-id': 1.1.1(@types/react@18.2.48)(react@18.2.0) - '@radix-ui/react-menu': 2.1.15(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-menu': 2.1.16(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.2.48)(react@18.2.0) react: 18.2.0 @@ -6055,7 +6471,7 @@ snapshots: optionalDependencies: '@types/react': 18.2.48 - '@radix-ui/react-focus-guards@1.1.2(@types/react@18.2.48)(react@18.2.0)': + '@radix-ui/react-focus-guards@1.1.3(@types/react@18.2.48)(react@18.2.0)': dependencies: react: 18.2.0 optionalDependencies: @@ -6083,15 +6499,15 @@ snapshots: '@types/react': 18.2.48 '@types/react-dom': 18.2.18 - '@radix-ui/react-hover-card@1.1.14(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': + '@radix-ui/react-hover-card@1.1.15(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': dependencies: - '@radix-ui/primitive': 1.1.2 + '@radix-ui/primitive': 1.1.3 '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-context': 1.1.2(@types/react@18.2.48)(react@18.2.0) - '@radix-ui/react-dismissable-layer': 1.1.10(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@radix-ui/react-popper': 1.2.7(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-dismissable-layer': 1.1.11(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-popper': 1.2.8(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-portal': 1.1.9(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@radix-ui/react-presence': 1.1.4(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-presence': 1.1.5(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.2.48)(react@18.2.0) react: 18.2.0 @@ -6127,22 +6543,31 @@ snapshots: '@types/react': 18.2.48 '@types/react-dom': 18.2.18 - '@radix-ui/react-menu@2.1.15(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': + '@radix-ui/react-label@2.1.8(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': dependencies: - '@radix-ui/primitive': 1.1.2 + '@radix-ui/react-primitive': 2.1.4(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + react: 18.2.0 + react-dom: 18.2.0(react@18.2.0) + optionalDependencies: + '@types/react': 18.2.48 + '@types/react-dom': 18.2.18 + + '@radix-ui/react-menu@2.1.16(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': + dependencies: + '@radix-ui/primitive': 1.1.3 '@radix-ui/react-collection': 1.1.7(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-context': 1.1.2(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-direction': 1.1.1(@types/react@18.2.48)(react@18.2.0) - '@radix-ui/react-dismissable-layer': 1.1.10(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@radix-ui/react-focus-guards': 1.1.2(@types/react@18.2.48)(react@18.2.0) + '@radix-ui/react-dismissable-layer': 1.1.11(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-focus-guards': 1.1.3(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-focus-scope': 1.1.7(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-id': 1.1.1(@types/react@18.2.48)(react@18.2.0) - '@radix-ui/react-popper': 1.2.7(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-popper': 1.2.8(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-portal': 1.1.9(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@radix-ui/react-presence': 1.1.4(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-presence': 1.1.5(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@radix-ui/react-roving-focus': 1.1.10(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-roving-focus': 1.1.11(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-slot': 1.2.3(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@18.2.48)(react@18.2.0) aria-hidden: 1.2.4 @@ -6153,16 +6578,16 @@ snapshots: '@types/react': 18.2.48 '@types/react-dom': 18.2.18 - '@radix-ui/react-navigation-menu@1.2.13(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': + '@radix-ui/react-navigation-menu@1.2.14(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': dependencies: - '@radix-ui/primitive': 1.1.2 + '@radix-ui/primitive': 1.1.3 '@radix-ui/react-collection': 1.1.7(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-context': 1.1.2(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-direction': 1.1.1(@types/react@18.2.48)(react@18.2.0) - '@radix-ui/react-dismissable-layer': 1.1.10(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-dismissable-layer': 1.1.11(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-id': 1.1.1(@types/react@18.2.48)(react@18.2.0) - '@radix-ui/react-presence': 1.1.4(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-presence': 1.1.5(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.2.48)(react@18.2.0) @@ -6175,18 +6600,18 @@ snapshots: '@types/react': 18.2.48 '@types/react-dom': 18.2.18 - '@radix-ui/react-popover@1.1.14(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': + '@radix-ui/react-popover@1.1.15(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': dependencies: - '@radix-ui/primitive': 1.1.2 + '@radix-ui/primitive': 1.1.3 '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-context': 1.1.2(@types/react@18.2.48)(react@18.2.0) - '@radix-ui/react-dismissable-layer': 1.1.10(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@radix-ui/react-focus-guards': 1.1.2(@types/react@18.2.48)(react@18.2.0) + '@radix-ui/react-dismissable-layer': 1.1.11(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-focus-guards': 1.1.3(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-focus-scope': 1.1.7(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-id': 1.1.1(@types/react@18.2.48)(react@18.2.0) - '@radix-ui/react-popper': 1.2.7(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-popper': 1.2.8(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-portal': 1.1.9(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@radix-ui/react-presence': 1.1.4(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-presence': 1.1.5(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-slot': 1.2.3(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.2.48)(react@18.2.0) @@ -6239,7 +6664,7 @@ snapshots: '@types/react': 18.2.48 '@types/react-dom': 18.2.18 - '@radix-ui/react-popper@1.2.7(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': + '@radix-ui/react-popper@1.2.8(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': dependencies: '@floating-ui/react-dom': 2.1.2(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-arrow': 1.1.7(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) @@ -6257,6 +6682,16 @@ snapshots: '@types/react': 18.2.48 '@types/react-dom': 18.2.18 + '@radix-ui/react-portal@1.1.10(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': + dependencies: + '@radix-ui/react-primitive': 2.1.4(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@18.2.48)(react@18.2.0) + react: 18.2.0 + react-dom: 18.2.0(react@18.2.0) + optionalDependencies: + '@types/react': 18.2.48 + '@types/react-dom': 18.2.18 + '@radix-ui/react-portal@1.1.2(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': dependencies: '@radix-ui/react-primitive': 2.0.0(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) @@ -6287,7 +6722,7 @@ snapshots: '@types/react': 18.2.48 '@types/react-dom': 18.2.18 - '@radix-ui/react-presence@1.1.4(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': + '@radix-ui/react-presence@1.1.5(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': dependencies: '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@18.2.48)(react@18.2.0) @@ -6315,25 +6750,34 @@ snapshots: '@types/react': 18.2.48 '@types/react-dom': 18.2.18 - '@radix-ui/react-progress@1.1.7(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': + '@radix-ui/react-primitive@2.1.4(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': dependencies: - '@radix-ui/react-context': 1.1.2(@types/react@18.2.48)(react@18.2.0) - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-slot': 1.2.4(@types/react@18.2.48)(react@18.2.0) react: 18.2.0 react-dom: 18.2.0(react@18.2.0) optionalDependencies: '@types/react': 18.2.48 '@types/react-dom': 18.2.18 - '@radix-ui/react-radio-group@1.3.7(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': + '@radix-ui/react-progress@1.1.8(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': dependencies: - '@radix-ui/primitive': 1.1.2 + '@radix-ui/react-context': 1.1.3(@types/react@18.2.48)(react@18.2.0) + '@radix-ui/react-primitive': 2.1.4(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + react: 18.2.0 + react-dom: 18.2.0(react@18.2.0) + optionalDependencies: + '@types/react': 18.2.48 + '@types/react-dom': 18.2.18 + + '@radix-ui/react-radio-group@1.3.8(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': + dependencies: + '@radix-ui/primitive': 1.1.3 '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-context': 1.1.2(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-direction': 1.1.1(@types/react@18.2.48)(react@18.2.0) - '@radix-ui/react-presence': 1.1.4(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-presence': 1.1.5(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@radix-ui/react-roving-focus': 1.1.10(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-roving-focus': 1.1.11(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-use-previous': 1.1.1(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-use-size': 1.1.1(@types/react@18.2.48)(react@18.2.0) @@ -6343,9 +6787,9 @@ snapshots: '@types/react': 18.2.48 '@types/react-dom': 18.2.18 - '@radix-ui/react-roving-focus@1.1.10(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': + '@radix-ui/react-roving-focus@1.1.11(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': dependencies: - '@radix-ui/primitive': 1.1.2 + '@radix-ui/primitive': 1.1.3 '@radix-ui/react-collection': 1.1.7(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-context': 1.1.2(@types/react@18.2.48)(react@18.2.0) @@ -6360,14 +6804,14 @@ snapshots: '@types/react': 18.2.48 '@types/react-dom': 18.2.18 - '@radix-ui/react-scroll-area@1.2.9(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': + '@radix-ui/react-scroll-area@1.2.10(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': dependencies: '@radix-ui/number': 1.1.1 - '@radix-ui/primitive': 1.1.2 + '@radix-ui/primitive': 1.1.3 '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-context': 1.1.2(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-direction': 1.1.1(@types/react@18.2.48)(react@18.2.0) - '@radix-ui/react-presence': 1.1.4(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-presence': 1.1.5(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@18.2.48)(react@18.2.0) @@ -6377,19 +6821,19 @@ snapshots: '@types/react': 18.2.48 '@types/react-dom': 18.2.18 - '@radix-ui/react-select@2.2.5(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': + '@radix-ui/react-select@2.2.6(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': dependencies: '@radix-ui/number': 1.1.1 - '@radix-ui/primitive': 1.1.2 + '@radix-ui/primitive': 1.1.3 '@radix-ui/react-collection': 1.1.7(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-context': 1.1.2(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-direction': 1.1.1(@types/react@18.2.48)(react@18.2.0) - '@radix-ui/react-dismissable-layer': 1.1.10(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@radix-ui/react-focus-guards': 1.1.2(@types/react@18.2.48)(react@18.2.0) + '@radix-ui/react-dismissable-layer': 1.1.11(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-focus-guards': 1.1.3(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-focus-scope': 1.1.7(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-id': 1.1.1(@types/react@18.2.48)(react@18.2.0) - '@radix-ui/react-popper': 1.2.7(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-popper': 1.2.8(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-portal': 1.1.9(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-slot': 1.2.3(@types/react@18.2.48)(react@18.2.0) @@ -6415,10 +6859,19 @@ snapshots: '@types/react': 18.2.48 '@types/react-dom': 18.2.18 - '@radix-ui/react-slider@1.3.5(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': + '@radix-ui/react-separator@1.1.8(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': + dependencies: + '@radix-ui/react-primitive': 2.1.4(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + react: 18.2.0 + react-dom: 18.2.0(react@18.2.0) + optionalDependencies: + '@types/react': 18.2.48 + '@types/react-dom': 18.2.18 + + '@radix-ui/react-slider@1.3.6(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': dependencies: '@radix-ui/number': 1.1.1 - '@radix-ui/primitive': 1.1.2 + '@radix-ui/primitive': 1.1.3 '@radix-ui/react-collection': 1.1.7(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-context': 1.1.2(@types/react@18.2.48)(react@18.2.0) @@ -6448,9 +6901,16 @@ snapshots: optionalDependencies: '@types/react': 18.2.48 - '@radix-ui/react-switch@1.2.5(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': + '@radix-ui/react-slot@1.2.4(@types/react@18.2.48)(react@18.2.0)': dependencies: - '@radix-ui/primitive': 1.1.2 + '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.2.48)(react@18.2.0) + react: 18.2.0 + optionalDependencies: + '@types/react': 18.2.48 + + '@radix-ui/react-switch@1.2.6(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': + dependencies: + '@radix-ui/primitive': 1.1.3 '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-context': 1.1.2(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) @@ -6463,15 +6923,15 @@ snapshots: '@types/react': 18.2.48 '@types/react-dom': 18.2.18 - '@radix-ui/react-tabs@1.1.12(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': + '@radix-ui/react-tabs@1.1.13(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': dependencies: - '@radix-ui/primitive': 1.1.2 + '@radix-ui/primitive': 1.1.3 '@radix-ui/react-context': 1.1.2(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-direction': 1.1.1(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-id': 1.1.1(@types/react@18.2.48)(react@18.2.0) - '@radix-ui/react-presence': 1.1.4(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-presence': 1.1.5(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@radix-ui/react-roving-focus': 1.1.10(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-roving-focus': 1.1.11(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.2.48)(react@18.2.0) react: 18.2.0 react-dom: 18.2.0(react@18.2.0) @@ -6479,15 +6939,15 @@ snapshots: '@types/react': 18.2.48 '@types/react-dom': 18.2.18 - '@radix-ui/react-toast@1.2.14(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': + '@radix-ui/react-toast@1.2.15(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': dependencies: - '@radix-ui/primitive': 1.1.2 + '@radix-ui/primitive': 1.1.3 '@radix-ui/react-collection': 1.1.7(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-context': 1.1.2(@types/react@18.2.48)(react@18.2.0) - '@radix-ui/react-dismissable-layer': 1.1.10(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-dismissable-layer': 1.1.11(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-portal': 1.1.9(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@radix-ui/react-presence': 1.1.4(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-presence': 1.1.5(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.2.48)(react@18.2.0) @@ -6519,14 +6979,14 @@ snapshots: '@types/react': 18.2.48 '@types/react-dom': 18.2.18 - '@radix-ui/react-toggle-group@1.1.10(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': + '@radix-ui/react-toggle-group@1.1.11(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': dependencies: - '@radix-ui/primitive': 1.1.2 + '@radix-ui/primitive': 1.1.3 '@radix-ui/react-context': 1.1.2(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-direction': 1.1.1(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@radix-ui/react-roving-focus': 1.1.10(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@radix-ui/react-toggle': 1.1.9(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-roving-focus': 1.1.11(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-toggle': 1.1.10(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.2.48)(react@18.2.0) react: 18.2.0 react-dom: 18.2.0(react@18.2.0) @@ -6534,9 +6994,9 @@ snapshots: '@types/react': 18.2.48 '@types/react-dom': 18.2.18 - '@radix-ui/react-toggle@1.1.9(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': + '@radix-ui/react-toggle@1.1.10(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': dependencies: - '@radix-ui/primitive': 1.1.2 + '@radix-ui/primitive': 1.1.3 '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.2.48)(react@18.2.0) react: 18.2.0 @@ -6545,31 +7005,31 @@ snapshots: '@types/react': 18.2.48 '@types/react-dom': 18.2.18 - '@radix-ui/react-toolbar@1.1.10(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': + '@radix-ui/react-toolbar@1.1.11(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': dependencies: - '@radix-ui/primitive': 1.1.2 + '@radix-ui/primitive': 1.1.3 '@radix-ui/react-context': 1.1.2(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-direction': 1.1.1(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@radix-ui/react-roving-focus': 1.1.10(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-roving-focus': 1.1.11(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-separator': 1.1.7(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@radix-ui/react-toggle-group': 1.1.10(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-toggle-group': 1.1.11(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) react: 18.2.0 react-dom: 18.2.0(react@18.2.0) optionalDependencies: '@types/react': 18.2.48 '@types/react-dom': 18.2.18 - '@radix-ui/react-tooltip@1.2.7(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': + '@radix-ui/react-tooltip@1.2.8(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': dependencies: - '@radix-ui/primitive': 1.1.2 + '@radix-ui/primitive': 1.1.3 '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-context': 1.1.2(@types/react@18.2.48)(react@18.2.0) - '@radix-ui/react-dismissable-layer': 1.1.10(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-dismissable-layer': 1.1.11(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-id': 1.1.1(@types/react@18.2.48)(react@18.2.0) - '@radix-ui/react-popper': 1.2.7(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-popper': 1.2.8(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-portal': 1.1.9(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@radix-ui/react-presence': 1.1.4(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-presence': 1.1.5(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-slot': 1.2.3(@types/react@18.2.48)(react@18.2.0) '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.2.48)(react@18.2.0) @@ -6699,14 +7159,27 @@ snapshots: '@types/react': 18.2.48 '@types/react-dom': 18.2.18 + '@radix-ui/react-visually-hidden@1.2.4(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': + dependencies: + '@radix-ui/react-primitive': 2.1.4(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + react: 18.2.0 + react-dom: 18.2.0(react@18.2.0) + optionalDependencies: + '@types/react': 18.2.48 + '@types/react-dom': 18.2.18 + '@radix-ui/rect@1.1.0': {} '@radix-ui/rect@1.1.1': {} '@remix-run/router@1.19.1': {} + '@sec-ant/readable-stream@0.4.1': {} + '@sindresorhus/is@5.6.0': {} + '@sindresorhus/is@7.1.1': {} + '@svgr/babel-plugin-add-jsx-attribute@6.5.1(@babel/core@7.25.2)': dependencies: '@babel/core': 7.25.2 @@ -6910,8 +7383,6 @@ snapshots: '@types/http-cache-semantics@4.0.4': {} - '@types/json-schema@7.0.15': {} - '@types/node@20.11.5': dependencies: undici-types: 5.26.5 @@ -7001,6 +7472,8 @@ snapshots: abortcontroller-polyfill@1.7.5: {} + abortcontroller-polyfill@1.7.8: {} + acorn@8.12.1: {} ansi-escapes@4.3.2: @@ -7054,33 +7527,8 @@ snapshots: axobject-query@3.2.4: {} - b4a@1.6.6: {} - balanced-match@1.0.2: {} - bare-events@2.4.2: - optional: true - - bare-fs@2.3.1: - dependencies: - bare-events: 2.4.2 - bare-path: 2.1.3 - bare-stream: 2.1.3 - optional: true - - bare-os@2.4.0: - optional: true - - bare-path@2.1.3: - dependencies: - bare-os: 2.4.0 - optional: true - - bare-stream@2.1.3: - dependencies: - streamx: 2.19.0 - optional: true - base-x@3.0.10: dependencies: safe-buffer: 5.2.1 @@ -7089,12 +7537,6 @@ snapshots: binary-extensions@2.3.0: {} - bl@4.1.0: - dependencies: - buffer: 5.7.1 - inherits: 2.0.4 - readable-stream: 3.6.2 - bluebird@3.7.2: {} boolbase@1.0.0: {} @@ -7121,11 +7563,6 @@ snapshots: node-releases: 2.0.18 update-browserslist-db: 1.1.0(browserslist@4.23.3) - buffer@5.7.1: - dependencies: - base64-js: 1.5.1 - ieee754: 1.2.1 - buffer@6.0.3: dependencies: base64-js: 1.5.1 @@ -7150,13 +7587,15 @@ snapshots: normalize-url: 8.0.1 responselike: 3.0.0 - call-bind@1.0.7: + cacheable-request@12.0.1: dependencies: - es-define-property: 1.0.0 - es-errors: 1.3.0 - function-bind: 1.1.2 - get-intrinsic: 1.2.4 - set-function-length: 1.2.2 + '@types/http-cache-semantics': 4.0.4 + get-stream: 9.0.1 + http-cache-semantics: 4.1.1 + keyv: 4.5.4 + mimic-response: 4.0.0 + normalize-url: 8.0.1 + responselike: 3.0.0 callsites@3.1.0: {} @@ -7177,11 +7616,11 @@ snapshots: ansi-styles: 4.3.0 supports-color: 7.2.0 - chalk@5.3.0: {} + chalk@5.4.1: {} - change-case@5.1.2: {} + change-case@5.4.4: {} - chardet@0.7.0: {} + chardet@2.1.1: {} chokidar@3.6.0: dependencies: @@ -7195,24 +7634,14 @@ snapshots: optionalDependencies: fsevents: 2.3.3 - chownr@1.1.4: {} - chrome-trace-event@1.0.4: {} class-variance-authority@0.7.0: dependencies: clsx: 2.0.0 - cli-cursor@3.1.0: - dependencies: - restore-cursor: 3.1.0 - - cli-spinners@2.9.2: {} - cli-width@4.1.0: {} - clone@1.0.4: {} - clone@2.1.2: {} clsx@2.0.0: {} @@ -7286,15 +7715,6 @@ snapshots: path-type: 4.0.0 yaml: 1.10.2 - cosmiconfig@9.0.0(typescript@5.2.2): - dependencies: - env-paths: 2.2.1 - import-fresh: 3.3.0 - js-yaml: 4.1.0 - parse-json: 5.2.0 - optionalDependencies: - typescript: 5.2.2 - cosmiconfig@9.0.0(typescript@5.3.3): dependencies: env-paths: 2.2.1 @@ -7304,6 +7724,15 @@ snapshots: optionalDependencies: typescript: 5.3.3 + cosmiconfig@9.0.0(typescript@5.8.2): + dependencies: + env-paths: 2.2.1 + import-fresh: 3.3.0 + js-yaml: 4.1.0 + parse-json: 5.2.0 + optionalDependencies: + typescript: 5.8.2 + cross-spawn@7.0.3: dependencies: path-key: 3.1.1 @@ -7364,18 +7793,8 @@ snapshots: deepmerge@4.3.1: {} - defaults@1.0.4: - dependencies: - clone: 1.0.4 - defer-to-connect@2.0.1: {} - define-data-property@1.1.4: - dependencies: - es-define-property: 1.0.0 - es-errors: 1.3.0 - gopd: 1.0.1 - dequal@2.0.3: {} detect-libc@1.0.3: {} @@ -7412,11 +7831,13 @@ snapshots: domelementtype: 2.3.0 domhandler: 4.3.1 - dotenv-expand@10.0.0: {} + dotenv-expand@12.0.1: + dependencies: + dotenv: 16.4.7 dotenv-expand@5.1.0: {} - dotenv@16.3.1: {} + dotenv@16.4.7: {} dotenv@7.0.0: {} @@ -7428,10 +7849,6 @@ snapshots: emoji-regex@9.2.2: {} - end-of-stream@1.4.4: - dependencies: - once: 1.4.0 - entities@2.2.0: {} entities@3.0.1: {} @@ -7449,12 +7866,6 @@ snapshots: dependencies: is-arrayish: 0.2.1 - es-define-property@1.0.0: - dependencies: - get-intrinsic: 1.2.4 - - es-errors@1.3.0: {} - esbuild@0.18.20: optionalDependencies: '@esbuild/android-arm': 0.18.20 @@ -7484,8 +7895,6 @@ snapshots: escape-string-regexp@1.0.5: {} - escape-string-regexp@5.0.0: {} - estree-walker@2.0.2: {} estree-walker@3.0.3: @@ -7506,16 +7915,6 @@ snapshots: signal-exit: 3.0.7 strip-final-newline: 2.0.0 - expand-template@2.0.3: {} - - external-editor@3.1.0: - dependencies: - chardet: 0.7.0 - iconv-lite: 0.4.24 - tmp: 0.0.33 - - fast-fifo@1.3.2: {} - fast-glob@3.3.2: dependencies: '@nodelib/fs.stat': 2.0.5 @@ -7524,16 +7923,19 @@ snapshots: merge2: 1.4.1 micromatch: 4.0.8 + fast-glob@3.3.3: + dependencies: + '@nodelib/fs.stat': 2.0.5 + '@nodelib/fs.walk': 1.2.8 + glob-parent: 5.1.2 + merge2: 1.4.1 + micromatch: 4.0.8 + fastq@1.17.1: dependencies: reusify: 1.0.4 - fflate@0.8.1: {} - - figures@5.0.0: - dependencies: - escape-string-regexp: 5.0.0 - is-unicode-supported: 1.3.0 + fflate@0.8.2: {} fill-range@7.1.1: dependencies: @@ -7546,9 +7948,9 @@ snapshots: form-data-encoder@2.1.4: {} - fraction.js@4.3.7: {} + form-data-encoder@4.1.0: {} - fs-constants@1.0.0: {} + fraction.js@4.3.7: {} fs-extra@11.1.1: dependencies: @@ -7563,21 +7965,16 @@ snapshots: gensync@1.0.0-beta.2: {} - get-intrinsic@1.2.4: - dependencies: - es-errors: 1.3.0 - function-bind: 1.1.2 - has-proto: 1.0.3 - has-symbols: 1.0.3 - hasown: 2.0.2 - get-nonce@1.0.1: {} - get-port@7.0.0: {} + get-port@7.1.0: {} get-stream@6.0.1: {} - github-from-package@0.0.0: {} + get-stream@9.0.1: + dependencies: + '@sec-ant/readable-stream': 0.4.1 + is-stream: 4.0.1 glob-parent@5.1.2: dependencies: @@ -7606,29 +8003,11 @@ snapshots: dependencies: array-union: 2.1.0 dir-glob: 3.0.1 - fast-glob: 3.3.2 + fast-glob: 3.3.3 ignore: 5.2.4 merge2: 1.4.1 slash: 3.0.0 - gopd@1.0.1: - dependencies: - get-intrinsic: 1.2.4 - - got@12.6.1: - dependencies: - '@sindresorhus/is': 5.6.0 - '@szmarczak/http-timer': 5.0.1 - cacheable-lookup: 7.0.0 - cacheable-request: 10.2.14 - decompress-response: 6.0.0 - form-data-encoder: 2.1.4 - get-stream: 6.0.1 - http2-wrapper: 2.2.1 - lowercase-keys: 3.0.0 - p-cancelable: 3.0.0 - responselike: 3.0.0 - got@13.0.0: dependencies: '@sindresorhus/is': 5.6.0 @@ -7643,6 +8022,20 @@ snapshots: p-cancelable: 3.0.0 responselike: 3.0.0 + got@14.4.6: + dependencies: + '@sindresorhus/is': 7.1.1 + '@szmarczak/http-timer': 5.0.1 + cacheable-lookup: 7.0.0 + cacheable-request: 12.0.1 + decompress-response: 6.0.0 + form-data-encoder: 4.1.0 + http2-wrapper: 2.2.1 + lowercase-keys: 3.0.0 + p-cancelable: 4.0.1 + responselike: 3.0.0 + type-fest: 4.41.0 + graceful-fs@4.2.10: {} graceful-fs@4.2.11: {} @@ -7657,23 +8050,15 @@ snapshots: has-flag@4.0.0: {} - has-property-descriptors@1.0.2: - dependencies: - es-define-property: 1.0.0 - - has-proto@1.0.3: {} - - has-symbols@1.0.3: {} - hasown@2.0.2: dependencies: function-bind: 1.1.2 html-escaper@3.0.3: {} - htmlnano@2.1.1(postcss@8.4.41)(svgo@2.8.0)(typescript@5.2.2): + htmlnano@2.1.1(postcss@8.4.41)(svgo@2.8.0)(typescript@5.8.2): dependencies: - cosmiconfig: 9.0.0(typescript@5.2.2) + cosmiconfig: 9.0.0(typescript@5.8.2) posthtml: 0.16.6 timsort: 0.3.0 optionalDependencies: @@ -7705,19 +8090,21 @@ snapshots: human-signals@2.1.0: {} - iconv-lite@0.4.24: - dependencies: - safer-buffer: 2.1.2 - iconv-lite@0.6.3: dependencies: safer-buffer: 2.1.2 optional: true + iconv-lite@0.7.0: + dependencies: + safer-buffer: 2.1.2 + ieee754@1.2.1: {} ignore@5.2.4: {} + ignore@7.0.3: {} + image-size@0.5.5: optional: true @@ -7728,27 +8115,19 @@ snapshots: parent-module: 1.0.1 resolve-from: 4.0.0 - inherits@2.0.4: {} - ini@1.3.8: {} - inquirer@9.2.12: + inquirer@12.5.0(@types/node@20.11.5): dependencies: - '@ljharb/through': 2.3.13 + '@inquirer/core': 10.3.2(@types/node@20.11.5) + '@inquirer/prompts': 7.10.1(@types/node@20.11.5) + '@inquirer/type': 3.0.10(@types/node@20.11.5) ansi-escapes: 4.3.2 - chalk: 5.3.0 - cli-cursor: 3.1.0 - cli-width: 4.1.0 - external-editor: 3.1.0 - figures: 5.0.0 - lodash: 4.17.21 - mute-stream: 1.0.0 - ora: 5.4.1 + mute-stream: 2.0.0 run-async: 3.0.0 - rxjs: 7.8.1 - string-width: 4.2.3 - strip-ansi: 6.0.1 - wrap-ansi: 6.2.0 + rxjs: 7.8.2 + optionalDependencies: + '@types/node': 20.11.5 invariant@2.2.4: dependencies: @@ -7774,8 +8153,6 @@ snapshots: dependencies: is-extglob: 2.1.1 - is-interactive@1.0.0: {} - is-json@2.0.1: {} is-number@7.0.0: {} @@ -7790,9 +8167,7 @@ snapshots: is-stream@3.0.0: {} - is-unicode-supported@0.1.0: {} - - is-unicode-supported@1.3.0: {} + is-stream@4.0.1: {} is-what@3.14.1: {} @@ -7822,11 +8197,10 @@ snapshots: json-parse-even-better-errors@2.3.1: {} - json-schema-to-ts@2.9.2: + json-schema-to-ts@3.1.1: dependencies: '@babel/runtime': 7.25.4 - '@types/json-schema': 7.0.15 - ts-algebra: 1.2.2 + ts-algebra: 2.0.0 json5@2.2.3: {} @@ -7840,6 +8214,8 @@ snapshots: dependencies: json-buffer: 3.0.1 + ky@1.14.0: {} + less@4.2.0: dependencies: copy-anything: 2.0.6 @@ -7991,12 +8367,8 @@ snapshots: lodash.sortby@4.7.0: {} - lodash@4.17.21: {} - - log-symbols@4.1.0: - dependencies: - chalk: 4.1.2 - is-unicode-supported: 0.1.0 + lodash@4.17.21: + optional: true loose-envify@1.4.0: dependencies: @@ -8060,8 +8432,6 @@ snapshots: minipass@7.1.2: {} - mkdirp-classic@0.5.3: {} - mnemonic-id@3.2.7: {} ms@2.1.2: {} @@ -8086,7 +8456,7 @@ snapshots: optionalDependencies: msgpackr-extract: 3.0.3 - mute-stream@1.0.0: {} + mute-stream@2.0.0: {} mz@2.7.0: dependencies: @@ -8098,22 +8468,14 @@ snapshots: nanoid@5.0.3: {} - napi-build-utils@1.0.2: {} - needle@3.3.1: dependencies: iconv-lite: 0.6.3 sax: 1.4.1 optional: true - node-abi@3.67.0: - dependencies: - semver: 7.6.3 - node-addon-api@4.3.0: {} - node-addon-api@6.1.0: {} - node-addon-api@7.1.1: {} node-gyp-build-optional-packages@5.0.3: {} @@ -8125,7 +8487,7 @@ snapshots: detect-libc: 2.0.3 optional: true - node-object-hash@3.0.0: {} + node-object-hash@3.1.1: {} node-releases@2.0.18: {} @@ -8149,40 +8511,24 @@ snapshots: object-hash@3.0.0: {} - once@1.4.0: - dependencies: - wrappy: 1.0.2 - onetime@5.1.2: dependencies: mimic-fn: 2.1.0 - ora@5.4.1: - dependencies: - bl: 4.1.0 - chalk: 4.1.2 - cli-cursor: 3.1.0 - cli-spinners: 2.9.2 - is-interactive: 1.0.0 - is-unicode-supported: 0.1.0 - log-symbols: 4.1.0 - strip-ansi: 6.0.1 - wcwidth: 1.0.1 - ordered-binary@1.5.1: {} - os-tmpdir@1.0.2: {} - p-cancelable@3.0.0: {} + p-cancelable@4.0.1: {} + package-json-from-dist@1.0.0: {} - package-json@8.1.1: + package-json@10.0.1: dependencies: - got: 12.6.1 + ky: 1.14.0 registry-auth-token: 5.0.2 registry-url: 6.0.1 - semver: 7.6.3 + semver: 7.7.1 parent-module@1.0.1: dependencies: @@ -8227,41 +8573,42 @@ snapshots: pirates@4.0.6: {} - plasmo@0.89.4(@swc/core@1.7.14(@swc/helpers@0.5.12))(@swc/helpers@0.5.12)(lodash@4.17.21)(postcss@8.4.41)(react-dom@18.2.0(react@18.2.0))(react@18.2.0): + plasmo@0.90.5(@swc/core@1.7.14(@swc/helpers@0.5.12))(@swc/helpers@0.5.12)(@types/node@20.11.5)(lodash@4.17.21)(postcss@8.4.41)(react-dom@18.2.0(react@18.2.0))(react@18.2.0): dependencies: '@expo/spawn-async': 1.7.2 '@parcel/core': 2.9.3 '@parcel/fs': 2.9.3(@parcel/core@2.9.3) '@parcel/package-manager': 2.9.3(@parcel/core@2.9.3) - '@parcel/watcher': 2.2.0 + '@parcel/watcher': 2.5.1 '@plasmohq/init': 0.7.0 - '@plasmohq/parcel-config': 0.41.1(@swc/core@1.7.14(@swc/helpers@0.5.12))(@swc/helpers@0.5.12)(lodash@4.17.21)(postcss@8.4.41)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)(typescript@5.2.2) - '@plasmohq/parcel-core': 0.1.10 + '@plasmohq/parcel-config': 0.42.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(@swc/helpers@0.5.12)(lodash@4.17.21)(postcss@8.4.41)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)(typescript@5.8.2) + '@plasmohq/parcel-core': 0.1.11 buffer: 6.0.3 - chalk: 5.3.0 - change-case: 5.1.2 - dotenv: 16.3.1 - dotenv-expand: 10.0.0 + chalk: 5.4.1 + change-case: 5.4.4 + dotenv: 16.4.7 + dotenv-expand: 12.0.1 events: 3.3.0 - fast-glob: 3.3.2 - fflate: 0.8.1 - get-port: 7.0.0 - got: 13.0.0 - ignore: 5.2.4 - inquirer: 9.2.12 + fast-glob: 3.3.3 + fflate: 0.8.2 + get-port: 7.1.0 + got: 14.4.6 + ignore: 7.0.3 + inquirer: 12.5.0(@types/node@20.11.5) is-path-inside: 4.0.0 json5: 2.2.3 mnemonic-id: 3.2.7 - node-object-hash: 3.0.0 - package-json: 8.1.1 + node-object-hash: 3.1.1 + package-json: 10.0.1 process: 0.11.10 - semver: 7.5.4 - sharp: 0.32.6 + semver: 7.7.1 + sharp: 0.33.5 tempy: 3.1.0 - typescript: 5.2.2 + typescript: 5.8.2 transitivePeerDependencies: - '@swc/core' - '@swc/helpers' + - '@types/node' - arc-templates - atpl - babel-core @@ -8381,21 +8728,6 @@ snapshots: posthtml-parser: 0.11.0 posthtml-render: 3.0.0 - prebuild-install@7.1.2: - dependencies: - detect-libc: 2.0.3 - expand-template: 2.0.3 - github-from-package: 0.0.0 - minimist: 1.2.8 - mkdirp-classic: 0.5.3 - napi-build-utils: 1.0.2 - node-abi: 3.67.0 - pump: 3.0.0 - rc: 1.2.8 - simple-get: 4.0.1 - tar-fs: 2.1.1 - tunnel-agent: 0.6.0 - process@0.11.10: {} proto-list@1.2.4: {} @@ -8403,52 +8735,45 @@ snapshots: prr@1.0.1: optional: true - pump@3.0.0: - dependencies: - end-of-stream: 1.4.4 - once: 1.4.0 - punycode@2.3.1: {} queue-microtask@1.2.3: {} - queue-tick@1.0.1: {} - quick-lru@5.1.1: {} radix-ui@1.0.1(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0): dependencies: - '@radix-ui/react-accessible-icon': 1.1.7(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@radix-ui/react-accordion': 1.2.11(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@radix-ui/react-alert-dialog': 1.1.14(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@radix-ui/react-aspect-ratio': 1.1.7(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@radix-ui/react-avatar': 1.1.10(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@radix-ui/react-checkbox': 1.3.2(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@radix-ui/react-collapsible': 1.1.11(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@radix-ui/react-context-menu': 2.2.15(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@radix-ui/react-dialog': 1.1.14(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-accessible-icon': 1.1.8(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-accordion': 1.2.12(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-alert-dialog': 1.1.15(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-aspect-ratio': 1.1.8(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-avatar': 1.1.11(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-checkbox': 1.3.3(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-collapsible': 1.1.12(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-context-menu': 2.2.16(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-dialog': 1.1.15(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@radix-ui/react-direction': 1.1.1(@types/react@18.2.48)(react@18.2.0) - '@radix-ui/react-dropdown-menu': 2.1.15(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@radix-ui/react-hover-card': 1.1.14(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@radix-ui/react-label': 2.1.7(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@radix-ui/react-navigation-menu': 1.2.13(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@radix-ui/react-popover': 1.1.14(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@radix-ui/react-portal': 1.1.9(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@radix-ui/react-progress': 1.1.7(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@radix-ui/react-radio-group': 1.3.7(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@radix-ui/react-scroll-area': 1.2.9(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@radix-ui/react-select': 2.2.5(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@radix-ui/react-separator': 1.1.7(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@radix-ui/react-slider': 1.3.5(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@radix-ui/react-slot': 1.2.3(@types/react@18.2.48)(react@18.2.0) - '@radix-ui/react-switch': 1.2.5(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@radix-ui/react-tabs': 1.1.12(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@radix-ui/react-toast': 1.2.14(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@radix-ui/react-toggle': 1.1.9(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@radix-ui/react-toggle-group': 1.1.10(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@radix-ui/react-toolbar': 1.1.10(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@radix-ui/react-tooltip': 1.2.7(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@radix-ui/react-visually-hidden': 1.2.3(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-dropdown-menu': 2.1.16(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-hover-card': 1.1.15(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-label': 2.1.8(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-navigation-menu': 1.2.14(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-popover': 1.1.15(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-portal': 1.1.10(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-progress': 1.1.8(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-radio-group': 1.3.8(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-scroll-area': 1.2.10(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-select': 2.2.6(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-separator': 1.1.8(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-slider': 1.3.6(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-slot': 1.2.4(@types/react@18.2.48)(react@18.2.0) + '@radix-ui/react-switch': 1.2.6(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-tabs': 1.1.13(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-toast': 1.2.15(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-toggle': 1.1.10(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-toggle-group': 1.1.11(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-toolbar': 1.1.11(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-tooltip': 1.2.8(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@radix-ui/react-visually-hidden': 1.2.4(@types/react-dom@18.2.18)(@types/react@18.2.48)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) react: 18.2.0 react-dom: 18.2.0(react@18.2.0) transitivePeerDependencies: @@ -8556,12 +8881,6 @@ snapshots: dependencies: pify: 2.3.0 - readable-stream@3.6.2: - dependencies: - inherits: 2.0.4 - string_decoder: 1.3.0 - util-deprecate: 1.0.2 - readdirp@3.6.0: dependencies: picomatch: 2.3.1 @@ -8594,11 +8913,6 @@ snapshots: dependencies: lowercase-keys: 3.0.0 - restore-cursor@3.1.0: - dependencies: - onetime: 5.1.2 - signal-exit: 3.0.7 - reusify@1.0.4: {} rollup@3.29.5: @@ -8611,7 +8925,7 @@ snapshots: dependencies: queue-microtask: 1.2.3 - rxjs@7.8.1: + rxjs@7.8.2: dependencies: tslib: 2.7.0 @@ -8642,25 +8956,33 @@ snapshots: semver@7.6.3: {} - set-function-length@1.2.2: - dependencies: - define-data-property: 1.1.4 - es-errors: 1.3.0 - function-bind: 1.1.2 - get-intrinsic: 1.2.4 - gopd: 1.0.1 - has-property-descriptors: 1.0.2 + semver@7.7.1: {} - sharp@0.32.6: + sharp@0.33.5: dependencies: color: 4.2.3 detect-libc: 2.0.3 - node-addon-api: 6.1.0 - prebuild-install: 7.1.2 semver: 7.6.3 - simple-get: 4.0.1 - tar-fs: 3.0.6 - tunnel-agent: 0.6.0 + optionalDependencies: + '@img/sharp-darwin-arm64': 0.33.5 + '@img/sharp-darwin-x64': 0.33.5 + '@img/sharp-libvips-darwin-arm64': 1.0.4 + '@img/sharp-libvips-darwin-x64': 1.0.4 + '@img/sharp-libvips-linux-arm': 1.0.5 + '@img/sharp-libvips-linux-arm64': 1.0.4 + '@img/sharp-libvips-linux-s390x': 1.0.4 + '@img/sharp-libvips-linux-x64': 1.0.4 + '@img/sharp-libvips-linuxmusl-arm64': 1.0.4 + '@img/sharp-libvips-linuxmusl-x64': 1.0.4 + '@img/sharp-linux-arm': 0.33.5 + '@img/sharp-linux-arm64': 0.33.5 + '@img/sharp-linux-s390x': 0.33.5 + '@img/sharp-linux-x64': 0.33.5 + '@img/sharp-linuxmusl-arm64': 0.33.5 + '@img/sharp-linuxmusl-x64': 0.33.5 + '@img/sharp-wasm32': 0.33.5 + '@img/sharp-win32-ia32': 0.33.5 + '@img/sharp-win32-x64': 0.33.5 shebang-command@2.0.0: dependencies: @@ -8672,14 +8994,6 @@ snapshots: signal-exit@4.1.0: {} - simple-concat@1.0.1: {} - - simple-get@4.0.1: - dependencies: - decompress-response: 6.0.0 - once: 1.4.0 - simple-concat: 1.0.1 - simple-swizzle@0.2.2: dependencies: is-arrayish: 0.3.2 @@ -8698,14 +9012,6 @@ snapshots: stable@0.1.8: {} - streamx@2.19.0: - dependencies: - fast-fifo: 1.3.2 - queue-tick: 1.0.1 - text-decoder: 1.1.1 - optionalDependencies: - bare-events: 2.4.2 - string-width@4.2.3: dependencies: emoji-regex: 8.0.0 @@ -8718,10 +9024,6 @@ snapshots: emoji-regex: 9.2.2 strip-ansi: 7.1.0 - string_decoder@1.3.0: - dependencies: - safe-buffer: 5.2.1 - strip-ansi@6.0.1: dependencies: ansi-regex: 5.0.1 @@ -8815,35 +9117,6 @@ snapshots: transitivePeerDependencies: - ts-node - tar-fs@2.1.1: - dependencies: - chownr: 1.1.4 - mkdirp-classic: 0.5.3 - pump: 3.0.0 - tar-stream: 2.2.0 - - tar-fs@3.0.6: - dependencies: - pump: 3.0.0 - tar-stream: 3.1.7 - optionalDependencies: - bare-fs: 2.3.1 - bare-path: 2.1.3 - - tar-stream@2.2.0: - dependencies: - bl: 4.1.0 - end-of-stream: 1.4.4 - fs-constants: 1.0.0 - inherits: 2.0.4 - readable-stream: 3.6.2 - - tar-stream@3.1.7: - dependencies: - b4a: 1.6.6 - fast-fifo: 1.3.2 - streamx: 2.19.0 - temp-dir@3.0.0: {} tempy@3.1.0: @@ -8853,10 +9126,6 @@ snapshots: type-fest: 2.19.0 unique-string: 3.0.0 - text-decoder@1.1.1: - dependencies: - b4a: 1.6.6 - thenify-all@1.6.0: dependencies: thenify: 3.3.1 @@ -8867,10 +9136,6 @@ snapshots: timsort@0.3.0: {} - tmp@0.0.33: - dependencies: - os-tmpdir: 1.0.2 - to-fast-properties@2.0.0: {} to-regex-range@5.0.1: @@ -8883,7 +9148,7 @@ snapshots: tree-kill@1.2.2: {} - ts-algebra@1.2.2: {} + ts-algebra@2.0.0: {} ts-interface-checker@0.1.13: {} @@ -8913,10 +9178,6 @@ snapshots: - supports-color - ts-node - tunnel-agent@0.6.0: - dependencies: - safe-buffer: 5.2.1 - type-fest@0.20.2: {} type-fest@0.21.3: {} @@ -8925,10 +9186,14 @@ snapshots: type-fest@2.19.0: {} + type-fest@4.41.0: {} + typescript@5.2.2: {} typescript@5.3.3: {} + typescript@5.8.2: {} + uhyphen@0.1.0: {} undici-types@5.26.5: {} @@ -9005,10 +9270,6 @@ snapshots: '@vue/server-renderer': 3.3.4(vue@3.3.4) '@vue/shared': 3.3.4 - wcwidth@1.0.1: - dependencies: - defaults: 1.0.4 - weak-lru-cache@1.2.2: {} webidl-conversions@4.0.2: {} @@ -9041,8 +9302,6 @@ snapshots: string-width: 5.1.2 strip-ansi: 7.1.0 - wrappy@1.0.2: {} - xxhash-wasm@0.4.2: {} yallist@3.1.1: {} @@ -9053,6 +9312,8 @@ snapshots: yaml@2.5.0: {} + yoctocolors-cjs@2.1.3: {} + zustand@4.0.0(react@18.2.0): dependencies: use-sync-external-store: 1.2.0(react@18.2.0) diff --git a/surfsense_web/app/dashboard/[search_space_id]/client-layout.tsx b/surfsense_web/app/dashboard/[search_space_id]/client-layout.tsx index 4ec8046a4..105c21e26 100644 --- a/surfsense_web/app/dashboard/[search_space_id]/client-layout.tsx +++ b/surfsense_web/app/dashboard/[search_space_id]/client-layout.tsx @@ -18,6 +18,7 @@ import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/com import { Separator } from "@/components/ui/separator"; import { SidebarInset, SidebarProvider, SidebarTrigger } from "@/components/ui/sidebar"; import { useLLMPreferences } from "@/hooks/use-llm-configs"; +import { useUserAccess } from "@/hooks/use-rbac"; import { cn } from "@/lib/utils"; export function DashboardClientLayout({ @@ -60,11 +61,15 @@ export function DashboardClientLayout({ }, [activeChatId, isChatPannelOpen]); const { loading, error, isOnboardingComplete } = useLLMPreferences(searchSpaceIdNum); + const { access, loading: accessLoading } = useUserAccess(searchSpaceIdNum); const [hasCheckedOnboarding, setHasCheckedOnboarding] = useState(false); // Skip onboarding check if we're already on the onboarding page const isOnboardingPage = pathname?.includes("/onboard"); + // Only owners should see onboarding - invited members use existing config + const isOwner = access?.is_owner ?? false; + // Translate navigation items const tNavMenu = useTranslations("nav_menu"); const translatedNavMain = useMemo(() => { @@ -102,11 +107,13 @@ export function DashboardClientLayout({ return; } - // Only check once after preferences have loaded - if (!loading && !hasCheckedOnboarding) { + // Wait for both preferences and access data to load + if (!loading && !accessLoading && !hasCheckedOnboarding) { const onboardingComplete = isOnboardingComplete(); - if (!onboardingComplete) { + // Only redirect to onboarding if user is the owner and onboarding is not complete + // Invited members (non-owners) should skip onboarding and use existing config + if (!onboardingComplete && isOwner) { router.push(`/dashboard/${searchSpaceId}/onboard`); } @@ -114,8 +121,10 @@ export function DashboardClientLayout({ } }, [ loading, + accessLoading, isOnboardingComplete, isOnboardingPage, + isOwner, router, searchSpaceId, hasCheckedOnboarding, @@ -145,7 +154,7 @@ export function DashboardClientLayout({ }, [chat_id, search_space_id]); // Show loading screen while checking onboarding status (only on first load) - if (!hasCheckedOnboarding && loading && !isOnboardingPage) { + if (!hasCheckedOnboarding && (loading || accessLoading) && !isOnboardingPage) { return (
diff --git a/surfsense_web/app/dashboard/[search_space_id]/connectors/[connector_id]/edit/page.tsx b/surfsense_web/app/dashboard/[search_space_id]/connectors/[connector_id]/edit/page.tsx index f09069521..ac580bbfc 100644 --- a/surfsense_web/app/dashboard/[search_space_id]/connectors/[connector_id]/edit/page.tsx +++ b/surfsense_web/app/dashboard/[search_space_id]/connectors/[connector_id]/edit/page.tsx @@ -18,7 +18,16 @@ import { CardHeader, CardTitle, } from "@/components/ui/card"; -import { Form } from "@/components/ui/form"; +import { + Form, + FormControl, + FormDescription, + FormField, + FormItem, + FormLabel, + FormMessage, +} from "@/components/ui/form"; +import { Textarea } from "@/components/ui/textarea"; import { getConnectorIcon } from "@/contracts/enums/connectorIcons"; import { useConnectorEditPage } from "@/hooks/use-connector-edit-page"; // Import Utils, Types, Hook, and Components @@ -282,6 +291,40 @@ export default function EditConnectorPage() { placeholder="Your Elasticsearch API Key" /> )} + + {/* == Webcrawler == */} + {connector.connector_type === "WEBCRAWLER_CONNECTOR" && ( +
+ + ( + + URLs to Crawl + +