diff --git a/.serena/.gitignore b/.serena/.gitignore new file mode 100644 index 000000000..14d86ad62 --- /dev/null +++ b/.serena/.gitignore @@ -0,0 +1 @@ +/cache diff --git a/.serena/project.yml b/.serena/project.yml new file mode 100644 index 000000000..ef139f883 --- /dev/null +++ b/.serena/project.yml @@ -0,0 +1,68 @@ +# language of the project (csharp, python, rust, java, typescript, go, cpp, or ruby) +# * For C, use cpp +# * For JavaScript, use typescript +# Special requirements: +# * csharp: Requires the presence of a .sln file in the project folder. +language: typescript + +# whether to use the project's gitignore file to ignore files +# Added on 2025-04-07 +ignore_all_files_in_gitignore: true +# list of additional paths to ignore +# same syntax as gitignore, so you can use * and ** +# Was previously called `ignored_dirs`, please update your config if you are using that. +# Added (renamed) on 2025-04-07 +ignored_paths: [] + +# whether the project is in read-only mode +# If set to true, all editing tools will be disabled and attempts to use them will result in an error +# Added on 2025-04-18 +read_only: false + + +# list of tool names to exclude. We recommend not excluding any tools, see the readme for more details. +# Below is the complete list of tools for convenience. +# To make sure you have the latest list of tools, and to view their descriptions, +# execute `uv run scripts/print_tool_overview.py`. +# +# * `activate_project`: Activates a project by name. +# * `check_onboarding_performed`: Checks whether project onboarding was already performed. +# * `create_text_file`: Creates/overwrites a file in the project directory. +# * `delete_lines`: Deletes a range of lines within a file. +# * `delete_memory`: Deletes a memory from Serena's project-specific memory store. +# * `execute_shell_command`: Executes a shell command. +# * `find_referencing_code_snippets`: Finds code snippets in which the symbol at the given location is referenced. +# * `find_referencing_symbols`: Finds symbols that reference the symbol at the given location (optionally filtered by type). +# * `find_symbol`: Performs a global (or local) search for symbols with/containing a given name/substring (optionally filtered by type). +# * `get_current_config`: Prints the current configuration of the agent, including the active and available projects, tools, contexts, and modes. +# * `get_symbols_overview`: Gets an overview of the top-level symbols defined in a given file. +# * `initial_instructions`: Gets the initial instructions for the current project. +# Should only be used in settings where the system prompt cannot be set, +# e.g. in clients you have no control over, like Claude Desktop. +# * `insert_after_symbol`: Inserts content after the end of the definition of a given symbol. +# * `insert_at_line`: Inserts content at a given line in a file. +# * `insert_before_symbol`: Inserts content before the beginning of the definition of a given symbol. +# * `list_dir`: Lists files and directories in the given directory (optionally with recursion). +# * `list_memories`: Lists memories in Serena's project-specific memory store. +# * `onboarding`: Performs onboarding (identifying the project structure and essential tasks, e.g. for testing or building). +# * `prepare_for_new_conversation`: Provides instructions for preparing for a new conversation (in order to continue with the necessary context). +# * `read_file`: Reads a file within the project directory. +# * `read_memory`: Reads the memory with the given name from Serena's project-specific memory store. +# * `remove_project`: Removes a project from the Serena configuration. +# * `replace_lines`: Replaces a range of lines within a file with new content. +# * `replace_symbol_body`: Replaces the full definition of a symbol. +# * `restart_language_server`: Restarts the language server, may be necessary when edits not through Serena happen. +# * `search_for_pattern`: Performs a search for a pattern in the project. +# * `summarize_changes`: Provides instructions for summarizing the changes made to the codebase. +# * `switch_modes`: Activates modes by providing a list of their names +# * `think_about_collected_information`: Thinking tool for pondering the completeness of collected information. +# * `think_about_task_adherence`: Thinking tool for determining whether the agent is still on track with the current task. +# * `think_about_whether_you_are_done`: Thinking tool for determining whether the task is truly completed. +# * `write_memory`: Writes a named memory (for future reference) to Serena's project-specific memory store. +excluded_tools: [] + +# initial prompt for the project. It will always be given to the LLM upon activating the project +# (contrary to the memories, which are loaded on demand). +initial_prompt: "" + +project_name: "SurfSense" diff --git a/_bmad-output/admin-guide.md b/_bmad-output/admin-guide.md index 008263f54..59316dda7 100644 --- a/_bmad-output/admin-guide.md +++ b/_bmad-output/admin-guide.md @@ -31,8 +31,22 @@ Tài liệu này hướng dẫn administrators cách quản lý và vận hành --- +--- + +## 🔑 Default Admin Account + +**Tài khoản quản trị mặc định:** +- **Email:** `admin@surfsense.ai` +- **Password:** `password123` + +> [!WARNING] +> **Bảo mật quan trọng:** Đổi mật khẩu ngay sau khi đăng nhập lần đầu! + +--- + ## 👥 Quản Lý Users + ### Tạo User Mới **Via CLI:** diff --git a/surfsense_backend/app/utils/validators.py b/surfsense_backend/app/utils/validators.py index 6a87679ec..69ffd3ce3 100644 --- a/surfsense_backend/app/utils/validators.py +++ b/surfsense_backend/app/utils/validators.py @@ -485,6 +485,56 @@ def validate_connector_config( if not validators.url(url): raise ValueError(f"Invalid URL format in INITIAL_URLS: {url}") + def validate_dexscreener_tokens() -> None: + """Validate DexScreener tokens configuration.""" + tokens = config.get("tokens") + if not isinstance(tokens, list) or not tokens: + raise ValueError("tokens must be a non-empty list") + + # Valid blockchain names supported by DexScreener + valid_chains = [ + "ethereum", "bsc", "polygon", "arbitrum", "optimism", "base", + "solana", "avalanche", "fantom", "cronos", "moonbeam", "moonriver", + "celo", "aurora", "harmony", "metis", "boba", "fuse", "okex", + "heco", "elastos", "telos", "iotex", "thundercore", "tomochain", + "velas", "wanchain", "kardia", "pulsechain", "dogechain", "evmos", + "kava", "step", "godwoken", "milkomeda", "dfk", "swimmer", "rei", + "vision", "smartbch", "redlight", "astar", "shiden", "clover", + "bitgert", "sx", "oasis", "energi", "tombchain", "canto", "kcc", + "ethw", "ethf", "core", "zksync", "polygonzkevm", "linea", "scroll", + "mantle", "manta", "blast", "mode", "xlayer", "merlin", "zkfair", + "opbnb", "taiko", "zeta", "sei", "berachain" + ] + + for i, token in enumerate(tokens): + if not isinstance(token, dict): + raise ValueError(f"tokens[{i}] must be a dictionary") + + # Validate required fields + if "chain" not in token: + raise ValueError(f"tokens[{i}] must have 'chain' field") + if "address" not in token: + raise ValueError(f"tokens[{i}] must have 'address' field") + + # Validate chain is valid + chain = token["chain"] + if not isinstance(chain, str) or chain.lower() not in valid_chains: + raise ValueError( + f"tokens[{i}].chain must be one of the supported blockchains. " + f"Got: {chain}. See DexScreener documentation for valid chains." + ) + + # Validate address format (basic check) + address = token["address"] + if not isinstance(address, str) or not address.strip(): + raise ValueError(f"tokens[{i}].address cannot be empty") + + # Optional: validate name field if present + if "name" in token: + name = token["name"] + if not isinstance(name, str): + raise ValueError(f"tokens[{i}].name must be a string if provided") + # Lookup table for connector validation rules connector_rules = { "SERPER_API": {"required": ["SERPER_API_KEY"], "validators": {}}, @@ -578,6 +628,12 @@ def validate_connector_config( "INITIAL_URLS": lambda: validate_initial_urls(), }, }, + "DEXSCREENER_CONNECTOR": { + "required": ["tokens"], + "validators": { + "tokens": lambda: validate_dexscreener_tokens() + }, + }, } rules = connector_rules.get(connector_type_str) diff --git a/surfsense_backend/backend.log b/surfsense_backend/backend.log index 9f14dc22b..4fa9b4560 100644 --- a/surfsense_backend/backend.log +++ b/surfsense_backend/backend.log @@ -939,3 +939,322 @@ INFO: 127.0.0.1:54519 - "GET /api/v1/searchspaces?limit=10&skip=0&owned_only INFO: 127.0.0.1:54522 - "GET /api/v1/messages/27/comments HTTP/1.1" 200 OK INFO: 127.0.0.1:54525 - "GET /api/v1/messages/29/comments HTTP/1.1" 200 OK INFO: 127.0.0.1:54516 - "GET /api/v1/messages/31/comments HTTP/1.1" 200 OK +INFO: 127.0.0.1:58489 - "OPTIONS /users/me HTTP/1.1" 200 OK +INFO: 127.0.0.1:58490 - "OPTIONS /api/v1/global-new-llm-configs HTTP/1.1" 200 OK +INFO: 127.0.0.1:58489 - "OPTIONS /api/v1/search-source-connectors?search_space_id=2 HTTP/1.1" 200 OK +INFO: 127.0.0.1:58493 - "OPTIONS /api/v1/search-spaces/2/llm-preferences HTTP/1.1" 200 OK +INFO: 127.0.0.1:58490 - "OPTIONS /api/v1/searchspaces?limit=10&skip=0&owned_only=false HTTP/1.1" 200 OK +INFO: 127.0.0.1:58494 - "OPTIONS /api/v1/searchspaces/2/my-access HTTP/1.1" 200 OK +INFO: 127.0.0.1:58493 - "OPTIONS /api/v1/searchspaces/2 HTTP/1.1" 200 OK +INFO: 127.0.0.1:58489 - "OPTIONS /api/v1/threads?search_space_id=2&limit=40 HTTP/1.1" 200 OK +INFO: 127.0.0.1:58497 - "OPTIONS /api/v1/threads?search_space_id=2&limit=1 HTTP/1.1" 200 OK +INFO: 127.0.0.1:58490 - "OPTIONS /api/v1/searchspaces/2/members HTTP/1.1" 200 OK +INFO: 127.0.0.1:58494 - "OPTIONS /api/v1/new-llm-configs?search_space_id=2 HTTP/1.1" 200 OK +INFO: 127.0.0.1:58498 - "OPTIONS /api/v1/documents/type-counts?search_space_id=2 HTTP/1.1" 200 OK +INFO: 127.0.0.1:58497 - "OPTIONS /api/v1/messages/23/comments HTTP/1.1" 200 OK +INFO: 127.0.0.1:58493 - "OPTIONS /api/v1/messages/25/comments HTTP/1.1" 200 OK +INFO: 127.0.0.1:58489 - "OPTIONS /api/v1/messages/27/comments HTTP/1.1" 200 OK +INFO: 127.0.0.1:58490 - "OPTIONS /api/v1/messages/29/comments HTTP/1.1" 200 OK +INFO: 127.0.0.1:58498 - "OPTIONS /api/v1/messages/31/comments HTTP/1.1" 200 OK +INFO: 127.0.0.1:58489 - "GET /api/v1/global-new-llm-configs HTTP/1.1" 200 OK +INFO: 127.0.0.1:58494 - "GET /users/me HTTP/1.1" 200 OK +INFO: 127.0.0.1:58497 - "GET /api/v1/search-source-connectors?search_space_id=2 HTTP/1.1" 200 OK +INFO: 127.0.0.1:58490 - "GET /api/v1/searchspaces?limit=10&skip=0&owned_only=false HTTP/1.1" 200 OK +INFO: 127.0.0.1:58494 - "GET /api/v1/searchspaces/2 HTTP/1.1" 200 OK +INFO: 127.0.0.1:58489 - "GET /api/v1/threads?search_space_id=2&limit=1 HTTP/1.1" 200 OK +INFO: 127.0.0.1:58490 - "GET /api/v1/searchspaces/2/members HTTP/1.1" 200 OK +INFO: 127.0.0.1:58494 - "GET /api/v1/documents/type-counts?search_space_id=2 HTTP/1.1" 200 OK +INFO: 127.0.0.1:58489 - "GET /api/v1/new-llm-configs?search_space_id=2 HTTP/1.1" 200 OK +INFO: 127.0.0.1:58493 - "GET /api/v1/search-spaces/2/llm-preferences HTTP/1.1" 200 OK +INFO: 127.0.0.1:58490 - "GET /api/v1/messages/27/comments HTTP/1.1" 200 OK +INFO: 127.0.0.1:58493 - "GET /api/v1/messages/29/comments HTTP/1.1" 200 OK +INFO: 127.0.0.1:58489 - "GET /api/v1/messages/25/comments HTTP/1.1" 200 OK +INFO: 127.0.0.1:58494 - "GET /api/v1/messages/23/comments HTTP/1.1" 200 OK +INFO: 127.0.0.1:58498 - "GET /api/v1/searchspaces/2/my-access HTTP/1.1" 200 OK +INFO: 127.0.0.1:58497 - "GET /api/v1/threads?search_space_id=2&limit=40 HTTP/1.1" 200 OK +INFO: 127.0.0.1:58490 - "GET /api/v1/messages/31/comments HTTP/1.1" 200 OK +INFO: 127.0.0.1:50485 - "GET /health HTTP/1.1" 404 Not Found +INFO: 127.0.0.1:50506 - "POST /api/auth/register HTTP/1.1" 404 Not Found +INFO: 127.0.0.1:50535 - "POST /api/auth/login HTTP/1.1" 404 Not Found +INFO: 127.0.0.1:50544 - "GET /docs HTTP/1.1" 200 OK +INFO: 127.0.0.1:50554 - "GET /openapi.json HTTP/1.1" 200 OK +INFO: 127.0.0.1:50580 - "GET /openapi.json HTTP/1.1" 200 OK +INFO: 127.0.0.1:50625 - "POST /connectors/dexscreener/add HTTP/1.1" 404 Not Found +INFO: 127.0.0.1:50643 - "POST /api/v1/connectors/dexscreener/add HTTP/1.1" 404 Not Found +INFO: 127.0.0.1:50655 - "GET /openapi.json HTTP/1.1" 200 OK +INFO: Shutting down +INFO: Waiting for application shutdown. +INFO: Application shutdown complete. +INFO: Finished server process [59799] +/Users/mac_1/.local/share/uv/python/cpython-3.12.9-macos-aarch64-none/lib/python3.12/multiprocessing/resource_tracker.py:255: UserWarning: resource_tracker: There appear to be 1 leaked semaphore objects to clean up at shutdown + warnings.warn('resource_tracker: There appear to be %d ' +/Users/mac_1/Documents/GitHub/SurfSense/surfsense_backend/.venv/lib/python3.12/site-packages/chonkie/chunker/code.py:82: UserWarning: The language is set to `auto`. This would adversely affect the performance of the chunker. Consider setting the `language` parameter to a specific language to improve performance. + warnings.warn("The language is set to `auto`. This would adversely affect the performance of the chunker. " + +INFO: Will watch for changes in these directories: ['/Users/mac_1/Documents/GitHub/SurfSense/surfsense_backend/app'] +INFO: Started server process [75345] +INFO: Waiting for application startup. +17:02:04 - LiteLLM Router:INFO: router.py:711 - Routing strategy: usage-based-routing +2026-01-31 17:02:04 - LiteLLM Router - INFO - Routing strategy: usage-based-routing +2026-01-31 17:02:04 - app.services.llm_router_service - INFO - LLM Router initialized with 4 deployments, strategy: usage-based-routing +2026-01-31 17:02:04 - app.tasks.surfsense_docs_indexer - INFO - Starting Surfsense docs indexing... +2026-01-31 17:02:04 - app.tasks.surfsense_docs_indexer - INFO - Found 24 MDX files to index +2026-01-31 17:02:04 - app.tasks.surfsense_docs_indexer - INFO - Indexing complete: 0 created, 0 updated, 24 skipped, 0 deleted +2026-01-31 17:02:04 - app.tasks.surfsense_docs_indexer - INFO - Surfsense docs indexing complete: created=0, updated=0, skipped=24, deleted=0 +INFO: Application startup complete. +INFO: Uvicorn running on http://0.0.0.0:8000 (Press CTRL+C to quit) +[Checkpointer] PostgreSQL checkpoint tables ready +Info: LLM Router initialized with 4 models (strategy: usage-based-routing) +INFO: 127.0.0.1:50750 - "GET /openapi.json HTTP/1.1" 200 OK +INFO: 127.0.0.1:50775 - "POST /api/v1/connectors/dexscreener/add HTTP/1.1" 401 Unauthorized +INFO: 127.0.0.1:50777 - "GET /api/v1/connectors/dexscreener/test?chain=ethereum&address=0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2 HTTP/1.1" 401 Unauthorized +2026-01-31 17:05:43 - app.users - INFO - User f34b4612-6556-47c8-bd37-c0e1fd6c9b30 has registered. Creating default search space... +2026-01-31 17:05:43 - app.users - INFO - Created default search space (ID: 3) for user f34b4612-6556-47c8-bd37-c0e1fd6c9b30 +INFO: 127.0.0.1:50909 - "POST /auth/register HTTP/1.1" 201 Created +INFO: 127.0.0.1:51070 - "POST /auth/jwt/login HTTP/1.1" 200 OK +2026-01-31 17:08:45 - app.routes.dexscreener_add_connector_route - ERROR - Unexpected error adding DexScreener connector: (sqlalchemy.dialects.postgresql.asyncpg.Error) : invalid input value for enum searchsourceconnectortype: "DEXSCREENER_CONNECTOR" +[SQL: SELECT search_source_connectors.name, search_source_connectors.connector_type, search_source_connectors.is_indexable, search_source_connectors.last_indexed_at, search_source_connectors.config, search_source_connectors.periodic_indexing_enabled, search_source_connectors.indexing_frequency_minutes, search_source_connectors.next_scheduled_at, search_source_connectors.search_space_id, search_source_connectors.user_id, search_source_connectors.id, search_source_connectors.created_at +FROM search_source_connectors +WHERE search_source_connectors.search_space_id = $1::INTEGER AND search_source_connectors.user_id = $2::UUID AND search_source_connectors.connector_type = $3::searchsourceconnectortype] +[parameters: (1, UUID('f34b4612-6556-47c8-bd37-c0e1fd6c9b30'), 'DEXSCREENER_CONNECTOR')] +(Background on this error at: https://sqlalche.me/e/20/dbapi) +Traceback (most recent call last): + File "/Users/mac_1/Documents/GitHub/SurfSense/surfsense_backend/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/asyncpg.py", line 545, in _prepare_and_execute + self._rows = deque(await prepared_stmt.fetch(*parameters)) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mac_1/Documents/GitHub/SurfSense/surfsense_backend/.venv/lib/python3.12/site-packages/asyncpg/prepared_stmt.py", line 176, in fetch + data = await self.__bind_execute(args, 0, timeout) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mac_1/Documents/GitHub/SurfSense/surfsense_backend/.venv/lib/python3.12/site-packages/asyncpg/prepared_stmt.py", line 267, in __bind_execute + data, status, _ = await self.__do_execute( + ^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mac_1/Documents/GitHub/SurfSense/surfsense_backend/.venv/lib/python3.12/site-packages/asyncpg/prepared_stmt.py", line 256, in __do_execute + return await executor(protocol) + ^^^^^^^^^^^^^^^^^^^^^^^^ + File "asyncpg/protocol/protocol.pyx", line 206, in bind_execute +asyncpg.exceptions.InvalidTextRepresentationError: invalid input value for enum searchsourceconnectortype: "DEXSCREENER_CONNECTOR" + +The above exception was the direct cause of the following exception: + +Traceback (most recent call last): + File "/Users/mac_1/Documents/GitHub/SurfSense/surfsense_backend/.venv/lib/python3.12/site-packages/sqlalchemy/engine/base.py", line 1963, in _exec_single_context + self.dialect.do_execute( + File "/Users/mac_1/Documents/GitHub/SurfSense/surfsense_backend/.venv/lib/python3.12/site-packages/sqlalchemy/engine/default.py", line 943, in do_execute + cursor.execute(statement, parameters) + File "/Users/mac_1/Documents/GitHub/SurfSense/surfsense_backend/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/asyncpg.py", line 580, in execute + self._adapt_connection.await_( + File "/Users/mac_1/Documents/GitHub/SurfSense/surfsense_backend/.venv/lib/python3.12/site-packages/sqlalchemy/util/_concurrency_py3k.py", line 132, in await_only + return current.parent.switch(awaitable) # type: ignore[no-any-return,attr-defined] # noqa: E501 + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mac_1/Documents/GitHub/SurfSense/surfsense_backend/.venv/lib/python3.12/site-packages/sqlalchemy/util/_concurrency_py3k.py", line 196, in greenlet_spawn + value = await result + ^^^^^^^^^^^^ + File "/Users/mac_1/Documents/GitHub/SurfSense/surfsense_backend/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/asyncpg.py", line 558, in _prepare_and_execute + self._handle_exception(error) + File "/Users/mac_1/Documents/GitHub/SurfSense/surfsense_backend/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/asyncpg.py", line 508, in _handle_exception + self._adapt_connection._handle_exception(error) + File "/Users/mac_1/Documents/GitHub/SurfSense/surfsense_backend/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/asyncpg.py", line 792, in _handle_exception + raise translated_error from error +sqlalchemy.dialects.postgresql.asyncpg.AsyncAdapt_asyncpg_dbapi.Error: : invalid input value for enum searchsourceconnectortype: "DEXSCREENER_CONNECTOR" + +The above exception was the direct cause of the following exception: + +Traceback (most recent call last): + File "/Users/mac_1/Documents/GitHub/SurfSense/surfsense_backend/app/routes/dexscreener_add_connector_route.py", line 79, in add_dexscreener_connector + result = await session.execute( + ^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mac_1/Documents/GitHub/SurfSense/surfsense_backend/.venv/lib/python3.12/site-packages/sqlalchemy/ext/asyncio/session.py", line 463, in execute + result = await greenlet_spawn( + ^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mac_1/Documents/GitHub/SurfSense/surfsense_backend/.venv/lib/python3.12/site-packages/sqlalchemy/util/_concurrency_py3k.py", line 201, in greenlet_spawn + result = context.throw(*sys.exc_info()) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mac_1/Documents/GitHub/SurfSense/surfsense_backend/.venv/lib/python3.12/site-packages/sqlalchemy/orm/session.py", line 2365, in execute + return self._execute_internal( + ^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mac_1/Documents/GitHub/SurfSense/surfsense_backend/.venv/lib/python3.12/site-packages/sqlalchemy/orm/session.py", line 2251, in _execute_internal + result: Result[Any] = compile_state_cls.orm_execute_statement( + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mac_1/Documents/GitHub/SurfSense/surfsense_backend/.venv/lib/python3.12/site-packages/sqlalchemy/orm/context.py", line 306, in orm_execute_statement + result = conn.execute( + ^^^^^^^^^^^^^ + File "/Users/mac_1/Documents/GitHub/SurfSense/surfsense_backend/.venv/lib/python3.12/site-packages/sqlalchemy/engine/base.py", line 1415, in execute + return meth( + ^^^^^ + File "/Users/mac_1/Documents/GitHub/SurfSense/surfsense_backend/.venv/lib/python3.12/site-packages/sqlalchemy/sql/elements.py", line 523, in _execute_on_connection + return connection._execute_clauseelement( + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mac_1/Documents/GitHub/SurfSense/surfsense_backend/.venv/lib/python3.12/site-packages/sqlalchemy/engine/base.py", line 1637, in _execute_clauseelement + ret = self._execute_context( + ^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mac_1/Documents/GitHub/SurfSense/surfsense_backend/.venv/lib/python3.12/site-packages/sqlalchemy/engine/base.py", line 1842, in _execute_context + return self._exec_single_context( + ^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mac_1/Documents/GitHub/SurfSense/surfsense_backend/.venv/lib/python3.12/site-packages/sqlalchemy/engine/base.py", line 1982, in _exec_single_context + self._handle_dbapi_exception( + File "/Users/mac_1/Documents/GitHub/SurfSense/surfsense_backend/.venv/lib/python3.12/site-packages/sqlalchemy/engine/base.py", line 2351, in _handle_dbapi_exception + raise sqlalchemy_exception.with_traceback(exc_info[2]) from e + File "/Users/mac_1/Documents/GitHub/SurfSense/surfsense_backend/.venv/lib/python3.12/site-packages/sqlalchemy/engine/base.py", line 1963, in _exec_single_context + self.dialect.do_execute( + File "/Users/mac_1/Documents/GitHub/SurfSense/surfsense_backend/.venv/lib/python3.12/site-packages/sqlalchemy/engine/default.py", line 943, in do_execute + cursor.execute(statement, parameters) + File "/Users/mac_1/Documents/GitHub/SurfSense/surfsense_backend/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/asyncpg.py", line 580, in execute + self._adapt_connection.await_( + File "/Users/mac_1/Documents/GitHub/SurfSense/surfsense_backend/.venv/lib/python3.12/site-packages/sqlalchemy/util/_concurrency_py3k.py", line 132, in await_only + return current.parent.switch(awaitable) # type: ignore[no-any-return,attr-defined] # noqa: E501 + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mac_1/Documents/GitHub/SurfSense/surfsense_backend/.venv/lib/python3.12/site-packages/sqlalchemy/util/_concurrency_py3k.py", line 196, in greenlet_spawn + value = await result + ^^^^^^^^^^^^ + File "/Users/mac_1/Documents/GitHub/SurfSense/surfsense_backend/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/asyncpg.py", line 558, in _prepare_and_execute + self._handle_exception(error) + File "/Users/mac_1/Documents/GitHub/SurfSense/surfsense_backend/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/asyncpg.py", line 508, in _handle_exception + self._adapt_connection._handle_exception(error) + File "/Users/mac_1/Documents/GitHub/SurfSense/surfsense_backend/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/asyncpg.py", line 792, in _handle_exception + raise translated_error from error +sqlalchemy.exc.DBAPIError: (sqlalchemy.dialects.postgresql.asyncpg.Error) : invalid input value for enum searchsourceconnectortype: "DEXSCREENER_CONNECTOR" +[SQL: SELECT search_source_connectors.name, search_source_connectors.connector_type, search_source_connectors.is_indexable, search_source_connectors.last_indexed_at, search_source_connectors.config, search_source_connectors.periodic_indexing_enabled, search_source_connectors.indexing_frequency_minutes, search_source_connectors.next_scheduled_at, search_source_connectors.search_space_id, search_source_connectors.user_id, search_source_connectors.id, search_source_connectors.created_at +FROM search_source_connectors +WHERE search_source_connectors.search_space_id = $1::INTEGER AND search_source_connectors.user_id = $2::UUID AND search_source_connectors.connector_type = $3::searchsourceconnectortype] +[parameters: (1, UUID('f34b4612-6556-47c8-bd37-c0e1fd6c9b30'), 'DEXSCREENER_CONNECTOR')] +(Background on this error at: https://sqlalche.me/e/20/dbapi) +INFO: 127.0.0.1:51071 - "POST /api/v1/connectors/dexscreener/add HTTP/1.1" 500 Internal Server Error +INFO: 127.0.0.1:51072 - "GET /api/v1/connectors/dexscreener HTTP/1.1" 405 Method Not Allowed +INFO: 127.0.0.1:51073 - "GET /api/v1/connectors/dexscreener/test?chain=ethereum&address=0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2 HTTP/1.1" 422 Unprocessable Entity +INFO: 127.0.0.1:51170 - "POST /auth/jwt/login HTTP/1.1" 200 OK +2026-01-31 17:10:00 - app.routes.dexscreener_add_connector_route - INFO - Successfully created DexScreener connector for user f34b4612-6556-47c8-bd37-c0e1fd6c9b30 with ID 3 +INFO: 127.0.0.1:51171 - "POST /api/v1/connectors/dexscreener/add HTTP/1.1" 200 OK +INFO: 127.0.0.1:51172 - "GET /api/v1/connectors/dexscreener HTTP/1.1" 405 Method Not Allowed +INFO: 127.0.0.1:51173 - "GET /api/v1/connectors/dexscreener/test?chain=ethereum&address=0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2 HTTP/1.1" 422 Unprocessable Entity +INFO: 127.0.0.1:51257 - "POST /auth/jwt/login HTTP/1.1" 200 OK +2026-01-31 17:10:42 - app.routes.dexscreener_add_connector_route - INFO - Updated existing DexScreener connector for user f34b4612-6556-47c8-bd37-c0e1fd6c9b30 in space 1 +INFO: 127.0.0.1:51258 - "POST /api/v1/connectors/dexscreener/add HTTP/1.1" 200 OK +2026-01-31 17:10:43 - app.connectors.dexscreener_connector - INFO - Token not found: tokens/ethereum/0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2 +INFO: 127.0.0.1:51259 - "GET /api/v1/connectors/dexscreener/test?space_id=1 HTTP/1.1" 400 Bad Request +INFO: 127.0.0.1:52249 - "POST /auth/jwt/login HTTP/1.1" 200 OK +2026-01-31 17:23:19 - app.routes.dexscreener_add_connector_route - INFO - Updated existing DexScreener connector for user f34b4612-6556-47c8-bd37-c0e1fd6c9b30 in space 1 +INFO: 127.0.0.1:52250 - "POST /api/v1/connectors/dexscreener/add HTTP/1.1" 200 OK +2026-01-31 17:23:19 - app.connectors.dexscreener_connector - INFO - Token not found: tokens/ethereum/0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2 +INFO: 127.0.0.1:52251 - "GET /api/v1/connectors/dexscreener/test?space_id=1 HTTP/1.1" 400 Bad Request +INFO: 127.0.0.1:52255 - "GET /api/v1/connectors/dexscreener/test?space_id=1 HTTP/1.1" 401 Unauthorized +INFO: 127.0.0.1:52260 - "POST /api/v1/connectors/dexscreener/add HTTP/1.1" 401 Unauthorized +INFO: 127.0.0.1:52262 - "DELETE /api/v1/connectors/dexscreener?space_id=1 HTTP/1.1" 401 Unauthorized +INFO: 127.0.0.1:53087 - "OPTIONS /users/me HTTP/1.1" 200 OK +INFO: 127.0.0.1:53090 - "OPTIONS /users/me HTTP/1.1" 200 OK +INFO: 127.0.0.1:53087 - "GET /users/me HTTP/1.1" 200 OK +INFO: 127.0.0.1:53087 - "GET /users/me HTTP/1.1" 200 OK +INFO: 127.0.0.1:53117 - "OPTIONS /api/v1/searchspaces?limit=10&skip=0&owned_only=false HTTP/1.1" 200 OK +INFO: 127.0.0.1:53115 - "GET /users/me HTTP/1.1" 200 OK +INFO: 127.0.0.1:53115 - "GET /users/me HTTP/1.1" 200 OK +INFO: 127.0.0.1:53117 - "GET /api/v1/searchspaces?limit=10&skip=0&owned_only=false HTTP/1.1" 200 OK +INFO: 127.0.0.1:53125 - "OPTIONS /api/v1/global-new-llm-configs HTTP/1.1" 200 OK +INFO: 127.0.0.1:53127 - "OPTIONS /api/v1/search-spaces/2/llm-preferences HTTP/1.1" 200 OK +INFO: 127.0.0.1:53129 - "OPTIONS /api/v1/searchspaces/2/my-access HTTP/1.1" 200 OK +INFO: 127.0.0.1:53129 - "GET /api/v1/global-new-llm-configs HTTP/1.1" 200 OK +INFO: 127.0.0.1:53127 - "GET /api/v1/search-spaces/2/llm-preferences HTTP/1.1" 200 OK +INFO: 127.0.0.1:53125 - "GET /api/v1/searchspaces/2/my-access HTTP/1.1" 200 OK +INFO: 127.0.0.1:53125 - "OPTIONS /api/v1/threads?search_space_id=2&limit=1 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53127 - "OPTIONS /api/v1/documents/type-counts?search_space_id=2 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53125 - "OPTIONS /api/v1/search-source-connectors?search_space_id=2 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53133 - "OPTIONS /api/v1/searchspaces/2 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53135 - "OPTIONS /api/v1/searchspaces/2/members HTTP/1.1" 200 OK +INFO: 127.0.0.1:53133 - "OPTIONS /api/v1/notifications/unread-count?search_space_id=2&type=new_mention HTTP/1.1" 200 OK +INFO: 127.0.0.1:53137 - "OPTIONS /api/v1/threads?search_space_id=2&limit=40 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53135 - "OPTIONS /api/v1/notifications/unread-count?search_space_id=2 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53125 - "GET /api/v1/search-source-connectors?search_space_id=2 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53127 - "GET /api/v1/documents/type-counts?search_space_id=2 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53125 - "GET /api/v1/notifications/unread-count?search_space_id=2&type=new_mention HTTP/1.1" 200 OK +INFO: 127.0.0.1:53125 - "OPTIONS /api/v1/new-llm-configs?search_space_id=2 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53125 - "OPTIONS /api/v1/notifications/unread-count?search_space_id=2&type=connector_indexing HTTP/1.1" 200 OK +INFO: 127.0.0.1:53125 - "OPTIONS /api/v1/notifications?search_space_id=2&type=new_mention&limit=50 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53125 - "OPTIONS /api/v1/notifications?search_space_id=2&limit=50 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53125 - "OPTIONS /api/v1/notifications?search_space_id=2&type=connector_indexing&limit=50 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53129 - "GET /api/v1/threads?search_space_id=2&limit=1 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53127 - "GET /api/v1/notifications/unread-count?search_space_id=2 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53135 - "GET /api/v1/threads?search_space_id=2&limit=40 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53125 - "GET /api/v1/new-llm-configs?search_space_id=2 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53133 - "GET /api/v1/searchspaces/2/members HTTP/1.1" 200 OK +INFO: 127.0.0.1:53129 - "GET /api/v1/notifications/unread-count?search_space_id=2&type=connector_indexing HTTP/1.1" 200 OK +INFO: 127.0.0.1:53137 - "GET /api/v1/searchspaces/2 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53127 - "GET /api/v1/notifications?search_space_id=2&type=new_mention&limit=50 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53135 - "GET /api/v1/notifications?search_space_id=2&limit=50 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53125 - "GET /api/v1/notifications?search_space_id=2&type=connector_indexing&limit=50 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53316 - "OPTIONS /users/me HTTP/1.1" 200 OK +INFO: 127.0.0.1:53318 - "OPTIONS /users/me HTTP/1.1" 200 OK +INFO: 127.0.0.1:53320 - "OPTIONS /api/v1/searchspaces?limit=10&skip=0&owned_only=false HTTP/1.1" 200 OK +INFO: 127.0.0.1:53320 - "GET /users/me HTTP/1.1" 200 OK +INFO: 127.0.0.1:53320 - "GET /users/me HTTP/1.1" 200 OK +INFO: 127.0.0.1:53318 - "GET /api/v1/searchspaces?limit=10&skip=0&owned_only=false HTTP/1.1" 200 OK +INFO: 127.0.0.1:53318 - "OPTIONS /api/v1/global-new-llm-configs HTTP/1.1" 200 OK +INFO: 127.0.0.1:53320 - "OPTIONS /api/v1/search-spaces/2/llm-preferences HTTP/1.1" 200 OK +INFO: 127.0.0.1:53320 - "OPTIONS /api/v1/searchspaces/2/my-access HTTP/1.1" 200 OK +INFO: 127.0.0.1:53318 - "GET /api/v1/global-new-llm-configs HTTP/1.1" 200 OK +INFO: 127.0.0.1:53316 - "GET /api/v1/search-spaces/2/llm-preferences HTTP/1.1" 200 OK +INFO: 127.0.0.1:53320 - "GET /api/v1/searchspaces/2/my-access HTTP/1.1" 200 OK +INFO: 127.0.0.1:53328 - "OPTIONS /api/v1/threads?search_space_id=2&limit=1 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53329 - "OPTIONS /api/v1/documents/type-counts?search_space_id=2 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53332 - "OPTIONS /api/v1/search-source-connectors?search_space_id=2 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53334 - "OPTIONS /api/v1/searchspaces/2 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53335 - "OPTIONS /api/v1/searchspaces/2/members HTTP/1.1" 200 OK +INFO: 127.0.0.1:53332 - "OPTIONS /api/v1/threads?search_space_id=2&limit=40 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53334 - "GET /api/v1/searchspaces/2 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53334 - "OPTIONS /api/v1/new-llm-configs?search_space_id=2 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53337 - "GET /api/v1/searchspaces/2/members HTTP/1.1" 200 OK +INFO: 127.0.0.1:53335 - "GET /api/v1/search-source-connectors?search_space_id=2 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53328 - "GET /api/v1/documents/type-counts?search_space_id=2 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53329 - "GET /api/v1/threads?search_space_id=2&limit=1 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53334 - "GET /api/v1/new-llm-configs?search_space_id=2 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53332 - "GET /api/v1/threads?search_space_id=2&limit=40 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53332 - "OPTIONS /api/v1/notifications/unread-count?search_space_id=2&type=connector_indexing HTTP/1.1" 200 OK +INFO: 127.0.0.1:53334 - "OPTIONS /api/v1/notifications/unread-count?search_space_id=2&type=new_mention HTTP/1.1" 200 OK +INFO: 127.0.0.1:53329 - "OPTIONS /api/v1/notifications/unread-count?search_space_id=2 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53328 - "OPTIONS /api/v1/notifications?search_space_id=2&type=connector_indexing&limit=50 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53335 - "OPTIONS /api/v1/notifications?search_space_id=2&type=new_mention&limit=50 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53337 - "OPTIONS /api/v1/notifications?search_space_id=2&limit=50 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53332 - "GET /api/v1/notifications/unread-count?search_space_id=2&type=connector_indexing HTTP/1.1" 200 OK +INFO: 127.0.0.1:53329 - "GET /api/v1/notifications/unread-count?search_space_id=2 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53334 - "GET /api/v1/notifications/unread-count?search_space_id=2&type=new_mention HTTP/1.1" 200 OK +INFO: 127.0.0.1:53335 - "GET /api/v1/notifications?search_space_id=2&type=new_mention&limit=50 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53337 - "GET /api/v1/notifications?search_space_id=2&type=connector_indexing&limit=50 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53328 - "GET /api/v1/notifications?search_space_id=2&limit=50 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53382 - "GET /users/me HTTP/1.1" 200 OK +INFO: 127.0.0.1:53384 - "GET /api/v1/global-new-llm-configs HTTP/1.1" 200 OK +INFO: 127.0.0.1:53382 - "GET /users/me HTTP/1.1" 200 OK +INFO: 127.0.0.1:53386 - "GET /api/v1/search-spaces/2/llm-preferences HTTP/1.1" 200 OK +INFO: 127.0.0.1:53388 - "GET /api/v1/searchspaces/2/my-access HTTP/1.1" 200 OK +INFO: 127.0.0.1:53384 - "GET /api/v1/searchspaces/2 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53382 - "GET /api/v1/search-source-connectors?search_space_id=2 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53391 - "GET /api/v1/searchspaces/2/members HTTP/1.1" 200 OK +INFO: 127.0.0.1:53388 - "GET /api/v1/threads?search_space_id=2&limit=1 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53386 - "GET /api/v1/documents/type-counts?search_space_id=2 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53382 - "GET /api/v1/new-llm-configs?search_space_id=2 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53384 - "GET /api/v1/threads?search_space_id=2&limit=40 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53393 - "GET /api/v1/searchspaces?limit=10&skip=0&owned_only=false HTTP/1.1" 200 OK +INFO: 127.0.0.1:53397 - "GET /api/v1/notifications/unread-count?search_space_id=2&type=connector_indexing HTTP/1.1" 200 OK +INFO: 127.0.0.1:53399 - "GET /api/v1/notifications/unread-count?search_space_id=2&type=new_mention HTTP/1.1" 200 OK +INFO: 127.0.0.1:53403 - "GET /api/v1/notifications?search_space_id=2&type=connector_indexing&limit=50 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53401 - "GET /api/v1/notifications/unread-count?search_space_id=2 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53405 - "GET /api/v1/notifications?search_space_id=2&type=new_mention&limit=50 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53407 - "GET /api/v1/notifications?search_space_id=2&limit=50 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53418 - "GET /api/v1/searchspaces/2 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53421 - "GET /api/v1/notifications/unread-count?search_space_id=2&type=connector_indexing HTTP/1.1" 200 OK +INFO: 127.0.0.1:53424 - "GET /api/v1/notifications?search_space_id=2&type=connector_indexing&limit=50 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53419 - "GET /api/v1/searchspaces/2/members HTTP/1.1" 200 OK +INFO: 127.0.0.1:53529 - "GET /users/me HTTP/1.1" 200 OK +INFO: 127.0.0.1:53529 - "GET /users/me HTTP/1.1" 200 OK +INFO: 127.0.0.1:53531 - "GET /api/v1/searchspaces?limit=10&skip=0&owned_only=false HTTP/1.1" 200 OK +INFO: 127.0.0.1:53531 - "GET /api/v1/global-new-llm-configs HTTP/1.1" 200 OK +INFO: 127.0.0.1:53529 - "GET /api/v1/search-spaces/2/llm-preferences HTTP/1.1" 200 OK +INFO: 127.0.0.1:53538 - "GET /api/v1/searchspaces/2/my-access HTTP/1.1" 200 OK +INFO: 127.0.0.1:53540 - "GET /api/v1/searchspaces/2 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53538 - "GET /api/v1/threads?search_space_id=2&limit=1 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53542 - "GET /api/v1/searchspaces/2/members HTTP/1.1" 200 OK +INFO: 127.0.0.1:53529 - "GET /api/v1/documents/type-counts?search_space_id=2 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53531 - "GET /api/v1/search-source-connectors?search_space_id=2 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53542 - "GET /api/v1/new-llm-configs?search_space_id=2 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53544 - "GET /api/v1/threads?search_space_id=2&limit=40 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53544 - "GET /api/v1/notifications/unread-count?search_space_id=2&type=connector_indexing HTTP/1.1" 200 OK +INFO: 127.0.0.1:53542 - "GET /api/v1/notifications/unread-count?search_space_id=2&type=new_mention HTTP/1.1" 200 OK +INFO: 127.0.0.1:53538 - "GET /api/v1/notifications?search_space_id=2&type=new_mention&limit=50 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53529 - "GET /api/v1/notifications?search_space_id=2&type=connector_indexing&limit=50 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53531 - "GET /api/v1/notifications/unread-count?search_space_id=2 HTTP/1.1" 200 OK +INFO: 127.0.0.1:53540 - "GET /api/v1/notifications?search_space_id=2&limit=50 HTTP/1.1" 200 OK diff --git a/test_dexscreener_search.py b/test_dexscreener_search.py new file mode 100644 index 000000000..4c3047a5d --- /dev/null +++ b/test_dexscreener_search.py @@ -0,0 +1,108 @@ +#!/usr/bin/env python3 +""" +Test script to debug DexScreener RAG retrieval. +This directly calls the search_knowledge_base_async function to see what documents are retrieved. +""" + +import asyncio +import sys +import os + +# Add backend to path +sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'surfsense_backend')) + +from app.agents.new_chat.tools.knowledge_base import search_knowledge_base_async +from app.db import get_async_session +from sqlalchemy.ext.asyncio import AsyncSession + + +async def test_dexscreener_search(): + """Test DexScreener search directly.""" + + # Get database session + async for session in get_async_session(): + try: + print("=" * 80) + print("Testing DexScreener RAG Retrieval") + print("=" * 80) + + # Test parameters + user_query = "WETH price" + search_space_id = 7 + user_id = "1951c010-436f-4636-89ca-5d86f57951df" # admin user + top_k = 5 + + print(f"\nQuery: {user_query}") + print(f"Search Space ID: {search_space_id}") + print(f"Top K: {top_k}") + print("\n" + "-" * 80) + + # Call search function + result = await search_knowledge_base_async( + query=user_query, + search_space_id=search_space_id, + user_id=user_id, + top_k=top_k + ) + + print(f"\nSearch Result Type: {type(result)}") + print(f"Result Length: {len(result) if isinstance(result, (list, dict)) else 'N/A'}") + + if isinstance(result, dict): + print("\nResult Keys:", list(result.keys())) + + # Print sources + if 'sources' in result: + sources = result['sources'] + print(f"\nNumber of Sources: {len(sources)}") + + for i, source in enumerate(sources, 1): + print(f"\n--- Source {i} ---") + print(f"Title: {source.get('title', 'N/A')}") + print(f"URL: {source.get('url', 'N/A')}") + print(f"Type: {source.get('type', 'N/A')}") + print(f"Description: {source.get('description', 'N/A')[:200]}...") + + # Check for DexScreener-specific fields + if 'extra_fields' in source: + extra = source['extra_fields'] + print(f"Extra Fields: {extra}") + + # Print documents + if 'documents' in result: + docs = result['documents'] + print(f"\n\nNumber of Documents: {len(docs)}") + + for i, doc in enumerate(docs[:3], 1): # Show first 3 docs + print(f"\n--- Document {i} ---") + print(f"Type: {type(doc)}") + if hasattr(doc, 'page_content'): + print(f"Content Preview: {doc.page_content[:300]}...") + if hasattr(doc, 'metadata'): + print(f"Metadata: {doc.metadata}") + + elif isinstance(result, list): + print(f"\nResult is a list with {len(result)} items") + for i, item in enumerate(result[:3], 1): + print(f"\n--- Item {i} ---") + print(f"Type: {type(item)}") + print(f"Content: {str(item)[:200]}...") + + else: + print(f"\nResult: {result}") + + print("\n" + "=" * 80) + print("Test Complete") + print("=" * 80) + + except Exception as e: + print(f"\n❌ Error: {e}") + import traceback + traceback.print_exc() + finally: + await session.close() + break + + +if __name__ == "__main__": + asyncio.run(test_dexscreener_search())