mirror of
https://github.com/MODSetter/SurfSense.git
synced 2026-05-12 09:12:40 +02:00
test(backend): enhance Drive file filtering and add unit tests for _drive_list_files
This commit is contained in:
parent
074b06441f
commit
8536bac29a
5 changed files with 139 additions and 24 deletions
|
|
@ -320,15 +320,54 @@ def _drive_list_files(args: dict[str, Any]) -> dict[str, Any]:
|
|||
except IndexError:
|
||||
folder_id = "root"
|
||||
|
||||
files = _DRIVE_FIXTURE.get(folder_id, [])
|
||||
files = _filter_drive_files_for_query(q, _DRIVE_FIXTURE.get(folder_id, []))
|
||||
return {
|
||||
"data": {
|
||||
"files": list(files),
|
||||
"files": files,
|
||||
"nextPageToken": None,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def _extract_quoted_value(q: str, anchor: str) -> str | None:
|
||||
anchor_idx = q.find(anchor)
|
||||
if anchor_idx == -1:
|
||||
return None
|
||||
|
||||
after_anchor = q[anchor_idx + len(anchor) :]
|
||||
first_quote_idx = after_anchor.find("'")
|
||||
if first_quote_idx == -1:
|
||||
return None
|
||||
|
||||
after_first_quote = after_anchor[first_quote_idx + 1 :]
|
||||
second_quote_idx = after_first_quote.find("'")
|
||||
if second_quote_idx == -1:
|
||||
return None
|
||||
|
||||
return after_first_quote[:second_quote_idx]
|
||||
|
||||
|
||||
def _filter_drive_files_for_query(q: str, files: list[dict[str, Any]]) -> list[dict[str, Any]]:
|
||||
filtered = list(files)
|
||||
|
||||
if "trashed = false" in q:
|
||||
filtered = [entry for entry in filtered if entry.get("trashed") is not True]
|
||||
|
||||
excluded_mime_type = _extract_quoted_value(q, "mimeType !=")
|
||||
if excluded_mime_type:
|
||||
filtered = [
|
||||
entry for entry in filtered if entry.get("mimeType") != excluded_mime_type
|
||||
]
|
||||
|
||||
included_mime_type = _extract_quoted_value(q, "mimeType =")
|
||||
if included_mime_type:
|
||||
filtered = [
|
||||
entry for entry in filtered if entry.get("mimeType") == included_mime_type
|
||||
]
|
||||
|
||||
return filtered
|
||||
|
||||
|
||||
def _drive_download_file(args: dict[str, Any]) -> dict[str, Any]:
|
||||
"""Mimic GOOGLEDRIVE_DOWNLOAD_FILE.
|
||||
|
||||
|
|
|
|||
|
|
@ -34,6 +34,21 @@
|
|||
"mimeType": "text/csv",
|
||||
"modifiedTime": "2025-01-25T13:45:00.000Z",
|
||||
"createdTime": "2025-01-25T13:45:00.000Z"
|
||||
},
|
||||
{
|
||||
"id": "fake-shortcut-canary",
|
||||
"name": "Shortcut to Canary",
|
||||
"mimeType": "application/vnd.google-apps.shortcut",
|
||||
"modifiedTime": "2025-02-10T12:00:00.000Z",
|
||||
"createdTime": "2025-02-10T12:00:00.000Z"
|
||||
},
|
||||
{
|
||||
"id": "fake-file-trashed",
|
||||
"name": "trashed-e2e-note.txt",
|
||||
"mimeType": "text/plain",
|
||||
"modifiedTime": "2025-02-11T09:00:00.000Z",
|
||||
"createdTime": "2025-02-11T09:00:00.000Z",
|
||||
"trashed": true
|
||||
}
|
||||
],
|
||||
"fake-folder-projects": [
|
||||
|
|
|
|||
|
|
@ -0,0 +1,38 @@
|
|||
from tests.e2e.fakes.composio_module import _drive_list_files
|
||||
|
||||
|
||||
def _ids(result: dict) -> set[str]:
|
||||
return {item["id"] for item in result["data"]["files"]}
|
||||
|
||||
|
||||
def test_drive_list_files_filters_shortcuts_and_trashed_items():
|
||||
result = _drive_list_files(
|
||||
{
|
||||
"q": (
|
||||
"'root' in parents and trashed = false and "
|
||||
"mimeType != 'application/vnd.google-apps.shortcut'"
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
ids = _ids(result)
|
||||
|
||||
assert "fake-file-canary" in ids
|
||||
assert "fake-shortcut-canary" not in ids
|
||||
assert "fake-file-trashed" not in ids
|
||||
|
||||
|
||||
def test_drive_list_files_filters_to_exact_mime_type():
|
||||
result = _drive_list_files(
|
||||
{"q": "'root' in parents and trashed = false and mimeType = 'text/plain'"}
|
||||
)
|
||||
|
||||
assert _ids(result) == {"fake-file-canary"}
|
||||
|
||||
|
||||
def test_drive_list_files_uses_requested_parent_folder():
|
||||
result = _drive_list_files(
|
||||
{"q": "'fake-folder-projects' in parents and trashed = false"}
|
||||
)
|
||||
|
||||
assert _ids(result) == {"fake-file-roadmap"}
|
||||
Loading…
Add table
Add a link
Reference in a new issue