mirror of
https://github.com/MODSetter/SurfSense.git
synced 2026-05-05 22:02:39 +02:00
feat: introduce SurfSense plugin for Obsidian with syncing capabilities and enhanced settings management
This commit is contained in:
parent
ee2fb79e75
commit
60d9e7ed8c
19 changed files with 2044 additions and 175 deletions
248
surfsense_obsidian/src/api-client.ts
Normal file
248
surfsense_obsidian/src/api-client.ts
Normal file
|
|
@ -0,0 +1,248 @@
|
|||
import { Notice, requestUrl, type RequestUrlParam, type RequestUrlResponse } from "obsidian";
|
||||
import type {
|
||||
ConnectResponse,
|
||||
HealthResponse,
|
||||
ManifestResponse,
|
||||
NotePayload,
|
||||
RenameItem,
|
||||
SearchSpace,
|
||||
} from "./types";
|
||||
|
||||
/**
|
||||
* SurfSense backend client used by the Obsidian plugin.
|
||||
*
|
||||
* Mobile-safety contract (must hold for every transitive import):
|
||||
* - Use Obsidian `requestUrl` only — no `fetch`, no `axios`, no
|
||||
* `node:http`, no `node:https`. CORS is bypassed and mobile works.
|
||||
* - No top-level `node:*` imports anywhere reachable from this file.
|
||||
* - Hashing happens elsewhere via Web Crypto, not `node:crypto`.
|
||||
*
|
||||
* Auth + wire contract:
|
||||
* - Every request carries `Authorization: Bearer <token>` only. No
|
||||
* custom headers — the backend identifies the caller from the JWT
|
||||
* and feature-detects the API via the `capabilities` array on
|
||||
* `/health` and `/connect`.
|
||||
* - 401 surfaces as `AuthError` so the orchestrator can show the
|
||||
* "token expired, paste a fresh one" UX.
|
||||
* - HealthResponse / ConnectResponse use index signatures so any
|
||||
* additive backend field (e.g. new capabilities) parses without
|
||||
* breaking the decoder. This mirrors `ConfigDict(extra='ignore')`
|
||||
* on the server side.
|
||||
*/
|
||||
|
||||
export class AuthError extends Error {
|
||||
constructor(message: string) {
|
||||
super(message);
|
||||
this.name = "AuthError";
|
||||
}
|
||||
}
|
||||
|
||||
export class TransientError extends Error {
|
||||
readonly status: number;
|
||||
constructor(status: number, message: string) {
|
||||
super(message);
|
||||
this.name = "TransientError";
|
||||
this.status = status;
|
||||
}
|
||||
}
|
||||
|
||||
export class PermanentError extends Error {
|
||||
readonly status: number;
|
||||
constructor(status: number, message: string) {
|
||||
super(message);
|
||||
this.name = "PermanentError";
|
||||
this.status = status;
|
||||
}
|
||||
}
|
||||
|
||||
export interface ApiClientOptions {
|
||||
getServerUrl: () => string;
|
||||
getToken: () => string;
|
||||
pluginVersion: string;
|
||||
onAuthError?: () => void;
|
||||
}
|
||||
|
||||
export class SurfSenseApiClient {
|
||||
private readonly opts: ApiClientOptions;
|
||||
|
||||
constructor(opts: ApiClientOptions) {
|
||||
this.opts = opts;
|
||||
}
|
||||
|
||||
updateOptions(partial: Partial<ApiClientOptions>): void {
|
||||
Object.assign(this.opts, partial);
|
||||
}
|
||||
|
||||
get pluginVersion(): string {
|
||||
return this.opts.pluginVersion;
|
||||
}
|
||||
|
||||
async health(): Promise<HealthResponse> {
|
||||
return await this.request<HealthResponse>("GET", "/api/v1/obsidian/health");
|
||||
}
|
||||
|
||||
async listSearchSpaces(): Promise<SearchSpace[]> {
|
||||
const resp = await this.request<SearchSpace[] | { items: SearchSpace[] }>(
|
||||
"GET",
|
||||
"/api/v1/searchspaces/"
|
||||
);
|
||||
if (Array.isArray(resp)) return resp;
|
||||
if (resp && Array.isArray((resp as { items?: SearchSpace[] }).items)) {
|
||||
return (resp as { items: SearchSpace[] }).items;
|
||||
}
|
||||
return [];
|
||||
}
|
||||
|
||||
async verifyToken(): Promise<{ ok: true; health: HealthResponse }> {
|
||||
// /health is gated by current_active_user, so a successful response
|
||||
// transitively proves the token works. Cheaper than fetching a list.
|
||||
const health = await this.health();
|
||||
return { ok: true, health };
|
||||
}
|
||||
|
||||
async connect(input: {
|
||||
searchSpaceId: number;
|
||||
vaultId: string;
|
||||
vaultName: string;
|
||||
deviceId: string;
|
||||
deviceLabel: string;
|
||||
}): Promise<ConnectResponse> {
|
||||
return await this.request<ConnectResponse>(
|
||||
"POST",
|
||||
`/api/v1/obsidian/connect?search_space_id=${encodeURIComponent(
|
||||
String(input.searchSpaceId)
|
||||
)}`,
|
||||
{
|
||||
vault_id: input.vaultId,
|
||||
vault_name: input.vaultName,
|
||||
plugin_version: this.opts.pluginVersion,
|
||||
device_id: input.deviceId,
|
||||
device_label: input.deviceLabel,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
async syncBatch(input: {
|
||||
vaultId: string;
|
||||
notes: NotePayload[];
|
||||
}): Promise<{ accepted: number; rejected: string[] }> {
|
||||
const resp = await this.request<{ accepted?: number; rejected?: string[] }>(
|
||||
"POST",
|
||||
"/api/v1/obsidian/sync",
|
||||
{ vault_id: input.vaultId, notes: input.notes }
|
||||
);
|
||||
return {
|
||||
accepted: typeof resp.accepted === "number" ? resp.accepted : input.notes.length,
|
||||
rejected: Array.isArray(resp.rejected) ? resp.rejected : [],
|
||||
};
|
||||
}
|
||||
|
||||
async renameBatch(input: {
|
||||
vaultId: string;
|
||||
renames: Pick<RenameItem, "oldPath" | "newPath">[];
|
||||
}): Promise<{ renamed: number }> {
|
||||
const resp = await this.request<{ renamed?: number }>(
|
||||
"POST",
|
||||
"/api/v1/obsidian/rename",
|
||||
{
|
||||
vault_id: input.vaultId,
|
||||
renames: input.renames.map((r) => ({
|
||||
old_path: r.oldPath,
|
||||
new_path: r.newPath,
|
||||
})),
|
||||
}
|
||||
);
|
||||
return { renamed: typeof resp.renamed === "number" ? resp.renamed : 0 };
|
||||
}
|
||||
|
||||
async deleteBatch(input: {
|
||||
vaultId: string;
|
||||
paths: string[];
|
||||
}): Promise<{ deleted: number }> {
|
||||
const resp = await this.request<{ deleted?: number }>(
|
||||
"DELETE",
|
||||
"/api/v1/obsidian/notes",
|
||||
{ vault_id: input.vaultId, paths: input.paths }
|
||||
);
|
||||
return { deleted: typeof resp.deleted === "number" ? resp.deleted : 0 };
|
||||
}
|
||||
|
||||
async getManifest(vaultId: string): Promise<ManifestResponse> {
|
||||
return await this.request<ManifestResponse>(
|
||||
"GET",
|
||||
`/api/v1/obsidian/manifest?vault_id=${encodeURIComponent(vaultId)}`
|
||||
);
|
||||
}
|
||||
|
||||
private async request<T>(
|
||||
method: RequestUrlParam["method"],
|
||||
path: string,
|
||||
body?: unknown
|
||||
): Promise<T> {
|
||||
const baseUrl = this.opts.getServerUrl().replace(/\/+$/, "");
|
||||
const token = this.opts.getToken();
|
||||
if (!token) {
|
||||
throw new AuthError("Missing API token. Open SurfSense settings to paste one.");
|
||||
}
|
||||
const headers: Record<string, string> = {
|
||||
Authorization: `Bearer ${token}`,
|
||||
Accept: "application/json",
|
||||
};
|
||||
if (body !== undefined) headers["Content-Type"] = "application/json";
|
||||
|
||||
let resp: RequestUrlResponse;
|
||||
try {
|
||||
resp = await requestUrl({
|
||||
url: `${baseUrl}${path}`,
|
||||
method,
|
||||
headers,
|
||||
body: body === undefined ? undefined : JSON.stringify(body),
|
||||
throw: false,
|
||||
});
|
||||
} catch (err) {
|
||||
throw new TransientError(0, `Network error: ${(err as Error).message}`);
|
||||
}
|
||||
|
||||
if (resp.status >= 200 && resp.status < 300) {
|
||||
return parseJson<T>(resp);
|
||||
}
|
||||
|
||||
const detail = extractDetail(resp);
|
||||
|
||||
if (resp.status === 401) {
|
||||
this.opts.onAuthError?.();
|
||||
new Notice("Surfsense: token expired or invalid. Paste a fresh token in settings.");
|
||||
throw new AuthError(detail || "Unauthorized");
|
||||
}
|
||||
|
||||
if (resp.status >= 500 || resp.status === 429) {
|
||||
throw new TransientError(resp.status, detail || `HTTP ${resp.status}`);
|
||||
}
|
||||
|
||||
throw new PermanentError(resp.status, detail || `HTTP ${resp.status}`);
|
||||
}
|
||||
}
|
||||
|
||||
function parseJson<T>(resp: RequestUrlResponse): T {
|
||||
if (resp.text === undefined || resp.text === "") return undefined as unknown as T;
|
||||
try {
|
||||
return JSON.parse(resp.text) as T;
|
||||
} catch {
|
||||
return undefined as unknown as T;
|
||||
}
|
||||
}
|
||||
|
||||
function safeJson(resp: RequestUrlResponse): Record<string, unknown> {
|
||||
try {
|
||||
return resp.text ? (JSON.parse(resp.text) as Record<string, unknown>) : {};
|
||||
} catch {
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
function extractDetail(resp: RequestUrlResponse): string {
|
||||
const json = safeJson(resp);
|
||||
if (typeof json.detail === "string") return json.detail;
|
||||
if (typeof json.message === "string") return json.message;
|
||||
return resp.text?.slice(0, 200) ?? "";
|
||||
}
|
||||
66
surfsense_obsidian/src/excludes.ts
Normal file
66
surfsense_obsidian/src/excludes.ts
Normal file
|
|
@ -0,0 +1,66 @@
|
|||
/**
|
||||
* Tiny glob matcher for exclude patterns.
|
||||
*
|
||||
* Supports `*` (any chars except `/`), `**` (any chars including `/`), and
|
||||
* literal segments. Patterns without a slash are matched against any path
|
||||
* segment (so `templates` excludes `templates/foo.md` and `notes/templates/x.md`).
|
||||
*
|
||||
* Intentionally not a full minimatch — Obsidian users overwhelmingly type
|
||||
* folder names ("templates", ".trash") and the obvious wildcards. Avoiding
|
||||
* the dependency keeps the bundle small and the mobile attack surface tiny.
|
||||
*/
|
||||
|
||||
const cache = new Map<string, RegExp>();
|
||||
|
||||
function compile(pattern: string): RegExp {
|
||||
const cached = cache.get(pattern);
|
||||
if (cached) return cached;
|
||||
|
||||
let body = "";
|
||||
let i = 0;
|
||||
while (i < pattern.length) {
|
||||
const ch = pattern[i] ?? "";
|
||||
if (ch === "*") {
|
||||
if (pattern[i + 1] === "*") {
|
||||
body += ".*";
|
||||
i += 2;
|
||||
if (pattern[i] === "/") i += 1;
|
||||
continue;
|
||||
}
|
||||
body += "[^/]*";
|
||||
i += 1;
|
||||
continue;
|
||||
}
|
||||
if (".+^${}()|[]\\".includes(ch)) {
|
||||
body += "\\" + ch;
|
||||
i += 1;
|
||||
continue;
|
||||
}
|
||||
body += ch;
|
||||
i += 1;
|
||||
}
|
||||
|
||||
const anchored = pattern.includes("/")
|
||||
? `^${body}(/.*)?$`
|
||||
: `(^|/)${body}(/.*)?$`;
|
||||
const re = new RegExp(anchored);
|
||||
cache.set(pattern, re);
|
||||
return re;
|
||||
}
|
||||
|
||||
export function isExcluded(path: string, patterns: string[]): boolean {
|
||||
if (!patterns.length) return false;
|
||||
for (const raw of patterns) {
|
||||
const trimmed = raw.trim();
|
||||
if (!trimmed || trimmed.startsWith("#")) continue;
|
||||
if (compile(trimmed).test(path)) return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
export function parseExcludePatterns(raw: string): string[] {
|
||||
return raw
|
||||
.split(/\r?\n/)
|
||||
.map((line) => line.trim())
|
||||
.filter((line) => line.length > 0 && !line.startsWith("#"));
|
||||
}
|
||||
|
|
@ -1,99 +1,216 @@
|
|||
import {App, Editor, MarkdownView, Modal, Notice, Plugin} from 'obsidian';
|
||||
import {DEFAULT_SETTINGS, MyPluginSettings, SampleSettingTab} from "./settings";
|
||||
import { Notice, Plugin } from "obsidian";
|
||||
import { SurfSenseApiClient } from "./api-client";
|
||||
import { PersistentQueue } from "./queue";
|
||||
import { SurfSenseSettingTab } from "./settings";
|
||||
import { StatusBar } from "./status-bar";
|
||||
import { SyncEngine } from "./sync-engine";
|
||||
import {
|
||||
DEFAULT_SETTINGS,
|
||||
type QueueItem,
|
||||
type StatusState,
|
||||
type SurfsensePluginSettings,
|
||||
} from "./types";
|
||||
|
||||
// Remember to rename these classes and interfaces!
|
||||
|
||||
export default class MyPlugin extends Plugin {
|
||||
settings: MyPluginSettings;
|
||||
/**
|
||||
* SurfSense plugin entry point.
|
||||
*
|
||||
* Replaces the obsidian-sample-plugin SampleModal/ribbon stub. Lifecycle:
|
||||
*
|
||||
* onload():
|
||||
* load settings → seed identity (vault_id, device_id) →
|
||||
* wire api client + queue + sync engine + status bar →
|
||||
* register settings tab → register vault + metadataCache events →
|
||||
* register commands (resync, sync current note, open settings) →
|
||||
* register status bar item →
|
||||
* kick off engine.start() (health → drain → reconcile).
|
||||
*
|
||||
* onunload():
|
||||
* stop the queue's debounce timer; unregistered events and DOM
|
||||
* handles auto-clean via the Plugin base class.
|
||||
*/
|
||||
export default class SurfSensePlugin extends Plugin {
|
||||
settings!: SurfsensePluginSettings;
|
||||
api!: SurfSenseApiClient;
|
||||
queue!: PersistentQueue;
|
||||
engine!: SyncEngine;
|
||||
private statusBar: StatusBar | null = null;
|
||||
lastStatus: StatusState = { kind: "idle", queueDepth: 0 };
|
||||
serverCapabilities: string[] = [];
|
||||
serverApiVersion: string | null = null;
|
||||
private settingTab: SurfSenseSettingTab | null = null;
|
||||
|
||||
async onload() {
|
||||
await this.loadSettings();
|
||||
this.seedIdentity();
|
||||
await this.saveSettings();
|
||||
|
||||
// This creates an icon in the left ribbon.
|
||||
this.addRibbonIcon('dice', 'Sample', (evt: MouseEvent) => {
|
||||
// Called when the user clicks the icon.
|
||||
new Notice('This is a notice!');
|
||||
const pluginVersion = this.manifest.version;
|
||||
|
||||
this.api = new SurfSenseApiClient({
|
||||
getServerUrl: () => this.settings.serverUrl,
|
||||
getToken: () => this.settings.apiToken,
|
||||
pluginVersion,
|
||||
});
|
||||
|
||||
// This adds a status bar item to the bottom of the app. Does not work on mobile apps.
|
||||
const statusBarItemEl = this.addStatusBarItem();
|
||||
statusBarItemEl.setText('Status bar text');
|
||||
|
||||
// This adds a simple command that can be triggered anywhere
|
||||
this.addCommand({
|
||||
id: 'open-modal-simple',
|
||||
name: 'Open modal (simple)',
|
||||
callback: () => {
|
||||
new SampleModal(this.app).open();
|
||||
}
|
||||
this.queue = new PersistentQueue(this.settings.queue ?? [], {
|
||||
persist: async (items) => {
|
||||
this.settings.queue = items;
|
||||
await this.saveData(this.settings);
|
||||
},
|
||||
});
|
||||
// This adds an editor command that can perform some operation on the current editor instance
|
||||
this.addCommand({
|
||||
id: 'replace-selected',
|
||||
name: 'Replace selected content',
|
||||
editorCallback: (editor: Editor, view: MarkdownView) => {
|
||||
editor.replaceSelection('Sample editor command');
|
||||
}
|
||||
});
|
||||
// This adds a complex command that can check whether the current state of the app allows execution of the command
|
||||
this.addCommand({
|
||||
id: 'open-modal-complex',
|
||||
name: 'Open modal (complex)',
|
||||
checkCallback: (checking: boolean) => {
|
||||
// Conditions to check
|
||||
const markdownView = this.app.workspace.getActiveViewOfType(MarkdownView);
|
||||
if (markdownView) {
|
||||
// If checking is true, we're simply "checking" if the command can be run.
|
||||
// If checking is false, then we want to actually perform the operation.
|
||||
if (!checking) {
|
||||
new SampleModal(this.app).open();
|
||||
}
|
||||
|
||||
// This command will only show up in Command Palette when the check function returns true
|
||||
return true;
|
||||
this.engine = new SyncEngine({
|
||||
app: this.app,
|
||||
apiClient: this.api,
|
||||
queue: this.queue,
|
||||
getSettings: () => this.settings,
|
||||
saveSettings: async (mut) => {
|
||||
mut(this.settings);
|
||||
await this.saveSettings();
|
||||
this.settingTab?.renderStatus();
|
||||
},
|
||||
setStatus: (s) => {
|
||||
this.lastStatus = s;
|
||||
this.statusBar?.update(s);
|
||||
this.settingTab?.renderStatus();
|
||||
},
|
||||
onCapabilities: (caps, apiVersion) => {
|
||||
this.serverCapabilities = [...caps];
|
||||
this.serverApiVersion = apiVersion;
|
||||
this.settingTab?.renderStatus();
|
||||
},
|
||||
});
|
||||
|
||||
this.queue.setFlushHandler(() => {
|
||||
if (this.settings.syncMode !== "auto") return;
|
||||
void this.engine.flushQueue();
|
||||
});
|
||||
|
||||
this.settingTab = new SurfSenseSettingTab(this.app, this);
|
||||
this.addSettingTab(this.settingTab);
|
||||
|
||||
const statusHost = this.addStatusBarItem();
|
||||
this.statusBar = new StatusBar(statusHost);
|
||||
this.statusBar.update(this.lastStatus);
|
||||
|
||||
this.registerEvent(
|
||||
this.app.vault.on("create", (file) => this.engine.onCreate(file)),
|
||||
);
|
||||
this.registerEvent(
|
||||
this.app.vault.on("modify", (file) => this.engine.onModify(file)),
|
||||
);
|
||||
this.registerEvent(
|
||||
this.app.vault.on("delete", (file) => this.engine.onDelete(file)),
|
||||
);
|
||||
this.registerEvent(
|
||||
this.app.vault.on("rename", (file, oldPath) =>
|
||||
this.engine.onRename(file, oldPath),
|
||||
),
|
||||
);
|
||||
this.registerEvent(
|
||||
this.app.metadataCache.on("changed", (file, data, cache) =>
|
||||
this.engine.onMetadataChanged(file, data, cache),
|
||||
),
|
||||
);
|
||||
|
||||
this.addCommand({
|
||||
id: "resync-vault",
|
||||
name: "Re-sync entire vault",
|
||||
callback: async () => {
|
||||
try {
|
||||
await this.engine.maybeReconcile(true);
|
||||
new Notice("Surfsense: re-sync started.");
|
||||
} catch (err) {
|
||||
new Notice(`Surfsense: re-sync failed — ${(err as Error).message}`);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
// This adds a settings tab so the user can configure various aspects of the plugin
|
||||
this.addSettingTab(new SampleSettingTab(this.app, this));
|
||||
|
||||
// If the plugin hooks up any global DOM events (on parts of the app that doesn't belong to this plugin)
|
||||
// Using this function will automatically remove the event listener when this plugin is disabled.
|
||||
this.registerDomEvent(document, 'click', (evt: MouseEvent) => {
|
||||
new Notice("Click");
|
||||
this.addCommand({
|
||||
id: "sync-current-note",
|
||||
name: "Sync current note",
|
||||
checkCallback: (checking) => {
|
||||
const file = this.app.workspace.getActiveFile();
|
||||
if (!file || file.extension.toLowerCase() !== "md") return false;
|
||||
if (checking) return true;
|
||||
this.queue.enqueueUpsert(file.path);
|
||||
void this.engine.flushQueue();
|
||||
return true;
|
||||
},
|
||||
});
|
||||
|
||||
// When registering intervals, this function will automatically clear the interval when the plugin is disabled.
|
||||
this.registerInterval(window.setInterval(() => console.log('setInterval'), 5 * 60 * 1000));
|
||||
this.addCommand({
|
||||
id: "open-settings",
|
||||
name: "Open settings",
|
||||
callback: () => {
|
||||
// Obsidian exposes this through the Setting host on the workspace;
|
||||
// fall back silently if the API moves so we never throw.
|
||||
type SettingHost = {
|
||||
open?: () => void;
|
||||
openTabById?: (id: string) => void;
|
||||
};
|
||||
const setting = (this.app as unknown as { setting?: SettingHost }).setting;
|
||||
if (setting?.open) setting.open();
|
||||
if (setting?.openTabById) setting.openTabById(this.manifest.id);
|
||||
},
|
||||
});
|
||||
|
||||
// Kick off the start sequence after Obsidian finishes its own
|
||||
// startup work, so the metadataCache is warm before reconcile.
|
||||
this.app.workspace.onLayoutReady(() => {
|
||||
void this.engine.start();
|
||||
});
|
||||
}
|
||||
|
||||
onunload() {
|
||||
this.queue?.cancelFlush();
|
||||
this.queue?.requestStop();
|
||||
}
|
||||
|
||||
get queueDepth(): number {
|
||||
return this.queue?.size ?? 0;
|
||||
}
|
||||
|
||||
async loadSettings() {
|
||||
this.settings = Object.assign({}, DEFAULT_SETTINGS, await this.loadData() as Partial<MyPluginSettings>);
|
||||
const data = (await this.loadData()) as Partial<SurfsensePluginSettings> | null;
|
||||
this.settings = {
|
||||
...DEFAULT_SETTINGS,
|
||||
...(data ?? {}),
|
||||
queue: (data?.queue ?? []).map((i: QueueItem) => ({ ...i })),
|
||||
tombstones: { ...(data?.tombstones ?? {}) },
|
||||
excludePatterns: data?.excludePatterns?.length
|
||||
? [...data.excludePatterns]
|
||||
: [...DEFAULT_SETTINGS.excludePatterns],
|
||||
};
|
||||
}
|
||||
|
||||
async saveSettings() {
|
||||
await this.saveData(this.settings);
|
||||
}
|
||||
}
|
||||
|
||||
class SampleModal extends Modal {
|
||||
constructor(app: App) {
|
||||
super(app);
|
||||
}
|
||||
|
||||
onOpen() {
|
||||
let {contentEl} = this;
|
||||
contentEl.setText('Woah!');
|
||||
}
|
||||
|
||||
onClose() {
|
||||
const {contentEl} = this;
|
||||
contentEl.empty();
|
||||
private seedIdentity(): void {
|
||||
if (!this.settings.vaultId) {
|
||||
this.settings.vaultId = generateUuid();
|
||||
}
|
||||
if (!this.settings.deviceId) {
|
||||
this.settings.deviceId = generateUuid();
|
||||
}
|
||||
if (!this.settings.vaultName) {
|
||||
this.settings.vaultName = this.app.vault.getName();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function generateUuid(): string {
|
||||
const c = globalThis.crypto;
|
||||
if (c?.randomUUID) return c.randomUUID();
|
||||
const buf = new Uint8Array(16);
|
||||
c.getRandomValues(buf);
|
||||
buf[6] = ((buf[6] ?? 0) & 0x0f) | 0x40;
|
||||
buf[8] = ((buf[8] ?? 0) & 0x3f) | 0x80;
|
||||
const hex = Array.from(buf, (b) => b.toString(16).padStart(2, "0")).join("");
|
||||
return `${hex.slice(0, 8)}-${hex.slice(8, 12)}-${hex.slice(12, 16)}-${hex.slice(
|
||||
16,
|
||||
20,
|
||||
)}-${hex.slice(20)}`;
|
||||
}
|
||||
|
|
|
|||
162
surfsense_obsidian/src/payload.ts
Normal file
162
surfsense_obsidian/src/payload.ts
Normal file
|
|
@ -0,0 +1,162 @@
|
|||
import {
|
||||
type App,
|
||||
type CachedMetadata,
|
||||
type FrontMatterCache,
|
||||
type HeadingCache,
|
||||
type ReferenceCache,
|
||||
type TFile,
|
||||
} from "obsidian";
|
||||
import type { HeadingRef, NotePayload } from "./types";
|
||||
|
||||
/**
|
||||
* Build a NotePayload from an Obsidian TFile.
|
||||
*
|
||||
* Mobile-safety contract:
|
||||
* - No top-level `node:fs` / `node:path` / `node:crypto` imports.
|
||||
* File IO uses `vault.cachedRead` (works on the mobile WASM adapter).
|
||||
* Hashing uses Web Crypto `subtle.digest`.
|
||||
* - Caller MUST first wait for `metadataCache.changed` before calling
|
||||
* this for a `.md` file, otherwise `frontmatter`/`tags`/`headings`
|
||||
* can lag the actual file contents.
|
||||
*/
|
||||
export async function buildNotePayload(
|
||||
app: App,
|
||||
file: TFile,
|
||||
vaultId: string,
|
||||
): Promise<NotePayload> {
|
||||
const content = await app.vault.cachedRead(file);
|
||||
const cache: CachedMetadata | null = app.metadataCache.getFileCache(file);
|
||||
|
||||
const frontmatter = normalizeFrontmatter(cache?.frontmatter);
|
||||
const tags = collectTags(cache);
|
||||
const headings = collectHeadings(cache?.headings ?? []);
|
||||
const aliases = collectAliases(frontmatter);
|
||||
const { embeds, internalLinks } = collectLinks(cache);
|
||||
const { resolved, unresolved } = resolveLinkTargets(
|
||||
app,
|
||||
file.path,
|
||||
internalLinks,
|
||||
);
|
||||
const contentHash = await computeContentHash(content);
|
||||
|
||||
return {
|
||||
vault_id: vaultId,
|
||||
path: file.path,
|
||||
name: file.basename,
|
||||
extension: file.extension,
|
||||
content,
|
||||
frontmatter,
|
||||
tags,
|
||||
headings,
|
||||
resolved_links: resolved,
|
||||
unresolved_links: unresolved,
|
||||
embeds,
|
||||
aliases,
|
||||
content_hash: contentHash,
|
||||
mtime: file.stat.mtime,
|
||||
ctime: file.stat.ctime,
|
||||
};
|
||||
}
|
||||
|
||||
export async function computeContentHash(content: string): Promise<string> {
|
||||
const bytes = new TextEncoder().encode(content);
|
||||
const digest = await crypto.subtle.digest("SHA-256", bytes);
|
||||
return bufferToHex(digest);
|
||||
}
|
||||
|
||||
function bufferToHex(buf: ArrayBuffer): string {
|
||||
const view = new Uint8Array(buf);
|
||||
let hex = "";
|
||||
for (let i = 0; i < view.length; i++) {
|
||||
hex += (view[i] ?? 0).toString(16).padStart(2, "0");
|
||||
}
|
||||
return hex;
|
||||
}
|
||||
|
||||
function normalizeFrontmatter(
|
||||
fm: FrontMatterCache | undefined,
|
||||
): Record<string, unknown> {
|
||||
if (!fm) return {};
|
||||
// FrontMatterCache extends a plain object; strip the `position` key
|
||||
// the cache adds so the wire payload stays clean.
|
||||
const rest: Record<string, unknown> = { ...(fm as Record<string, unknown>) };
|
||||
delete rest.position;
|
||||
return rest;
|
||||
}
|
||||
|
||||
function collectTags(cache: CachedMetadata | null): string[] {
|
||||
const out = new Set<string>();
|
||||
for (const t of cache?.tags ?? []) {
|
||||
const tag = t.tag.startsWith("#") ? t.tag.slice(1) : t.tag;
|
||||
if (tag) out.add(tag);
|
||||
}
|
||||
const fmTags: unknown =
|
||||
cache?.frontmatter?.tags ?? cache?.frontmatter?.tag;
|
||||
if (Array.isArray(fmTags)) {
|
||||
for (const t of fmTags) {
|
||||
if (typeof t === "string" && t) out.add(t.replace(/^#/, ""));
|
||||
}
|
||||
} else if (typeof fmTags === "string" && fmTags) {
|
||||
for (const t of fmTags.split(/[\s,]+/)) {
|
||||
if (t) out.add(t.replace(/^#/, ""));
|
||||
}
|
||||
}
|
||||
return [...out];
|
||||
}
|
||||
|
||||
function collectHeadings(items: HeadingCache[]): HeadingRef[] {
|
||||
return items.map((h) => ({ heading: h.heading, level: h.level }));
|
||||
}
|
||||
|
||||
function collectAliases(frontmatter: Record<string, unknown>): string[] {
|
||||
const raw = frontmatter.aliases ?? frontmatter.alias;
|
||||
if (Array.isArray(raw)) {
|
||||
return raw.filter((x): x is string => typeof x === "string" && x.length > 0);
|
||||
}
|
||||
if (typeof raw === "string" && raw) return [raw];
|
||||
return [];
|
||||
}
|
||||
|
||||
function collectLinks(cache: CachedMetadata | null): {
|
||||
embeds: string[];
|
||||
internalLinks: ReferenceCache[];
|
||||
} {
|
||||
const linkRefs: ReferenceCache[] = [
|
||||
...((cache?.links) ?? []),
|
||||
...((cache?.embeds as ReferenceCache[] | undefined) ?? []),
|
||||
];
|
||||
const embeds = ((cache?.embeds as ReferenceCache[] | undefined) ?? []).map(
|
||||
(e) => e.link,
|
||||
);
|
||||
return { embeds, internalLinks: linkRefs };
|
||||
}
|
||||
|
||||
function resolveLinkTargets(
|
||||
app: App,
|
||||
sourcePath: string,
|
||||
links: ReferenceCache[],
|
||||
): { resolved: string[]; unresolved: string[] } {
|
||||
const resolved = new Set<string>();
|
||||
const unresolved = new Set<string>();
|
||||
for (const link of links) {
|
||||
const target = app.metadataCache.getFirstLinkpathDest(
|
||||
stripSubpath(link.link),
|
||||
sourcePath,
|
||||
);
|
||||
if (target) {
|
||||
resolved.add(target.path);
|
||||
} else {
|
||||
unresolved.add(link.link);
|
||||
}
|
||||
}
|
||||
return { resolved: [...resolved], unresolved: [...unresolved] };
|
||||
}
|
||||
|
||||
function stripSubpath(link: string): string {
|
||||
const hashIdx = link.indexOf("#");
|
||||
const pipeIdx = link.indexOf("|");
|
||||
let end = link.length;
|
||||
if (hashIdx !== -1) end = Math.min(end, hashIdx);
|
||||
if (pipeIdx !== -1) end = Math.min(end, pipeIdx);
|
||||
return link.slice(0, end);
|
||||
}
|
||||
237
surfsense_obsidian/src/queue.ts
Normal file
237
surfsense_obsidian/src/queue.ts
Normal file
|
|
@ -0,0 +1,237 @@
|
|||
import type { QueueItem } from "./types";
|
||||
|
||||
/**
|
||||
* Persistent upload queue.
|
||||
*
|
||||
* Mobile-safety contract:
|
||||
* - Persistence is delegated to a save callback (which the plugin wires
|
||||
* to `plugin.saveData()`); never `node:fs`. Items also live in the
|
||||
* plugin's settings JSON so a crash mid-flight loses nothing.
|
||||
* - No top-level `node:*` imports.
|
||||
*
|
||||
* Behavioural contract:
|
||||
* - Per-file debounce: enqueueing the same path coalesces, the latest
|
||||
* `enqueuedAt` wins so we don't ship a stale snapshot.
|
||||
* - `delete` for a path drops any pending `upsert` for that path
|
||||
* (otherwise we'd resurrect a note the user just deleted).
|
||||
* - `rename` is a first-class op so the backend can update
|
||||
* `unique_identifier_hash` instead of "delete + create" (which would
|
||||
* blow away document versions, citations, and the document_id used
|
||||
* in chat history).
|
||||
* - Drain takes a worker, returns once the worker either succeeds for
|
||||
* every batch or hits a stop signal (transient error, mid-drain
|
||||
* stop request).
|
||||
*/
|
||||
|
||||
export interface QueueWorker {
|
||||
processBatch(batch: QueueItem[]): Promise<BatchResult>;
|
||||
}
|
||||
|
||||
export interface BatchResult {
|
||||
/** Items that succeeded; they will be ack'd off the queue. */
|
||||
acked: QueueItem[];
|
||||
/** Items that should be retried; their `attempt` is bumped. */
|
||||
retry: QueueItem[];
|
||||
/** Items that failed permanently (4xx). They get dropped. */
|
||||
dropped: QueueItem[];
|
||||
/** If true, the drain loop stops (e.g. transient/network error). */
|
||||
stop: boolean;
|
||||
/** Optional retry-after for transient errors (ms). */
|
||||
backoffMs?: number;
|
||||
}
|
||||
|
||||
export interface PersistentQueueOptions {
|
||||
debounceMs?: number;
|
||||
batchSize?: number;
|
||||
maxAttempts?: number;
|
||||
persist: (items: QueueItem[]) => Promise<void> | void;
|
||||
now?: () => number;
|
||||
}
|
||||
|
||||
const DEFAULTS = {
|
||||
debounceMs: 2000,
|
||||
batchSize: 15,
|
||||
maxAttempts: 8,
|
||||
};
|
||||
|
||||
export class PersistentQueue {
|
||||
private items: QueueItem[];
|
||||
private readonly opts: Required<
|
||||
Omit<PersistentQueueOptions, "persist" | "now">
|
||||
> & {
|
||||
persist: PersistentQueueOptions["persist"];
|
||||
now: () => number;
|
||||
};
|
||||
private draining = false;
|
||||
private stopRequested = false;
|
||||
private flushTimer: ReturnType<typeof setTimeout> | null = null;
|
||||
private onFlush: (() => void) | null = null;
|
||||
|
||||
constructor(initial: QueueItem[], opts: PersistentQueueOptions) {
|
||||
this.items = [...initial];
|
||||
this.opts = {
|
||||
debounceMs: opts.debounceMs ?? DEFAULTS.debounceMs,
|
||||
batchSize: opts.batchSize ?? DEFAULTS.batchSize,
|
||||
maxAttempts: opts.maxAttempts ?? DEFAULTS.maxAttempts,
|
||||
persist: opts.persist,
|
||||
now: opts.now ?? (() => Date.now()),
|
||||
};
|
||||
}
|
||||
|
||||
get size(): number {
|
||||
return this.items.length;
|
||||
}
|
||||
|
||||
snapshot(): QueueItem[] {
|
||||
return this.items.map((i) => ({ ...i }));
|
||||
}
|
||||
|
||||
setFlushHandler(handler: () => void): void {
|
||||
this.onFlush = handler;
|
||||
}
|
||||
|
||||
enqueueUpsert(path: string): void {
|
||||
const now = this.opts.now();
|
||||
this.items = this.items.filter(
|
||||
(i) => !(i.op === "upsert" && i.path === path),
|
||||
);
|
||||
this.items.push({ op: "upsert", path, enqueuedAt: now, attempt: 0 });
|
||||
void this.persist();
|
||||
this.scheduleFlush();
|
||||
}
|
||||
|
||||
enqueueDelete(path: string): void {
|
||||
const now = this.opts.now();
|
||||
// A delete supersedes any pending upsert for the same path.
|
||||
this.items = this.items.filter(
|
||||
(i) =>
|
||||
!(
|
||||
(i.op === "upsert" && i.path === path) ||
|
||||
(i.op === "delete" && i.path === path)
|
||||
),
|
||||
);
|
||||
this.items.push({ op: "delete", path, enqueuedAt: now, attempt: 0 });
|
||||
void this.persist();
|
||||
this.scheduleFlush();
|
||||
}
|
||||
|
||||
enqueueRename(oldPath: string, newPath: string): void {
|
||||
const now = this.opts.now();
|
||||
this.items = this.items.filter(
|
||||
(i) =>
|
||||
!(
|
||||
(i.op === "upsert" && (i.path === oldPath || i.path === newPath)) ||
|
||||
(i.op === "rename" && i.oldPath === oldPath && i.newPath === newPath)
|
||||
),
|
||||
);
|
||||
this.items.push({
|
||||
op: "rename",
|
||||
oldPath,
|
||||
newPath,
|
||||
enqueuedAt: now,
|
||||
attempt: 0,
|
||||
});
|
||||
// Also enqueue an upsert of the new path so its content/metadata
|
||||
// reflects whatever the editor flushed alongside the rename.
|
||||
this.items.push({ op: "upsert", path: newPath, enqueuedAt: now, attempt: 0 });
|
||||
void this.persist();
|
||||
this.scheduleFlush();
|
||||
}
|
||||
|
||||
requestStop(): void {
|
||||
this.stopRequested = true;
|
||||
}
|
||||
|
||||
cancelFlush(): void {
|
||||
if (this.flushTimer !== null) {
|
||||
clearTimeout(this.flushTimer);
|
||||
this.flushTimer = null;
|
||||
}
|
||||
}
|
||||
|
||||
private scheduleFlush(): void {
|
||||
if (!this.onFlush) return;
|
||||
if (this.flushTimer !== null) clearTimeout(this.flushTimer);
|
||||
this.flushTimer = setTimeout(() => {
|
||||
this.flushTimer = null;
|
||||
this.onFlush?.();
|
||||
}, this.opts.debounceMs);
|
||||
}
|
||||
|
||||
async drain(worker: QueueWorker): Promise<DrainSummary> {
|
||||
if (this.draining) return { batches: 0, acked: 0, dropped: 0, stopped: false };
|
||||
this.draining = true;
|
||||
this.stopRequested = false;
|
||||
const summary: DrainSummary = {
|
||||
batches: 0,
|
||||
acked: 0,
|
||||
dropped: 0,
|
||||
stopped: false,
|
||||
};
|
||||
try {
|
||||
while (this.items.length > 0 && !this.stopRequested) {
|
||||
const batch = this.takeBatch();
|
||||
summary.batches += 1;
|
||||
|
||||
const result = await worker.processBatch(batch);
|
||||
summary.acked += result.acked.length;
|
||||
summary.dropped += result.dropped.length;
|
||||
|
||||
const ackKeys = new Set(result.acked.map(itemKey));
|
||||
const dropKeys = new Set(result.dropped.map(itemKey));
|
||||
const retryKeys = new Set(result.retry.map(itemKey));
|
||||
|
||||
// Keep any item we didn't explicitly account for in `retry`
|
||||
// so a partial-batch drop never silently loses work.
|
||||
const unhandled = batch.filter(
|
||||
(b) =>
|
||||
!ackKeys.has(itemKey(b)) &&
|
||||
!dropKeys.has(itemKey(b)) &&
|
||||
!retryKeys.has(itemKey(b)),
|
||||
);
|
||||
const retry = [...result.retry, ...unhandled].map((i) => ({
|
||||
...i,
|
||||
attempt: i.attempt + 1,
|
||||
}));
|
||||
const survivors = retry.filter((i) => i.attempt <= this.opts.maxAttempts);
|
||||
summary.dropped += retry.length - survivors.length;
|
||||
|
||||
this.items = [...survivors, ...this.items];
|
||||
await this.persist();
|
||||
|
||||
if (result.stop) {
|
||||
summary.stopped = true;
|
||||
if (result.backoffMs) summary.backoffMs = result.backoffMs;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (this.stopRequested) summary.stopped = true;
|
||||
return summary;
|
||||
} finally {
|
||||
this.draining = false;
|
||||
}
|
||||
}
|
||||
|
||||
private takeBatch(): QueueItem[] {
|
||||
const head = this.items.slice(0, this.opts.batchSize);
|
||||
this.items = this.items.slice(this.opts.batchSize);
|
||||
return head;
|
||||
}
|
||||
|
||||
private async persist(): Promise<void> {
|
||||
await this.opts.persist(this.snapshot());
|
||||
}
|
||||
}
|
||||
|
||||
export interface DrainSummary {
|
||||
batches: number;
|
||||
acked: number;
|
||||
dropped: number;
|
||||
stopped: boolean;
|
||||
backoffMs?: number;
|
||||
}
|
||||
|
||||
export function itemKey(i: QueueItem): string {
|
||||
if (i.op === "rename") return `rename:${i.oldPath}=>${i.newPath}`;
|
||||
return `${i.op}:${i.path}`;
|
||||
}
|
||||
|
|
@ -1,36 +1,322 @@
|
|||
import {App, PluginSettingTab, Setting} from "obsidian";
|
||||
import MyPlugin from "./main";
|
||||
import {
|
||||
type App,
|
||||
Notice,
|
||||
PluginSettingTab,
|
||||
Setting,
|
||||
} from "obsidian";
|
||||
import { AuthError } from "./api-client";
|
||||
import { parseExcludePatterns } from "./excludes";
|
||||
import type SurfSensePlugin from "./main";
|
||||
import type { SearchSpace } from "./types";
|
||||
|
||||
export interface MyPluginSettings {
|
||||
mySetting: string;
|
||||
}
|
||||
/**
|
||||
* Plugin settings tab.
|
||||
*
|
||||
* Replaces the obsidian-sample-plugin SampleSettingTab stub. Same module
|
||||
* path so existing imports from main.ts keep resolving.
|
||||
*
|
||||
* Surface mirrors the per-plan list:
|
||||
* server URL · api token · search space · vault name · sync mode ·
|
||||
* exclude patterns · include attachments · status panel.
|
||||
*
|
||||
* Vault id, device id, and device label are auto-generated UUIDs the
|
||||
* first time settings load — they're displayed (read-only) so users can
|
||||
* audit them, but never editable. Vault id is decoupled from the OS
|
||||
* folder name so renaming the vault doesn't invalidate the connector
|
||||
* (edge case #5 from the plan).
|
||||
*/
|
||||
|
||||
export const DEFAULT_SETTINGS: MyPluginSettings = {
|
||||
mySetting: 'default'
|
||||
}
|
||||
export class SurfSenseSettingTab extends PluginSettingTab {
|
||||
private readonly plugin: SurfSensePlugin;
|
||||
private searchSpaces: SearchSpace[] = [];
|
||||
private loadingSpaces = false;
|
||||
private statusEl: HTMLElement | null = null;
|
||||
|
||||
export class SampleSettingTab extends PluginSettingTab {
|
||||
plugin: MyPlugin;
|
||||
|
||||
constructor(app: App, plugin: MyPlugin) {
|
||||
constructor(app: App, plugin: SurfSensePlugin) {
|
||||
super(app, plugin);
|
||||
this.plugin = plugin;
|
||||
}
|
||||
|
||||
display(): void {
|
||||
const {containerEl} = this;
|
||||
|
||||
const { containerEl } = this;
|
||||
containerEl.empty();
|
||||
containerEl.addClass("surfsense-settings");
|
||||
|
||||
const settings = this.plugin.settings;
|
||||
|
||||
new Setting(containerEl).setName("Connection").setHeading();
|
||||
|
||||
new Setting(containerEl)
|
||||
.setName('Settings #1')
|
||||
.setDesc('It\'s a secret')
|
||||
.addText(text => text
|
||||
.setPlaceholder('Enter your secret')
|
||||
.setValue(this.plugin.settings.mySetting)
|
||||
.onChange(async (value) => {
|
||||
this.plugin.settings.mySetting = value;
|
||||
.setName("Server URL")
|
||||
.setDesc(
|
||||
"https://api.surfsense.com for SurfSense Cloud, or your self-hosted URL.",
|
||||
)
|
||||
.addText((text) =>
|
||||
text
|
||||
.setPlaceholder("https://api.surfsense.com")
|
||||
.setValue(settings.serverUrl)
|
||||
.onChange(async (value) => {
|
||||
this.plugin.settings.serverUrl = value.trim();
|
||||
await this.plugin.saveSettings();
|
||||
}),
|
||||
);
|
||||
|
||||
new Setting(containerEl)
|
||||
.setName("API token")
|
||||
.setDesc(
|
||||
"Paste your Surfsense API token (expires after 24 hours; re-paste when you see an auth error).",
|
||||
)
|
||||
.addText((text) => {
|
||||
text.inputEl.type = "password";
|
||||
text.inputEl.autocomplete = "off";
|
||||
text.inputEl.spellcheck = false;
|
||||
text
|
||||
.setPlaceholder("Paste token")
|
||||
.setValue(settings.apiToken)
|
||||
.onChange(async (value) => {
|
||||
this.plugin.settings.apiToken = value.trim();
|
||||
await this.plugin.saveSettings();
|
||||
});
|
||||
})
|
||||
.addButton((btn) =>
|
||||
btn
|
||||
.setButtonText("Verify")
|
||||
.setCta()
|
||||
.onClick(async () => {
|
||||
btn.setDisabled(true);
|
||||
try {
|
||||
await this.plugin.api.verifyToken();
|
||||
new Notice("Surfsense: token verified.");
|
||||
await this.refreshSearchSpaces();
|
||||
this.display();
|
||||
} catch (err) {
|
||||
this.handleApiError(err);
|
||||
} finally {
|
||||
btn.setDisabled(false);
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
new Setting(containerEl)
|
||||
.setName("Search space")
|
||||
.setDesc(
|
||||
"Which Surfsense search space this vault syncs into. Reload after changing your token.",
|
||||
)
|
||||
.addDropdown((drop) => {
|
||||
drop.addOption("", this.loadingSpaces ? "Loading…" : "Select a search space");
|
||||
for (const space of this.searchSpaces) {
|
||||
drop.addOption(String(space.id), space.name);
|
||||
}
|
||||
if (settings.searchSpaceId !== null) {
|
||||
drop.setValue(String(settings.searchSpaceId));
|
||||
}
|
||||
drop.onChange(async (value) => {
|
||||
this.plugin.settings.searchSpaceId = value ? Number(value) : null;
|
||||
this.plugin.settings.connectorId = null;
|
||||
await this.plugin.saveSettings();
|
||||
}));
|
||||
if (this.plugin.settings.searchSpaceId !== null) {
|
||||
try {
|
||||
await this.plugin.engine.ensureConnected();
|
||||
new Notice("Surfsense: vault connected.");
|
||||
} catch (err) {
|
||||
this.handleApiError(err);
|
||||
}
|
||||
}
|
||||
this.renderStatus();
|
||||
});
|
||||
})
|
||||
.addExtraButton((btn) =>
|
||||
btn
|
||||
.setIcon("refresh-ccw")
|
||||
.setTooltip("Reload search spaces")
|
||||
.onClick(async () => {
|
||||
await this.refreshSearchSpaces();
|
||||
this.display();
|
||||
}),
|
||||
);
|
||||
|
||||
new Setting(containerEl).setName("Vault").setHeading();
|
||||
|
||||
new Setting(containerEl)
|
||||
.setName("Vault name")
|
||||
.setDesc(
|
||||
"Friendly name for this vault. Defaults to your Obsidian vault folder name.",
|
||||
)
|
||||
.addText((text) =>
|
||||
text
|
||||
.setValue(settings.vaultName)
|
||||
.onChange(async (value) => {
|
||||
this.plugin.settings.vaultName = value.trim() || this.app.vault.getName();
|
||||
await this.plugin.saveSettings();
|
||||
}),
|
||||
);
|
||||
|
||||
new Setting(containerEl)
|
||||
.setName("Device label")
|
||||
.setDesc(
|
||||
"Optional human-readable label shown next to the device ID in the Surfsense web app.",
|
||||
)
|
||||
.addText((text) =>
|
||||
text
|
||||
.setPlaceholder("My laptop")
|
||||
.setValue(settings.deviceLabel)
|
||||
.onChange(async (value) => {
|
||||
this.plugin.settings.deviceLabel = value.trim();
|
||||
await this.plugin.saveSettings();
|
||||
}),
|
||||
);
|
||||
|
||||
new Setting(containerEl)
|
||||
.setName("Sync mode")
|
||||
.setDesc("Auto syncs on every edit. Manual only syncs when you trigger it via the command palette.")
|
||||
.addDropdown((drop) =>
|
||||
drop
|
||||
.addOption("auto", "Auto")
|
||||
.addOption("manual", "Manual")
|
||||
.setValue(settings.syncMode)
|
||||
.onChange(async (value) => {
|
||||
this.plugin.settings.syncMode = value === "manual" ? "manual" : "auto";
|
||||
await this.plugin.saveSettings();
|
||||
}),
|
||||
);
|
||||
|
||||
new Setting(containerEl)
|
||||
.setName("Exclude patterns")
|
||||
.setDesc(
|
||||
"One pattern per line. Supports * and **. Lines starting with # are comments. Files matching any pattern are skipped.",
|
||||
)
|
||||
.addTextArea((area) => {
|
||||
area.inputEl.rows = 4;
|
||||
area
|
||||
.setPlaceholder(".trash\n_attachments\ntemplates/**")
|
||||
.setValue(settings.excludePatterns.join("\n"))
|
||||
.onChange(async (value) => {
|
||||
this.plugin.settings.excludePatterns = parseExcludePatterns(value);
|
||||
await this.plugin.saveSettings();
|
||||
});
|
||||
});
|
||||
|
||||
new Setting(containerEl)
|
||||
.setName("Include attachments")
|
||||
.setDesc(
|
||||
"Sync non-Markdown files (images, PDFs, …). Off by default — Markdown only.",
|
||||
)
|
||||
.addToggle((toggle) =>
|
||||
toggle
|
||||
.setValue(settings.includeAttachments)
|
||||
.onChange(async (value) => {
|
||||
this.plugin.settings.includeAttachments = value;
|
||||
await this.plugin.saveSettings();
|
||||
}),
|
||||
);
|
||||
|
||||
new Setting(containerEl).setName("Identity").setHeading();
|
||||
|
||||
new Setting(containerEl)
|
||||
.setName("Vault ID")
|
||||
.setDesc("Stable identifier for this vault. Used by the backend to keep separate vaults distinct even if their folder names change.")
|
||||
.addText((text) => {
|
||||
text.inputEl.disabled = true;
|
||||
text.setValue(settings.vaultId);
|
||||
});
|
||||
|
||||
new Setting(containerEl)
|
||||
.setName("Device ID")
|
||||
.setDesc("Stable identifier for this install. Used by the backend so you can revoke a single device without disconnecting the others.")
|
||||
.addText((text) => {
|
||||
text.inputEl.disabled = true;
|
||||
text.setValue(settings.deviceId);
|
||||
});
|
||||
|
||||
new Setting(containerEl).setName("Status").setHeading();
|
||||
this.statusEl = containerEl.createDiv({ cls: "surfsense-settings__status" });
|
||||
this.renderStatus();
|
||||
|
||||
new Setting(containerEl)
|
||||
.addButton((btn) =>
|
||||
btn
|
||||
.setButtonText("Re-sync entire vault")
|
||||
.onClick(async () => {
|
||||
btn.setDisabled(true);
|
||||
try {
|
||||
await this.plugin.engine.maybeReconcile(true);
|
||||
new Notice("Surfsense: re-sync requested.");
|
||||
} catch (err) {
|
||||
this.handleApiError(err);
|
||||
} finally {
|
||||
btn.setDisabled(false);
|
||||
this.renderStatus();
|
||||
}
|
||||
}),
|
||||
)
|
||||
.addButton((btn) =>
|
||||
btn.setButtonText("Open releases").onClick(() => {
|
||||
window.open(
|
||||
"https://github.com/MODSetter/SurfSense/releases?q=obsidian",
|
||||
"_blank",
|
||||
);
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
hide(): void {
|
||||
this.statusEl = null;
|
||||
}
|
||||
|
||||
private async refreshSearchSpaces(): Promise<void> {
|
||||
this.loadingSpaces = true;
|
||||
try {
|
||||
this.searchSpaces = await this.plugin.api.listSearchSpaces();
|
||||
} catch (err) {
|
||||
this.handleApiError(err);
|
||||
this.searchSpaces = [];
|
||||
} finally {
|
||||
this.loadingSpaces = false;
|
||||
}
|
||||
}
|
||||
|
||||
renderStatus(): void {
|
||||
if (!this.statusEl) return;
|
||||
const s = this.plugin.settings;
|
||||
this.statusEl.empty();
|
||||
|
||||
const rows: { label: string; value: string }[] = [
|
||||
{ label: "Status", value: this.plugin.lastStatus.kind },
|
||||
{
|
||||
label: "Last sync",
|
||||
value: s.lastSyncAt ? new Date(s.lastSyncAt).toLocaleString() : "—",
|
||||
},
|
||||
{
|
||||
label: "Last reconcile",
|
||||
value: s.lastReconcileAt ? new Date(s.lastReconcileAt).toLocaleString() : "—",
|
||||
},
|
||||
{ label: "Files synced", value: String(s.filesSynced ?? 0) },
|
||||
{ label: "Queue depth", value: String(this.plugin.queueDepth) },
|
||||
{
|
||||
label: "API version",
|
||||
value: this.plugin.serverApiVersion ?? "(not yet handshaken)",
|
||||
},
|
||||
{
|
||||
label: "Capabilities",
|
||||
value: this.plugin.serverCapabilities.length
|
||||
? this.plugin.serverCapabilities.join(", ")
|
||||
: "(not yet handshaken)",
|
||||
},
|
||||
];
|
||||
for (const row of rows) {
|
||||
const wrap = this.statusEl.createDiv({ cls: "surfsense-settings__status-row" });
|
||||
wrap.createSpan({ cls: "surfsense-settings__status-label", text: row.label });
|
||||
wrap.createSpan({ cls: "surfsense-settings__status-value", text: row.value });
|
||||
}
|
||||
}
|
||||
|
||||
private handleApiError(err: unknown): void {
|
||||
if (err instanceof AuthError) {
|
||||
new Notice(`SurfSense: ${err.message}`);
|
||||
return;
|
||||
}
|
||||
new Notice(
|
||||
`SurfSense: request failed — ${(err as Error).message ?? "unknown error"}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
61
surfsense_obsidian/src/status-bar.ts
Normal file
61
surfsense_obsidian/src/status-bar.ts
Normal file
|
|
@ -0,0 +1,61 @@
|
|||
import { setIcon } from "obsidian";
|
||||
import type { StatusKind, StatusState } from "./types";
|
||||
|
||||
/**
|
||||
* Tiny status-bar adornment.
|
||||
*
|
||||
* Plain DOM (no HTML strings, no CSS-in-JS) so it stays cheap on mobile
|
||||
* and Obsidian's lint doesn't complain about innerHTML.
|
||||
*/
|
||||
|
||||
interface StatusVisual {
|
||||
icon: string;
|
||||
label: string;
|
||||
cls: string;
|
||||
}
|
||||
|
||||
const VISUALS: Record<StatusKind, StatusVisual> = {
|
||||
idle: { icon: "check-circle", label: "Synced", cls: "surfsense-status--ok" },
|
||||
syncing: { icon: "refresh-ccw", label: "Syncing", cls: "surfsense-status--syncing" },
|
||||
queued: { icon: "upload", label: "Queued", cls: "surfsense-status--syncing" },
|
||||
offline: { icon: "wifi-off", label: "Offline", cls: "surfsense-status--warn" },
|
||||
"auth-error": { icon: "lock", label: "Auth error", cls: "surfsense-status--err" },
|
||||
error: { icon: "alert-circle", label: "Error", cls: "surfsense-status--err" },
|
||||
};
|
||||
|
||||
export class StatusBar {
|
||||
private readonly el: HTMLElement;
|
||||
private readonly icon: HTMLElement;
|
||||
private readonly text: HTMLElement;
|
||||
|
||||
constructor(host: HTMLElement) {
|
||||
this.el = host;
|
||||
this.el.addClass("surfsense-status");
|
||||
this.icon = this.el.createSpan({ cls: "surfsense-status__icon" });
|
||||
this.text = this.el.createSpan({ cls: "surfsense-status__text" });
|
||||
this.update({ kind: "idle", queueDepth: 0 });
|
||||
}
|
||||
|
||||
update(state: StatusState): void {
|
||||
const visual = VISUALS[state.kind];
|
||||
this.el.removeClass(
|
||||
"surfsense-status--ok",
|
||||
"surfsense-status--syncing",
|
||||
"surfsense-status--warn",
|
||||
"surfsense-status--err",
|
||||
);
|
||||
this.el.addClass(visual.cls);
|
||||
setIcon(this.icon, visual.icon);
|
||||
|
||||
let label = `SurfSense: ${visual.label}`;
|
||||
if (state.queueDepth > 0 && state.kind !== "idle") {
|
||||
label += ` (${state.queueDepth})`;
|
||||
}
|
||||
this.text.setText(label);
|
||||
this.el.setAttr(
|
||||
"aria-label",
|
||||
state.detail ? `${label} — ${state.detail}` : label,
|
||||
);
|
||||
this.el.setAttr("title", state.detail ?? label);
|
||||
}
|
||||
}
|
||||
505
surfsense_obsidian/src/sync-engine.ts
Normal file
505
surfsense_obsidian/src/sync-engine.ts
Normal file
|
|
@ -0,0 +1,505 @@
|
|||
import { Notice, TFile, type App, type CachedMetadata, type TAbstractFile } from "obsidian";
|
||||
import {
|
||||
AuthError,
|
||||
PermanentError,
|
||||
type SurfSenseApiClient,
|
||||
TransientError,
|
||||
} from "./api-client";
|
||||
import { isExcluded } from "./excludes";
|
||||
import { buildNotePayload, computeContentHash } from "./payload";
|
||||
import { type BatchResult, PersistentQueue } from "./queue";
|
||||
import type {
|
||||
HealthResponse,
|
||||
NotePayload,
|
||||
QueueItem,
|
||||
StatusKind,
|
||||
StatusState,
|
||||
} from "./types";
|
||||
|
||||
/**
|
||||
* Owner of "what does the vault look like vs the server" reasoning.
|
||||
*
|
||||
* Onload sequence (per plan §p4_plugin_sync_engine, in this exact order):
|
||||
* 1. apiClient.health() — proves connectivity and pulls the capabilities
|
||||
* handshake before we issue any sync traffic.
|
||||
* 2. Cache health.capabilities + api_version on the plugin instance
|
||||
* so feature gating (e.g. "attachments_v2" before syncing binaries)
|
||||
* reads from local state instead of round-tripping.
|
||||
* 3. Drain queue — items persisted from the previous session land first.
|
||||
* 4. Reconcile — GET /manifest, diff against vault, queue uploads/deletes.
|
||||
* 5. Subscribe events — only after the above so the user's first edit
|
||||
* after launching Obsidian doesn't race with the manifest diff.
|
||||
*
|
||||
* Reconcile skips itself if last successful reconcile is < RECONCILE_MIN_INTERVAL_MS
|
||||
* ago. ConnectResponse already carries handshake fields so first connect
|
||||
* does not need a separate /health round-trip.
|
||||
*/
|
||||
|
||||
export interface SyncEngineDeps {
|
||||
app: App;
|
||||
apiClient: SurfSenseApiClient;
|
||||
queue: PersistentQueue;
|
||||
getSettings: () => SyncEngineSettings;
|
||||
saveSettings: (mut: (s: SyncEngineSettings) => void) => Promise<void>;
|
||||
setStatus: (s: StatusState) => void;
|
||||
onCapabilities: (caps: string[], apiVersion: string) => void;
|
||||
}
|
||||
|
||||
export interface SyncEngineSettings {
|
||||
vaultId: string;
|
||||
vaultName: string;
|
||||
connectorId: number | null;
|
||||
searchSpaceId: number | null;
|
||||
deviceId: string;
|
||||
deviceLabel: string;
|
||||
excludePatterns: string[];
|
||||
includeAttachments: boolean;
|
||||
syncMode: "auto" | "manual";
|
||||
lastReconcileAt: number | null;
|
||||
lastSyncAt: number | null;
|
||||
filesSynced: number;
|
||||
tombstones: Record<string, number>;
|
||||
}
|
||||
|
||||
export const RECONCILE_MIN_INTERVAL_MS = 5 * 60 * 1000;
|
||||
const TOMBSTONE_TTL_MS = 24 * 60 * 60 * 1000; // 1 day
|
||||
const PENDING_DEBOUNCE_MS = 1500;
|
||||
|
||||
export class SyncEngine {
|
||||
private readonly deps: SyncEngineDeps;
|
||||
private capabilities: string[] = [];
|
||||
private apiVersion: string | null = null;
|
||||
private pendingMdEdits = new Map<string, ReturnType<typeof setTimeout>>();
|
||||
|
||||
constructor(deps: SyncEngineDeps) {
|
||||
this.deps = deps;
|
||||
}
|
||||
|
||||
getCapabilities(): readonly string[] {
|
||||
return this.capabilities;
|
||||
}
|
||||
|
||||
supports(capability: string): boolean {
|
||||
return this.capabilities.includes(capability);
|
||||
}
|
||||
|
||||
/** Run the onload sequence described in this file's docstring. */
|
||||
async start(): Promise<void> {
|
||||
this.setStatus("syncing", "Connecting to SurfSense…");
|
||||
try {
|
||||
const health = await this.deps.apiClient.health();
|
||||
this.applyHealth(health);
|
||||
} catch (err) {
|
||||
this.handleStartupError(err);
|
||||
return;
|
||||
}
|
||||
|
||||
const settings = this.deps.getSettings();
|
||||
if (!settings.connectorId || !settings.searchSpaceId) {
|
||||
// No connector yet — settings tab will trigger ensureConnect once
|
||||
// the user picks a search space, then re-call start().
|
||||
this.setStatus("idle", "Pick a search space in settings to start syncing.");
|
||||
return;
|
||||
}
|
||||
|
||||
await this.flushQueue();
|
||||
await this.maybeReconcile();
|
||||
this.setStatus(this.queueStatusKind(), undefined);
|
||||
}
|
||||
|
||||
/** Public entry point used after settings save to (re)connect the vault. */
|
||||
async ensureConnected(): Promise<void> {
|
||||
const settings = this.deps.getSettings();
|
||||
if (!settings.searchSpaceId) {
|
||||
this.setStatus("idle", "Pick a search space in settings.");
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const resp = await this.deps.apiClient.connect({
|
||||
searchSpaceId: settings.searchSpaceId,
|
||||
vaultId: settings.vaultId,
|
||||
vaultName: settings.vaultName,
|
||||
deviceId: settings.deviceId,
|
||||
deviceLabel: settings.deviceLabel,
|
||||
});
|
||||
this.applyHealth(resp);
|
||||
await this.deps.saveSettings((s) => {
|
||||
s.connectorId = resp.connector_id;
|
||||
});
|
||||
} catch (err) {
|
||||
this.handleStartupError(err);
|
||||
}
|
||||
}
|
||||
|
||||
applyHealth(h: HealthResponse): void {
|
||||
this.capabilities = Array.isArray(h.capabilities) ? [...h.capabilities] : [];
|
||||
this.apiVersion = h.api_version ?? null;
|
||||
this.deps.onCapabilities(this.capabilities, this.apiVersion ?? "?");
|
||||
}
|
||||
|
||||
// ---- vault event handlers --------------------------------------------
|
||||
|
||||
onCreate(file: TAbstractFile): void {
|
||||
if (!this.shouldTrack(file)) return;
|
||||
const settings = this.deps.getSettings();
|
||||
if (this.isExcluded(file.path, settings)) return;
|
||||
if (this.isMarkdown(file)) {
|
||||
this.scheduleMdUpsert(file.path);
|
||||
return;
|
||||
}
|
||||
this.deps.queue.enqueueUpsert(file.path);
|
||||
}
|
||||
|
||||
onModify(file: TAbstractFile): void {
|
||||
if (!this.shouldTrack(file)) return;
|
||||
const settings = this.deps.getSettings();
|
||||
if (this.isExcluded(file.path, settings)) return;
|
||||
if (this.isMarkdown(file)) {
|
||||
// Defer to metadataCache.changed so payload fields are fresh.
|
||||
this.scheduleMdUpsert(file.path);
|
||||
return;
|
||||
}
|
||||
this.deps.queue.enqueueUpsert(file.path);
|
||||
}
|
||||
|
||||
onDelete(file: TAbstractFile): void {
|
||||
if (!this.shouldTrack(file)) return;
|
||||
this.deps.queue.enqueueDelete(file.path);
|
||||
void this.deps.saveSettings((s) => {
|
||||
s.tombstones[file.path] = Date.now();
|
||||
});
|
||||
}
|
||||
|
||||
onRename(file: TAbstractFile, oldPath: string): void {
|
||||
if (!this.shouldTrack(file)) return;
|
||||
const settings = this.deps.getSettings();
|
||||
if (this.isExcluded(file.path, settings)) {
|
||||
this.deps.queue.enqueueDelete(oldPath);
|
||||
void this.deps.saveSettings((s) => {
|
||||
s.tombstones[oldPath] = Date.now();
|
||||
});
|
||||
return;
|
||||
}
|
||||
this.deps.queue.enqueueRename(oldPath, file.path);
|
||||
}
|
||||
|
||||
onMetadataChanged(file: TFile, _data: string, _cache: CachedMetadata): void {
|
||||
if (!this.shouldTrack(file)) return;
|
||||
const settings = this.deps.getSettings();
|
||||
if (this.isExcluded(file.path, settings)) return;
|
||||
if (!this.isMarkdown(file)) return;
|
||||
// Cancel any deferred upsert and enqueue with fresh metadata now.
|
||||
const pending = this.pendingMdEdits.get(file.path);
|
||||
if (pending) {
|
||||
clearTimeout(pending);
|
||||
this.pendingMdEdits.delete(file.path);
|
||||
}
|
||||
this.deps.queue.enqueueUpsert(file.path);
|
||||
}
|
||||
|
||||
private scheduleMdUpsert(path: string): void {
|
||||
const existing = this.pendingMdEdits.get(path);
|
||||
if (existing) clearTimeout(existing);
|
||||
this.pendingMdEdits.set(
|
||||
path,
|
||||
setTimeout(() => {
|
||||
this.pendingMdEdits.delete(path);
|
||||
this.deps.queue.enqueueUpsert(path);
|
||||
}, PENDING_DEBOUNCE_MS),
|
||||
);
|
||||
}
|
||||
|
||||
// ---- queue draining ---------------------------------------------------
|
||||
|
||||
async flushQueue(): Promise<void> {
|
||||
if (this.deps.queue.size === 0) return;
|
||||
this.setStatus("syncing", `Syncing ${this.deps.queue.size} item(s)…`);
|
||||
const summary = await this.deps.queue.drain({
|
||||
processBatch: (batch) => this.processBatch(batch),
|
||||
});
|
||||
if (summary.acked > 0) {
|
||||
await this.deps.saveSettings((s) => {
|
||||
s.lastSyncAt = Date.now();
|
||||
s.filesSynced = (s.filesSynced ?? 0) + summary.acked;
|
||||
});
|
||||
}
|
||||
this.setStatus(this.queueStatusKind(), this.statusDetail());
|
||||
}
|
||||
|
||||
private async processBatch(batch: QueueItem[]): Promise<BatchResult> {
|
||||
const settings = this.deps.getSettings();
|
||||
const upserts = batch.filter((b): b is QueueItem & { op: "upsert" } => b.op === "upsert");
|
||||
const renames = batch.filter((b): b is QueueItem & { op: "rename" } => b.op === "rename");
|
||||
const deletes = batch.filter((b): b is QueueItem & { op: "delete" } => b.op === "delete");
|
||||
|
||||
const acked: QueueItem[] = [];
|
||||
const retry: QueueItem[] = [];
|
||||
const dropped: QueueItem[] = [];
|
||||
|
||||
// Renames first so paths line up server-side before content upserts.
|
||||
if (renames.length > 0) {
|
||||
try {
|
||||
await this.deps.apiClient.renameBatch({
|
||||
vaultId: settings.vaultId,
|
||||
renames: renames.map((r) => ({ oldPath: r.oldPath, newPath: r.newPath })),
|
||||
});
|
||||
acked.push(...renames);
|
||||
} catch (err) {
|
||||
const verdict = this.classify(err);
|
||||
if (verdict === "stop") return { acked, retry: [...retry, ...renames], dropped, stop: true };
|
||||
if (verdict === "retry") retry.push(...renames);
|
||||
else dropped.push(...renames);
|
||||
}
|
||||
}
|
||||
|
||||
if (deletes.length > 0) {
|
||||
try {
|
||||
await this.deps.apiClient.deleteBatch({
|
||||
vaultId: settings.vaultId,
|
||||
paths: deletes.map((d) => d.path),
|
||||
});
|
||||
acked.push(...deletes);
|
||||
} catch (err) {
|
||||
const verdict = this.classify(err);
|
||||
if (verdict === "stop") return { acked, retry: [...retry, ...deletes], dropped, stop: true };
|
||||
if (verdict === "retry") retry.push(...deletes);
|
||||
else dropped.push(...deletes);
|
||||
}
|
||||
}
|
||||
|
||||
if (upserts.length > 0) {
|
||||
const payloads: NotePayload[] = [];
|
||||
for (const item of upserts) {
|
||||
const file = this.deps.app.vault.getAbstractFileByPath(item.path);
|
||||
if (!file || !isTFile(file)) {
|
||||
// File vanished; treat as ack (delete will follow if user removed it).
|
||||
acked.push(item);
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
const payload = this.isMarkdown(file)
|
||||
? await buildNotePayload(this.deps.app, file, settings.vaultId)
|
||||
: await this.buildBinaryPayload(file, settings.vaultId);
|
||||
payloads.push(payload);
|
||||
} catch (err) {
|
||||
console.error("SurfSense: failed to build payload", item.path, err);
|
||||
retry.push(item);
|
||||
}
|
||||
}
|
||||
|
||||
if (payloads.length > 0) {
|
||||
try {
|
||||
const resp = await this.deps.apiClient.syncBatch({
|
||||
vaultId: settings.vaultId,
|
||||
notes: payloads,
|
||||
});
|
||||
const rejected = new Set(resp.rejected ?? []);
|
||||
for (const item of upserts) {
|
||||
if (retry.find((r) => r === item)) continue;
|
||||
if (rejected.has(item.path)) dropped.push(item);
|
||||
else acked.push(item);
|
||||
}
|
||||
} catch (err) {
|
||||
const verdict = this.classify(err);
|
||||
if (verdict === "stop")
|
||||
return { acked, retry: [...retry, ...upserts], dropped, stop: true };
|
||||
if (verdict === "retry") retry.push(...upserts);
|
||||
else dropped.push(...upserts);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { acked, retry, dropped, stop: false };
|
||||
}
|
||||
|
||||
private async buildBinaryPayload(file: TFile, vaultId: string): Promise<NotePayload> {
|
||||
// Plain attachments don't go through buildNotePayload (no markdown
|
||||
// metadata to extract). We still need a stable hash + file stat so
|
||||
// the backend can de-dupe and the manifest diff still works.
|
||||
const buf = await this.deps.app.vault.readBinary(file);
|
||||
const digest = await crypto.subtle.digest("SHA-256", buf);
|
||||
const hash = bufferToHex(digest);
|
||||
return {
|
||||
vault_id: vaultId,
|
||||
path: file.path,
|
||||
name: file.basename,
|
||||
extension: file.extension,
|
||||
content: "",
|
||||
frontmatter: {},
|
||||
tags: [],
|
||||
headings: [],
|
||||
resolved_links: [],
|
||||
unresolved_links: [],
|
||||
embeds: [],
|
||||
aliases: [],
|
||||
content_hash: hash,
|
||||
mtime: file.stat.mtime,
|
||||
ctime: file.stat.ctime,
|
||||
is_binary: true,
|
||||
};
|
||||
}
|
||||
|
||||
// ---- reconcile --------------------------------------------------------
|
||||
|
||||
async maybeReconcile(force = false): Promise<void> {
|
||||
const settings = this.deps.getSettings();
|
||||
if (!settings.connectorId) return;
|
||||
if (!force && settings.lastReconcileAt) {
|
||||
if (Date.now() - settings.lastReconcileAt < RECONCILE_MIN_INTERVAL_MS) return;
|
||||
}
|
||||
|
||||
this.setStatus("syncing", "Reconciling vault with server…");
|
||||
try {
|
||||
const manifest = await this.deps.apiClient.getManifest(settings.vaultId);
|
||||
const remote = manifest.entries ?? {};
|
||||
await this.diffAndQueue(settings, remote);
|
||||
await this.deps.saveSettings((s) => {
|
||||
s.lastReconcileAt = Date.now();
|
||||
s.tombstones = pruneTombstones(s.tombstones);
|
||||
});
|
||||
await this.flushQueue();
|
||||
} catch (err) {
|
||||
this.classifyAndStatus(err, "Reconcile failed");
|
||||
}
|
||||
}
|
||||
|
||||
private async diffAndQueue(
|
||||
settings: SyncEngineSettings,
|
||||
remote: Record<string, { hash: string; mtime: number }>,
|
||||
): Promise<void> {
|
||||
const localFiles = this.deps.app.vault.getFiles().filter((f) => {
|
||||
if (!this.shouldTrack(f)) return false;
|
||||
if (this.isExcluded(f.path, settings)) return false;
|
||||
return true;
|
||||
});
|
||||
const localPaths = new Set(localFiles.map((f) => f.path));
|
||||
|
||||
// Local-only or content-changed → upsert.
|
||||
for (const file of localFiles) {
|
||||
const remoteEntry = remote[file.path];
|
||||
if (!remoteEntry) {
|
||||
this.deps.queue.enqueueUpsert(file.path);
|
||||
continue;
|
||||
}
|
||||
if (file.stat.mtime > remoteEntry.mtime + 1000) {
|
||||
this.deps.queue.enqueueUpsert(file.path);
|
||||
continue;
|
||||
}
|
||||
if (this.isMarkdown(file)) {
|
||||
const content = await this.deps.app.vault.cachedRead(file);
|
||||
const hash = await computeContentHash(content);
|
||||
if (hash !== remoteEntry.hash) {
|
||||
this.deps.queue.enqueueUpsert(file.path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Remote-only → delete, but only if NOT a fresh tombstone (which
|
||||
// the queue will deliver) and NOT a path we already plan to upsert.
|
||||
for (const path of Object.keys(remote)) {
|
||||
if (localPaths.has(path)) continue;
|
||||
const tombstone = settings.tombstones[path];
|
||||
if (tombstone && Date.now() - tombstone < TOMBSTONE_TTL_MS) continue;
|
||||
this.deps.queue.enqueueDelete(path);
|
||||
}
|
||||
}
|
||||
|
||||
// ---- status helpers ---------------------------------------------------
|
||||
|
||||
private setStatus(kind: StatusKind, detail?: string): void {
|
||||
this.deps.setStatus({ kind, detail, queueDepth: this.deps.queue.size });
|
||||
}
|
||||
|
||||
private queueStatusKind(): StatusKind {
|
||||
if (this.deps.queue.size > 0) return "queued";
|
||||
return "idle";
|
||||
}
|
||||
|
||||
private statusDetail(): string | undefined {
|
||||
const settings = this.deps.getSettings();
|
||||
if (settings.lastSyncAt) {
|
||||
return `Last sync ${formatRelative(settings.lastSyncAt)}`;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
private handleStartupError(err: unknown): void {
|
||||
if (err instanceof AuthError) {
|
||||
this.setStatus("auth-error", err.message);
|
||||
return;
|
||||
}
|
||||
if (err instanceof TransientError) {
|
||||
this.setStatus("offline", err.message);
|
||||
return;
|
||||
}
|
||||
this.setStatus("error", (err as Error).message ?? "Unknown error");
|
||||
}
|
||||
|
||||
private classify(err: unknown): "ack" | "retry" | "drop" | "stop" {
|
||||
if (err instanceof AuthError) {
|
||||
this.setStatus("auth-error", err.message);
|
||||
return "stop";
|
||||
}
|
||||
if (err instanceof TransientError) {
|
||||
this.setStatus("offline", err.message);
|
||||
return "stop";
|
||||
}
|
||||
if (err instanceof PermanentError) {
|
||||
console.warn("SurfSense: permanent error, dropping batch", err);
|
||||
new Notice(`SurfSense: ${err.message}`);
|
||||
return "drop";
|
||||
}
|
||||
console.error("SurfSense: unknown error", err);
|
||||
return "retry";
|
||||
}
|
||||
|
||||
private classifyAndStatus(err: unknown, prefix: string): void {
|
||||
this.classify(err);
|
||||
this.setStatus(this.queueStatusKind(), `${prefix}: ${(err as Error).message}`);
|
||||
}
|
||||
|
||||
// ---- predicates -------------------------------------------------------
|
||||
|
||||
private shouldTrack(file: TAbstractFile): boolean {
|
||||
if (!isTFile(file)) return false;
|
||||
const settings = this.deps.getSettings();
|
||||
if (!settings.includeAttachments && !this.isMarkdown(file)) return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
private isExcluded(path: string, settings: SyncEngineSettings): boolean {
|
||||
return isExcluded(path, settings.excludePatterns);
|
||||
}
|
||||
|
||||
private isMarkdown(file: TAbstractFile): boolean {
|
||||
return isTFile(file) && file.extension.toLowerCase() === "md";
|
||||
}
|
||||
}
|
||||
|
||||
function isTFile(f: TAbstractFile): f is TFile {
|
||||
return f instanceof TFile;
|
||||
}
|
||||
|
||||
function bufferToHex(buf: ArrayBuffer): string {
|
||||
const view = new Uint8Array(buf);
|
||||
let hex = "";
|
||||
for (let i = 0; i < view.length; i++) hex += (view[i] ?? 0).toString(16).padStart(2, "0");
|
||||
return hex;
|
||||
}
|
||||
|
||||
function formatRelative(ts: number): string {
|
||||
const diff = Date.now() - ts;
|
||||
if (diff < 60_000) return "just now";
|
||||
if (diff < 3600_000) return `${Math.round(diff / 60_000)}m ago`;
|
||||
if (diff < 86_400_000) return `${Math.round(diff / 3600_000)}h ago`;
|
||||
return `${Math.round(diff / 86_400_000)}d ago`;
|
||||
}
|
||||
|
||||
function pruneTombstones(tombstones: Record<string, number>): Record<string, number> {
|
||||
const out: Record<string, number> = {};
|
||||
const cutoff = Date.now() - TOMBSTONE_TTL_MS;
|
||||
for (const [k, v] of Object.entries(tombstones)) {
|
||||
if (v >= cutoff) out[k] = v;
|
||||
}
|
||||
return out;
|
||||
}
|
||||
145
surfsense_obsidian/src/types.ts
Normal file
145
surfsense_obsidian/src/types.ts
Normal file
|
|
@ -0,0 +1,145 @@
|
|||
/**
|
||||
* Shared types for the SurfSense Obsidian plugin.
|
||||
*
|
||||
* Kept in a leaf module with no other src/ imports so it can be imported
|
||||
* from anywhere (settings, api-client, sync-engine, status-bar, main)
|
||||
* without creating cycles.
|
||||
*/
|
||||
|
||||
export interface SurfsensePluginSettings {
|
||||
serverUrl: string;
|
||||
apiToken: string;
|
||||
searchSpaceId: number | null;
|
||||
connectorId: number | null;
|
||||
vaultId: string;
|
||||
vaultName: string;
|
||||
deviceId: string;
|
||||
deviceLabel: string;
|
||||
syncMode: "auto" | "manual";
|
||||
excludePatterns: string[];
|
||||
includeAttachments: boolean;
|
||||
lastSyncAt: number | null;
|
||||
lastReconcileAt: number | null;
|
||||
filesSynced: number;
|
||||
queue: QueueItem[];
|
||||
tombstones: Record<string, number>;
|
||||
}
|
||||
|
||||
export const DEFAULT_SETTINGS: SurfsensePluginSettings = {
|
||||
serverUrl: "https://api.surfsense.com",
|
||||
apiToken: "",
|
||||
searchSpaceId: null,
|
||||
connectorId: null,
|
||||
vaultId: "",
|
||||
vaultName: "",
|
||||
deviceId: "",
|
||||
deviceLabel: "",
|
||||
syncMode: "auto",
|
||||
excludePatterns: [".trash", "_attachments", "templates"],
|
||||
includeAttachments: false,
|
||||
lastSyncAt: null,
|
||||
lastReconcileAt: null,
|
||||
filesSynced: 0,
|
||||
queue: [],
|
||||
tombstones: {},
|
||||
};
|
||||
|
||||
export type QueueOp = "upsert" | "delete" | "rename";
|
||||
|
||||
export interface UpsertItem {
|
||||
op: "upsert";
|
||||
path: string;
|
||||
enqueuedAt: number;
|
||||
attempt: number;
|
||||
}
|
||||
|
||||
export interface DeleteItem {
|
||||
op: "delete";
|
||||
path: string;
|
||||
enqueuedAt: number;
|
||||
attempt: number;
|
||||
}
|
||||
|
||||
export interface RenameItem {
|
||||
op: "rename";
|
||||
oldPath: string;
|
||||
newPath: string;
|
||||
enqueuedAt: number;
|
||||
attempt: number;
|
||||
}
|
||||
|
||||
export type QueueItem = UpsertItem | DeleteItem | RenameItem;
|
||||
|
||||
export interface NotePayload {
|
||||
vault_id: string;
|
||||
path: string;
|
||||
name: string;
|
||||
extension: string;
|
||||
content: string;
|
||||
frontmatter: Record<string, unknown>;
|
||||
tags: string[];
|
||||
headings: HeadingRef[];
|
||||
resolved_links: string[];
|
||||
unresolved_links: string[];
|
||||
embeds: string[];
|
||||
aliases: string[];
|
||||
content_hash: string;
|
||||
mtime: number;
|
||||
ctime: number;
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
export interface HeadingRef {
|
||||
heading: string;
|
||||
level: number;
|
||||
}
|
||||
|
||||
export interface SearchSpace {
|
||||
id: number;
|
||||
name: string;
|
||||
description?: string;
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
export interface ConnectResponse {
|
||||
connector_id: number;
|
||||
vault_id: string;
|
||||
search_space_id: number;
|
||||
api_version: string;
|
||||
capabilities: string[];
|
||||
server_time_utc: string;
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
export interface HealthResponse {
|
||||
api_version: string;
|
||||
capabilities: string[];
|
||||
server_time_utc: string;
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
export interface ManifestEntry {
|
||||
hash: string;
|
||||
mtime: number;
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
export interface ManifestResponse {
|
||||
vault_id: string;
|
||||
entries: Record<string, ManifestEntry>;
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
export type StatusKind =
|
||||
| "idle"
|
||||
| "syncing"
|
||||
| "queued"
|
||||
| "offline"
|
||||
| "auth-error"
|
||||
| "error";
|
||||
|
||||
export interface StatusState {
|
||||
kind: StatusKind;
|
||||
detail?: string;
|
||||
queueDepth: number;
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue