From 5a27b998c396880d4e86de3dae2c802789eb7dc7 Mon Sep 17 00:00:00 2001 From: svemagie <869694+svemagie@users.noreply.github.com> Date: Wed, 4 Mar 2026 21:13:28 +0100 Subject: [PATCH] Fix vault index: prevent stuck state and wait for layout ready - VaultSearch.buildIndex(): wrap body in try/finally so this.indexing is always reset to false even if an unexpected error occurs. Previously any outer exception left indexing=true permanently, blocking all retries. - main.ts: replace unreliable setTimeout(3000) with onLayoutReady() so the index is built only after Obsidian's vault cache is fully resolved. Co-Authored-By: Claude Sonnet 4.6 --- main.js | 109 ++++++++++++++++++++-------------------- src/VaultSearch.ts | 120 +++++++++++++++++++++++---------------------- src/main.ts | 6 +-- 3 files changed, 121 insertions(+), 114 deletions(-) diff --git a/main.js b/main.js index 07317f3..39653d2 100644 --- a/main.js +++ b/main.js @@ -510,60 +510,63 @@ var VaultSearch = class { this.docVectors.clear(); this.idf.clear(); this.docContents.clear(); - const files = this.app.vault.getMarkdownFiles(); - const total = files.length; - const df = /* @__PURE__ */ new Map(); - const tfs = /* @__PURE__ */ new Map(); - for (let i = 0; i < files.length; i++) { - const file = files[i]; - if (this.onProgress && i % 100 === 0) - this.onProgress(i, total); - try { - const raw = await this.app.vault.cachedRead(file); - const clean = this.cleanContent(raw); - this.docContents.set(file.path, clean); - const tokens = this.tokenize(clean + " " + file.basename); - const tf = /* @__PURE__ */ new Map(); - for (const t of tokens) { - tf.set(t, ((_a = tf.get(t)) != null ? _a : 0) + 1); - } - const maxTf = Math.max(...tf.values(), 1); - const normalizedTf = /* @__PURE__ */ new Map(); - for (const [t, count] of tf) { - normalizedTf.set(t, count / maxTf); - } - tfs.set(file.path, normalizedTf); - for (const t of tf.keys()) { - df.set(t, ((_b = df.get(t)) != null ? _b : 0) + 1); - } - } catch (e) { - } - } - const N = files.length; - for (const [term, docCount] of df) { - this.idf.set(term, Math.log(N / docCount + 1)); - } - for (const [path, tf] of tfs) { - const vec = /* @__PURE__ */ new Map(); - let norm = 0; - for (const [term, tfVal] of tf) { - const idfVal = (_c = this.idf.get(term)) != null ? _c : 0; - const tfidf = tfVal * idfVal; - vec.set(term, tfidf); - norm += tfidf * tfidf; - } - norm = Math.sqrt(norm); - if (norm > 0) { - for (const [term, val] of vec) { - vec.set(term, val / norm); + try { + const files = this.app.vault.getMarkdownFiles(); + const total = files.length; + const df = /* @__PURE__ */ new Map(); + const tfs = /* @__PURE__ */ new Map(); + for (let i = 0; i < files.length; i++) { + const file = files[i]; + if (this.onProgress && i % 100 === 0) + this.onProgress(i, total); + try { + const raw = await this.app.vault.cachedRead(file); + const clean = this.cleanContent(raw); + this.docContents.set(file.path, clean); + const tokens = this.tokenize(clean + " " + file.basename); + const tf = /* @__PURE__ */ new Map(); + for (const t of tokens) { + tf.set(t, ((_a = tf.get(t)) != null ? _a : 0) + 1); + } + const maxTf = Math.max(...tf.values(), 1); + const normalizedTf = /* @__PURE__ */ new Map(); + for (const [t, count] of tf) { + normalizedTf.set(t, count / maxTf); + } + tfs.set(file.path, normalizedTf); + for (const t of tf.keys()) { + df.set(t, ((_b = df.get(t)) != null ? _b : 0) + 1); + } + } catch (e) { } } - this.docVectors.set(path, vec); + const N = files.length; + for (const [term, docCount] of df) { + this.idf.set(term, Math.log(N / docCount + 1)); + } + for (const [path, tf] of tfs) { + const vec = /* @__PURE__ */ new Map(); + let norm = 0; + for (const [term, tfVal] of tf) { + const idfVal = (_c = this.idf.get(term)) != null ? _c : 0; + const tfidf = tfVal * idfVal; + vec.set(term, tfidf); + norm += tfidf * tfidf; + } + norm = Math.sqrt(norm); + if (norm > 0) { + for (const [term, val] of vec) { + vec.set(term, val / norm); + } + } + this.docVectors.set(path, vec); + } + this.indexed = true; + if (this.onProgress) + this.onProgress(total, total); + } finally { + this.indexing = false; } - this.indexed = true; - this.indexing = false; - if (this.onProgress) - this.onProgress(total, total); } isIndexed() { return this.indexed; @@ -865,11 +868,11 @@ var MemexChatPlugin = class extends import_obsidian4.Plugin { } }); this.addSettingTab(new MemexChatSettingsTab(this.app, this)); - setTimeout(() => { + this.app.workspace.onLayoutReady(() => { if (!this.search.isIndexed()) { this.search.buildIndex().catch(console.error); } - }, 3e3); + }); console.log("[Memex Chat] Plugin geladen"); } onunload() { diff --git a/src/VaultSearch.ts b/src/VaultSearch.ts index 2afba38..a014560 100644 --- a/src/VaultSearch.ts +++ b/src/VaultSearch.ts @@ -59,70 +59,74 @@ export class VaultSearch { this.idf.clear(); this.docContents.clear(); - const files = this.app.vault.getMarkdownFiles(); - const total = files.length; - const df: Map = new Map(); // term -> doc count + try { + const files = this.app.vault.getMarkdownFiles(); + const total = files.length; + const df: Map = new Map(); // term -> doc count - // Step 1: Read all files, compute TF - const tfs: Map> = new Map(); - for (let i = 0; i < files.length; i++) { - const file = files[i]; - if (this.onProgress && i % 100 === 0) this.onProgress(i, total); - try { - const raw = await this.app.vault.cachedRead(file); - const clean = this.cleanContent(raw); - this.docContents.set(file.path, clean); + // Step 1: Read all files, compute TF + const tfs: Map> = new Map(); + for (let i = 0; i < files.length; i++) { + const file = files[i]; + if (this.onProgress && i % 100 === 0) this.onProgress(i, total); + try { + const raw = await this.app.vault.cachedRead(file); + const clean = this.cleanContent(raw); + this.docContents.set(file.path, clean); - const tokens = this.tokenize(clean + " " + file.basename); - const tf: Map = new Map(); - for (const t of tokens) { - tf.set(t, (tf.get(t) ?? 0) + 1); - } - // Normalize TF - const maxTf = Math.max(...tf.values(), 1); - const normalizedTf: Map = new Map(); - for (const [t, count] of tf) { - normalizedTf.set(t, count / maxTf); - } - tfs.set(file.path, normalizedTf); + const tokens = this.tokenize(clean + " " + file.basename); + const tf: Map = new Map(); + for (const t of tokens) { + tf.set(t, (tf.get(t) ?? 0) + 1); + } + // Normalize TF + const maxTf = Math.max(...tf.values(), 1); + const normalizedTf: Map = new Map(); + for (const [t, count] of tf) { + normalizedTf.set(t, count / maxTf); + } + tfs.set(file.path, normalizedTf); - // Update DF - for (const t of tf.keys()) { - df.set(t, (df.get(t) ?? 0) + 1); - } - } catch { - // skip unreadable files - } - } - - // Step 2: Compute IDF and TF-IDF vectors - const N = files.length; - for (const [term, docCount] of df) { - this.idf.set(term, Math.log(N / docCount + 1)); - } - - for (const [path, tf] of tfs) { - const vec: Map = new Map(); - let norm = 0; - for (const [term, tfVal] of tf) { - const idfVal = this.idf.get(term) ?? 0; - const tfidf = tfVal * idfVal; - vec.set(term, tfidf); - norm += tfidf * tfidf; - } - // L2 normalize - norm = Math.sqrt(norm); - if (norm > 0) { - for (const [term, val] of vec) { - vec.set(term, val / norm); + // Update DF + for (const t of tf.keys()) { + df.set(t, (df.get(t) ?? 0) + 1); + } + } catch { + // skip unreadable files } } - this.docVectors.set(path, vec); - } - this.indexed = true; - this.indexing = false; - if (this.onProgress) this.onProgress(total, total); + // Step 2: Compute IDF and TF-IDF vectors + const N = files.length; + for (const [term, docCount] of df) { + this.idf.set(term, Math.log(N / docCount + 1)); + } + + for (const [path, tf] of tfs) { + const vec: Map = new Map(); + let norm = 0; + for (const [term, tfVal] of tf) { + const idfVal = this.idf.get(term) ?? 0; + const tfidf = tfVal * idfVal; + vec.set(term, tfidf); + norm += tfidf * tfidf; + } + // L2 normalize + norm = Math.sqrt(norm); + if (norm > 0) { + for (const [term, val] of vec) { + vec.set(term, val / norm); + } + } + this.docVectors.set(path, vec); + } + + this.indexed = true; + if (this.onProgress) this.onProgress(total, total); + } finally { + // Always reset indexing so retries are possible if an error occurred + this.indexing = false; + } } isIndexed(): boolean { diff --git a/src/main.ts b/src/main.ts index c7684db..769e587 100644 --- a/src/main.ts +++ b/src/main.ts @@ -73,12 +73,12 @@ export default class MemexChatPlugin extends Plugin { // Settings tab this.addSettingTab(new MemexChatSettingsTab(this.app, this)); - // Build index in background after startup - setTimeout(() => { + // Build index once the workspace layout (and vault cache) is fully ready + this.app.workspace.onLayoutReady(() => { if (!this.search.isIndexed()) { this.search.buildIndex().catch(console.error); } - }, 3000); + }); console.log("[Memex Chat] Plugin geladen"); }