Fix vault index: prevent stuck state and wait for layout ready

- VaultSearch.buildIndex(): wrap body in try/finally so this.indexing
  is always reset to false even if an unexpected error occurs. Previously
  any outer exception left indexing=true permanently, blocking all retries.
- main.ts: replace unreliable setTimeout(3000) with onLayoutReady() so
  the index is built only after Obsidian's vault cache is fully resolved.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
svemagie
2026-03-04 21:13:28 +01:00
parent fbb8545890
commit 5a27b998c3
3 changed files with 121 additions and 114 deletions
+56 -53
View File
@@ -510,60 +510,63 @@ var VaultSearch = class {
this.docVectors.clear();
this.idf.clear();
this.docContents.clear();
const files = this.app.vault.getMarkdownFiles();
const total = files.length;
const df = /* @__PURE__ */ new Map();
const tfs = /* @__PURE__ */ new Map();
for (let i = 0; i < files.length; i++) {
const file = files[i];
if (this.onProgress && i % 100 === 0)
this.onProgress(i, total);
try {
const raw = await this.app.vault.cachedRead(file);
const clean = this.cleanContent(raw);
this.docContents.set(file.path, clean);
const tokens = this.tokenize(clean + " " + file.basename);
const tf = /* @__PURE__ */ new Map();
for (const t of tokens) {
tf.set(t, ((_a = tf.get(t)) != null ? _a : 0) + 1);
}
const maxTf = Math.max(...tf.values(), 1);
const normalizedTf = /* @__PURE__ */ new Map();
for (const [t, count] of tf) {
normalizedTf.set(t, count / maxTf);
}
tfs.set(file.path, normalizedTf);
for (const t of tf.keys()) {
df.set(t, ((_b = df.get(t)) != null ? _b : 0) + 1);
}
} catch (e) {
}
}
const N = files.length;
for (const [term, docCount] of df) {
this.idf.set(term, Math.log(N / docCount + 1));
}
for (const [path, tf] of tfs) {
const vec = /* @__PURE__ */ new Map();
let norm = 0;
for (const [term, tfVal] of tf) {
const idfVal = (_c = this.idf.get(term)) != null ? _c : 0;
const tfidf = tfVal * idfVal;
vec.set(term, tfidf);
norm += tfidf * tfidf;
}
norm = Math.sqrt(norm);
if (norm > 0) {
for (const [term, val] of vec) {
vec.set(term, val / norm);
try {
const files = this.app.vault.getMarkdownFiles();
const total = files.length;
const df = /* @__PURE__ */ new Map();
const tfs = /* @__PURE__ */ new Map();
for (let i = 0; i < files.length; i++) {
const file = files[i];
if (this.onProgress && i % 100 === 0)
this.onProgress(i, total);
try {
const raw = await this.app.vault.cachedRead(file);
const clean = this.cleanContent(raw);
this.docContents.set(file.path, clean);
const tokens = this.tokenize(clean + " " + file.basename);
const tf = /* @__PURE__ */ new Map();
for (const t of tokens) {
tf.set(t, ((_a = tf.get(t)) != null ? _a : 0) + 1);
}
const maxTf = Math.max(...tf.values(), 1);
const normalizedTf = /* @__PURE__ */ new Map();
for (const [t, count] of tf) {
normalizedTf.set(t, count / maxTf);
}
tfs.set(file.path, normalizedTf);
for (const t of tf.keys()) {
df.set(t, ((_b = df.get(t)) != null ? _b : 0) + 1);
}
} catch (e) {
}
}
this.docVectors.set(path, vec);
const N = files.length;
for (const [term, docCount] of df) {
this.idf.set(term, Math.log(N / docCount + 1));
}
for (const [path, tf] of tfs) {
const vec = /* @__PURE__ */ new Map();
let norm = 0;
for (const [term, tfVal] of tf) {
const idfVal = (_c = this.idf.get(term)) != null ? _c : 0;
const tfidf = tfVal * idfVal;
vec.set(term, tfidf);
norm += tfidf * tfidf;
}
norm = Math.sqrt(norm);
if (norm > 0) {
for (const [term, val] of vec) {
vec.set(term, val / norm);
}
}
this.docVectors.set(path, vec);
}
this.indexed = true;
if (this.onProgress)
this.onProgress(total, total);
} finally {
this.indexing = false;
}
this.indexed = true;
this.indexing = false;
if (this.onProgress)
this.onProgress(total, total);
}
isIndexed() {
return this.indexed;
@@ -865,11 +868,11 @@ var MemexChatPlugin = class extends import_obsidian4.Plugin {
}
});
this.addSettingTab(new MemexChatSettingsTab(this.app, this));
setTimeout(() => {
this.app.workspace.onLayoutReady(() => {
if (!this.search.isIndexed()) {
this.search.buildIndex().catch(console.error);
}
}, 3e3);
});
console.log("[Memex Chat] Plugin geladen");
}
onunload() {
+62 -58
View File
@@ -59,70 +59,74 @@ export class VaultSearch {
this.idf.clear();
this.docContents.clear();
const files = this.app.vault.getMarkdownFiles();
const total = files.length;
const df: Map<string, number> = new Map(); // term -> doc count
try {
const files = this.app.vault.getMarkdownFiles();
const total = files.length;
const df: Map<string, number> = new Map(); // term -> doc count
// Step 1: Read all files, compute TF
const tfs: Map<string, Map<string, number>> = new Map();
for (let i = 0; i < files.length; i++) {
const file = files[i];
if (this.onProgress && i % 100 === 0) this.onProgress(i, total);
try {
const raw = await this.app.vault.cachedRead(file);
const clean = this.cleanContent(raw);
this.docContents.set(file.path, clean);
// Step 1: Read all files, compute TF
const tfs: Map<string, Map<string, number>> = new Map();
for (let i = 0; i < files.length; i++) {
const file = files[i];
if (this.onProgress && i % 100 === 0) this.onProgress(i, total);
try {
const raw = await this.app.vault.cachedRead(file);
const clean = this.cleanContent(raw);
this.docContents.set(file.path, clean);
const tokens = this.tokenize(clean + " " + file.basename);
const tf: Map<string, number> = new Map();
for (const t of tokens) {
tf.set(t, (tf.get(t) ?? 0) + 1);
}
// Normalize TF
const maxTf = Math.max(...tf.values(), 1);
const normalizedTf: Map<string, number> = new Map();
for (const [t, count] of tf) {
normalizedTf.set(t, count / maxTf);
}
tfs.set(file.path, normalizedTf);
const tokens = this.tokenize(clean + " " + file.basename);
const tf: Map<string, number> = new Map();
for (const t of tokens) {
tf.set(t, (tf.get(t) ?? 0) + 1);
}
// Normalize TF
const maxTf = Math.max(...tf.values(), 1);
const normalizedTf: Map<string, number> = new Map();
for (const [t, count] of tf) {
normalizedTf.set(t, count / maxTf);
}
tfs.set(file.path, normalizedTf);
// Update DF
for (const t of tf.keys()) {
df.set(t, (df.get(t) ?? 0) + 1);
}
} catch {
// skip unreadable files
}
}
// Step 2: Compute IDF and TF-IDF vectors
const N = files.length;
for (const [term, docCount] of df) {
this.idf.set(term, Math.log(N / docCount + 1));
}
for (const [path, tf] of tfs) {
const vec: Map<string, number> = new Map();
let norm = 0;
for (const [term, tfVal] of tf) {
const idfVal = this.idf.get(term) ?? 0;
const tfidf = tfVal * idfVal;
vec.set(term, tfidf);
norm += tfidf * tfidf;
}
// L2 normalize
norm = Math.sqrt(norm);
if (norm > 0) {
for (const [term, val] of vec) {
vec.set(term, val / norm);
// Update DF
for (const t of tf.keys()) {
df.set(t, (df.get(t) ?? 0) + 1);
}
} catch {
// skip unreadable files
}
}
this.docVectors.set(path, vec);
}
this.indexed = true;
this.indexing = false;
if (this.onProgress) this.onProgress(total, total);
// Step 2: Compute IDF and TF-IDF vectors
const N = files.length;
for (const [term, docCount] of df) {
this.idf.set(term, Math.log(N / docCount + 1));
}
for (const [path, tf] of tfs) {
const vec: Map<string, number> = new Map();
let norm = 0;
for (const [term, tfVal] of tf) {
const idfVal = this.idf.get(term) ?? 0;
const tfidf = tfVal * idfVal;
vec.set(term, tfidf);
norm += tfidf * tfidf;
}
// L2 normalize
norm = Math.sqrt(norm);
if (norm > 0) {
for (const [term, val] of vec) {
vec.set(term, val / norm);
}
}
this.docVectors.set(path, vec);
}
this.indexed = true;
if (this.onProgress) this.onProgress(total, total);
} finally {
// Always reset indexing so retries are possible if an error occurred
this.indexing = false;
}
}
isIndexed(): boolean {
+3 -3
View File
@@ -73,12 +73,12 @@ export default class MemexChatPlugin extends Plugin {
// Settings tab
this.addSettingTab(new MemexChatSettingsTab(this.app, this));
// Build index in background after startup
setTimeout(() => {
// Build index once the workspace layout (and vault cache) is fully ready
this.app.workspace.onLayoutReady(() => {
if (!this.search.isIndexed()) {
this.search.buildIndex().catch(console.error);
}
}, 3000);
});
console.log("[Memex Chat] Plugin geladen");
}