Update patches for indiekit-endpoint-activitypub upgrade
Deploy Indiekit Server / deploy (push) Successful in 1m35s

Upstream switched from timestamp-based status IDs to MongoDB ObjectIds,
fixed the tautological null in status.js via replyIdMap, and pre-fixed
the objectId ReferenceError in DELETE. Patches updated accordingly:

- patch-ap-mastodon-status-id: rewritten to capture addTimelineItem()
  return value and use _tlItem._id.toString() as the POST response ID,
  so in_reply_to_id lookups resolve correctly after ObjectId migration
- patch-ap-mastodon-delete-fix: Change B (objectId → item._id) already
  fixed upstream; Change C now matches upstream code directly
- patch-ap-status-reply-id: detects upstream replyIdMap fix and skips
  Change A silently instead of warning
- patch-micropub-gitea-dispatch-conditional: rewritten as standalone
  patch (injects _dispatchGiteaBuild helper + conditional guard),
  no longer requires the deleted base dispatch patch

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
Sven
2026-04-09 15:57:57 +02:00
parent 13d6879f08
commit 5ac8f1f43b
5 changed files with 141 additions and 78 deletions
+12 -5
View File
@@ -2418,14 +2418,15 @@
} }
}, },
"node_modules/@rmdes/indiekit-endpoint-activitypub": { "node_modules/@rmdes/indiekit-endpoint-activitypub": {
"version": "3.10.3", "version": "3.13.4",
"resolved": "git+https://gitea.giersig.eu/svemagie/indiekit-endpoint-activitypub#318720c90d123a6b5b57b689eca2b15f264dfd79", "resolved": "git+https://gitea.giersig.eu/svemagie/indiekit-endpoint-activitypub#c8ca9914f8e237b5d9b94e91cbfea798e87f536d",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@fedify/debugger": "^2.1.0", "@fedify/debugger": "^2.1.0",
"@fedify/fedify": "^2.1.0", "@fedify/fedify": "^2.1.0",
"@fedify/redis": "^2.1.0", "@fedify/redis": "^2.1.0",
"@js-temporal/polyfill": "^0.5.0", "@js-temporal/polyfill": "^0.5.0",
"@rmdes/indiekit-startup-gate": "^1.0.0",
"express": "^5.0.0", "express": "^5.0.0",
"express-rate-limit": "^7.5.1", "express-rate-limit": "^7.5.1",
"ioredis": "^5.9.3", "ioredis": "^5.9.3",
@@ -2436,9 +2437,9 @@
"node": ">=22" "node": ">=22"
}, },
"peerDependencies": { "peerDependencies": {
"@indiekit/endpoint-micropub": "^1.0.0-beta.25", "@indiekit/endpoint-micropub": "^1.0.0-beta.27",
"@indiekit/error": "^1.0.0-beta.25", "@indiekit/error": "^1.0.0-beta.27",
"@indiekit/frontend": "^1.0.0-beta.25" "@indiekit/frontend": "^1.0.0-beta.27"
} }
}, },
"node_modules/@rmdes/indiekit-endpoint-activitypub/node_modules/express-rate-limit": { "node_modules/@rmdes/indiekit-endpoint-activitypub/node_modules/express-rate-limit": {
@@ -2782,6 +2783,12 @@
"node": ">=20" "node": ">=20"
} }
}, },
"node_modules/@rmdes/indiekit-startup-gate": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/@rmdes/indiekit-startup-gate/-/indiekit-startup-gate-1.0.0.tgz",
"integrity": "sha512-LrfSjTN9Ay4RiJH5xSvsvOEs7Zqw/GCC9+FhF7S6Ij8eDXpJOKQeHshAhzsqSmP/wksAyq0TIhqXZAPJXM+Tcg==",
"license": "MIT"
},
"node_modules/@rmdes/indiekit-syndicator-bluesky": { "node_modules/@rmdes/indiekit-syndicator-bluesky": {
"version": "1.0.20", "version": "1.0.20",
"resolved": "https://registry.npmjs.org/@rmdes/indiekit-syndicator-bluesky/-/indiekit-syndicator-bluesky-1.0.20.tgz", "resolved": "https://registry.npmjs.org/@rmdes/indiekit-syndicator-bluesky/-/indiekit-syndicator-bluesky-1.0.20.tgz",
+8 -32
View File
@@ -51,9 +51,11 @@ const NEW_DELETE_ONE = ` // Delete from timeline
await collections.ap_timeline.deleteOne({ _id: item._id }); ${MARKER}`; await collections.ap_timeline.deleteOne({ _id: item._id }); ${MARKER}`;
// ── Change C: call broadcastDelete after timeline removal (statuses.js) ─────── // ── Change C: call broadcastDelete after timeline removal (statuses.js) ───────
// NOTE: Change B (objectId → item._id) was already fixed upstream.
// OLD_AFTER_DELETE matches the upstream code directly (no MARKER dependency).
const OLD_AFTER_DELETE = ` // Delete from timeline const OLD_AFTER_DELETE = ` // Delete from timeline
await collections.ap_timeline.deleteOne({ _id: item._id }); ${MARKER} await collections.ap_timeline.deleteOne({ _id: item._id });
// Clean up interactions`; // Clean up interactions`;
@@ -113,40 +115,14 @@ for (const filePath of indexCandidates) {
if (ok) totalPatched++; if (ok) totalPatched++;
} }
// Patch statuses.js candidates (Changes B + C together, in sequence) // Patch statuses.js candidates (Change C only — Change B already fixed upstream)
for (const filePath of statusesCandidates) { for (const filePath of statusesCandidates) {
if (!(await exists(filePath))) continue; if (!(await exists(filePath))) continue;
totalChecked++; totalChecked++;
const ok = await patchFile(filePath, [
const source = await readFile(filePath, "utf8"); { old: OLD_AFTER_DELETE, newSnippet: NEW_AFTER_DELETE, label: "broadcastDelete call" },
if (source.includes(MARKER)) { ]);
console.log(`[postinstall] patch-ap-mastodon-delete-fix: already applied to ${filePath}`); if (ok) totalPatched++;
continue;
}
// Apply B first, then C (C depends on B's output)
let updated = source;
let applied = 0;
if (!updated.includes(OLD_DELETE_ONE)) {
console.warn(`[postinstall] patch-ap-mastodon-delete-fix: "objectId fix" snippet not found in ${filePath}`);
} else {
updated = updated.replace(OLD_DELETE_ONE, NEW_DELETE_ONE);
applied++;
}
if (!updated.includes(OLD_AFTER_DELETE)) {
console.warn(`[postinstall] patch-ap-mastodon-delete-fix: "broadcastDelete call" snippet not found in ${filePath}`);
} else {
updated = updated.replace(OLD_AFTER_DELETE, NEW_AFTER_DELETE);
applied++;
}
if (applied === 0) continue;
await writeFile(filePath, updated, "utf8");
console.log(`[postinstall] Applied patch-ap-mastodon-delete-fix to ${filePath} (${applied}/2 change(s))`);
totalPatched++;
} }
if (totalChecked === 0) { if (totalChecked === 0) {
+37 -30
View File
@@ -1,24 +1,23 @@
/** /**
* Patch: fix POST /api/v1/statuses response ID to match ap_timeline published date. * Patch: fix POST /api/v1/statuses response ID to match ap_timeline _id.
* *
* Root cause: * Root cause:
* The POST /api/v1/statuses handler returns `id: String(Date.now())` — the * The POST /api/v1/statuses handler returns `id: String(Date.now())` — the
* wall-clock time when the response is sent. The ap_timeline item inserted by * wall-clock time when the response is sent. The ap_timeline item inserted by
* patch-ap-mastodon-reply-threading uses `published: data.properties.published`, * patch-ap-mastodon-reply-threading uses addTimelineItem(), which stores the
* which is set BEFORE postContent.create() runs (before the Gitea write, which * item with a MongoDB-generated ObjectId as _id.
* can take several seconds).
* *
* When Phanpy/Elk receives the creation response and the user then replies to that * When Phanpy/Elk receives the creation response and the user then replies to
* post, the client sends `in_reply_to_id: <id from creation response>`. The handler * that post, the client sends `in_reply_to_id: <id from creation response>`.
* calls findTimelineItemById with that id. The ±1 second range query looks for * The handler calls findTimelineItemById which does:
* published ≈ Date.now(), but the stored item has published = postCreationTime * collection.findOne({ _id: new ObjectId(id) })
* (potentially 515 seconds earlier). The range misses → inReplyTo = null → * A String(Date.now()) value is not a valid ObjectId → lookup returns null →
* jf2["in-reply-to"] not set → getPostType returns "note" instead of "reply". * inReplyTo = null → jf2["in-reply-to"] not set → getPostType returns "note"
* instead of "reply".
* *
* Fix: * Fix:
* 1. Also import encodeCursor alongside decodeCursor. * 1. Capture the return value of addTimelineItem() into `_tlItem`.
* 2. Use encodeCursor(data.properties.published) as the status ID in the response. * 2. Use `_tlItem?._id?.toString() || String(Date.now())` as the status ID.
* Falls back to String(Date.now()) if published is missing/invalid.
* *
* This ensures the creation response ID matches what findTimelineItemById will * This ensures the creation response ID matches what findTimelineItemById will
* resolve in subsequent in_reply_to_id lookups. * resolve in subsequent in_reply_to_id lookups.
@@ -33,15 +32,18 @@ const candidates = [
"node_modules/@indiekit/indiekit/node_modules/@rmdes/indiekit-endpoint-activitypub/lib/mastodon/routes/statuses.js", "node_modules/@indiekit/indiekit/node_modules/@rmdes/indiekit-endpoint-activitypub/lib/mastodon/routes/statuses.js",
]; ];
// Change 1: add encodeCursor to the decodeCursor import // Change 1: capture return value of addTimelineItem
const OLD_IMPORT = `import { decodeCursor } from "../helpers/pagination.js";`; const OLD_TL_INSERT = ` await addTimelineItem(collections, { // [patch] ap-mastodon-reply-threading`;
const NEW_IMPORT = `import { decodeCursor, encodeCursor } from "../helpers/pagination.js"; ${MARKER}`; const NEW_TL_INSERT = ` _tlItem = await addTimelineItem(collections, { // [patch] ap-mastodon-reply-threading ${MARKER}`;
// Change 2: replace String(Date.now()) with encodeCursor(data.properties.published) // Change 2: declare _tlItem before the try block
const OLD_ID = ` res.json({ const OLD_TRY = ` try { // [patch] ap-mastodon-reply-threading`;
id: String(Date.now()),`; const NEW_TRY = ` let _tlItem = null; ${MARKER}
const NEW_ID = ` res.json({ try { // [patch] ap-mastodon-reply-threading`;
id: encodeCursor(data.properties.published) || String(Date.now()), ${MARKER}`;
// Change 3: use _tlItem._id as the status response ID
const OLD_ID = ` id: String(Date.now()),`;
const NEW_ID = ` id: _tlItem?._id?.toString() || String(Date.now()), ${MARKER}`;
async function exists(filePath) { async function exists(filePath) {
try { try {
@@ -66,18 +68,23 @@ for (const filePath of candidates) {
continue; continue;
} }
if (!source.includes(OLD_IMPORT)) { let missing = false;
console.warn(`[postinstall] patch-ap-mastodon-status-id: import snippet not found in ${filePath}`); for (const [label, snippet] of [
continue; ["await addTimelineItem", OLD_TL_INSERT],
} ["try block", OLD_TRY],
["response id", OLD_ID],
if (!source.includes(OLD_ID)) { ]) {
console.warn(`[postinstall] patch-ap-mastodon-status-id: response id snippet not found in ${filePath}`); if (!source.includes(snippet)) {
continue; console.warn(`[postinstall] patch-ap-mastodon-status-id: "${label}" snippet not found in ${filePath}`);
missing = true;
}
} }
if (missing) continue;
// Apply in order: TRY first (adds let _tlItem before try), then INSERT (changes await to assign), then ID
let updated = source let updated = source
.replace(OLD_IMPORT, NEW_IMPORT) .replace(OLD_TRY, NEW_TRY)
.replace(OLD_TL_INSERT, NEW_TL_INSERT)
.replace(OLD_ID, NEW_ID); .replace(OLD_ID, NEW_ID);
if (updated === source) { if (updated === source) {
+12 -2
View File
@@ -54,7 +54,11 @@ async function exists(p) {
try { await access(p); return true; } catch { return false; } try { await access(p); return true; } catch { return false; }
} }
async function applyPatch(candidates, oldSnippet, newSnippet, label) { // Upstream fix indicator for Change A — if present, the tautological null is
// already replaced by the upstream's replyIdMap-based lookup (better than our patch).
const UPSTREAM_FIX_A = `in_reply_to_id: replyIdMap?.get(item.inReplyTo)`;
async function applyPatch(candidates, oldSnippet, newSnippet, label, upstreamFix) {
let checked = 0; let checked = 0;
let patched = 0; let patched = 0;
@@ -68,6 +72,12 @@ async function applyPatch(candidates, oldSnippet, newSnippet, label) {
continue; continue;
} }
// If upstream has already fixed the issue (better fix), skip silently
if (upstreamFix && source.includes(upstreamFix)) {
console.log(`[postinstall] patch-ap-status-reply-id: ${label} already fixed upstream in ${filePath}`);
continue;
}
if (!source.includes(oldSnippet)) { if (!source.includes(oldSnippet)) {
console.warn(`[postinstall] patch-ap-status-reply-id: ${label} snippet not found in ${filePath}`); console.warn(`[postinstall] patch-ap-status-reply-id: ${label} snippet not found in ${filePath}`);
continue; continue;
@@ -81,7 +91,7 @@ async function applyPatch(candidates, oldSnippet, newSnippet, label) {
return { checked, patched }; return { checked, patched };
} }
const a = await applyPatch(statusEntityCandidates, OLD_TAUTOLOGY, NEW_REPLY_ID, "status entity"); const a = await applyPatch(statusEntityCandidates, OLD_TAUTOLOGY, NEW_REPLY_ID, "status entity", UPSTREAM_FIX_A);
const b = await applyPatch(statusesRouteCandidates, OLD_REPLY_INSERT, NEW_REPLY_INSERT, "timeline insert"); const b = await applyPatch(statusesRouteCandidates, OLD_REPLY_INSERT, NEW_REPLY_INSERT, "timeline insert");
const totalChecked = a.checked + b.checked; const totalChecked = a.checked + b.checked;
@@ -1,12 +1,22 @@
/** /**
* Patch: skip workflow_dispatch for delete actions. * Patch: fire a Gitea workflow_dispatch after each Micropub create/update/undelete,
* but NOT for delete (Gitea's Contents API DELETE commits trigger on:push CI already).
* *
* Gitea's Contents API DELETE commits DO trigger on:push CI, while * Gitea Contents API POST/PUT commits do NOT trigger `on: push` CI workflows.
* POST/PUT commits do not. Without this patch, delete fires both * DELETE commits DO trigger `on: push`. Without this distinction, delete would fire
* on:push and workflow_dispatch → 2 CI runs. * both on:push and workflow_dispatch → 2 CI runs.
* *
* create/update/undelete: only dispatch fires (1 CI run) ✓ * create/update/undelete: only dispatch fires (1 CI run) ✓
* delete: only on:push fires (1 CI run) ✓ * delete: only on:push fires (1 CI run) ✓
*
* This is a standalone patch (does not require a separate base dispatch patch).
* Env vars read at runtime (all have defaults):
* GITEA_BASE_URL Internal Gitea API base
* GITEA_CONTENT_USER Org/user that owns the blog repo
* GITEA_CONTENT_REPO Blog repo name
* GH_CONTENT_TOKEN Gitea PAT with Actions write permission
* GITEA_DISPATCH_WORKFLOW Workflow filename, default deploy.yml
* GITEA_DISPATCH_REF Branch to dispatch on, default main
*/ */
import { readFile, writeFile, access } from "node:fs/promises"; import { readFile, writeFile, access } from "node:fs/promises";
@@ -14,8 +24,44 @@ import { readFile, writeFile, access } from "node:fs/promises";
const TARGET = "node_modules/@indiekit/endpoint-micropub/lib/controllers/action.js"; const TARGET = "node_modules/@indiekit/endpoint-micropub/lib/controllers/action.js";
const MARKER = "// [patch] micropub-gitea-dispatch-conditional"; const MARKER = "// [patch] micropub-gitea-dispatch-conditional";
const OLD = ` _dispatchGiteaBuild().catch(() => {});`; const HELPER = `${MARKER}
const NEW = ` if (action !== "delete") _dispatchGiteaBuild().catch(() => {}); ${MARKER}`; async function _dispatchGiteaBuild() {
try {
const base = (process.env.GITEA_BASE_URL || "http://10.100.0.90:3000/api/v1/").replace(/\\/+$/, "");
const owner = process.env.GITEA_CONTENT_USER || "giersig.eu";
const repo = process.env.GITEA_CONTENT_REPO || "indiekit-blog";
const token = process.env.GH_CONTENT_TOKEN || process.env.GITHUB_TOKEN || "";
const workflow = process.env.GITEA_DISPATCH_WORKFLOW || "deploy.yml";
const ref = process.env.GITEA_DISPATCH_REF || "main";
if (!token) { console.warn("[micropub-gitea-dispatch] No token — skipping dispatch"); return; }
const url = \`\${base}/repos/\${owner}/\${repo}/actions/workflows/\${workflow}/dispatches\`;
const res = await fetch(url, {
method: "POST",
headers: { "Authorization": \`token \${token}\`, "Content-Type": "application/json" },
body: JSON.stringify({ ref }),
});
if (!res.ok) {
const text = await res.text().catch(() => "");
console.warn(\`[micropub-gitea-dispatch] Dispatch failed \${res.status}: \${text}\`);
} else {
console.log(\`[micropub-gitea-dispatch] Dispatched \${workflow} on \${owner}/\${repo}@\${ref}\`);
}
} catch (err) {
console.warn("[micropub-gitea-dispatch] Dispatch error:", err.message);
}
}
`;
const OLD_RESPONSE = ` response
.status(content.status)
.location(content.location)
.json(content.json);`;
const NEW_RESPONSE = ` if (action !== "delete") _dispatchGiteaBuild().catch(() => {}); ${MARKER}
response
.status(content.status)
.location(content.location)
.json(content.json);`;
async function exists(p) { async function exists(p) {
try { await access(p); return true; } catch { return false; } try { await access(p); return true; } catch { return false; }
@@ -33,10 +79,27 @@ if (source.includes(MARKER)) {
process.exit(0); process.exit(0);
} }
if (!source.includes(OLD)) { if (!source.includes(OLD_RESPONSE)) {
console.warn("[postinstall] micropub-gitea-dispatch-conditional: snippet not found — is patch-micropub-gitea-dispatch applied first?"); console.warn("[postinstall] micropub-gitea-dispatch-conditional: response snippet not found — skipping");
process.exit(0); process.exit(0);
} }
await writeFile(TARGET, source.replace(OLD, NEW), "utf8"); // Insert helper block after the last import statement
const allImportMatches = [...source.matchAll(/^import\s/gm)];
let insertAt = 0;
if (allImportMatches.length > 0) {
const lastImportStart = allImportMatches.at(-1).index;
const afterLastImport = source.slice(lastImportStart);
const fromMatch = afterLastImport.match(/from\s+["'][^"']+["']\s*;\s*\n/);
if (fromMatch) {
insertAt = lastImportStart + fromMatch.index + fromMatch[0].length;
}
}
const updated =
source.slice(0, insertAt) +
"\n" + HELPER + "\n" +
source.slice(insertAt).replace(OLD_RESPONSE, NEW_RESPONSE);
await writeFile(TARGET, updated, "utf8");
console.log(`[postinstall] micropub-gitea-dispatch-conditional: patched ${TARGET}`); console.log(`[postinstall] micropub-gitea-dispatch-conditional: patched ${TARGET}`);