chore: remove AP runtime patches now integrated into fork
Deploy Indiekit Server / deploy (push) Failing after 1m11s
Deploy Indiekit Server / deploy (push) Failing after 1m11s
All 10 AP patch scripts are now part of the svemagie/indiekit-endpoint-activitypub fork source code (commit 56a8b08). Runtime patching is no longer needed for these. Removed: accounts-id-cache-fallback, conversations-endpoint, federation-infra, mastodon-misc, mastodon-statuses, oauth-token-expiry, self-follow-guard, startup-gate-bypass, syndication, unify-dm-visibility Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -1,62 +0,0 @@
|
||||
/**
|
||||
* patch-ap-accounts-id-cache-fallback.mjs
|
||||
*
|
||||
* Fixes 404 on /api/v1/accounts/:id for actors resolved via /lookup but not
|
||||
* in followers/following/timeline. After resolveActorData() returns null,
|
||||
* checks in-memory idToUrl cache and ap_actor_cache MongoDB collection for
|
||||
* the actor URL, then calls resolveRemoteAccount() if found.
|
||||
*/
|
||||
import { readFileSync, writeFileSync } from "node:fs";
|
||||
|
||||
const MARKER = "// [patch] ap-accounts-id-cache-fallback";
|
||||
const TARGET = "/usr/local/indiekit/node_modules/@rmdes/indiekit-endpoint-activitypub/lib/mastodon/routes/accounts.js";
|
||||
|
||||
const src = readFileSync(TARGET, "utf8");
|
||||
|
||||
if (src.includes(MARKER)) {
|
||||
console.log("[postinstall] ap-accounts-id-cache-fallback: already applied");
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
// Find the 404 return in the GET /accounts/:id handler.
|
||||
// We need to insert a cache-fallback block BEFORE the 404 return.
|
||||
// The pattern: after the resolveActorData block, before `return res.status(404)`
|
||||
// in the :id handler.
|
||||
|
||||
const needle = ` return res.status(404).json({ error: "Record not found" });
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
// ─── GET /api/v1/accounts/:id/statuses`;
|
||||
|
||||
if (!src.includes(needle)) {
|
||||
console.error("[postinstall] ap-accounts-id-cache-fallback: cannot find insertion point");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const replacement = ` // Cache fallback: actor not in followers/following/timeline, ${MARKER}
|
||||
// but may have been resolved via /lookup and cached in ap_actor_cache
|
||||
let cachedActorUrl = getActorUrlFromId(id); ${MARKER}
|
||||
if (!cachedActorUrl && collections.ap_actor_cache) { ${MARKER}
|
||||
const cached = await collections.ap_actor_cache.findOne({ _id: id }); ${MARKER}
|
||||
if (cached?.actorUrl) cachedActorUrl = cached.actorUrl; ${MARKER}
|
||||
} ${MARKER}
|
||||
if (cachedActorUrl) { ${MARKER}
|
||||
const cachedAccount = await resolveRemoteAccount( ${MARKER}
|
||||
cachedActorUrl, pluginOptions, baseUrl, collections, ${MARKER}
|
||||
); ${MARKER}
|
||||
if (cachedAccount) return res.json(cachedAccount); ${MARKER}
|
||||
} ${MARKER}
|
||||
|
||||
return res.status(404).json({ error: "Record not found" });
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
// ─── GET /api/v1/accounts/:id/statuses`;
|
||||
|
||||
writeFileSync(TARGET, src.replace(needle, replacement));
|
||||
console.log("[postinstall] ap-accounts-id-cache-fallback: applied successfully");
|
||||
@@ -1,380 +0,0 @@
|
||||
/**
|
||||
* Patch: Implement /api/v1/conversations endpoint + backfill ap_messages.
|
||||
*
|
||||
* Problem: /api/v1/conversations was a stub returning []. DMs are stored in
|
||||
* ap_notifications (type:"dm") but ap_messages was empty (code added after
|
||||
* existing DMs were processed).
|
||||
*
|
||||
* Fix:
|
||||
* A) Backfill ap_messages from ap_notifications type:"dm" (idempotent upsert)
|
||||
* B) Replace the conversations stub with a real implementation that:
|
||||
* - Aggregates ap_messages grouped by conversationId (actor URL)
|
||||
* - Returns Mastodon Conversation entities
|
||||
* - Supports pagination (max_id, since_id, limit)
|
||||
* C) Add POST /api/v1/conversations/:id/read to mark conversations read
|
||||
*/
|
||||
|
||||
import { access, readFile, writeFile } from "node:fs/promises";
|
||||
|
||||
const AP_BASE = "@rmdes/indiekit-endpoint-activitypub";
|
||||
const AP_ROOTS = [
|
||||
`node_modules/${AP_BASE}`,
|
||||
`node_modules/@indiekit/indiekit/node_modules/${AP_BASE}`,
|
||||
];
|
||||
|
||||
function apPath(rel) {
|
||||
return AP_ROOTS.map(r => `${r}/${rel}`);
|
||||
}
|
||||
|
||||
async function fileExists(p) {
|
||||
try { await access(p); return true; } catch { return false; }
|
||||
}
|
||||
|
||||
const SCRIPT = "patch-ap-conversations-endpoint";
|
||||
const MARKER = "// [patch] ap-conversations-endpoint";
|
||||
let total = 0;
|
||||
|
||||
// ── Part A: Backfill ap_messages from ap_notifications ──────────────────────
|
||||
|
||||
async function backfillMessages() {
|
||||
let MongoClient;
|
||||
try {
|
||||
({ MongoClient } = await import("mongodb"));
|
||||
} catch {
|
||||
console.warn(`[postinstall] ${SCRIPT}: mongodb driver not available, skipping backfill`);
|
||||
return;
|
||||
}
|
||||
|
||||
// Load .env for Mongo credentials
|
||||
const dotenv = await import("dotenv");
|
||||
const fs = await import("node:fs");
|
||||
let envVars = {};
|
||||
try {
|
||||
const envContent = fs.readFileSync(".env", "utf8");
|
||||
envVars = dotenv.parse(envContent);
|
||||
} catch {
|
||||
// .env not found — use process.env
|
||||
}
|
||||
const env = { ...process.env, ...envVars };
|
||||
|
||||
const mongoUsername = env.MONGO_USERNAME || env.MONGO_USER || "";
|
||||
const mongoPassword = env.MONGO_PASSWORD || "";
|
||||
const mongoHost = env.MONGO_HOST || "10.100.0.20";
|
||||
const mongoPort = env.MONGO_PORT || "27017";
|
||||
const mongoDatabase = env.MONGO_DATABASE || env.MONGO_DB || "indiekit";
|
||||
const mongoAuthSource = env.MONGO_AUTH_SOURCE || "admin";
|
||||
const mongoCredentials = mongoUsername && mongoPassword
|
||||
? `${encodeURIComponent(mongoUsername)}:${encodeURIComponent(mongoPassword)}@`
|
||||
: "";
|
||||
const mongoQuery = mongoCredentials && mongoAuthSource
|
||||
? `?authSource=${encodeURIComponent(mongoAuthSource)}`
|
||||
: "";
|
||||
const mongoUrl = env.MONGO_URL || `mongodb://${mongoCredentials}${mongoHost}:${mongoPort}/${mongoDatabase}${mongoQuery}`;
|
||||
|
||||
let client;
|
||||
try {
|
||||
client = new MongoClient(mongoUrl);
|
||||
await client.connect();
|
||||
const db = client.db(mongoDatabase);
|
||||
const notifications = db.collection("ap_notifications");
|
||||
const messages = db.collection("ap_messages");
|
||||
|
||||
// Find all DM notifications
|
||||
const dmNotifs = await notifications.find({ type: "dm" }).toArray();
|
||||
if (!dmNotifs.length) {
|
||||
console.log(`[postinstall] ${SCRIPT}: no DM notifications to backfill`);
|
||||
return;
|
||||
}
|
||||
|
||||
let backfilled = 0;
|
||||
for (const notif of dmNotifs) {
|
||||
// Build uid matching what addMessage uses
|
||||
const uid = notif.uid?.startsWith("dm:")
|
||||
? notif.uid.replace(/^dm:/, "")
|
||||
: notif.uid || `dm:${notif.actorUrl}:${Date.now()}`;
|
||||
|
||||
const result = await messages.updateOne(
|
||||
{ uid },
|
||||
{
|
||||
$setOnInsert: {
|
||||
uid,
|
||||
actorUrl: notif.actorUrl || "",
|
||||
actorName: notif.actorName || "",
|
||||
actorPhoto: notif.actorPhoto || "",
|
||||
actorHandle: notif.actorHandle || "",
|
||||
content: notif.content || { text: "", html: "" },
|
||||
inReplyTo: null,
|
||||
conversationId: notif.actorUrl || "",
|
||||
direction: "inbound",
|
||||
published: notif.published || notif.createdAt || new Date().toISOString(),
|
||||
createdAt: notif.createdAt || new Date().toISOString(),
|
||||
read: notif.read || false,
|
||||
},
|
||||
},
|
||||
{ upsert: true },
|
||||
);
|
||||
if (result.upsertedCount > 0) backfilled++;
|
||||
}
|
||||
|
||||
console.log(`[postinstall] ${SCRIPT}: backfilled ${backfilled}/${dmNotifs.length} DM notifications into ap_messages`);
|
||||
} catch (error) {
|
||||
console.warn(`[postinstall] ${SCRIPT}: backfill failed: ${error.message}`);
|
||||
} finally {
|
||||
if (client) await client.close().catch(() => {});
|
||||
}
|
||||
}
|
||||
|
||||
// ── Part B: Replace conversations stub in stubs.js ──────────────────────────
|
||||
|
||||
const STUBS_CANDIDATES = apPath("lib/mastodon/routes/stubs.js");
|
||||
|
||||
// The old stub — exact match
|
||||
const CONVERSATIONS_STUB = `router.get("/api/v1/conversations", (req, res) => {
|
||||
res.json([]);
|
||||
});`;
|
||||
|
||||
// The new implementation
|
||||
const CONVERSATIONS_IMPL = `// ─── Conversations (Direct Messages) ────────────────────────────────────────
|
||||
// Real implementation replacing the empty stub. ${MARKER}
|
||||
// Reads from ap_messages collection, groups by conversationId (actor URL).
|
||||
|
||||
router.get("/api/v1/conversations", async (req, res, next) => { ${MARKER}
|
||||
try { ${MARKER}
|
||||
const collections = req.app.locals.mastodonCollections; ${MARKER}
|
||||
const baseUrl = \`\${req.protocol}://\${req.get("host")}\`; ${MARKER}
|
||||
const { serializeAccount } = await import("../entities/account.js"); ${MARKER}
|
||||
const { remoteActorId } = await import("../helpers/id-mapping.js"); ${MARKER}
|
||||
const { parseLimit } = await import("../helpers/pagination.js"); ${MARKER}
|
||||
${MARKER}
|
||||
if (!collections?.ap_messages) { ${MARKER}
|
||||
return res.json([]); ${MARKER}
|
||||
} ${MARKER}
|
||||
${MARKER}
|
||||
const limit = parseLimit(req.query.limit, 20); ${MARKER}
|
||||
${MARKER}
|
||||
// Aggregate conversations: group by conversationId, get last message + unread count ${MARKER}
|
||||
const pipeline = [ ${MARKER}
|
||||
{ $sort: { published: -1 } }, ${MARKER}
|
||||
{ ${MARKER}
|
||||
$group: { ${MARKER}
|
||||
_id: "$conversationId", ${MARKER}
|
||||
lastMessageId: { $first: "$_id" }, ${MARKER}
|
||||
lastUid: { $first: "$uid" }, ${MARKER}
|
||||
lastContent: { $first: "$content" }, ${MARKER}
|
||||
lastPublished: { $first: "$published" }, ${MARKER}
|
||||
actorUrl: { $first: "$actorUrl" }, ${MARKER}
|
||||
actorName: { $first: "$actorName" }, ${MARKER}
|
||||
actorPhoto: { $first: "$actorPhoto" }, ${MARKER}
|
||||
actorHandle: { $first: "$actorHandle" }, ${MARKER}
|
||||
unreadCount: { ${MARKER}
|
||||
$sum: { $cond: [{ $eq: ["$read", false] }, 1, 0] }, ${MARKER}
|
||||
}, ${MARKER}
|
||||
}, ${MARKER}
|
||||
}, ${MARKER}
|
||||
{ $sort: { lastPublished: -1 } }, ${MARKER}
|
||||
]; ${MARKER}
|
||||
${MARKER}
|
||||
// Apply cursor pagination on the aggregation result ${MARKER}
|
||||
if (req.query.max_id) { ${MARKER}
|
||||
pipeline.splice(0, 0, { ${MARKER}
|
||||
$match: { _id: { $lt: req.query.max_id } }, ${MARKER}
|
||||
}); ${MARKER}
|
||||
} ${MARKER}
|
||||
${MARKER}
|
||||
pipeline.push({ $limit: limit }); ${MARKER}
|
||||
${MARKER}
|
||||
const conversations = await collections.ap_messages ${MARKER}
|
||||
.aggregate(pipeline) ${MARKER}
|
||||
.toArray(); ${MARKER}
|
||||
${MARKER}
|
||||
const result = conversations.map((conv) => { ${MARKER}
|
||||
const convId = remoteActorId(conv._id || conv.actorUrl); ${MARKER}
|
||||
${MARKER}
|
||||
// Build a minimal Mastodon Status for last_status ${MARKER}
|
||||
const lastStatus = { ${MARKER}
|
||||
id: conv.lastMessageId.toString(), ${MARKER}
|
||||
created_at: conv.lastPublished || new Date().toISOString(), ${MARKER}
|
||||
in_reply_to_id: null, ${MARKER}
|
||||
in_reply_to_account_id: null, ${MARKER}
|
||||
sensitive: false, ${MARKER}
|
||||
spoiler_text: "", ${MARKER}
|
||||
visibility: "direct", ${MARKER}
|
||||
language: null, ${MARKER}
|
||||
uri: conv.lastUid || "", ${MARKER}
|
||||
url: conv.lastUid || "", ${MARKER}
|
||||
replies_count: 0, ${MARKER}
|
||||
reblogs_count: 0, ${MARKER}
|
||||
favourites_count: 0, ${MARKER}
|
||||
edited_at: null, ${MARKER}
|
||||
favourited: false, ${MARKER}
|
||||
reblogged: false, ${MARKER}
|
||||
muted: false, ${MARKER}
|
||||
bookmarked: false, ${MARKER}
|
||||
pinned: false, ${MARKER}
|
||||
content: conv.lastContent?.html || conv.lastContent?.text || "", ${MARKER}
|
||||
filtered: null, ${MARKER}
|
||||
reblog: null, ${MARKER}
|
||||
application: null, ${MARKER}
|
||||
account: serializeAccount( ${MARKER}
|
||||
{ ${MARKER}
|
||||
name: conv.actorName, ${MARKER}
|
||||
url: conv.actorUrl, ${MARKER}
|
||||
photo: conv.actorPhoto, ${MARKER}
|
||||
handle: conv.actorHandle, ${MARKER}
|
||||
}, ${MARKER}
|
||||
{ baseUrl }, ${MARKER}
|
||||
), ${MARKER}
|
||||
media_attachments: [], ${MARKER}
|
||||
mentions: [], ${MARKER}
|
||||
tags: [], ${MARKER}
|
||||
emojis: [], ${MARKER}
|
||||
card: null, ${MARKER}
|
||||
poll: null, ${MARKER}
|
||||
}; ${MARKER}
|
||||
${MARKER}
|
||||
return { ${MARKER}
|
||||
id: convId, ${MARKER}
|
||||
unread: conv.unreadCount > 0, ${MARKER}
|
||||
last_status: lastStatus, ${MARKER}
|
||||
accounts: [ ${MARKER}
|
||||
serializeAccount( ${MARKER}
|
||||
{ ${MARKER}
|
||||
name: conv.actorName, ${MARKER}
|
||||
url: conv.actorUrl, ${MARKER}
|
||||
photo: conv.actorPhoto, ${MARKER}
|
||||
handle: conv.actorHandle, ${MARKER}
|
||||
}, ${MARKER}
|
||||
{ baseUrl }, ${MARKER}
|
||||
), ${MARKER}
|
||||
], ${MARKER}
|
||||
}; ${MARKER}
|
||||
}); ${MARKER}
|
||||
${MARKER}
|
||||
// Set Link header for pagination ${MARKER}
|
||||
if (result.length === limit && conversations.length > 0) { ${MARKER}
|
||||
const lastConv = conversations[conversations.length - 1]; ${MARKER}
|
||||
const maxId = remoteActorId(lastConv._id || lastConv.actorUrl); ${MARKER}
|
||||
res.set("Link", \`<\${baseUrl}/api/v1/conversations?max_id=\${maxId}>; rel="next"\`); ${MARKER}
|
||||
} ${MARKER}
|
||||
${MARKER}
|
||||
res.json(result); ${MARKER}
|
||||
} catch (error) { ${MARKER}
|
||||
next(error); ${MARKER}
|
||||
} ${MARKER}
|
||||
}); ${MARKER}
|
||||
|
||||
// Mark conversation as read ${MARKER}
|
||||
router.post("/api/v1/conversations/:id/read", async (req, res, next) => { ${MARKER}
|
||||
try { ${MARKER}
|
||||
const collections = req.app.locals.mastodonCollections; ${MARKER}
|
||||
const baseUrl = \`\${req.protocol}://\${req.get("host")}\`; ${MARKER}
|
||||
const { serializeAccount } = await import("../entities/account.js"); ${MARKER}
|
||||
const { remoteActorId } = await import("../helpers/id-mapping.js"); ${MARKER}
|
||||
${MARKER}
|
||||
if (!collections?.ap_messages) { ${MARKER}
|
||||
return res.status(404).json({ error: "Not found" }); ${MARKER}
|
||||
} ${MARKER}
|
||||
${MARKER}
|
||||
// Find the conversation partner whose hashed actorUrl matches the :id ${MARKER}
|
||||
const allPartners = await collections.ap_messages.aggregate([ ${MARKER}
|
||||
{ $group: { _id: "$conversationId" } }, ${MARKER}
|
||||
]).toArray(); ${MARKER}
|
||||
${MARKER}
|
||||
const partner = allPartners.find( ${MARKER}
|
||||
(p) => remoteActorId(p._id) === req.params.id ${MARKER}
|
||||
); ${MARKER}
|
||||
${MARKER}
|
||||
if (!partner) { ${MARKER}
|
||||
return res.status(404).json({ error: "Conversation not found" }); ${MARKER}
|
||||
} ${MARKER}
|
||||
${MARKER}
|
||||
// Mark all messages from this partner as read ${MARKER}
|
||||
await collections.ap_messages.updateMany( ${MARKER}
|
||||
{ conversationId: partner._id, read: false }, ${MARKER}
|
||||
{ $set: { read: true } }, ${MARKER}
|
||||
); ${MARKER}
|
||||
${MARKER}
|
||||
// Return the updated conversation ${MARKER}
|
||||
const lastMsg = await collections.ap_messages ${MARKER}
|
||||
.findOne({ conversationId: partner._id }, { sort: { published: -1 } }); ${MARKER}
|
||||
${MARKER}
|
||||
if (!lastMsg) { ${MARKER}
|
||||
return res.status(404).json({ error: "No messages" }); ${MARKER}
|
||||
} ${MARKER}
|
||||
${MARKER}
|
||||
const convId = remoteActorId(partner._id); ${MARKER}
|
||||
const account = serializeAccount( ${MARKER}
|
||||
{ ${MARKER}
|
||||
name: lastMsg.actorName, ${MARKER}
|
||||
url: lastMsg.actorUrl, ${MARKER}
|
||||
photo: lastMsg.actorPhoto, ${MARKER}
|
||||
handle: lastMsg.actorHandle, ${MARKER}
|
||||
}, ${MARKER}
|
||||
{ baseUrl }, ${MARKER}
|
||||
); ${MARKER}
|
||||
${MARKER}
|
||||
res.json({ ${MARKER}
|
||||
id: convId, ${MARKER}
|
||||
unread: false, ${MARKER}
|
||||
last_status: { ${MARKER}
|
||||
id: lastMsg._id.toString(), ${MARKER}
|
||||
created_at: lastMsg.published || new Date().toISOString(), ${MARKER}
|
||||
in_reply_to_id: null, ${MARKER}
|
||||
in_reply_to_account_id: null, ${MARKER}
|
||||
sensitive: false, ${MARKER}
|
||||
spoiler_text: "", ${MARKER}
|
||||
visibility: "direct", ${MARKER}
|
||||
language: null, ${MARKER}
|
||||
uri: lastMsg.uid || "", ${MARKER}
|
||||
url: lastMsg.uid || "", ${MARKER}
|
||||
replies_count: 0, ${MARKER}
|
||||
reblogs_count: 0, ${MARKER}
|
||||
favourites_count: 0, ${MARKER}
|
||||
edited_at: null, ${MARKER}
|
||||
favourited: false, ${MARKER}
|
||||
reblogged: false, ${MARKER}
|
||||
muted: false, ${MARKER}
|
||||
bookmarked: false, ${MARKER}
|
||||
pinned: false, ${MARKER}
|
||||
content: lastMsg.content?.html || lastMsg.content?.text || "", ${MARKER}
|
||||
filtered: null, ${MARKER}
|
||||
reblog: null, ${MARKER}
|
||||
application: null, ${MARKER}
|
||||
account, ${MARKER}
|
||||
media_attachments: [], ${MARKER}
|
||||
mentions: [], ${MARKER}
|
||||
tags: [], ${MARKER}
|
||||
emojis: [], ${MARKER}
|
||||
card: null, ${MARKER}
|
||||
poll: null, ${MARKER}
|
||||
}, ${MARKER}
|
||||
accounts: [account], ${MARKER}
|
||||
}); ${MARKER}
|
||||
} catch (error) { ${MARKER}
|
||||
next(error); ${MARKER}
|
||||
} ${MARKER}
|
||||
}); ${MARKER}`;
|
||||
|
||||
let stubsDone = false;
|
||||
for (const f of STUBS_CANDIDATES) {
|
||||
if (!(await fileExists(f))) continue;
|
||||
const src = await readFile(f, "utf8");
|
||||
if (src.includes(MARKER)) {
|
||||
console.log(`[postinstall] ${SCRIPT}: conversations endpoint already applied in ${f}`);
|
||||
stubsDone = true; break;
|
||||
}
|
||||
if (!src.includes(CONVERSATIONS_STUB)) {
|
||||
console.warn(`[postinstall] ${SCRIPT}: conversations stub not found in ${f}`);
|
||||
continue;
|
||||
}
|
||||
const updated = src.replace(CONVERSATIONS_STUB, CONVERSATIONS_IMPL);
|
||||
await writeFile(f, updated, "utf8");
|
||||
console.log(`[postinstall] ${SCRIPT}: applied conversations endpoint to ${f}`);
|
||||
total++; stubsDone = true; break;
|
||||
}
|
||||
if (!stubsDone) console.log(`[postinstall] ${SCRIPT}: conversations stub — no target file found or no changes`);
|
||||
|
||||
// ── Run backfill ────────────────────────────────────────────────────────────
|
||||
await backfillMessages();
|
||||
|
||||
console.log(`[postinstall] ${SCRIPT}: done (${total} patch(es) applied)`);
|
||||
@@ -1,273 +0,0 @@
|
||||
/**
|
||||
* Consolidated patch: AP federation infrastructure.
|
||||
*
|
||||
* Absorbs:
|
||||
* - patch-ap-federation-bridge-base-url (federation-bridge.js + index.js)
|
||||
* - patch-ap-signature-host-header (federation-bridge.js)
|
||||
* - patch-ap-inbox-delivery-debug (federation-setup.js + federation-bridge.js)
|
||||
* - patch-ap-inbox-publication-url (federation-setup.js + inbox-handlers.js)
|
||||
* - patch-ap-webfinger-before-auth (index.js)
|
||||
* - patch-ap-mastodon-delete-fix Change A only (index.js)
|
||||
*
|
||||
* ORDERING within federation-bridge.js entries:
|
||||
* federation-bridge-base-url patches MUST come before inbox-delivery-debug
|
||||
* because delivery-debug Fix B anchors on the "// ap-base-url patch" comment
|
||||
* injected by the base-url patch.
|
||||
*/
|
||||
|
||||
import { access, readFile, writeFile } from "node:fs/promises";
|
||||
|
||||
const AP_BASE = "@rmdes/indiekit-endpoint-activitypub";
|
||||
const AP_ROOTS = [
|
||||
`node_modules/${AP_BASE}`,
|
||||
`node_modules/@indiekit/indiekit/node_modules/${AP_BASE}`,
|
||||
];
|
||||
|
||||
function apPath(rel) {
|
||||
return AP_ROOTS.map(r => `${r}/${rel}`);
|
||||
}
|
||||
|
||||
async function fileExists(p) {
|
||||
try { await access(p); return true; } catch { return false; }
|
||||
}
|
||||
|
||||
async function applyPatch(filePath, marker, oldSnippet, newSnippet) {
|
||||
if (!(await fileExists(filePath))) return "file_not_found";
|
||||
const src = await readFile(filePath, "utf8");
|
||||
if (src.includes(marker)) return "already_applied";
|
||||
if (!src.includes(oldSnippet)) return "snippet_not_found";
|
||||
await writeFile(filePath, src.replace(oldSnippet, newSnippet), "utf8");
|
||||
return "applied";
|
||||
}
|
||||
|
||||
const SCRIPT = "patch-ap-federation-infra";
|
||||
|
||||
const PATCHES = [
|
||||
// ── federation-bridge-base-url: fromExpressRequest signature ────────────────
|
||||
// ORDERING: must come before inbox-delivery-debug entries
|
||||
{
|
||||
name: "bridge-base-url/fromExpressRequest-sig",
|
||||
files: apPath("lib/federation-bridge.js"),
|
||||
marker: "// ap-base-url patch",
|
||||
oldSnippet: `export function fromExpressRequest(req) {
|
||||
const url = \`\${req.protocol}://\${req.get("host")}\${req.originalUrl}\`;`,
|
||||
newSnippet: `export function fromExpressRequest(req, baseUrl) { // ap-base-url patch
|
||||
const url = baseUrl
|
||||
? \`\${baseUrl.replace(/\\/$/, "")}\${req.originalUrl}\` // ap-base-url patch
|
||||
: \`\${req.protocol}://\${req.get("host")}\${req.originalUrl}\`;`,
|
||||
},
|
||||
|
||||
// ── federation-bridge-base-url: createFedifyMiddleware signature ─────────────
|
||||
{
|
||||
name: "bridge-base-url/createFedifyMiddleware-sig",
|
||||
files: apPath("lib/federation-bridge.js"),
|
||||
marker: "// ap-base-url patch",
|
||||
oldSnippet: `export function createFedifyMiddleware(federation, contextDataFactory) {`,
|
||||
newSnippet: `export function createFedifyMiddleware(federation, contextDataFactory, publicationUrl) { // ap-base-url patch`,
|
||||
},
|
||||
|
||||
// ── federation-bridge-base-url: fromExpressRequest call ──────────────────────
|
||||
{
|
||||
name: "bridge-base-url/fromExpressRequest-call",
|
||||
files: apPath("lib/federation-bridge.js"),
|
||||
marker: "// ap-base-url patch",
|
||||
oldSnippet: ` const request = fromExpressRequest(req);`,
|
||||
newSnippet: ` const request = fromExpressRequest(req, publicationUrl); // ap-base-url patch`,
|
||||
},
|
||||
|
||||
// ── federation-bridge-base-url: index.js createFedifyMiddleware call ─────────
|
||||
{
|
||||
name: "bridge-base-url/index-createFedifyMiddleware-call",
|
||||
files: apPath("index.js"),
|
||||
marker: "// ap-base-url patch",
|
||||
oldSnippet: ` this._fedifyMiddleware = createFedifyMiddleware(federation, () => ({}));`,
|
||||
newSnippet: ` this._fedifyMiddleware = createFedifyMiddleware(federation, () => ({}), this._publicationUrl); // ap-base-url patch`,
|
||||
},
|
||||
|
||||
// ── signature-host-header: normalise "host" header in fromExpressRequest ─────
|
||||
{
|
||||
name: "signature-host-header/normalise-host",
|
||||
files: apPath("lib/federation-bridge.js"),
|
||||
marker: "// [patch] ap-signature-host-header",
|
||||
oldSnippet: ` for (const [key, value] of Object.entries(req.headers)) {
|
||||
if (Array.isArray(value)) {
|
||||
for (const v of value) headers.append(key, v);
|
||||
} else if (typeof value === "string") {
|
||||
headers.append(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
let body;`,
|
||||
newSnippet: ` for (const [key, value] of Object.entries(req.headers)) {
|
||||
if (Array.isArray(value)) {
|
||||
for (const v of value) headers.append(key, v);
|
||||
} else if (typeof value === "string") {
|
||||
headers.append(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
// Normalise "host" to the public hostname so Fedify's HTTP Signature
|
||||
// verifier reconstructs the same signed-string the remote server created.
|
||||
// Without this, nginx may forward an internal Host (e.g. "10.100.0.20")
|
||||
// which doesn't match what the sender signed, causing every inbox POST
|
||||
// to fail with "Failed to verify the request's HTTP Signatures". // [patch] ap-signature-host-header
|
||||
if (baseUrl) {
|
||||
try {
|
||||
const _canonicalHost = new URL(baseUrl).host; // e.g. "blog.giersig.eu"
|
||||
headers.set("host", _canonicalHost);
|
||||
} catch { /* invalid baseUrl — leave header as-is */ }
|
||||
}
|
||||
|
||||
let body;`,
|
||||
},
|
||||
|
||||
// ── inbox-delivery-debug Fix A: federation-setup.js — inbox logger level ─────
|
||||
{
|
||||
name: "inbox-delivery-debug/inbox-logger-level",
|
||||
files: apPath("lib/federation-setup.js"),
|
||||
marker: "// [patch] ap-inbox-delivery-debug-A-fatal",
|
||||
oldSnippet: ` {
|
||||
// Surfacing real verification failures (wrong key, clock skew, digest
|
||||
// mismatch) at "error" level while keeping high-volume key-fetch
|
||||
// 404/410 warnings from deleted actors silent. // [patch] ap-inbox-delivery-debug-A
|
||||
category: ["fedify", "federation", "inbox"],
|
||||
sinks: ["console"],
|
||||
lowestLevel: "error",
|
||||
},`,
|
||||
newSnippet: ` {
|
||||
// Noise guard: HTTP Signature verification failures are expected for
|
||||
// incoming activities from servers with expired/gone keys (e.g. deleted
|
||||
// actors, migrated servers). These produce high log volume with no
|
||||
// actionable signal — suppress everything below fatal. // [patch] ap-inbox-delivery-debug-A-fatal
|
||||
category: ["fedify", "federation", "inbox"],
|
||||
sinks: ["console"],
|
||||
lowestLevel: "fatal",
|
||||
},`,
|
||||
},
|
||||
|
||||
// ── inbox-delivery-debug Fix B: federation-bridge.js — request-level logging ─
|
||||
// ORDERING: must come AFTER bridge-base-url patches (anchors on "// ap-base-url patch")
|
||||
{
|
||||
name: "inbox-delivery-debug/bridge-request-log",
|
||||
files: apPath("lib/federation-bridge.js"),
|
||||
marker: "// [patch] ap-inbox-delivery-debug-B",
|
||||
oldSnippet: ` const request = fromExpressRequest(req, publicationUrl); // ap-base-url patch`,
|
||||
newSnippet: ` // Log incoming inbox POSTs before Fedify signature check. // [patch] ap-inbox-delivery-debug-B
|
||||
// Enabled by AP_LOG_LEVEL=debug or AP_DEBUG=1.
|
||||
if (
|
||||
(process.env.AP_LOG_LEVEL === "debug" || process.env.AP_DEBUG === "1") &&
|
||||
req.method === "POST" &&
|
||||
(req.path.includes("/inbox") || req.path.includes("/users/"))
|
||||
) {
|
||||
const _bct = (req.headers["content-type"] || "").split(";")[0].trim();
|
||||
const _bsz = req._rawBody?.length ?? (req.body ? "pre-parsed" : "none");
|
||||
console.info(\`[AP-inbox] POST \${req.path} ct=\${_bct} body=\${_bsz}B\`);
|
||||
}
|
||||
const request = fromExpressRequest(req, publicationUrl); // ap-base-url patch`,
|
||||
},
|
||||
|
||||
// ── inbox-publication-url Fix A: federation-setup.js — set _publicationUrl ───
|
||||
{
|
||||
name: "inbox-publication-url/set-publicationUrl",
|
||||
files: apPath("lib/federation-setup.js"),
|
||||
marker: "// [patch] ap-inbox-publication-url",
|
||||
oldSnippet: ` registerInboxListeners(inboxChain, {
|
||||
collections,
|
||||
handle,
|
||||
storeRawActivities,
|
||||
});`,
|
||||
newSnippet: ` // Expose publicationUrl on collections so inbox handlers can gate // [patch] ap-inbox-publication-url
|
||||
// notifications/timeline-storage to our own content only.
|
||||
collections._publicationUrl = publicationUrl;
|
||||
registerInboxListeners(inboxChain, {
|
||||
collections,
|
||||
handle,
|
||||
storeRawActivities,
|
||||
});`,
|
||||
},
|
||||
|
||||
// ── inbox-publication-url Fix B: inbox-handlers.js — store reply from non-follower
|
||||
{
|
||||
name: "inbox-publication-url/store-reply-non-follower",
|
||||
files: apPath("lib/inbox-handlers.js"),
|
||||
marker: "// [patch] ap-inbox-publication-url",
|
||||
oldSnippet: ` } else if (collections.ap_followed_tags) {
|
||||
// Not a followed account — check if the post's hashtags match any followed tags`,
|
||||
newSnippet: ` } else if (pubUrl && inReplyTo && inReplyTo.startsWith(pubUrl)) {
|
||||
// Reply to our post from a non-followed account — store in timeline // [patch] ap-inbox-publication-url
|
||||
// so it appears in the Mastodon client API's conversation/notification view.
|
||||
try {
|
||||
const timelineItem = await extractObjectData(object, {
|
||||
actorFallback: actorObj,
|
||||
documentLoader: authLoader,
|
||||
});
|
||||
timelineItem.visibility = computeVisibility(object);
|
||||
await addTimelineItem(collections, timelineItem);
|
||||
} catch (error) {
|
||||
console.error("[inbox-handlers] Failed to store reply timeline item:", error.message);
|
||||
}
|
||||
} else if (collections.ap_followed_tags) {
|
||||
// Not a followed account — check if the post's hashtags match any followed tags`,
|
||||
},
|
||||
|
||||
// ── webfinger-before-auth: extend Fedify delegation to /.well-known/ ─────────
|
||||
{
|
||||
name: "webfinger-before-auth/extend-discovery-routes",
|
||||
files: apPath("index.js"),
|
||||
marker: "// ap-webfinger-before-auth patch",
|
||||
oldSnippet: ` if (!self._fedifyMiddleware) return next();
|
||||
if (req.method !== "GET" && req.method !== "HEAD") return next();
|
||||
// Only delegate to Fedify for NodeInfo data endpoint (/nodeinfo/2.1).
|
||||
// All other paths in this root-mounted router are handled by the
|
||||
// content negotiation catch-all below. Passing arbitrary paths like
|
||||
// /notes/... to Fedify causes harmless but noisy 404 warnings.
|
||||
if (!req.path.startsWith("/nodeinfo/")) return next();
|
||||
return self._fedifyMiddleware(req, res, next);`,
|
||||
newSnippet: ` if (!self._fedifyMiddleware) return next();
|
||||
if (req.method !== "GET" && req.method !== "HEAD") return next();
|
||||
// Delegate to Fedify for discovery endpoints:
|
||||
// /.well-known/webfinger — actor/resource identity resolution
|
||||
// /.well-known/nodeinfo — server capabilities advertised to the fediverse
|
||||
// /nodeinfo/2.1 — NodeInfo data document
|
||||
// This router is mounted at "/" so req.url retains the full path, allowing
|
||||
// Fedify to match its internal routes correctly. (routesWellKnown strips
|
||||
// the /.well-known/ prefix, causing Fedify to miss the webfinger route.)
|
||||
// ap-webfinger-before-auth patch
|
||||
const isDiscoveryRoute =
|
||||
req.path.startsWith("/nodeinfo/") ||
|
||||
req.path.startsWith("/.well-known/");
|
||||
if (!isDiscoveryRoute) return next();
|
||||
return self._fedifyMiddleware(req, res, next);`,
|
||||
},
|
||||
|
||||
// ── mastodon-delete-fix Change A: expose broadcastDelete in pluginOptions ─────
|
||||
{
|
||||
name: "mastodon-delete-fix/broadcastDelete-in-pluginOptions",
|
||||
files: apPath("index.js"),
|
||||
marker: "// [patch] ap-mastodon-delete-fix",
|
||||
oldSnippet: ` loadRsaKey: () => pluginRef._loadRsaPrivateKey(),
|
||||
broadcastActorUpdate: () => pluginRef.broadcastActorUpdate(),`,
|
||||
newSnippet: ` loadRsaKey: () => pluginRef._loadRsaPrivateKey(),
|
||||
broadcastActorUpdate: () => pluginRef.broadcastActorUpdate(),
|
||||
broadcastDelete: (url) => pluginRef.broadcastDelete(url), // [patch] ap-mastodon-delete-fix`,
|
||||
},
|
||||
];
|
||||
|
||||
let total = 0;
|
||||
for (const p of PATCHES) {
|
||||
let done = false;
|
||||
for (const f of p.files) {
|
||||
const r = await applyPatch(f, p.marker, p.oldSnippet, p.newSnippet);
|
||||
if (r === "applied") {
|
||||
console.log(`[postinstall] ${SCRIPT}: applied ${p.name} to ${f}`);
|
||||
total++; done = true; break;
|
||||
} else if (r === "already_applied") {
|
||||
console.log(`[postinstall] ${SCRIPT}: ${p.name} already applied in ${f}`);
|
||||
done = true; break;
|
||||
} else if (r === "snippet_not_found") {
|
||||
console.warn(`[postinstall] ${SCRIPT}: ${p.name} — snippet not found in ${f}, skipping`);
|
||||
}
|
||||
}
|
||||
if (!done) console.log(`[postinstall] ${SCRIPT}: ${p.name} — no target file found`);
|
||||
}
|
||||
console.log(`[postinstall] ${SCRIPT}: done (${total} patch(es) applied)`);
|
||||
@@ -1,355 +0,0 @@
|
||||
/**
|
||||
* Patch: AP OG image generation in jf2-to-as2.js.
|
||||
*
|
||||
* Adds `image` property to ActivityStreams objects using:
|
||||
* - post photo attachment (if present), or
|
||||
* - generated OG image at /og/{slug}.png
|
||||
*
|
||||
* Note: All other patches previously in this file have been integrated
|
||||
* into the @rmdes/indiekit-endpoint-activitypub source directly:
|
||||
* - patch-ap-compose-default-checked
|
||||
* - patch-ap-repost-announce-fix
|
||||
* - patch-ap-interactions-send-guard
|
||||
* - patch-ap-interactions-cleanup-preserve
|
||||
* - patch-ap-interactions-accounts-uid
|
||||
* - patch-inbox-ignore-view-activity
|
||||
* - patch-inbox-skip-view-activity-parse (raw body fixes)
|
||||
*/
|
||||
|
||||
import { access, readFile, writeFile } from "node:fs/promises";
|
||||
|
||||
const AP_BASE = "@rmdes/indiekit-endpoint-activitypub";
|
||||
const AP_ROOTS = [
|
||||
`node_modules/${AP_BASE}`,
|
||||
`node_modules/@indiekit/indiekit/node_modules/${AP_BASE}`,
|
||||
];
|
||||
|
||||
function apPath(rel) {
|
||||
return AP_ROOTS.map(r => `${r}/${rel}`);
|
||||
}
|
||||
|
||||
async function fileExists(p) {
|
||||
try { await access(p); return true; } catch { return false; }
|
||||
}
|
||||
|
||||
const SCRIPT = "patch-ap-mastodon-misc";
|
||||
|
||||
// ── patch-ap-og-image: regex-based, two replacements per file ─────────────────
|
||||
|
||||
const OG_MARKER = "// og-image-v2";
|
||||
const OG_CANDIDATES = apPath("lib/jf2-to-as2.js");
|
||||
|
||||
const CN_BLOCK_RE =
|
||||
/ const og(?:Slug|Match) = postUrl && postUrl\.match\([^\n]+\n if \(og(?:Slug|Match)\) \{[\s\S]*?\n \}/;
|
||||
|
||||
const AS2_BLOCK_RE =
|
||||
/ const og(?:SlugF|MatchF) = postUrl && postUrl\.match\([^\n]+\n if \(og(?:SlugF|MatchF)\) \{[\s\S]*?\n \}/;
|
||||
|
||||
const NEW_CN = ` const _ogPhoto = properties.photo && asArray(properties.photo)[0]; // og-image-v2
|
||||
const _ogPhotoUrl = _ogPhoto && (typeof _ogPhoto === "string" ? _ogPhoto : _ogPhoto.url); // og-image-v2
|
||||
const ogSlug = postUrl && postUrl.match(/\\/([\\\w-]+)\\/?$/)?.[1]; // og-image-v2
|
||||
const _ogUrl = _ogPhotoUrl
|
||||
? resolveMediaUrl(_ogPhotoUrl, publicationUrl) // og-image-v2
|
||||
: ogSlug ? \`\${publicationUrl.replace(/\\/$/, "")}/og/\${ogSlug}.png\` : null; // og-image-v2
|
||||
if (_ogUrl) { // og-image-v2
|
||||
object.image = {
|
||||
type: "Image",
|
||||
url: _ogUrl, // og-image-v2
|
||||
mediaType: _ogPhotoUrl ? guessMediaType(_ogUrl) : "image/png", // og-image-v2
|
||||
};
|
||||
}`;
|
||||
|
||||
const NEW_AS2 = ` const _ogPhotoF = properties.photo && asArray(properties.photo)[0]; // og-image-v2
|
||||
const _ogPhotoUrlF = _ogPhotoF && (typeof _ogPhotoF === "string" ? _ogPhotoF : _ogPhotoF.url); // og-image-v2
|
||||
const ogSlugF = postUrl && postUrl.match(/\\/([\\\w-]+)\\/?$/)?.[1]; // og-image-v2
|
||||
const _ogUrlF = _ogPhotoUrlF
|
||||
? resolveMediaUrl(_ogPhotoUrlF, publicationUrl) // og-image-v2
|
||||
: ogSlugF ? \`\${publicationUrl.replace(/\\/$/, "")}/og/\${ogSlugF}.png\` : null; // og-image-v2
|
||||
if (_ogUrlF) { // og-image-v2
|
||||
noteOptions.image = new Image({
|
||||
url: new URL(_ogUrlF), // og-image-v2
|
||||
mediaType: _ogPhotoUrlF ? guessMediaType(_ogUrlF) : "image/png", // og-image-v2
|
||||
});
|
||||
}`;
|
||||
|
||||
let total = 0;
|
||||
let ogDone = false;
|
||||
for (const f of OG_CANDIDATES) {
|
||||
if (!(await fileExists(f))) continue;
|
||||
const src = await readFile(f, "utf8");
|
||||
if (src.includes(OG_MARKER)) {
|
||||
console.log(`[postinstall] ${SCRIPT}: ap-og-image already applied in ${f}`);
|
||||
ogDone = true; break;
|
||||
}
|
||||
let updated = src;
|
||||
let changed = false;
|
||||
if (CN_BLOCK_RE.test(updated)) {
|
||||
updated = updated.replace(CN_BLOCK_RE, NEW_CN);
|
||||
changed = true;
|
||||
} else {
|
||||
console.warn(`[postinstall] ${SCRIPT}: ap-og-image — jf2ToActivityStreams OG block not found in ${f}`);
|
||||
}
|
||||
if (AS2_BLOCK_RE.test(updated)) {
|
||||
updated = updated.replace(AS2_BLOCK_RE, NEW_AS2);
|
||||
changed = true;
|
||||
} else {
|
||||
console.warn(`[postinstall] ${SCRIPT}: ap-og-image — jf2ToAS2Activity OG block not found in ${f}`);
|
||||
}
|
||||
if (changed && updated !== src) {
|
||||
await writeFile(f, updated, "utf8");
|
||||
console.log(`[postinstall] ${SCRIPT}: applied ap-og-image to ${f}`);
|
||||
total++; ogDone = true; break;
|
||||
}
|
||||
}
|
||||
if (!ogDone) console.log(`[postinstall] ${SCRIPT}: ap-og-image — no target file found or no changes`);
|
||||
|
||||
// ── patch-ap-accounts-id-hash: use URL-hash ID instead of MongoDB _id ─────────
|
||||
// GET /api/v1/accounts/:id, /followers, /following all checked profile._id.toString()
|
||||
// against the client-provided id, but verify_credentials returns remoteActorId(profile.url)
|
||||
// (sha256 hash). The ObjectId and the hash never match → followers/following always [].
|
||||
|
||||
const HASH_MARKER = "// [patch] ap-accounts-id-hash";
|
||||
const ACCOUNTS_CANDIDATES = apPath("lib/mastodon/routes/accounts.js");
|
||||
|
||||
const HASH_PATCHES = [
|
||||
{
|
||||
old: ` if (profile && profile._id.toString() === id) {`,
|
||||
new: ` if (profile && remoteActorId(profile.url) === id) { ${HASH_MARKER}`,
|
||||
},
|
||||
{
|
||||
old: ` // Only serve followers for the local account\n if (!profile || profile._id.toString() !== id) {`,
|
||||
new: ` // Only serve followers for the local account\n if (!profile || remoteActorId(profile.url) !== id) { ${HASH_MARKER}`,
|
||||
},
|
||||
{
|
||||
old: ` // Only serve following for the local account\n if (!profile || profile._id.toString() !== id) {`,
|
||||
new: ` // Only serve following for the local account\n if (!profile || remoteActorId(profile.url) !== id) { ${HASH_MARKER}`,
|
||||
},
|
||||
];
|
||||
|
||||
let hashDone = false;
|
||||
for (const f of ACCOUNTS_CANDIDATES) {
|
||||
if (!(await fileExists(f))) continue;
|
||||
const src = await readFile(f, "utf8");
|
||||
if (src.includes(HASH_MARKER)) {
|
||||
console.log(`[postinstall] ${SCRIPT}: ap-accounts-id-hash already applied in ${f}`);
|
||||
hashDone = true; break;
|
||||
}
|
||||
let updated = src;
|
||||
let changed = false;
|
||||
for (const { old, new: replacement } of HASH_PATCHES) {
|
||||
if (updated.includes(old)) {
|
||||
updated = updated.replace(old, replacement);
|
||||
changed = true;
|
||||
} else {
|
||||
console.warn(`[postinstall] ${SCRIPT}: ap-accounts-id-hash — snippet not found in ${f}: ${old.slice(0, 60)}...`);
|
||||
}
|
||||
}
|
||||
if (changed && updated !== src) {
|
||||
await writeFile(f, updated, "utf8");
|
||||
console.log(`[postinstall] ${SCRIPT}: applied ap-accounts-id-hash to ${f}`);
|
||||
total++; hashDone = true; break;
|
||||
}
|
||||
}
|
||||
if (!hashDone) console.log(`[postinstall] ${SCRIPT}: ap-accounts-id-hash — no target file found or no changes`);
|
||||
|
||||
// ── patch-ap-search-url-resolve: resolve posts by URL when resolve=true ────────
|
||||
// Phanpy calls GET /api/v2/search?q=<post_url>&resolve=true&type=statuses before
|
||||
// liking/boosting a remote post. The search endpoint only did content-text regex
|
||||
// search — never found posts by URL → statuses:[] → "Failed to load post" error.
|
||||
// Fix: when resolve=true and query is a URL, look up ap_timeline by uid/url first.
|
||||
// Two targeted replacements:
|
||||
// A) inject URL-resolve block before the content search (unique anchor: "results.statuses = items.map")
|
||||
// B) change assignment to push so existing array stays intact
|
||||
|
||||
const SEARCH_MARKER = "// [patch] ap-search-url-resolve";
|
||||
const SEARCH_CANDIDATES = apPath("lib/mastodon/routes/search.js");
|
||||
|
||||
// Replacement A: inject URL-resolve block + switch assignment to push
|
||||
// Anchor chosen because it is unique in search.js.
|
||||
const SEARCH_OLD_A = " results.statuses = items.map((item) =>";
|
||||
const SEARCH_NEW_A = [
|
||||
" // URL resolve: find post by AP URL before content search. " + SEARCH_MARKER,
|
||||
" if (resolve && query.startsWith(\"http\")) { " + SEARCH_MARKER,
|
||||
" const resolvedItem = await collections.ap_timeline.findOne({ " + SEARCH_MARKER,
|
||||
" isContext: { $ne: true }, " + SEARCH_MARKER,
|
||||
" $or: [{ uid: query }, { url: query }], " + SEARCH_MARKER,
|
||||
" }); " + SEARCH_MARKER,
|
||||
" if (resolvedItem) { " + SEARCH_MARKER,
|
||||
" results.statuses.push(serializeStatus(resolvedItem, { " + SEARCH_MARKER,
|
||||
" baseUrl, favouritedIds: new Set(), rebloggedIds: new Set(), " + SEARCH_MARKER,
|
||||
" bookmarkedIds: new Set(), pinnedIds: new Set(), " + SEARCH_MARKER,
|
||||
" })); " + SEARCH_MARKER,
|
||||
" } " + SEARCH_MARKER,
|
||||
" } " + SEARCH_MARKER,
|
||||
" results.statuses.push(...items.map((item) =>",
|
||||
].join("\n");
|
||||
|
||||
// Replacement B: close the push call (was ` );` → ` ));`)
|
||||
// Unique because it is immediately followed by the closing brace + Hashtag comment.
|
||||
const SEARCH_OLD_B = " }),\n );\n }\n\n // ─── Hashtag";
|
||||
const SEARCH_NEW_B = " }),\n ));\n }\n\n // ─── Hashtag";
|
||||
|
||||
let searchDone = false;
|
||||
for (const f of SEARCH_CANDIDATES) {
|
||||
if (!(await fileExists(f))) continue;
|
||||
const src = await readFile(f, "utf8");
|
||||
if (src.includes(SEARCH_MARKER)) {
|
||||
console.log(`[postinstall] ${SCRIPT}: ap-search-url-resolve already applied in ${f}`);
|
||||
searchDone = true; break;
|
||||
}
|
||||
let updated = src;
|
||||
let changed = false;
|
||||
if (updated.includes(SEARCH_OLD_A)) {
|
||||
updated = updated.replace(SEARCH_OLD_A, SEARCH_NEW_A);
|
||||
changed = true;
|
||||
} else {
|
||||
console.warn(`[postinstall] ${SCRIPT}: ap-search-url-resolve A — anchor not found in ${f}`);
|
||||
}
|
||||
if (updated.includes(SEARCH_OLD_B)) {
|
||||
updated = updated.replace(SEARCH_OLD_B, SEARCH_NEW_B);
|
||||
} else {
|
||||
console.warn(`[postinstall] ${SCRIPT}: ap-search-url-resolve B — closing anchor not found in ${f}`);
|
||||
changed = false; // both must succeed
|
||||
}
|
||||
if (changed && updated !== src) {
|
||||
await writeFile(f, updated, "utf8");
|
||||
console.log(`[postinstall] ${SCRIPT}: applied ap-search-url-resolve to ${f}`);
|
||||
total++; searchDone = true; break;
|
||||
}
|
||||
}
|
||||
if (!searchDone) console.log(`[postinstall] ${SCRIPT}: ap-search-url-resolve — no target file found or no changes`);
|
||||
|
||||
// ── patch-ap-search-url-resolve-remote: fetch AP object when not in local timeline ──
|
||||
// The v1 patch only looked up posts already in ap_timeline. Posts from Threads
|
||||
// (or any instance not yet federated to this server) were never stored there, so
|
||||
// the resolve block returned nothing. This patch adds an `else if` that fetches
|
||||
// the AP object remotely via lookupWithSecurity, stores it via extractObjectData +
|
||||
// addTimelineItem, then returns it. This makes like/boost work on any public post,
|
||||
// not just ones already in the local timeline.
|
||||
|
||||
const SEARCH_REMOTE_MARKER = "// [patch] ap-search-url-resolve-remote";
|
||||
// Anchor: the two consecutive closing braces at the end of the v1 URL-resolve block.
|
||||
// They appear exactly once in the patched search.js.
|
||||
const SEARCH_REMOTE_OLD = ` } // [patch] ap-search-url-resolve
|
||||
} // [patch] ap-search-url-resolve`;
|
||||
const SEARCH_REMOTE_NEW = [
|
||||
` } else if (pluginOptions.federation) { ${SEARCH_REMOTE_MARKER}`,
|
||||
` try { ${SEARCH_REMOTE_MARKER}`,
|
||||
` const { lookupWithSecurity } = await import("../../lookup-helpers.js"); ${SEARCH_REMOTE_MARKER}`,
|
||||
` const { extractObjectData } = await import("../../timeline-store.js"); ${SEARCH_REMOTE_MARKER}`,
|
||||
` const { addTimelineItem } = await import("../../storage/timeline.js"); ${SEARCH_REMOTE_MARKER}`,
|
||||
` const _rCtx = pluginOptions.federation.createContext(new URL(pluginOptions.publicationUrl), { handle: pluginOptions.handle, publicationUrl: pluginOptions.publicationUrl }); ${SEARCH_REMOTE_MARKER}`,
|
||||
` const _rDl = await _rCtx.getDocumentLoader({ identifier: pluginOptions.handle }); ${SEARCH_REMOTE_MARKER}`,
|
||||
` const _rObj = await lookupWithSecurity(_rCtx, new URL(query), { documentLoader: _rDl }); ${SEARCH_REMOTE_MARKER}`,
|
||||
` if (_rObj) { ${SEARCH_REMOTE_MARKER}`,
|
||||
` const _rData = await extractObjectData(_rObj, { documentLoader: _rDl }); ${SEARCH_REMOTE_MARKER}`,
|
||||
` const _rStored = await addTimelineItem(collections, _rData); ${SEARCH_REMOTE_MARKER}`,
|
||||
` if (_rStored) { ${SEARCH_REMOTE_MARKER}`,
|
||||
` results.statuses.push(serializeStatus(_rStored, { ${SEARCH_REMOTE_MARKER}`,
|
||||
` baseUrl, favouritedIds: new Set(), rebloggedIds: new Set(), ${SEARCH_REMOTE_MARKER}`,
|
||||
` bookmarkedIds: new Set(), pinnedIds: new Set(), ${SEARCH_REMOTE_MARKER}`,
|
||||
` })); ${SEARCH_REMOTE_MARKER}`,
|
||||
` } ${SEARCH_REMOTE_MARKER}`,
|
||||
` } ${SEARCH_REMOTE_MARKER}`,
|
||||
` } catch (_rErr) { ${SEARCH_REMOTE_MARKER}`,
|
||||
` console.warn(\`[Mastodon API] search resolve remote fetch failed for \${query}: \${_rErr.message}\`); ${SEARCH_REMOTE_MARKER}`,
|
||||
` } ${SEARCH_REMOTE_MARKER}`,
|
||||
` } // [patch] ap-search-url-resolve`,
|
||||
` } // [patch] ap-search-url-resolve`,
|
||||
].join("\n");
|
||||
|
||||
// ── patch-ap-search-remote-uid-guard: skip AP objects with no uid ─────────────
|
||||
// When extractObjectData fails to resolve the AP object's id (e.g. network
|
||||
// error or malformed response), it returns uid:"". addTimelineItem({uid:""})
|
||||
// then matches/creates a single corrupted document shared by ALL failed lookups.
|
||||
// Any client that receives this garbage status will get a 500 when liking it
|
||||
// (likePost calls new URL("") which throws). Guard: reject rData with empty uid.
|
||||
|
||||
const UID_GUARD_MARKER = "// [patch] ap-search-remote-uid-guard";
|
||||
// Anchor must be the two consecutive remote-marker lines that exist only here.
|
||||
const UID_GUARD_OLD =
|
||||
` const _rData = await extractObjectData(_rObj, { documentLoader: _rDl }); ${SEARCH_REMOTE_MARKER}\n` +
|
||||
` const _rStored = await addTimelineItem(collections, _rData); ${SEARCH_REMOTE_MARKER}`;
|
||||
const UID_GUARD_NEW =
|
||||
` const _rData = await extractObjectData(_rObj, { documentLoader: _rDl }); ${SEARCH_REMOTE_MARKER}\n` +
|
||||
` if (!_rData?.uid) throw new Error("remote AP object has no uid"); ${UID_GUARD_MARKER}\n` +
|
||||
` const _rStored = await addTimelineItem(collections, _rData); ${SEARCH_REMOTE_MARKER}`;
|
||||
|
||||
let searchRemoteDone = false;
|
||||
for (const f of SEARCH_CANDIDATES) {
|
||||
if (!(await fileExists(f))) continue;
|
||||
const src = await readFile(f, "utf8");
|
||||
if (src.includes(SEARCH_REMOTE_MARKER)) {
|
||||
console.log(`[postinstall] ${SCRIPT}: ap-search-url-resolve-remote already applied in ${f}`);
|
||||
searchRemoteDone = true; break;
|
||||
}
|
||||
if (!src.includes(SEARCH_REMOTE_OLD)) {
|
||||
console.warn(`[postinstall] ${SCRIPT}: ap-search-url-resolve-remote — anchor not found in ${f} (v1 patch not applied?)`);
|
||||
continue;
|
||||
}
|
||||
await writeFile(f, src.replace(SEARCH_REMOTE_OLD, SEARCH_REMOTE_NEW), "utf8");
|
||||
console.log(`[postinstall] ${SCRIPT}: applied ap-search-url-resolve-remote to ${f}`);
|
||||
total++; searchRemoteDone = true; break;
|
||||
}
|
||||
if (!searchRemoteDone) console.log(`[postinstall] ${SCRIPT}: ap-search-url-resolve-remote — no target file found or no changes`);
|
||||
|
||||
// uid-guard must run AFTER ap-search-url-resolve-remote (depends on its markers)
|
||||
let uidGuardDone = false;
|
||||
for (const f of SEARCH_CANDIDATES) {
|
||||
if (!(await fileExists(f))) continue;
|
||||
const src = await readFile(f, "utf8");
|
||||
if (src.includes(UID_GUARD_MARKER)) {
|
||||
console.log(`[postinstall] ${SCRIPT}: ap-search-remote-uid-guard already applied in ${f}`);
|
||||
uidGuardDone = true; break;
|
||||
}
|
||||
if (!src.includes(UID_GUARD_OLD)) {
|
||||
console.warn(`[postinstall] ${SCRIPT}: ap-search-remote-uid-guard — anchor not found in ${f}`);
|
||||
continue;
|
||||
}
|
||||
await writeFile(f, src.replace(UID_GUARD_OLD, UID_GUARD_NEW), "utf8");
|
||||
console.log(`[postinstall] ${SCRIPT}: applied ap-search-remote-uid-guard to ${f}`);
|
||||
total++; uidGuardDone = true; break;
|
||||
}
|
||||
if (!uidGuardDone) console.log(`[postinstall] ${SCRIPT}: ap-search-remote-uid-guard — no target file found`);
|
||||
|
||||
// ── patch-ap-resolve-actor-timeout: cap actor lookup at 8 s + log failures ───
|
||||
// When Mastodon client clicks a @mention handle, /api/v1/accounts/lookup calls
|
||||
// resolveRemoteAccount → lookupWithSecurity → ctx.lookupObject (no timeout).
|
||||
// If the remote server hangs, the request hangs indefinitely. Even when it fails
|
||||
// fast, we get 404 with no log entry explaining why.
|
||||
// Fix: wrap lookupWithSecurity in an 8 s timeout; log on failure/null result.
|
||||
|
||||
const ACTOR_TIMEOUT_MARKER = "// [patch] ap-resolve-actor-timeout";
|
||||
const RESOLVE_ACCOUNT_CANDIDATES = apPath("lib/mastodon/helpers/resolve-account.js");
|
||||
|
||||
const ACTOR_TIMEOUT_OLD = ` // Use signed→unsigned fallback so servers rejecting signed GETs still resolve
|
||||
const documentLoader = await ctx.getDocumentLoader({ identifier: handle });
|
||||
const actor = await lookupWithSecurity(ctx, actorUri, { documentLoader });
|
||||
if (!actor) return null;`;
|
||||
|
||||
const ACTOR_TIMEOUT_NEW = ` // Use signed→unsigned fallback so servers rejecting signed GETs still resolve
|
||||
const documentLoader = await ctx.getDocumentLoader({ identifier: handle });
|
||||
// Timeout guard: cap actor fetch at 8 s so hung lookups fail fast. ${ACTOR_TIMEOUT_MARKER}
|
||||
const _aLookupTimeout = (p, ms = 8000) => { const t = new Promise((_, rej) => setTimeout(() => rej(new Error("actor lookup timeout")), ms)); p.catch(() => {}); return Promise.race([p, t]); }; ${ACTOR_TIMEOUT_MARKER}
|
||||
const actor = await _aLookupTimeout(lookupWithSecurity(ctx, actorUri, { documentLoader })).catch(err => { console.warn(\`[Mastodon API] Actor lookup failed for \${acct}: \${err.message}\`); return null; }); ${ACTOR_TIMEOUT_MARKER}
|
||||
if (!actor) { console.warn(\`[Mastodon API] lookupWithSecurity returned null for \${acct}\`); return null; } ${ACTOR_TIMEOUT_MARKER}`;
|
||||
|
||||
let actorTimeoutDone = false;
|
||||
for (const f of RESOLVE_ACCOUNT_CANDIDATES) {
|
||||
if (!(await fileExists(f))) continue;
|
||||
const src = await readFile(f, "utf8");
|
||||
if (src.includes(ACTOR_TIMEOUT_MARKER)) {
|
||||
console.log(`[postinstall] ${SCRIPT}: ap-resolve-actor-timeout already applied in ${f}`);
|
||||
actorTimeoutDone = true; break;
|
||||
}
|
||||
if (!src.includes(ACTOR_TIMEOUT_OLD)) {
|
||||
console.warn(`[postinstall] ${SCRIPT}: ap-resolve-actor-timeout — anchor not found in ${f}`);
|
||||
continue;
|
||||
}
|
||||
await writeFile(f, src.replace(ACTOR_TIMEOUT_OLD, ACTOR_TIMEOUT_NEW), "utf8");
|
||||
console.log(`[postinstall] ${SCRIPT}: applied ap-resolve-actor-timeout to ${f}`);
|
||||
total++; actorTimeoutDone = true; break;
|
||||
}
|
||||
if (!actorTimeoutDone) console.log(`[postinstall] ${SCRIPT}: ap-resolve-actor-timeout — no target file found or no changes`);
|
||||
|
||||
console.log(`[postinstall] ${SCRIPT}: done (${total} patch(es) applied)`);
|
||||
@@ -1,292 +0,0 @@
|
||||
/**
|
||||
* Consolidated patch for lib/mastodon/routes/statuses.js.
|
||||
*
|
||||
* Absorbs:
|
||||
* - patch-ap-mastodon-reply-threading (eager ap_timeline insert on POST /statuses)
|
||||
* - patch-ap-mastodon-status-id (return addTimelineItem _id as status ID)
|
||||
* - patch-ap-mastodon-delete-fix (Change C only — broadcastDelete after timeline removal)
|
||||
* - patch-ap-status-reply-id (Change B only — store inReplyToId in timeline insert)
|
||||
* - patch-ap-interactions-context-state (load real interaction state for thread context)
|
||||
*
|
||||
* Note: patch-ap-mastodon-delete-fix Change A (index.js) is handled separately.
|
||||
* patch-ap-status-reply-id Change A (status.js entity) is upstream-fixed — omitted.
|
||||
*
|
||||
* Patches are applied in order. Patches 2 and 4 depend on markers written by patch 1,
|
||||
* so each patch in the PATCHES array is applied sequentially per file.
|
||||
*/
|
||||
|
||||
import { access, readFile, writeFile } from "node:fs/promises";
|
||||
|
||||
const AP_BASE = "@rmdes/indiekit-endpoint-activitypub";
|
||||
const AP_ROOTS = [
|
||||
`node_modules/${AP_BASE}`,
|
||||
`node_modules/@indiekit/indiekit/node_modules/${AP_BASE}`,
|
||||
];
|
||||
|
||||
function apPath(rel) {
|
||||
return AP_ROOTS.map(r => `${r}/${rel}`);
|
||||
}
|
||||
|
||||
async function fileExists(p) {
|
||||
try { await access(p); return true; } catch { return false; }
|
||||
}
|
||||
|
||||
async function applyPatch(filePath, marker, oldSnippet, newSnippet) {
|
||||
if (!(await fileExists(filePath))) return "file_not_found";
|
||||
const src = await readFile(filePath, "utf8");
|
||||
if (src.includes(marker)) return "already_applied";
|
||||
if (!src.includes(oldSnippet)) return "snippet_not_found";
|
||||
await writeFile(filePath, src.replace(oldSnippet, newSnippet), "utf8");
|
||||
return "applied";
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply multiple replacements to a single file in one read/write cycle.
|
||||
* Returns "already_applied" if marker is found, "applied" if changes were made,
|
||||
* "snippet_not_found" if any required snippet was missing, "file_not_found" if absent.
|
||||
*/
|
||||
async function applyMultiPatch(filePath, marker, replacements) {
|
||||
if (!(await fileExists(filePath))) return "file_not_found";
|
||||
let src = await readFile(filePath, "utf8");
|
||||
if (src.includes(marker)) return "already_applied";
|
||||
|
||||
let updated = src;
|
||||
for (const { oldSnippet, newSnippet, label } of replacements) {
|
||||
if (!updated.includes(oldSnippet)) {
|
||||
console.warn(`[postinstall] patch-ap-mastodon-statuses: snippet "${label}" not found in ${filePath}`);
|
||||
return "snippet_not_found";
|
||||
}
|
||||
updated = updated.replace(oldSnippet, newSnippet);
|
||||
}
|
||||
|
||||
await writeFile(filePath, updated, "utf8");
|
||||
return "applied";
|
||||
}
|
||||
|
||||
const SCRIPT = "patch-ap-mastodon-statuses";
|
||||
|
||||
// ── Patch 1: ap-mastodon-reply-threading ────────────────────────────────────
|
||||
|
||||
const MARKER_THREADING = "// [patch] ap-mastodon-reply-threading";
|
||||
|
||||
const OLD_THREADING = ` // Return a minimal status to the Mastodon client.
|
||||
// No timeline entry is created here — the post will appear in the timeline
|
||||
// after the normal flow: Eleventy rebuild → syndication webhook → AP delivery.
|
||||
const profile = await collections.ap_profile.findOne({});
|
||||
const handle = pluginOptions.handle || "user";`;
|
||||
|
||||
const NEW_THREADING = ` // Return a minimal status to the Mastodon client. ${MARKER_THREADING}
|
||||
// Eagerly insert own post into ap_timeline so the Mastodon client can resolve ${MARKER_THREADING}
|
||||
// in_reply_to_id for this post immediately, without waiting for the build webhook. ${MARKER_THREADING}
|
||||
// The AP syndicator will upsert the same uid later via $setOnInsert (no-op). ${MARKER_THREADING}
|
||||
const profile = await collections.ap_profile.findOne({});
|
||||
const handle = pluginOptions.handle || "user";
|
||||
try { ${MARKER_THREADING}
|
||||
const _ph = (() => { try { return new URL(publicationUrl).hostname; } catch { return ""; } })(); ${MARKER_THREADING}
|
||||
await addTimelineItem(collections, { ${MARKER_THREADING}
|
||||
uid: postUrl, ${MARKER_THREADING}
|
||||
url: postUrl, ${MARKER_THREADING}
|
||||
type: data.properties["post-type"] || "note", ${MARKER_THREADING}
|
||||
content: { text: contentText, html: \`<p>\${contentHtml}</p>\` }, ${MARKER_THREADING}
|
||||
author: { ${MARKER_THREADING}
|
||||
name: profile?.name || handle, ${MARKER_THREADING}
|
||||
url: profile?.url || publicationUrl, ${MARKER_THREADING}
|
||||
photo: profile?.icon || "", ${MARKER_THREADING}
|
||||
handle: \`@\${handle}@\${_ph}\`, ${MARKER_THREADING}
|
||||
emojis: [], ${MARKER_THREADING}
|
||||
bot: false, ${MARKER_THREADING}
|
||||
}, ${MARKER_THREADING}
|
||||
published: data.properties.published || new Date().toISOString(), ${MARKER_THREADING}
|
||||
createdAt: new Date().toISOString(), ${MARKER_THREADING}
|
||||
inReplyTo: inReplyTo || null, ${MARKER_THREADING}
|
||||
visibility: jf2.visibility || "public", ${MARKER_THREADING}
|
||||
sensitive: jf2.sensitive === "true", ${MARKER_THREADING}
|
||||
category: [], ${MARKER_THREADING}
|
||||
counts: { likes: 0, boosts: 0, replies: 0 }, ${MARKER_THREADING}
|
||||
}); ${MARKER_THREADING}
|
||||
} catch (tlErr) { ${MARKER_THREADING}
|
||||
console.warn(\`[Mastodon API] Failed to pre-insert own post into timeline: \${tlErr.message}\`); ${MARKER_THREADING}
|
||||
} ${MARKER_THREADING}`;
|
||||
|
||||
// ── Patch 2: ap-mastodon-status-id (3 replacements, depends on patch 1 markers) ─
|
||||
|
||||
const MARKER_STATUS_ID = "// [patch] ap-mastodon-status-id";
|
||||
|
||||
const STATUS_ID_REPLACEMENTS = [
|
||||
{
|
||||
label: "declare _tlItem before try",
|
||||
oldSnippet: ` try { // [patch] ap-mastodon-reply-threading`,
|
||||
newSnippet: ` let _tlItem = null; ${MARKER_STATUS_ID}
|
||||
try { // [patch] ap-mastodon-reply-threading`,
|
||||
},
|
||||
{
|
||||
label: "capture addTimelineItem return value",
|
||||
oldSnippet: ` await addTimelineItem(collections, { // [patch] ap-mastodon-reply-threading`,
|
||||
newSnippet: ` _tlItem = await addTimelineItem(collections, { // [patch] ap-mastodon-reply-threading ${MARKER_STATUS_ID}`,
|
||||
},
|
||||
{
|
||||
label: "use _tlItem._id as status response ID",
|
||||
oldSnippet: ` id: String(Date.now()),`,
|
||||
newSnippet: ` id: _tlItem?._id?.toString() || String(Date.now()), ${MARKER_STATUS_ID}`,
|
||||
},
|
||||
];
|
||||
|
||||
// ── Patch 3: ap-mastodon-delete-fix Change C (broadcastDelete call) ──────────
|
||||
|
||||
const MARKER_DELETE_FIX = "// [patch] ap-mastodon-delete-fix";
|
||||
|
||||
const OLD_AFTER_DELETE = ` // Delete from timeline
|
||||
await collections.ap_timeline.deleteOne({ _id: item._id });
|
||||
|
||||
// Clean up interactions`;
|
||||
|
||||
const NEW_AFTER_DELETE = ` // Delete from timeline
|
||||
await collections.ap_timeline.deleteOne({ _id: item._id }); ${MARKER_DELETE_FIX}
|
||||
|
||||
// Broadcast AP Delete activity to followers ${MARKER_DELETE_FIX}
|
||||
const _pluginOpts = req.app.locals.mastodonPluginOptions || {};
|
||||
if (_pluginOpts.broadcastDelete && postUrl) {
|
||||
_pluginOpts.broadcastDelete(postUrl).catch((err) =>
|
||||
console.warn(\`[Mastodon API] broadcastDelete failed for \${postUrl}: \${err.message}\`),
|
||||
);
|
||||
}
|
||||
|
||||
// Clean up interactions`;
|
||||
|
||||
// ── Patch 4: ap-status-reply-id Change B (store inReplyToId in timeline insert) ─
|
||||
|
||||
const MARKER_REPLY_ID = "// [patch] ap-status-reply-id";
|
||||
|
||||
const OLD_REPLY_INSERT = ` inReplyTo: inReplyTo || null, // [patch] ap-mastodon-reply-threading`;
|
||||
const NEW_REPLY_INSERT = ` inReplyTo: inReplyTo || null, // [patch] ap-mastodon-reply-threading
|
||||
inReplyToId: inReplyToId || null, ${MARKER_REPLY_ID}`;
|
||||
|
||||
// ── Patch 5: ap-interactions-context-state ───────────────────────────────────
|
||||
|
||||
const MARKER_CTX_STATE = "// [patch] ap-interactions-context-state";
|
||||
|
||||
const OLD_CTX_STATE = ` // Serialize all items
|
||||
const emptyInteractions = {
|
||||
favouritedIds: new Set(),
|
||||
rebloggedIds: new Set(),
|
||||
bookmarkedIds: new Set(),
|
||||
pinnedIds: new Set(),
|
||||
};
|
||||
|
||||
const allItems = [...ancestors, ...descendants];
|
||||
const { replyIdMap, replyAccountIdMap } = await resolveReplyIds(collections.ap_timeline, allItems);
|
||||
const serializeOpts = { baseUrl, ...emptyInteractions, replyIdMap, replyAccountIdMap };`;
|
||||
|
||||
const NEW_CTX_STATE = ` // Serialize all items
|
||||
const allItems = [...ancestors, ...descendants];
|
||||
const { replyIdMap, replyAccountIdMap } = await resolveReplyIds(collections.ap_timeline, allItems);
|
||||
|
||||
// Load real interaction state for thread context ${MARKER_CTX_STATE}
|
||||
const ctxFavouritedIds = new Set();
|
||||
const ctxRebloggedIds = new Set();
|
||||
const ctxBookmarkedIds = new Set();
|
||||
if (allItems.length > 0 && collections.ap_interactions) {
|
||||
const ctxUrlToUid = new Map();
|
||||
for (const ci of allItems) {
|
||||
if (ci.uid) { ctxUrlToUid.set(ci.uid, ci.uid); }
|
||||
if (ci.url && ci.url !== ci.uid) { ctxUrlToUid.set(ci.url, ci.uid || ci.url); }
|
||||
}
|
||||
const ctxLookupUrls = [...ctxUrlToUid.keys()];
|
||||
if (ctxLookupUrls.length > 0) {
|
||||
const ctxInteractions = await collections.ap_interactions
|
||||
.find({ objectUrl: { $in: ctxLookupUrls } })
|
||||
.toArray();
|
||||
for (const ci of ctxInteractions) {
|
||||
const uid = ctxUrlToUid.get(ci.objectUrl) || ci.objectUrl;
|
||||
if (ci.type === "like") ctxFavouritedIds.add(uid);
|
||||
else if (ci.type === "boost") ctxRebloggedIds.add(uid);
|
||||
else if (ci.type === "bookmark") ctxBookmarkedIds.add(uid);
|
||||
}
|
||||
}
|
||||
}
|
||||
const serializeOpts = { baseUrl, favouritedIds: ctxFavouritedIds, rebloggedIds: ctxRebloggedIds, bookmarkedIds: ctxBookmarkedIds, pinnedIds: new Set(), replyIdMap, replyAccountIdMap };`;
|
||||
|
||||
// ── Patch 6: ap-status-not-found-log ────────────────────────────────────────
|
||||
// Add warn logging to findTimelineItemById so 404s on /favourite etc. produce
|
||||
// a log line showing the requested id and whether it was a bad ObjectId or a
|
||||
// genuine missing document. Helps diagnose "Record not found" recurrences.
|
||||
|
||||
const MARKER_NOT_FOUND_LOG = "// [patch] ap-status-not-found-log";
|
||||
|
||||
const OLD_FIND_BY_ID = `async function findTimelineItemById(collection, id) {
|
||||
try {
|
||||
return await collection.findOne({ _id: new ObjectId(id) });
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}`;
|
||||
|
||||
const NEW_FIND_BY_ID = `async function findTimelineItemById(collection, id) { ${MARKER_NOT_FOUND_LOG}
|
||||
try { ${MARKER_NOT_FOUND_LOG}
|
||||
const _oid = new ObjectId(id); ${MARKER_NOT_FOUND_LOG}
|
||||
const _doc = await collection.findOne({ _id: _oid }); ${MARKER_NOT_FOUND_LOG}
|
||||
if (!_doc) console.warn(\`[Mastodon API] findTimelineItemById: no item for id=\${id}\`); ${MARKER_NOT_FOUND_LOG}
|
||||
return _doc; ${MARKER_NOT_FOUND_LOG}
|
||||
} catch (_fErr) { ${MARKER_NOT_FOUND_LOG}
|
||||
console.warn(\`[Mastodon API] findTimelineItemById: invalid id=\${id}: \${_fErr.message}\`); ${MARKER_NOT_FOUND_LOG}
|
||||
return null; ${MARKER_NOT_FOUND_LOG}
|
||||
} ${MARKER_NOT_FOUND_LOG}
|
||||
}`;
|
||||
|
||||
// ── Runner ───────────────────────────────────────────────────────────────────
|
||||
|
||||
const FILES = apPath("lib/mastodon/routes/statuses.js");
|
||||
|
||||
// Patches that use a single replacement (applyPatch)
|
||||
const SINGLE_PATCHES = [
|
||||
{ name: "reply-threading", marker: MARKER_THREADING, oldSnippet: OLD_THREADING, newSnippet: NEW_THREADING },
|
||||
{ name: "delete-fix-C", marker: MARKER_DELETE_FIX, oldSnippet: OLD_AFTER_DELETE, newSnippet: NEW_AFTER_DELETE },
|
||||
{ name: "status-reply-id-B", marker: MARKER_REPLY_ID, oldSnippet: OLD_REPLY_INSERT, newSnippet: NEW_REPLY_INSERT },
|
||||
{ name: "interactions-context", marker: MARKER_CTX_STATE, oldSnippet: OLD_CTX_STATE, newSnippet: NEW_CTX_STATE },
|
||||
{ name: "not-found-log", marker: MARKER_NOT_FOUND_LOG, oldSnippet: OLD_FIND_BY_ID, newSnippet: NEW_FIND_BY_ID },
|
||||
];
|
||||
|
||||
// Patches that need multiple replacements in one pass (applyMultiPatch)
|
||||
const MULTI_PATCHES = [
|
||||
{ name: "status-id", marker: MARKER_STATUS_ID, replacements: STATUS_ID_REPLACEMENTS },
|
||||
];
|
||||
|
||||
let total = 0;
|
||||
|
||||
// Apply single patches
|
||||
for (const p of SINGLE_PATCHES) {
|
||||
let done = false;
|
||||
for (const f of FILES) {
|
||||
const r = await applyPatch(f, p.marker, p.oldSnippet, p.newSnippet);
|
||||
if (r === "applied") {
|
||||
console.log(`[postinstall] ${SCRIPT}: applied ${p.name} to ${f}`);
|
||||
total++; done = true; break;
|
||||
} else if (r === "already_applied") {
|
||||
console.log(`[postinstall] ${SCRIPT}: ${p.name} already applied in ${f}`);
|
||||
done = true; break;
|
||||
} else if (r === "snippet_not_found") {
|
||||
console.warn(`[postinstall] ${SCRIPT}: ${p.name} — snippet not found in ${f}, skipping`);
|
||||
}
|
||||
}
|
||||
if (!done) console.log(`[postinstall] ${SCRIPT}: ${p.name} — no target file found`);
|
||||
}
|
||||
|
||||
// Apply multi-replacement patches
|
||||
for (const p of MULTI_PATCHES) {
|
||||
let done = false;
|
||||
for (const f of FILES) {
|
||||
const r = await applyMultiPatch(f, p.marker, p.replacements);
|
||||
if (r === "applied") {
|
||||
console.log(`[postinstall] ${SCRIPT}: applied ${p.name} to ${f}`);
|
||||
total++; done = true; break;
|
||||
} else if (r === "already_applied") {
|
||||
console.log(`[postinstall] ${SCRIPT}: ${p.name} already applied in ${f}`);
|
||||
done = true; break;
|
||||
} else if (r === "snippet_not_found") {
|
||||
// warning already printed inside applyMultiPatch
|
||||
}
|
||||
}
|
||||
if (!done) console.log(`[postinstall] ${SCRIPT}: ${p.name} — no target file found`);
|
||||
}
|
||||
|
||||
console.log(`[postinstall] ${SCRIPT}: done (${total} patch(es) applied)`);
|
||||
@@ -1,82 +0,0 @@
|
||||
/**
|
||||
* Patch: Clear expiresAt when exchanging auth code for access token.
|
||||
*
|
||||
* Bug: The authorization code is created with expiresAt = now + 10 minutes.
|
||||
* When the code is exchanged for an access token, the updateOne only $set's
|
||||
* accessToken/refreshToken/refreshExpiresAt but does NOT $unset expiresAt.
|
||||
* The token-required middleware checks expiresAt and rejects the token after
|
||||
* 10 minutes, forcing Mastodon clients (Mona) to re-authenticate constantly.
|
||||
*
|
||||
* The refresh_token flow already does $unset: { expiresAt: "" } correctly.
|
||||
* This patch adds the same $unset to the authorization_code exchange.
|
||||
*
|
||||
* Access tokens should not expire (matching Mastodon behavior — valid until
|
||||
* revoked), as the code comments already state.
|
||||
*/
|
||||
|
||||
import { readFile, writeFile } from "node:fs/promises";
|
||||
|
||||
const AP_BASE = "@rmdes/indiekit-endpoint-activitypub";
|
||||
const AP_ROOTS = [
|
||||
`node_modules/${AP_BASE}`,
|
||||
`node_modules/@indiekit/indiekit/node_modules/${AP_BASE}`,
|
||||
];
|
||||
|
||||
const SCRIPT = "patch-ap-oauth-token-expiry";
|
||||
const MARKER = "// patched: unset-expiresAt-on-code-exchange";
|
||||
const TARGET = "lib/mastodon/routes/oauth.js";
|
||||
|
||||
const OLD = ` $set: {
|
||||
accessToken,
|
||||
refreshToken,
|
||||
refreshExpiresAt: new Date(Date.now() + REFRESH_TOKEN_TTL),
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
res.json({
|
||||
access_token: accessToken,`;
|
||||
|
||||
const NEW = ` $set: {
|
||||
accessToken,
|
||||
refreshToken,
|
||||
refreshExpiresAt: new Date(Date.now() + REFRESH_TOKEN_TTL),
|
||||
},
|
||||
$unset: { expiresAt: "" }, ${MARKER}
|
||||
},
|
||||
);
|
||||
|
||||
res.json({
|
||||
access_token: accessToken,`;
|
||||
|
||||
let patched = false;
|
||||
|
||||
for (const root of AP_ROOTS) {
|
||||
const filePath = `${root}/${TARGET}`;
|
||||
let content;
|
||||
try {
|
||||
content = await readFile(filePath, "utf-8");
|
||||
} catch {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (content.includes(MARKER)) {
|
||||
console.log(`[${SCRIPT}] ${filePath}: already patched, skipping`);
|
||||
patched = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!content.includes(OLD)) {
|
||||
console.warn(`[${SCRIPT}] ${filePath}: target string not found, skipping`);
|
||||
continue;
|
||||
}
|
||||
|
||||
const updated = content.replace(OLD, NEW);
|
||||
await writeFile(filePath, updated, "utf-8");
|
||||
console.log(`[${SCRIPT}] ${filePath}: patched successfully`);
|
||||
patched = true;
|
||||
}
|
||||
|
||||
if (!patched) {
|
||||
console.warn(`[${SCRIPT}] WARNING: no files were patched`);
|
||||
}
|
||||
@@ -1,84 +0,0 @@
|
||||
/**
|
||||
* Patch: Prevent self-follows in the inbox Follow handler.
|
||||
*
|
||||
* When the server follows itself (e.g. via Mona), the self-entry in
|
||||
* ap_followers causes Fedify to attempt delivery to the local shared
|
||||
* inbox on every sendActivity("followers") call. Since the node jail
|
||||
* has no outbound internet, this produces infinite ECONNRESET retries.
|
||||
*
|
||||
* This patch adds an early return in the Follow listener when the
|
||||
* follower's actor URL matches the server's own publicationUrl.
|
||||
*/
|
||||
|
||||
import { access, readFile, writeFile } from "node:fs/promises";
|
||||
|
||||
const AP_BASE = "@rmdes/indiekit-endpoint-activitypub";
|
||||
const AP_ROOTS = [
|
||||
`node_modules/${AP_BASE}`,
|
||||
`node_modules/@indiekit/indiekit/node_modules/${AP_BASE}`,
|
||||
];
|
||||
|
||||
function apPath(rel) {
|
||||
return AP_ROOTS.map(r => `${r}/${rel}`);
|
||||
}
|
||||
|
||||
async function fileExists(p) {
|
||||
try { await access(p); return true; } catch { return false; }
|
||||
}
|
||||
|
||||
async function applyPatch(filePath, marker, oldSnippet, newSnippet) {
|
||||
if (!(await fileExists(filePath))) return "file_not_found";
|
||||
const src = await readFile(filePath, "utf8");
|
||||
if (src.includes(marker)) return "already_applied";
|
||||
if (!src.includes(oldSnippet)) return "snippet_not_found";
|
||||
await writeFile(filePath, src.replace(oldSnippet, newSnippet), "utf8");
|
||||
return "applied";
|
||||
}
|
||||
|
||||
const SCRIPT = "patch-ap-self-follow-guard";
|
||||
const MARKER = "// [patch] ap-self-follow-guard";
|
||||
|
||||
const PATCHES = [
|
||||
{
|
||||
name: "self-follow-guard",
|
||||
files: apPath("lib/inbox-listeners.js"),
|
||||
marker: MARKER,
|
||||
oldSnippet: ` const actorUrl = follow.actorId?.href || "";
|
||||
if (await isServerBlocked(actorUrl, collections)) return;
|
||||
await touchKeyFreshness(collections, actorUrl);
|
||||
await resetDeliveryStrikes(collections, actorUrl);`,
|
||||
newSnippet: ` const actorUrl = follow.actorId?.href || "";
|
||||
if (await isServerBlocked(actorUrl, collections)) return;
|
||||
|
||||
// Reject self-follows: if the follower is our own actor, skip. // [patch] ap-self-follow-guard
|
||||
// Self-follows cause infinite delivery retries because Fedify
|
||||
// tries to POST to our own shared inbox, which is unreachable
|
||||
// from within the jail (no outbound internet).
|
||||
if (collections._publicationUrl && actorUrl.startsWith(collections._publicationUrl)) {
|
||||
console.info(\`[ActivityPub] Ignoring self-follow from \${actorUrl}\`);
|
||||
return;
|
||||
}
|
||||
|
||||
await touchKeyFreshness(collections, actorUrl);
|
||||
await resetDeliveryStrikes(collections, actorUrl);`,
|
||||
},
|
||||
];
|
||||
|
||||
let total = 0;
|
||||
for (const p of PATCHES) {
|
||||
let done = false;
|
||||
for (const f of p.files) {
|
||||
const r = await applyPatch(f, p.marker, p.oldSnippet, p.newSnippet);
|
||||
if (r === "applied") {
|
||||
console.log(`[postinstall] ${SCRIPT}: applied ${p.name} to ${f}`);
|
||||
total++; done = true; break;
|
||||
} else if (r === "already_applied") {
|
||||
console.log(`[postinstall] ${SCRIPT}: ${p.name} already applied in ${f}`);
|
||||
done = true; break;
|
||||
} else if (r === "snippet_not_found") {
|
||||
console.warn(`[postinstall] ${SCRIPT}: ${p.name} — snippet not found in ${f}, skipping`);
|
||||
}
|
||||
}
|
||||
if (!done) console.log(`[postinstall] ${SCRIPT}: ${p.name} — no target file found`);
|
||||
}
|
||||
console.log(`[postinstall] ${SCRIPT}: done (${total} patch(es) applied)`);
|
||||
@@ -1,98 +0,0 @@
|
||||
/**
|
||||
* patch-ap-startup-gate-bypass
|
||||
*
|
||||
* @rmdes/indiekit-startup-gate polls for /app/data/.indiekit-ready before
|
||||
* starting background tasks (inbox processor, key refresh, batch refollow).
|
||||
* That file is a Cloudron platform convention — on this FreeBSD server it
|
||||
* never gets created, so the inbox queue processor would never start.
|
||||
*
|
||||
* This patch replaces the module with a no-op shim that calls the callback
|
||||
* immediately. The preflight-startup-gate.mjs workaround is kept for safety
|
||||
* but this patch makes it irrelevant.
|
||||
*/
|
||||
|
||||
import { readFileSync, writeFileSync } from "node:fs";
|
||||
import { resolve } from "node:path";
|
||||
|
||||
const MARKER = "// [patch] ap-startup-gate-bypass";
|
||||
|
||||
const PATHS = [
|
||||
"node_modules/@rmdes/indiekit-startup-gate/index.js",
|
||||
"node_modules/@indiekit/indiekit/node_modules/@rmdes/indiekit-startup-gate/index.js",
|
||||
];
|
||||
|
||||
const OLD_SNIPPET = `import { existsSync } from "node:fs";
|
||||
|
||||
const SIGNAL_PATH = "/app/data/.indiekit-ready";
|
||||
const POLL_INTERVAL = 5000;
|
||||
|
||||
/**
|
||||
* Wait for the readiness signal, then invoke the callback.
|
||||
* If the signal file already exists (hot restart, dev mode), fires immediately.
|
||||
* @param {Function} callback - Function to call when ready
|
||||
* @param {object} [options]
|
||||
* @param {string} [options.label] - Plugin name for log messages
|
||||
* @returns {Function} stop - Call to cancel polling (for plugin destroy lifecycle)
|
||||
*/
|
||||
export function waitForReady(callback, options = {}) {
|
||||
const label = options.label || "plugin";
|
||||
|
||||
if (existsSync(SIGNAL_PATH)) {
|
||||
console.info(\`[startup-gate] \${label}: ready (signal file exists)\`);
|
||||
callback();
|
||||
return () => {};
|
||||
}
|
||||
|
||||
console.info(\`[startup-gate] \${label}: waiting for readiness signal...\`);
|
||||
|
||||
const timer = setInterval(() => {
|
||||
if (existsSync(SIGNAL_PATH)) {
|
||||
clearInterval(timer);
|
||||
console.info(\`[startup-gate] \${label}: ready — starting deferred tasks\`);
|
||||
callback();
|
||||
}
|
||||
}, POLL_INTERVAL);
|
||||
|
||||
timer.unref();
|
||||
|
||||
return () => {
|
||||
clearInterval(timer);
|
||||
};
|
||||
}`;
|
||||
|
||||
const NEW_SNIPPET = `// [patch] ap-startup-gate-bypass — Cloudron signal file not used on this server
|
||||
export function waitForReady(callback, options = {}) {
|
||||
callback();
|
||||
return () => {};
|
||||
} ${MARKER}`;
|
||||
|
||||
let patched = 0;
|
||||
|
||||
for (const relPath of PATHS) {
|
||||
const filePath = resolve(relPath);
|
||||
let src;
|
||||
try {
|
||||
src = readFileSync(filePath, "utf8");
|
||||
} catch {
|
||||
continue; // path doesn't exist in this install
|
||||
}
|
||||
|
||||
if (src.includes(MARKER)) {
|
||||
console.log(`[patch] ap-startup-gate-bypass: already applied to ${relPath}`);
|
||||
patched++;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!src.includes(OLD_SNIPPET)) {
|
||||
console.warn(`[patch] ap-startup-gate-bypass: target snippet not found in ${relPath} (package updated?)`);
|
||||
continue;
|
||||
}
|
||||
|
||||
writeFileSync(filePath, src.replace(OLD_SNIPPET, NEW_SNIPPET), "utf8");
|
||||
console.log(`[patch] ap-startup-gate-bypass: applied to ${relPath}`);
|
||||
patched++;
|
||||
}
|
||||
|
||||
if (patched === 0) {
|
||||
console.warn("[patch] ap-startup-gate-bypass: no target files found — skipping");
|
||||
}
|
||||
@@ -1,131 +0,0 @@
|
||||
/**
|
||||
* Consolidated patch: AP syndication guards in syndicator.js.
|
||||
*
|
||||
* Absorbs:
|
||||
* - patch-ap-syndicate-skip-checkin (skip location checkins)
|
||||
* - patch-ap-syndicate-skip-draft (skip draft posts)
|
||||
* - patch-ap-syndicate-skip-unlisted (skip unlisted posts)
|
||||
*
|
||||
* Note: patch-ap-syndicate-dedup integrated into source directly.
|
||||
*
|
||||
* ORDER MATTERS: checkin → draft → unlisted
|
||||
* Each newSnippet is the next oldSnippet.
|
||||
*/
|
||||
|
||||
import { access, readFile, writeFile } from "node:fs/promises";
|
||||
|
||||
const AP_BASE = "@rmdes/indiekit-endpoint-activitypub";
|
||||
const AP_ROOTS = [
|
||||
`node_modules/${AP_BASE}`,
|
||||
`node_modules/@indiekit/indiekit/node_modules/${AP_BASE}`,
|
||||
];
|
||||
|
||||
function apPath(rel) {
|
||||
return AP_ROOTS.map(r => `${r}/${rel}`);
|
||||
}
|
||||
|
||||
async function fileExists(p) {
|
||||
try { await access(p); return true; } catch { return false; }
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply a single patch to a file.
|
||||
* Returns: "applied" | "already_applied" | "snippet_not_found" | "file_not_found"
|
||||
*/
|
||||
async function applyPatch(filePath, marker, oldSnippet, newSnippet) {
|
||||
if (!(await fileExists(filePath))) return "file_not_found";
|
||||
const src = await readFile(filePath, "utf8");
|
||||
if (src.includes(marker)) return "already_applied";
|
||||
if (!src.includes(oldSnippet)) return "snippet_not_found";
|
||||
await writeFile(filePath, src.replace(oldSnippet, newSnippet), "utf8");
|
||||
return "applied";
|
||||
}
|
||||
|
||||
const SCRIPT = "patch-ap-syndication";
|
||||
|
||||
const PATCHES = [
|
||||
// ORDER: checkin → draft → unlisted (each builds on prior output)
|
||||
|
||||
{
|
||||
name: "skip-checkin",
|
||||
files: apPath("lib/syndicator.js"),
|
||||
marker: "// [patch] ap-syndicate-skip-checkin",
|
||||
oldSnippet: ` async syndicate(properties) {
|
||||
if (!plugin._federation) {
|
||||
return undefined;
|
||||
}`,
|
||||
newSnippet: ` async syndicate(properties) {
|
||||
if (!plugin._federation) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
// Skip location checkins — they have a JF2 \`location\` property. // [patch] ap-syndicate-skip-checkin
|
||||
if (properties.location) {
|
||||
console.info(\`[ActivityPub] Skipping syndication for location checkin: \${properties.url}\`);
|
||||
return undefined;
|
||||
}`,
|
||||
},
|
||||
|
||||
{
|
||||
name: "skip-draft",
|
||||
files: apPath("lib/syndicator.js"),
|
||||
marker: "// [patch] ap-syndicate-skip-draft",
|
||||
oldSnippet: ` // Skip location checkins — they have a JF2 \`location\` property. // [patch] ap-syndicate-skip-checkin
|
||||
if (properties.location) {
|
||||
console.info(\`[ActivityPub] Skipping syndication for location checkin: \${properties.url}\`);
|
||||
return undefined;
|
||||
}`,
|
||||
newSnippet: ` // Skip location checkins — they have a JF2 \`location\` property. // [patch] ap-syndicate-skip-checkin
|
||||
if (properties.location) {
|
||||
console.info(\`[ActivityPub] Skipping syndication for location checkin: \${properties.url}\`);
|
||||
return undefined;
|
||||
}
|
||||
|
||||
// Skip draft posts — they should not be federated to followers. // [patch] ap-syndicate-skip-draft
|
||||
if (properties["post-status"] === "draft") {
|
||||
console.info(\`[ActivityPub] Skipping syndication for draft post: \${properties.url}\`);
|
||||
return undefined;
|
||||
}`,
|
||||
},
|
||||
|
||||
{
|
||||
name: "skip-unlisted",
|
||||
files: apPath("lib/syndicator.js"),
|
||||
marker: "// [patch] ap-syndicate-skip-unlisted",
|
||||
oldSnippet: ` // Skip draft posts — they should not be federated to followers. // [patch] ap-syndicate-skip-draft
|
||||
if (properties["post-status"] === "draft") {
|
||||
console.info(\`[ActivityPub] Skipping syndication for draft post: \${properties.url}\`);
|
||||
return undefined;
|
||||
}`,
|
||||
newSnippet: ` // Skip draft posts — they should not be federated to followers. // [patch] ap-syndicate-skip-draft
|
||||
if (properties["post-status"] === "draft") {
|
||||
console.info(\`[ActivityPub] Skipping syndication for draft post: \${properties.url}\`);
|
||||
return undefined;
|
||||
}
|
||||
|
||||
// Skip unlisted posts — they should not be federated to followers. // [patch] ap-syndicate-skip-unlisted
|
||||
if (properties.visibility === "unlisted") {
|
||||
console.info(\`[ActivityPub] Skipping syndication for unlisted post: \${properties.url}\`);
|
||||
return undefined;
|
||||
}`,
|
||||
},
|
||||
];
|
||||
|
||||
let total = 0;
|
||||
for (const p of PATCHES) {
|
||||
let done = false;
|
||||
for (const f of p.files) {
|
||||
const r = await applyPatch(f, p.marker, p.oldSnippet, p.newSnippet);
|
||||
if (r === "applied") {
|
||||
console.log(`[postinstall] ${SCRIPT}: applied ${p.name} to ${f}`);
|
||||
total++; done = true; break;
|
||||
} else if (r === "already_applied") {
|
||||
console.log(`[postinstall] ${SCRIPT}: ${p.name} already applied in ${f}`);
|
||||
done = true; break;
|
||||
} else if (r === "snippet_not_found") {
|
||||
console.warn(`[postinstall] ${SCRIPT}: ${p.name} — snippet not found in ${f}, skipping`);
|
||||
}
|
||||
}
|
||||
if (!done) console.log(`[postinstall] ${SCRIPT}: ${p.name} — no target file found`);
|
||||
}
|
||||
console.log(`[postinstall] ${SCRIPT}: done (${total} patch(es) applied)`);
|
||||
@@ -1,159 +0,0 @@
|
||||
/**
|
||||
* Patch: Unify isDirectMessage() and computeVisibility() in inbox-handlers.js.
|
||||
*
|
||||
* Problem: Two overlapping functions for DM detection. computeVisibility()
|
||||
* returns "private" for DMs because it lacks actor context. isDirectMessage()
|
||||
* correctly detects DMs but is separate.
|
||||
*
|
||||
* Fix: Extend computeVisibility() to accept optional {ourActorUrl, followersUrl}.
|
||||
* When provided and DM conditions are met, return "direct" instead of "private".
|
||||
* Replace the standalone isDirectMessage() guard with computeVisibility() check.
|
||||
* Update all call sites inside handleCreate() to pass actor context.
|
||||
*/
|
||||
|
||||
import { access, readFile, writeFile } from "node:fs/promises";
|
||||
|
||||
const AP_BASE = "@rmdes/indiekit-endpoint-activitypub";
|
||||
const AP_ROOTS = [
|
||||
`node_modules/${AP_BASE}`,
|
||||
`node_modules/@indiekit/indiekit/node_modules/${AP_BASE}`,
|
||||
];
|
||||
|
||||
function apPath(rel) {
|
||||
return AP_ROOTS.map(r => `${r}/${rel}`);
|
||||
}
|
||||
|
||||
async function fileExists(p) {
|
||||
try { await access(p); return true; } catch { return false; }
|
||||
}
|
||||
|
||||
const SCRIPT = "patch-ap-unify-dm-visibility";
|
||||
const MARKER = "// [patch] unify-dm-visibility";
|
||||
const CANDIDATES = apPath("lib/inbox-handlers.js");
|
||||
let total = 0;
|
||||
|
||||
// ── Patch 1: Replace computeVisibility to accept actor context ──────────────
|
||||
|
||||
const OLD_COMPUTE_VISIBILITY = `function computeVisibility(object) {
|
||||
const to = new Set((object.toIds || []).map((u) => u.href));
|
||||
const cc = new Set((object.ccIds || []).map((u) => u.href));
|
||||
|
||||
if (to.has(PUBLIC)) return "public";
|
||||
if (cc.has(PUBLIC)) return "unlisted";
|
||||
// Without knowing the remote actor's followers URL, we can't distinguish
|
||||
// "private" (followers-only) from "direct". Both are non-public.
|
||||
if (to.size > 0 || cc.size > 0) return "private";
|
||||
return "direct";
|
||||
}`;
|
||||
|
||||
const NEW_COMPUTE_VISIBILITY = `function computeVisibility(object, actorContext) { ${MARKER}
|
||||
const to = new Set((object.toIds || []).map((u) => u.href)); ${MARKER}
|
||||
const cc = new Set((object.ccIds || []).map((u) => u.href)); ${MARKER}
|
||||
${MARKER}
|
||||
if (to.has(PUBLIC)) return "public"; ${MARKER}
|
||||
if (cc.has(PUBLIC)) return "unlisted"; ${MARKER}
|
||||
${MARKER}
|
||||
// When actor context is available, use isDirectMessage logic to distinguish ${MARKER}
|
||||
// "direct" (addressed to specific actors only) from "private" (followers-only). ${MARKER}
|
||||
if (actorContext?.ourActorUrl) { ${MARKER}
|
||||
const allAddressed = [ ${MARKER}
|
||||
...to, ...cc, ${MARKER}
|
||||
...(object.btoIds || []).map((u) => u.href), ${MARKER}
|
||||
...(object.bccIds || []).map((u) => u.href), ${MARKER}
|
||||
]; ${MARKER}
|
||||
const hasPublic = allAddressed.some((u) => u === PUBLIC || u === "as:Public"); ${MARKER}
|
||||
const hasFollowers = actorContext.followersUrl && allAddressed.includes(actorContext.followersUrl); ${MARKER}
|
||||
if (!hasPublic && !hasFollowers && allAddressed.includes(actorContext.ourActorUrl)) { ${MARKER}
|
||||
return "direct"; ${MARKER}
|
||||
} ${MARKER}
|
||||
} ${MARKER}
|
||||
${MARKER}
|
||||
// Without actor context, can't distinguish "private" from "direct". ${MARKER}
|
||||
if (to.size > 0 || cc.size > 0) return "private"; ${MARKER}
|
||||
return "direct"; ${MARKER}
|
||||
}`;
|
||||
|
||||
// ── Patch 2: Replace isDirectMessage() guard with computeVisibility() check ─
|
||||
|
||||
const OLD_DM_GUARD = ` if (isDirectMessage(object, ourActorUrl, followersUrl)) {`;
|
||||
|
||||
const NEW_DM_GUARD = ` if (computeVisibility(object, { ourActorUrl, followersUrl }) === "direct") { ${MARKER}`;
|
||||
|
||||
// ── Patch 3: Update computeVisibility call sites inside handleCreate ────────
|
||||
// These are after the DM guard, so the object is NOT a DM. But passing context
|
||||
// lets computeVisibility correctly return "private" vs "direct" for edge cases.
|
||||
// The 4 call sites all follow the pattern: timelineItem.visibility = computeVisibility(object);
|
||||
// We need to add actor context only to those inside handleCreate (lines 812, 842, 866)
|
||||
// and handleAnnounce (line 586). The fetchReplyChain call (line 212) has no context.
|
||||
|
||||
// For handleCreate calls — ourActorUrl and followersUrl are in scope
|
||||
const OLD_VISIBILITY_CALL = `timelineItem.visibility = computeVisibility(object);`;
|
||||
const NEW_VISIBILITY_CALL_WITH_CTX = `timelineItem.visibility = computeVisibility(object, { ourActorUrl, followersUrl }); ${MARKER}`;
|
||||
|
||||
for (const f of CANDIDATES) {
|
||||
if (!(await fileExists(f))) continue;
|
||||
const src = await readFile(f, "utf8");
|
||||
if (src.includes(MARKER)) {
|
||||
console.log(`[postinstall] ${SCRIPT}: already applied in ${f}`);
|
||||
break;
|
||||
}
|
||||
|
||||
let updated = src;
|
||||
let changed = false;
|
||||
|
||||
// Patch 1: Replace computeVisibility function
|
||||
if (updated.includes(OLD_COMPUTE_VISIBILITY)) {
|
||||
updated = updated.replace(OLD_COMPUTE_VISIBILITY, NEW_COMPUTE_VISIBILITY);
|
||||
changed = true;
|
||||
console.log(`[postinstall] ${SCRIPT}: replaced computeVisibility()`);
|
||||
} else {
|
||||
console.warn(`[postinstall] ${SCRIPT}: computeVisibility() anchor not found in ${f}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Patch 2: Replace isDirectMessage guard
|
||||
if (updated.includes(OLD_DM_GUARD)) {
|
||||
updated = updated.replace(OLD_DM_GUARD, NEW_DM_GUARD);
|
||||
console.log(`[postinstall] ${SCRIPT}: replaced isDirectMessage() guard`);
|
||||
} else {
|
||||
console.warn(`[postinstall] ${SCRIPT}: isDirectMessage() guard not found in ${f}`);
|
||||
}
|
||||
|
||||
// Patch 3: Update computeVisibility call sites with actor context
|
||||
// We need to be selective — only replace calls inside handleCreate/handleAnnounce
|
||||
// where ourActorUrl and followersUrl are in scope.
|
||||
// The fetchReplyChain call (line ~212) does NOT have these in scope — leave it.
|
||||
//
|
||||
// Strategy: split file at the handleCreate function boundary and only replace
|
||||
// within that section. handleAnnounce also has the vars via ctx but through a
|
||||
// different scope — safer to leave those unchanged since boosts aren't DMs.
|
||||
|
||||
// Find the handleCreate function start
|
||||
const handleCreateStart = updated.indexOf("export async function handleCreate(");
|
||||
if (handleCreateStart > -1) {
|
||||
const before = updated.substring(0, handleCreateStart);
|
||||
const after = updated.substring(handleCreateStart);
|
||||
|
||||
// Replace all computeVisibility(object) calls within handleCreate
|
||||
const afterPatched = after.replaceAll(OLD_VISIBILITY_CALL, NEW_VISIBILITY_CALL_WITH_CTX);
|
||||
if (afterPatched !== after) {
|
||||
updated = before + afterPatched;
|
||||
const count = (afterPatched.match(/unify-dm-visibility/g) || []).length -
|
||||
(after.match(/unify-dm-visibility/g) || []).length;
|
||||
console.log(`[postinstall] ${SCRIPT}: updated ${count} computeVisibility() call site(s) in handleCreate`);
|
||||
}
|
||||
}
|
||||
|
||||
if (changed && updated !== src) {
|
||||
await writeFile(f, updated, "utf8");
|
||||
console.log(`[postinstall] ${SCRIPT}: applied to ${f}`);
|
||||
total++;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (total === 0) {
|
||||
console.log(`[postinstall] ${SCRIPT}: no target file found or no changes needed`);
|
||||
}
|
||||
|
||||
console.log(`[postinstall] ${SCRIPT}: done (${total} patch(es) applied)`);
|
||||
Reference in New Issue
Block a user