fix: sort Mastodon API timeline by published date instead of ObjectId
The Mastodon API timeline sorted by MongoDB _id (insertion order), not by published date. This caused chronological jumps — backfilled or syndicated posts got ObjectIds at import time, interleaving them incorrectly with federation-received posts. Changes: - Pagination cursors now use published date (encoded as ms-since-epoch) instead of ObjectId. Mastodon clients pass these as opaque max_id/ min_id/since_id values and they sort correctly. - Status and notification IDs are now encodeCursor(published) so the cursor round-trips through client pagination. - Status lookups (GET/DELETE /statuses/:id, context, interactions) use findTimelineItemById() which tries published-based lookup first, then falls back to ObjectId for backwards compatibility. - Link pagination headers emit published-based cursors. This matches the native reader's sort (storage/timeline.js) which has always sorted by published: -1.
This commit is contained in:
@@ -13,6 +13,7 @@
|
||||
*/
|
||||
import { serializeAccount } from "./account.js";
|
||||
import { serializeStatus } from "./status.js";
|
||||
import { encodeCursor } from "../helpers/pagination.js";
|
||||
|
||||
/**
|
||||
* Map internal notification types to Mastodon API types.
|
||||
@@ -55,9 +56,17 @@ export function serializeNotification(notif, { baseUrl, statusMap, interactionSt
|
||||
);
|
||||
|
||||
// Resolve the associated status (for favourite, reblog, mention types)
|
||||
// For mention types, prefer the triggering post (notif.url) over the target post (notif.targetUrl)
|
||||
// because targetUrl for replies points to the user's OWN post being replied to
|
||||
let status = null;
|
||||
if (notif.targetUrl && statusMap) {
|
||||
const timelineItem = statusMap.get(notif.targetUrl);
|
||||
if (statusMap) {
|
||||
const isMentionType = mastodonType === "mention";
|
||||
const lookupUrl = isMentionType
|
||||
? (notif.url || notif.targetUrl)
|
||||
: (notif.targetUrl || notif.url);
|
||||
|
||||
if (lookupUrl) {
|
||||
const timelineItem = statusMap.get(lookupUrl);
|
||||
if (timelineItem) {
|
||||
status = serializeStatus(timelineItem, {
|
||||
baseUrl,
|
||||
@@ -68,6 +77,7 @@ export function serializeNotification(notif, { baseUrl, statusMap, interactionSt
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// For mentions/replies that don't have a matching timeline item,
|
||||
// construct a minimal status from the notification content
|
||||
@@ -82,7 +92,7 @@ export function serializeNotification(notif, { baseUrl, statusMap, interactionSt
|
||||
visibility: notif.type === "dm" ? "direct" : "public",
|
||||
language: null,
|
||||
uri: notif.uid || "",
|
||||
url: notif.targetUrl || notif.uid || "",
|
||||
url: notif.url || notif.targetUrl || notif.uid || "",
|
||||
replies_count: 0,
|
||||
reblogs_count: 0,
|
||||
favourites_count: 0,
|
||||
@@ -106,12 +116,14 @@ export function serializeNotification(notif, { baseUrl, statusMap, interactionSt
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
id: notif._id.toString(),
|
||||
type: mastodonType,
|
||||
created_at: notif.published instanceof Date
|
||||
const createdAt = notif.published instanceof Date
|
||||
? notif.published.toISOString()
|
||||
: notif.published || notif.createdAt || new Date().toISOString(),
|
||||
: notif.published || notif.createdAt || new Date().toISOString();
|
||||
|
||||
return {
|
||||
id: encodeCursor(createdAt) || notif._id.toString(),
|
||||
type: mastodonType,
|
||||
created_at: createdAt,
|
||||
account,
|
||||
status,
|
||||
};
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
*/
|
||||
import { serializeAccount } from "./account.js";
|
||||
import { sanitizeHtml } from "./sanitize.js";
|
||||
import { encodeCursor } from "../helpers/pagination.js";
|
||||
|
||||
// Module-level defaults set once at startup via setLocalIdentity()
|
||||
let _localPublicationUrl = "";
|
||||
@@ -46,7 +47,10 @@ export function setLocalIdentity(publicationUrl, handle) {
|
||||
export function serializeStatus(item, { baseUrl, favouritedIds, rebloggedIds, bookmarkedIds, pinnedIds }) {
|
||||
if (!item) return null;
|
||||
|
||||
const id = item._id.toString();
|
||||
// Use published-based cursor as the status ID so pagination cursors
|
||||
// (max_id/min_id) sort chronologically, not by insertion order.
|
||||
const published = item.published || item.createdAt || item.boostedAt;
|
||||
const id = encodeCursor(published) || item._id.toString();
|
||||
const uid = item.uid || "";
|
||||
const url = item.url || uid;
|
||||
|
||||
|
||||
@@ -1,14 +1,50 @@
|
||||
/**
|
||||
* Mastodon-compatible cursor pagination helpers.
|
||||
*
|
||||
* Uses MongoDB ObjectId as cursor (chronologically ordered).
|
||||
* Uses `published` date as cursor (chronologically correct) instead of
|
||||
* MongoDB ObjectId. ObjectId reflects insertion order, not publication
|
||||
* order — backfilled or syndicated posts get new ObjectIds at import
|
||||
* time, breaking chronological sort. The `published` field matches the
|
||||
* native reader's sort and produces a correct timeline.
|
||||
*
|
||||
* Cursor values are `published` ISO strings, but Mastodon clients pass
|
||||
* them as opaque `max_id`/`min_id`/`since_id` strings. We encode the
|
||||
* published date as a Mastodon-style snowflake-ish ID (milliseconds
|
||||
* since epoch) so clients treat them as comparable integers.
|
||||
*
|
||||
* Emits RFC 8288 Link headers that masto.js / Phanpy parse.
|
||||
*/
|
||||
import { ObjectId } from "mongodb";
|
||||
|
||||
const DEFAULT_LIMIT = 20;
|
||||
const MAX_LIMIT = 40;
|
||||
|
||||
/**
|
||||
* Encode a published date string as a numeric cursor ID.
|
||||
* Mastodon clients expect IDs to be numeric strings that sort chronologically.
|
||||
* We use milliseconds since epoch — monotonic and comparable.
|
||||
*
|
||||
* @param {string|Date} published - ISO date string or Date object
|
||||
* @returns {string} Numeric string (ms since epoch)
|
||||
*/
|
||||
export function encodeCursor(published) {
|
||||
if (!published) return "0";
|
||||
const ms = new Date(published).getTime();
|
||||
return Number.isFinite(ms) ? String(ms) : "0";
|
||||
}
|
||||
|
||||
/**
|
||||
* Decode a numeric cursor ID back to an ISO date string.
|
||||
*
|
||||
* @param {string} cursor - Numeric cursor from client
|
||||
* @returns {string|null} ISO date string, or null if invalid
|
||||
*/
|
||||
export function decodeCursor(cursor) {
|
||||
if (!cursor) return null;
|
||||
const ms = Number.parseInt(cursor, 10);
|
||||
if (!Number.isFinite(ms) || ms <= 0) return null;
|
||||
return new Date(ms).toISOString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse and clamp the limit parameter.
|
||||
*
|
||||
@@ -24,48 +60,45 @@ export function parseLimit(raw) {
|
||||
/**
|
||||
* Build a MongoDB filter object for cursor-based pagination.
|
||||
*
|
||||
* Mastodon cursor params (all optional, applied to `_id`):
|
||||
* max_id — return items older than this ID (exclusive)
|
||||
* min_id — return items newer than this ID (exclusive), closest first
|
||||
* since_id — return items newer than this ID (exclusive), most recent first
|
||||
* Mastodon cursor params (all optional, applied to `published`):
|
||||
* max_id — return items older than this cursor (exclusive)
|
||||
* min_id — return items newer than this cursor (exclusive), closest first
|
||||
* since_id — return items newer than this cursor (exclusive), most recent first
|
||||
*
|
||||
* @param {object} baseFilter - Existing MongoDB filter to extend
|
||||
* @param {object} cursors
|
||||
* @param {string} [cursors.max_id]
|
||||
* @param {string} [cursors.min_id]
|
||||
* @param {string} [cursors.since_id]
|
||||
* @param {string} [cursors.max_id] - Numeric cursor (ms since epoch)
|
||||
* @param {string} [cursors.min_id] - Numeric cursor (ms since epoch)
|
||||
* @param {string} [cursors.since_id] - Numeric cursor (ms since epoch)
|
||||
* @returns {{ filter: object, sort: object, reverse: boolean }}
|
||||
*/
|
||||
export function buildPaginationQuery(baseFilter, { max_id, min_id, since_id } = {}) {
|
||||
const filter = { ...baseFilter };
|
||||
let sort = { _id: -1 }; // newest first (default)
|
||||
let sort = { published: -1 }; // newest first (default)
|
||||
let reverse = false;
|
||||
|
||||
if (max_id) {
|
||||
try {
|
||||
filter._id = { ...filter._id, $lt: new ObjectId(max_id) };
|
||||
} catch {
|
||||
// Invalid ObjectId — ignore
|
||||
const date = decodeCursor(max_id);
|
||||
if (date) {
|
||||
filter.published = { ...filter.published, $lt: date };
|
||||
}
|
||||
}
|
||||
|
||||
if (since_id) {
|
||||
try {
|
||||
filter._id = { ...filter._id, $gt: new ObjectId(since_id) };
|
||||
} catch {
|
||||
// Invalid ObjectId — ignore
|
||||
const date = decodeCursor(since_id);
|
||||
if (date) {
|
||||
filter.published = { ...filter.published, $gt: date };
|
||||
}
|
||||
}
|
||||
|
||||
if (min_id) {
|
||||
try {
|
||||
filter._id = { ...filter._id, $gt: new ObjectId(min_id) };
|
||||
const date = decodeCursor(min_id);
|
||||
if (date) {
|
||||
filter.published = { ...filter.published, $gt: date };
|
||||
// min_id returns results closest to the cursor, so sort ascending
|
||||
// then reverse the results before returning
|
||||
sort = { _id: 1 };
|
||||
sort = { published: 1 };
|
||||
reverse = true;
|
||||
} catch {
|
||||
// Invalid ObjectId — ignore
|
||||
}
|
||||
}
|
||||
|
||||
@@ -77,7 +110,7 @@ export function buildPaginationQuery(baseFilter, { max_id, min_id, since_id } =
|
||||
*
|
||||
* @param {object} res - Express response object
|
||||
* @param {object} req - Express request object (for building URLs)
|
||||
* @param {Array} items - Result items (must have `_id` or `id`)
|
||||
* @param {Array} items - Result items (must have `published`)
|
||||
* @param {number} limit - The limit used for the query
|
||||
*/
|
||||
export function setPaginationHeaders(res, req, items, limit) {
|
||||
@@ -86,10 +119,10 @@ export function setPaginationHeaders(res, req, items, limit) {
|
||||
// Only emit Link if we got a full page (may have more)
|
||||
if (items.length < limit) return;
|
||||
|
||||
const firstId = itemId(items[0]);
|
||||
const lastId = itemId(items[items.length - 1]);
|
||||
const firstCursor = encodeCursor(items[0].published);
|
||||
const lastCursor = encodeCursor(items[items.length - 1].published);
|
||||
|
||||
if (!firstId || !lastId) return;
|
||||
if (firstCursor === "0" || lastCursor === "0") return;
|
||||
|
||||
const baseUrl = `${req.protocol}://${req.get("host")}${req.path}`;
|
||||
|
||||
@@ -106,25 +139,15 @@ export function setPaginationHeaders(res, req, items, limit) {
|
||||
|
||||
const links = [];
|
||||
|
||||
// rel="next" — older items (max_id = last item's ID)
|
||||
// rel="next" — older items (max_id = last item's cursor)
|
||||
const nextParams = new URLSearchParams(existingParams);
|
||||
nextParams.set("max_id", lastId);
|
||||
nextParams.set("max_id", lastCursor);
|
||||
links.push(`<${baseUrl}?${nextParams.toString()}>; rel="next"`);
|
||||
|
||||
// rel="prev" — newer items (min_id = first item's ID)
|
||||
// rel="prev" — newer items (min_id = first item's cursor)
|
||||
const prevParams = new URLSearchParams(existingParams);
|
||||
prevParams.set("min_id", firstId);
|
||||
prevParams.set("min_id", firstCursor);
|
||||
links.push(`<${baseUrl}?${prevParams.toString()}>; rel="prev"`);
|
||||
|
||||
res.set("Link", links.join(", "));
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract the string ID from an item.
|
||||
*/
|
||||
function itemId(item) {
|
||||
if (!item) return null;
|
||||
if (item._id) return item._id.toString();
|
||||
if (item.id) return String(item.id);
|
||||
return null;
|
||||
}
|
||||
|
||||
+113
-145
@@ -3,6 +3,8 @@
|
||||
*
|
||||
* GET /api/v1/statuses/:id — single status
|
||||
* GET /api/v1/statuses/:id/context — thread context (ancestors + descendants)
|
||||
* POST /api/v1/statuses — create post via Micropub pipeline
|
||||
* DELETE /api/v1/statuses/:id — delete post via Micropub pipeline
|
||||
* POST /api/v1/statuses/:id/favourite — like a post
|
||||
* POST /api/v1/statuses/:id/unfavourite — unlike a post
|
||||
* POST /api/v1/statuses/:id/reblog — boost a post
|
||||
@@ -13,12 +15,13 @@
|
||||
import express from "express";
|
||||
import { ObjectId } from "mongodb";
|
||||
import { serializeStatus } from "../entities/status.js";
|
||||
import { serializeAccount } from "../entities/account.js";
|
||||
import { decodeCursor } from "../helpers/pagination.js";
|
||||
import {
|
||||
likePost, unlikePost,
|
||||
boostPost, unboostPost,
|
||||
bookmarkPost, unbookmarkPost,
|
||||
} from "../helpers/interactions.js";
|
||||
import { addTimelineItem } from "../../storage/timeline.js";
|
||||
|
||||
const router = express.Router(); // eslint-disable-line new-cap
|
||||
|
||||
@@ -30,14 +33,7 @@ router.get("/api/v1/statuses/:id", async (req, res, next) => {
|
||||
const collections = req.app.locals.mastodonCollections;
|
||||
const baseUrl = `${req.protocol}://${req.get("host")}`;
|
||||
|
||||
let objectId;
|
||||
try {
|
||||
objectId = new ObjectId(id);
|
||||
} catch {
|
||||
return res.status(404).json({ error: "Record not found" });
|
||||
}
|
||||
|
||||
const item = await collections.ap_timeline.findOne({ _id: objectId });
|
||||
const item = await findTimelineItemById(collections.ap_timeline, id);
|
||||
if (!item) {
|
||||
return res.status(404).json({ error: "Record not found" });
|
||||
}
|
||||
@@ -65,14 +61,7 @@ router.get("/api/v1/statuses/:id/context", async (req, res, next) => {
|
||||
const collections = req.app.locals.mastodonCollections;
|
||||
const baseUrl = `${req.protocol}://${req.get("host")}`;
|
||||
|
||||
let objectId;
|
||||
try {
|
||||
objectId = new ObjectId(id);
|
||||
} catch {
|
||||
return res.status(404).json({ error: "Record not found" });
|
||||
}
|
||||
|
||||
const item = await collections.ap_timeline.findOne({ _id: objectId });
|
||||
const item = await findTimelineItemById(collections.ap_timeline, id);
|
||||
if (!item) {
|
||||
return res.status(404).json({ error: "Record not found" });
|
||||
}
|
||||
@@ -142,6 +131,8 @@ router.get("/api/v1/statuses/:id/context", async (req, res, next) => {
|
||||
});
|
||||
|
||||
// ─── POST /api/v1/statuses ───────────────────────────────────────────────────
|
||||
// Creates a post via the Micropub pipeline so it goes through the full flow:
|
||||
// Micropub → content file → Eleventy build → syndication → AP federation.
|
||||
|
||||
router.post("/api/v1/statuses", async (req, res, next) => {
|
||||
try {
|
||||
@@ -150,6 +141,7 @@ router.post("/api/v1/statuses", async (req, res, next) => {
|
||||
return res.status(401).json({ error: "The access token is invalid" });
|
||||
}
|
||||
|
||||
const { application, publication } = req.app.locals;
|
||||
const collections = req.app.locals.mastodonCollections;
|
||||
const pluginOptions = req.app.locals.mastodonPluginOptions || {};
|
||||
const baseUrl = `${req.protocol}://${req.get("host")}`;
|
||||
@@ -168,48 +160,77 @@ router.post("/api/v1/statuses", async (req, res, next) => {
|
||||
return res.status(422).json({ error: "Validation failed: Text content is required" });
|
||||
}
|
||||
|
||||
// Resolve in_reply_to if provided
|
||||
// Resolve in_reply_to URL from status ID (cursor or ObjectId)
|
||||
let inReplyTo = null;
|
||||
if (inReplyToId) {
|
||||
try {
|
||||
const replyItem = await collections.ap_timeline.findOne({
|
||||
_id: new ObjectId(inReplyToId),
|
||||
});
|
||||
const replyItem = await findTimelineItemById(collections.ap_timeline, inReplyToId);
|
||||
if (replyItem) {
|
||||
inReplyTo = replyItem.uid || replyItem.url;
|
||||
}
|
||||
} catch {
|
||||
// Invalid ObjectId — ignore
|
||||
}
|
||||
}
|
||||
|
||||
// Load local profile for the author field
|
||||
// Build JF2 properties for the Micropub pipeline
|
||||
const jf2 = {
|
||||
type: "entry",
|
||||
content: statusText || "",
|
||||
};
|
||||
|
||||
if (inReplyTo) {
|
||||
jf2["in-reply-to"] = inReplyTo;
|
||||
}
|
||||
|
||||
if (spoilerText) {
|
||||
jf2.summary = spoilerText;
|
||||
}
|
||||
|
||||
if (sensitive === true || sensitive === "true") {
|
||||
jf2.sensitive = "true";
|
||||
}
|
||||
|
||||
if (visibility && visibility !== "public") {
|
||||
jf2.visibility = visibility;
|
||||
}
|
||||
|
||||
if (language) {
|
||||
jf2["mp-language"] = language;
|
||||
}
|
||||
|
||||
// Syndicate to AP only — posts from Mastodon clients belong to the fediverse.
|
||||
// Never cross-post to Bluesky (conversations stay in their protocol).
|
||||
// The publication URL is the AP syndicator's uid.
|
||||
const publicationUrl = pluginOptions.publicationUrl || baseUrl;
|
||||
jf2["mp-syndicate-to"] = [publicationUrl.replace(/\/$/, "") + "/"];
|
||||
|
||||
// Create post via Micropub pipeline (same functions the Micropub endpoint uses)
|
||||
// postData.create() handles: normalization, post type detection, path rendering,
|
||||
// mp-syndicate-to validated against configured syndicators, MongoDB posts collection
|
||||
const { postData } = await import("@indiekit/endpoint-micropub/lib/post-data.js");
|
||||
const { postContent } = await import("@indiekit/endpoint-micropub/lib/post-content.js");
|
||||
|
||||
const data = await postData.create(application, publication, jf2);
|
||||
// postContent.create() handles: template rendering, file creation in store
|
||||
await postContent.create(publication, data);
|
||||
|
||||
const postUrl = data.properties.url;
|
||||
console.info(`[Mastodon API] Created post via Micropub: ${postUrl}`);
|
||||
|
||||
// Add to ap_timeline so the post is visible in the Mastodon Client API
|
||||
const profile = await collections.ap_profile.findOne({});
|
||||
const handle = pluginOptions.handle || "user";
|
||||
const publicationUrl = pluginOptions.publicationUrl || baseUrl;
|
||||
const actorUrl = profile?.url || `${publicationUrl}/users/${handle}`;
|
||||
|
||||
// Generate post ID and URL
|
||||
const postId = crypto.randomUUID();
|
||||
const postUrl = `${publicationUrl.replace(/\/$/, "")}/posts/${postId}`;
|
||||
const uid = postUrl;
|
||||
|
||||
// Build the timeline item
|
||||
const now = new Date().toISOString();
|
||||
const timelineItem = {
|
||||
uid,
|
||||
const timelineItem = await addTimelineItem(collections, {
|
||||
uid: postUrl,
|
||||
url: postUrl,
|
||||
type: "note",
|
||||
content: {
|
||||
text: statusText || "",
|
||||
html: linkifyAndParagraph(statusText || ""),
|
||||
},
|
||||
type: data.properties["post-type"] || "note",
|
||||
content: data.properties.content || { text: statusText || "", html: "" },
|
||||
summary: spoilerText || "",
|
||||
sensitive: sensitive === true || sensitive === "true",
|
||||
visibility: visibility || "public",
|
||||
language: language || null,
|
||||
inReplyTo,
|
||||
published: now,
|
||||
published: data.properties.published || now,
|
||||
createdAt: now,
|
||||
author: {
|
||||
name: profile?.name || handle,
|
||||
@@ -219,26 +240,15 @@ router.post("/api/v1/statuses", async (req, res, next) => {
|
||||
emojis: [],
|
||||
bot: false,
|
||||
},
|
||||
photo: [],
|
||||
video: [],
|
||||
audio: [],
|
||||
category: extractHashtags(statusText || ""),
|
||||
photo: data.properties.photo || [],
|
||||
video: data.properties.video || [],
|
||||
audio: data.properties.audio || [],
|
||||
category: data.properties.category || [],
|
||||
counts: { replies: 0, boosts: 0, likes: 0 },
|
||||
linkPreviews: [],
|
||||
mentions: [],
|
||||
emojis: [],
|
||||
};
|
||||
|
||||
// Insert into timeline
|
||||
const result = await collections.ap_timeline.insertOne(timelineItem);
|
||||
timelineItem._id = result.insertedId;
|
||||
|
||||
// Trigger federation asynchronously (don't block the response)
|
||||
if (pluginOptions.federation) {
|
||||
federatePost(timelineItem, pluginOptions).catch((err) => {
|
||||
console.error("[Mastodon API] Federation failed:", err.message);
|
||||
});
|
||||
}
|
||||
|
||||
// Serialize and return
|
||||
const serialized = serializeStatus(timelineItem, {
|
||||
@@ -256,6 +266,8 @@ router.post("/api/v1/statuses", async (req, res, next) => {
|
||||
});
|
||||
|
||||
// ─── DELETE /api/v1/statuses/:id ────────────────────────────────────────────
|
||||
// Deletes via Micropub pipeline (removes content file + MongoDB post) and
|
||||
// cleans up the ap_timeline entry.
|
||||
|
||||
router.delete("/api/v1/statuses/:id", async (req, res, next) => {
|
||||
try {
|
||||
@@ -264,18 +276,12 @@ router.delete("/api/v1/statuses/:id", async (req, res, next) => {
|
||||
return res.status(401).json({ error: "The access token is invalid" });
|
||||
}
|
||||
|
||||
const { application, publication } = req.app.locals;
|
||||
const { id } = req.params;
|
||||
const collections = req.app.locals.mastodonCollections;
|
||||
const baseUrl = `${req.protocol}://${req.get("host")}`;
|
||||
|
||||
let objectId;
|
||||
try {
|
||||
objectId = new ObjectId(id);
|
||||
} catch {
|
||||
return res.status(404).json({ error: "Record not found" });
|
||||
}
|
||||
|
||||
const item = await collections.ap_timeline.findOne({ _id: objectId });
|
||||
const item = await findTimelineItemById(collections.ap_timeline, id);
|
||||
if (!item) {
|
||||
return res.status(404).json({ error: "Record not found" });
|
||||
}
|
||||
@@ -296,6 +302,23 @@ router.delete("/api/v1/statuses/:id", async (req, res, next) => {
|
||||
});
|
||||
serialized.text = item.content?.text || "";
|
||||
|
||||
// Delete via Micropub pipeline (removes content file from store + MongoDB posts)
|
||||
const postUrl = item.uid || item.url;
|
||||
try {
|
||||
const { postData } = await import("@indiekit/endpoint-micropub/lib/post-data.js");
|
||||
const { postContent } = await import("@indiekit/endpoint-micropub/lib/post-content.js");
|
||||
|
||||
const existingPost = await postData.read(application, postUrl);
|
||||
if (existingPost) {
|
||||
const deletedData = await postData.delete(application, postUrl);
|
||||
await postContent.delete(publication, deletedData);
|
||||
console.info(`[Mastodon API] Deleted post via Micropub: ${postUrl}`);
|
||||
}
|
||||
} catch (err) {
|
||||
// Log but don't block — the post may not exist in Micropub (e.g. old pre-pipeline posts)
|
||||
console.warn(`[Mastodon API] Micropub delete failed for ${postUrl}: ${err.message}`);
|
||||
}
|
||||
|
||||
// Delete from timeline
|
||||
await collections.ap_timeline.deleteOne({ _id: objectId });
|
||||
|
||||
@@ -304,8 +327,6 @@ router.delete("/api/v1/statuses/:id", async (req, res, next) => {
|
||||
await collections.ap_interactions.deleteMany({ objectUrl: item.uid });
|
||||
}
|
||||
|
||||
// TODO: Broadcast Delete activity via federation
|
||||
|
||||
res.json(serialized);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
@@ -505,6 +526,31 @@ router.post("/api/v1/statuses/:id/unbookmark", async (req, res, next) => {
|
||||
|
||||
// ─── Helpers ─────────────────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Find a timeline item by cursor ID (published-based) or ObjectId (legacy).
|
||||
* Status IDs are now encodeCursor(published) — milliseconds since epoch.
|
||||
* Falls back to ObjectId lookup for backwards compatibility.
|
||||
*
|
||||
* @param {object} collection - ap_timeline collection
|
||||
* @param {string} id - Status ID from client
|
||||
* @returns {Promise<object|null>} Timeline document or null
|
||||
*/
|
||||
async function findTimelineItemById(collection, id) {
|
||||
// Try cursor-based lookup first (published date from ms-since-epoch)
|
||||
const publishedDate = decodeCursor(id);
|
||||
if (publishedDate) {
|
||||
const item = await collection.findOne({ published: publishedDate });
|
||||
if (item) return item;
|
||||
}
|
||||
|
||||
// Fall back to ObjectId lookup (legacy IDs)
|
||||
try {
|
||||
return await collection.findOne({ _id: new ObjectId(id) });
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve a timeline item from the :id param, plus common context.
|
||||
*/
|
||||
@@ -512,14 +558,7 @@ async function resolveStatusForInteraction(req) {
|
||||
const collections = req.app.locals.mastodonCollections;
|
||||
const baseUrl = `${req.protocol}://${req.get("host")}`;
|
||||
|
||||
let objectId;
|
||||
try {
|
||||
objectId = new ObjectId(req.params.id);
|
||||
} catch {
|
||||
return { item: null, collections, baseUrl };
|
||||
}
|
||||
|
||||
const item = await collections.ap_timeline.findOne({ _id: objectId });
|
||||
const item = await findTimelineItemById(collections.ap_timeline, req.params.id);
|
||||
return { item, collections, baseUrl };
|
||||
}
|
||||
|
||||
@@ -560,75 +599,4 @@ async function loadItemInteractions(collections, item) {
|
||||
return { favouritedIds, rebloggedIds, bookmarkedIds };
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert plain text to basic HTML (paragraphs + linkified URLs).
|
||||
*/
|
||||
function linkifyAndParagraph(text) {
|
||||
if (!text) return "";
|
||||
const paragraphs = text.split(/\n\n+/).filter(Boolean);
|
||||
return paragraphs
|
||||
.map((p) => {
|
||||
const withBreaks = p.replace(/\n/g, "<br>");
|
||||
const linked = withBreaks.replace(
|
||||
/(?<![=">])(https?:\/\/[^\s<"]+)/g,
|
||||
'<a href="$1">$1</a>',
|
||||
);
|
||||
return `<p>${linked}</p>`;
|
||||
})
|
||||
.join("");
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract #hashtags from text content.
|
||||
*/
|
||||
function extractHashtags(text) {
|
||||
if (!text) return [];
|
||||
const tags = [];
|
||||
const regex = /#([\w]+)/g;
|
||||
let match;
|
||||
while ((match = regex.exec(text)) !== null) {
|
||||
tags.push(match[1]);
|
||||
}
|
||||
return [...new Set(tags)];
|
||||
}
|
||||
|
||||
/**
|
||||
* Federate a newly created post via ActivityPub.
|
||||
* Runs asynchronously — errors logged, don't block API response.
|
||||
*/
|
||||
async function federatePost(item, pluginOptions) {
|
||||
const { jf2ToAS2Activity } = await import("../../jf2-to-as2.js");
|
||||
|
||||
const handle = pluginOptions.handle || "user";
|
||||
const publicationUrl = pluginOptions.publicationUrl;
|
||||
const federation = pluginOptions.federation;
|
||||
const actorUrl = `${publicationUrl.replace(/\/$/, "")}/users/${handle}`;
|
||||
|
||||
const ctx = federation.createContext(
|
||||
new URL(publicationUrl),
|
||||
{ handle, publicationUrl },
|
||||
);
|
||||
|
||||
const properties = {
|
||||
"post-type": "note",
|
||||
url: item.url,
|
||||
content: item.content,
|
||||
summary: item.summary || undefined,
|
||||
"in-reply-to": item.inReplyTo || undefined,
|
||||
category: item.category,
|
||||
visibility: item.visibility,
|
||||
};
|
||||
|
||||
const activity = jf2ToAS2Activity(properties, actorUrl, publicationUrl, {
|
||||
visibility: item.visibility,
|
||||
});
|
||||
|
||||
if (activity) {
|
||||
await ctx.sendActivity({ identifier: handle }, "followers", activity, {
|
||||
preferSharedInbox: true,
|
||||
});
|
||||
console.info(`[Mastodon API] Federated post: ${item.url}`);
|
||||
}
|
||||
}
|
||||
|
||||
export default router;
|
||||
|
||||
+1
-1
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@rmdes/indiekit-endpoint-activitypub",
|
||||
"version": "3.5.9",
|
||||
"version": "3.6.0",
|
||||
"description": "ActivityPub federation endpoint for Indiekit via Fedify. Adds full fediverse support: actor, inbox, outbox, followers, following, syndication, and Mastodon migration.",
|
||||
"keywords": [
|
||||
"indiekit",
|
||||
|
||||
Reference in New Issue
Block a user