fix: parse CSV client-side to avoid 413 payload too large

Express's app-level body parser has a 100KB default limit that
runs before any route-level overrides. A 3K-line CSV at 113KB
exceeds this. Instead of sending raw CSV, the client now extracts
handles (first column only) and sends just the array — typically
under 90KB for 3000 accounts.
This commit is contained in:
Ricardo
2026-02-19 10:41:20 +01:00
parent d159e79998
commit c522989d38
4 changed files with 40 additions and 21 deletions
+1 -1
View File
@@ -180,7 +180,7 @@ export default class ActivityPubEndpoint {
router.get("/admin/activities", activitiesController(mp)); router.get("/admin/activities", activitiesController(mp));
router.get("/admin/migrate", migrateGetController(mp, this.options)); router.get("/admin/migrate", migrateGetController(mp, this.options));
router.post("/admin/migrate", migratePostController(mp, this.options)); router.post("/admin/migrate", migratePostController(mp, this.options));
router.post("/admin/migrate/import", express.json({ limit: "5mb" }), migrateImportController(mp, this.options)); router.post("/admin/migrate/import", migrateImportController(mp, this.options));
return router; return router;
} }
+9 -12
View File
@@ -7,8 +7,6 @@
*/ */
import { import {
parseMastodonFollowingCsv,
parseMastodonFollowersList,
bulkImportFollowing, bulkImportFollowing,
bulkImportFollowers, bulkImportFollowers,
} from "../migration.js"; } from "../migration.js";
@@ -56,19 +54,20 @@ export function migratePostController(mountPath, pluginOptions) {
} }
/** /**
* JSON endpoint for CSV import — receives { csvContent, importTypes } * JSON endpoint for import — receives { handles, importTypes }.
* via fetch() to bypass Express's app-level urlencoded body size limit. * CSV is parsed client-side to extract handles only, keeping the
* JSON payload small enough for Express's default body parser limit.
*/ */
export function migrateImportController(mountPath, pluginOptions) { export function migrateImportController(mountPath, pluginOptions) {
return async (request, response, next) => { return async (request, response, next) => {
try { try {
const { application } = request.app.locals; const { application } = request.app.locals;
const { csvContent, importTypes } = request.body; const { handles, importTypes } = request.body;
if (!csvContent?.trim()) { if (!Array.isArray(handles) || handles.length === 0) {
return response.status(400).json({ return response.status(400).json({
type: "error", type: "error",
text: "No CSV content provided.", text: "No handles provided.",
}); });
} }
@@ -84,15 +83,13 @@ export function migrateImportController(mountPath, pluginOptions) {
let followersResult = { imported: 0, failed: 0, errors: [] }; let followersResult = { imported: 0, failed: 0, errors: [] };
if (importFollowing && followingCollection) { if (importFollowing && followingCollection) {
const handles = parseMastodonFollowingCsv(csvContent); console.log(`[ActivityPub] Migration: importing ${handles.length} following handles`);
console.log(`[ActivityPub] Migration: parsed ${handles.length} following handles from CSV`);
followingResult = await bulkImportFollowing(handles, followingCollection); followingResult = await bulkImportFollowing(handles, followingCollection);
} }
if (importFollowers && followersCollection) { if (importFollowers && followersCollection) {
const entries = parseMastodonFollowersList(csvContent); console.log(`[ActivityPub] Migration: importing ${handles.length} follower entries`);
console.log(`[ActivityPub] Migration: parsed ${entries.length} follower entries from CSV`); followersResult = await bulkImportFollowers(handles, followersCollection);
followersResult = await bulkImportFollowers(entries, followersCollection);
} }
const totalFailed = followingResult.failed + followersResult.failed; const totalFailed = followingResult.failed + followersResult.failed;
+1 -1
View File
@@ -1,6 +1,6 @@
{ {
"name": "@rmdes/indiekit-endpoint-activitypub", "name": "@rmdes/indiekit-endpoint-activitypub",
"version": "0.1.9", "version": "0.1.10",
"description": "ActivityPub federation endpoint for Indiekit via Fedify. Adds full fediverse support: actor, inbox, outbox, followers, following, syndication, and Mastodon migration.", "description": "ActivityPub federation endpoint for Indiekit via Fedify. Adds full fediverse support: actor, inbox, outbox, followers, following, syndication, and Mastodon migration.",
"keywords": [ "keywords": [
"indiekit", "indiekit",
+29 -7
View File
@@ -76,7 +76,7 @@
@change="readFile($event)"> @change="readFile($event)">
<template x-if="fileName"> <template x-if="fileName">
<p class="hint" style="margin-top: 0.5em"> <p class="hint" style="margin-top: 0.5em">
<strong x-text="fileName"></strong> — <span x-text="lineCount + ' lines'"></span> <strong x-text="fileName"></strong> — <span x-text="handles.length + ' accounts found'"></span>
</p> </p>
</template> </template>
<template x-if="fileError"> <template x-if="fileError">
@@ -85,7 +85,7 @@
</div> </div>
<button class="button" type="button" <button class="button" type="button"
:disabled="importing || !csvContent" :disabled="importing || handles.length === 0"
@click="startImport()"> @click="startImport()">
<span x-show="!importing">{{ __("activitypub.migrate.importButton") }}</span> <span x-show="!importing">{{ __("activitypub.migrate.importButton") }}</span>
<span x-show="importing" x-text="statusText"></span> <span x-show="importing" x-text="statusText"></span>
@@ -119,7 +119,7 @@
<script> <script>
function csvImport(mountPath) { function csvImport(mountPath) {
return { return {
csvContent: '', handles: [],
fileName: '', fileName: '',
lineCount: 0, lineCount: 0,
fileError: '', fileError: '',
@@ -129,9 +129,14 @@
resultText: '', resultText: '',
resultErrors: [], resultErrors: [],
/**
* Parse CSV client-side — extract handles (first column) only.
* This keeps the JSON payload small (handles only, no raw CSV),
* avoiding Express's default 100KB body parser limit.
*/
readFile(event) { readFile(event) {
var self = this; var self = this;
self.csvContent = ''; self.handles = [];
self.fileName = ''; self.fileName = '';
self.lineCount = 0; self.lineCount = 0;
self.fileError = ''; self.fileError = '';
@@ -151,9 +156,19 @@
var reader = new FileReader(); var reader = new FileReader();
reader.onload = function(e) { reader.onload = function(e) {
var text = e.target.result; var text = e.target.result;
self.csvContent = text; var lines = text.split('\n').filter(function(l) { return l.trim(); });
self.fileName = file.name; self.fileName = file.name;
self.lineCount = text.split('\n').filter(function(l) { return l.trim(); }).length; self.lineCount = lines.length;
// Extract handles: skip header, take first CSV column, keep only valid handles
var parsed = [];
for (var i = 1; i < lines.length; i++) {
var handle = lines[i].split(',')[0].trim();
if (handle && handle.indexOf('@') !== -1) {
parsed.push(handle);
}
}
self.handles = parsed;
}; };
reader.onerror = function() { reader.onerror = function() {
self.fileError = 'Could not read file'; self.fileError = 'Could not read file';
@@ -183,12 +198,19 @@
return; return;
} }
if (self.handles.length === 0) {
self.importing = false;
self.resultType = 'error';
self.resultText = 'No valid handles found in the CSV file.';
return;
}
try { try {
var res = await fetch(mountPath + '/admin/migrate/import', { var res = await fetch(mountPath + '/admin/migrate/import', {
method: 'POST', method: 'POST',
headers: { 'Content-Type': 'application/json' }, headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ body: JSON.stringify({
csvContent: self.csvContent, handles: self.handles,
importTypes: importTypes importTypes: importTypes
}) })
}); });