diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index 537af50..7ec7e4a 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -159,6 +159,7 @@ jobs: GITEA_ORG: giersig.eu OG_CACHE_DIR: /usr/local/git/.cache/og UNFURL_CACHE_DIR: /usr/local/git/.cache/unfurl + UNFURL_TIMEOUT_MS: 5000 - name: Save eleventy-fetch cache run: | diff --git a/lib/unfurl-shortcode.js b/lib/unfurl-shortcode.js index 7ceeb0b..958449f 100644 --- a/lib/unfurl-shortcode.js +++ b/lib/unfurl-shortcode.js @@ -7,6 +7,9 @@ const CACHE_DIR = process.env.UNFURL_CACHE_DIR || resolve(import.meta.dirname, " const CACHE_DURATION_MS = 7 * 24 * 60 * 60 * 1000; // 1 week const FAILURE_CACHE_MS = 7 * 24 * 60 * 60 * 1000; // 1 week — YouTube/bot-blocked sites never succeed, no point retrying daily const USER_AGENT = "Mozilla/5.0 (compatible; Indiekit/1.0; +https://getindiekit.com)"; +// CI sets UNFURL_TIMEOUT_MS=5000 to fail fast on uncached URLs rather than hanging 22s each +const UNFURL_TIMEOUT_MS = process.env.UNFURL_TIMEOUT_MS ? parseInt(process.env.UNFURL_TIMEOUT_MS, 10) : 22000; +const UNFURL_HTTP_TIMEOUT_MS = Math.max(1000, UNFURL_TIMEOUT_MS - 4000); // Concurrency limiter — prevents overwhelming outbound network let activeRequests = 0; @@ -135,10 +138,10 @@ export async function prefetchUrl(url) { // Hard outer deadline guards against TCP-level hangs that bypass unfurl's // own timeout (which only covers HTTP read, not connection establishment). const deadline = new Promise((_, reject) => - setTimeout(() => reject(new Error("hard deadline 22s")), 22000) + setTimeout(() => reject(new Error(`hard deadline ${UNFURL_TIMEOUT_MS}ms`)), UNFURL_TIMEOUT_MS) ); return await Promise.race([ - unfurl(url, { timeout: 18000, headers: { "User-Agent": USER_AGENT } }), + unfurl(url, { timeout: UNFURL_HTTP_TIMEOUT_MS, headers: { "User-Agent": USER_AGENT } }), deadline, ]); } catch (err) {