aboutsummaryrefslogtreecommitdiff
path: root/apps/proxy/src/index.js
diff options
context:
space:
mode:
authorFuwn <[email protected]>2026-03-27 08:28:30 +0000
committerFuwn <[email protected]>2026-03-27 08:45:37 +0000
commit7e447fd8f478fd3f980f9b44ace29abc7fdffb04 (patch)
tree31cd06a778ece94b14590ecef88c9f08ac456732 /apps/proxy/src/index.js
parentchore(apps): Remove placeholder README (diff)
downloaddue.moe-7e447fd8f478fd3f980f9b44ace29abc7fdffb04.tar.xz
due.moe-7e447fd8f478fd3f980f9b44ace29abc7fdffb04.zip
refactor(proxy): move manga chapter counts behind indexed cache
Diffstat (limited to 'apps/proxy/src/index.js')
-rw-r--r--apps/proxy/src/index.js390
1 files changed, 301 insertions, 89 deletions
diff --git a/apps/proxy/src/index.js b/apps/proxy/src/index.js
index f2a37111..f90adfa7 100644
--- a/apps/proxy/src/index.js
+++ b/apps/proxy/src/index.js
@@ -1,113 +1,325 @@
-const handleRequest = async (request) => {
+import { bootstrapManga, syncMangadexIndex } from "./mangadex.js";
+import {
+ deleteMangadexFailureRows,
+ getMangadexFailureRowsByAniListIds,
+ getMangadexRowsByAniListIds,
+ hasSupabaseConfig,
+ upsertMangadexRows,
+ upsertMangadexFailureRows,
+} from "./supabase.js";
+
+const DEFAULT_ALLOWED_ORIGIN = "https://due.moe";
+const DEFAULT_BOOTSTRAP_RETRY_MINUTES = 360;
+const DEFAULT_PENDING_RETRY_MS = 750;
+const bootstrapInFlight = new Map();
+
+const isPrivateHostname = (hostname) =>
+ hostname === "localhost" ||
+ hostname === "127.0.0.1" ||
+ hostname.endsWith(".local") ||
+ /^10\./.test(hostname) ||
+ /^192\.168\./.test(hostname) ||
+ /^172\.(1[6-9]|2\d|3[0-1])\./.test(hostname);
+
+const accessControlOrigin = (request) => {
+ const origin = request.headers.get("Origin");
+
+ if (!origin) return DEFAULT_ALLOWED_ORIGIN;
+
try {
- const url = new URL(request.url);
- let query;
- let dropHeaders = false;
+ const url = new URL(origin);
- if (url.search.includes('&dh')) {
- url.search = url.search.replace('&dh', '');
- dropHeaders = true;
- }
+ if (
+ url.hostname === "due.moe" ||
+ url.hostname.endsWith(".due.moe") ||
+ isPrivateHostname(url.hostname)
+ )
+ return origin;
+ } catch {}
- if (url.search.includes('?q=')) {
- query = url.search.split('?q=')[1];
- } else if (url.search.includes('?d=')) {
- query = atob(url.search.split('?d=')[1]);
- } else if (url.search.includes('?d2=')) {
- const fullEncodedURL = url.search.split('?d2=')[1];
- const key = parseInt(fullEncodedURL.slice(-2));
-
- query = atob(fullEncodedURL.slice(0, -2))
- .split(':')
- .map((char) => String.fromCharCode(char - key))
- .join('');
- } else {
- return new Response(null, {
- status: 400,
- statusText: 'Bad Request',
- });
- }
+ return DEFAULT_ALLOWED_ORIGIN;
+};
- request = new Request(query, request);
+const appendCorsHeaders = (request, headers = new Headers()) => {
+ headers.set("Access-Control-Allow-Origin", accessControlOrigin(request));
+ headers.set("Access-Control-Allow-Methods", "GET, HEAD, POST, OPTIONS");
+ headers.set("Access-Control-Allow-Headers", "Authorization, Content-Type");
+ headers.append("Vary", "Origin");
- request.headers.set('Host', new URL(query).origin);
- request.headers.set('Referrer', new URL(query));
- request.headers.set('Origin', new URL(query));
- request.headers.set('Access-Control-Allow-Credentials', 'true');
- request.headers.delete('X-Content-Type-Options');
+ return headers;
+};
- let response = await fetch(request);
+const jsonResponse = (request, body, init = {}) => {
+ const headers = appendCorsHeaders(request, new Headers(init.headers));
- response = new Response(response.body, response);
+ headers.set("Content-Type", "application/json");
- if (dropHeaders) response.headers.forEach((_, key) => response.headers.delete(key));
+ return new Response(JSON.stringify(body), {
+ ...init,
+ headers,
+ });
+};
- response.headers.set('Access-Control-Allow-Origin', 'https://due.moe');
- response.headers.append('Vary', 'Origin');
- response.headers.set('Cache-Control', 'max-age=300');
+const textResponse = (request, body, init = {}) =>
+ new Response(body, {
+ ...init,
+ headers: appendCorsHeaders(request, new Headers(init.headers)),
+ });
- return response;
- } catch {
- return new Response(null, {
- status: 400,
- statusText: 'Bad Request',
- });
+const decodeProxyTarget = (url) => {
+ if (url.search.includes("?q=")) return url.search.split("?q=")[1];
+ if (url.search.includes("?d=")) return atob(url.search.split("?d=")[1]);
+ if (url.search.includes("?d2=")) {
+ const fullEncodedUrl = url.search.split("?d2=")[1];
+ const key = Number.parseInt(fullEncodedUrl.slice(-2), 10);
+
+ return atob(fullEncodedUrl.slice(0, -2))
+ .split(":")
+ .map((char) => String.fromCharCode(Number(char) - key))
+ .join("");
}
+
+ return null;
};
-const handleOptions = async (request) => {
- if (
- request.headers.get('Origin') !== null &&
- request.headers.get('Access-Control-Request-Method') !== null &&
- request.headers.get('Access-Control-Request-Headers') !== null
- ) {
- return new Response(null, {
- headers: {
- 'Access-Control-Allow-Origin': '*',
- 'Access-Control-Allow-Methods': 'GET, HEAD, POST, OPTIONS',
- 'Access-Control-Allow-Headers': '*',
- },
+const forwardProxyRequest = async (request) => {
+ const url = new URL(request.url);
+ const dropHeaders = url.search.includes("&dh");
+
+ if (dropHeaders) url.search = url.search.replace("&dh", "");
+
+ const target = decodeProxyTarget(url);
+
+ if (!target)
+ return textResponse(request, null, {
+ status: 400,
+ statusText: "Bad Request",
});
- } else {
- return new Response(null, {
- headers: {
- Allow: 'GET, HEAD, POST, OPTIONS',
+
+ const targetUrl = new URL(target);
+ const proxiedRequest = new Request(target, request);
+
+ proxiedRequest.headers.set("Host", targetUrl.hostname);
+ proxiedRequest.headers.set("Referrer", targetUrl.toString());
+ proxiedRequest.headers.set("Origin", targetUrl.origin);
+ proxiedRequest.headers.delete("X-Content-Type-Options");
+
+ let response = await fetch(proxiedRequest);
+
+ response = new Response(response.body, response);
+
+ if (dropHeaders)
+ for (const key of [...response.headers.keys()])
+ response.headers.delete(key);
+
+ appendCorsHeaders(request, response.headers);
+ response.headers.set("Cache-Control", "max-age=300");
+
+ return response;
+};
+
+const handleOptions = (request) =>
+ new Response(null, {
+ headers: appendCorsHeaders(request),
+ });
+
+const isMangadexIdConstraintConflict = (error) =>
+ error instanceof Error &&
+ error.message.includes("mangadex_manga_index_mangadex_id_key");
+
+const parseMangaPayload = async (request) => {
+ const body = await request.json().catch(() => null);
+ const manga = Array.isArray(body?.manga) ? body.manga : [];
+
+ return manga
+ .map((entry) => ({
+ anilistId: Number(entry?.anilistId),
+ status: String(entry?.status || ""),
+ startYear: entry?.startYear ? Number(entry.startYear) : null,
+ nativeTitle: entry?.nativeTitle || null,
+ englishTitle: entry?.englishTitle || null,
+ romajiTitle: entry?.romajiTitle || null,
+ }))
+ .filter((entry) => Number.isFinite(entry.anilistId));
+};
+
+const bootstrapRetryMinutes = (env) => {
+ const minutes = Number.parseInt(env.MANGADEX_BOOTSTRAP_RETRY_MINUTES || "", 10);
+
+ return Number.isFinite(minutes) && minutes > 0
+ ? minutes
+ : DEFAULT_BOOTSTRAP_RETRY_MINUTES;
+};
+
+const pendingRetryMs = (env) => {
+ const milliseconds = Number.parseInt(env.MANGA_CHAPTER_COUNTS_RETRY_MS || "", 10);
+
+ return Number.isFinite(milliseconds) && milliseconds > 0
+ ? milliseconds
+ : DEFAULT_PENDING_RETRY_MS;
+};
+
+const isRecentFailure = (row, retryMinutes) =>
+ Date.now() - new Date(row.last_attempted_at || row.updated_at).getTime() <
+ retryMinutes * 60 * 1000;
+
+const queueBootstrap = (env, manga) =>
+ manga
+ .map((entry) => {
+ const existing = bootstrapInFlight.get(entry.anilistId);
+
+ if (existing) return existing;
+
+ const promise = (async () => {
+ const row = await bootstrapManga(env, entry);
+
+ if (row) {
+ try {
+ await upsertMangadexRows(env, [row]);
+ } catch (error) {
+ if (!isMangadexIdConstraintConflict(error)) throw error;
+ }
+
+ await deleteMangadexFailureRows(env, [row.anilist_id]);
+
+ return;
+ }
+
+ await upsertMangadexFailureRows(env, [entry.anilistId]);
+ })().finally(() => {
+ bootstrapInFlight.delete(entry.anilistId);
+ });
+
+ bootstrapInFlight.set(entry.anilistId, promise);
+
+ return promise;
+ })
+ .filter(Boolean);
+
+const handleMangaChapterCounts = async (request, env, ctx) => {
+ if (!hasSupabaseConfig(env))
+ return jsonResponse(
+ request,
+ { error: "Supabase is not configured for the proxy worker." },
+ { status: 500 },
+ );
+
+ const manga = await parseMangaPayload(request);
+
+ if (!manga.length) return jsonResponse(request, { data: {} });
+
+ const anilistIds = manga.map((entry) => entry.anilistId);
+ const [existingRows, failureRows] = await Promise.all([
+ getMangadexRowsByAniListIds(env, anilistIds),
+ getMangadexFailureRowsByAniListIds(env, anilistIds),
+ ]);
+ const existingIds = new Set(existingRows.map((row) => row.anilist_id));
+ const recentFailures = new Set(
+ failureRows
+ .filter((row) => isRecentFailure(row, bootstrapRetryMinutes(env)))
+ .map((row) => row.anilist_id),
+ );
+ const missingRows = manga.filter(
+ (entry) =>
+ !existingIds.has(entry.anilistId) && !recentFailures.has(entry.anilistId),
+ );
+ const pendingRows = missingRows.filter((entry) =>
+ bootstrapInFlight.has(entry.anilistId),
+ );
+ const queueableRows = missingRows.filter(
+ (entry) => !bootstrapInFlight.has(entry.anilistId),
+ );
+
+ if (queueableRows.length)
+ ctx.waitUntil(
+ Promise.all(queueBootstrap(env, queueableRows)).catch((error) => {
+ if (!isMangadexIdConstraintConflict(error)) throw error;
+ }),
+ );
+
+ const data = Object.fromEntries(
+ existingRows.map((row) => [
+ String(row.anilist_id),
+ {
+ chapter: row.latest_en_chapter_number,
+ ...(row.latest_en_volume_text === null
+ ? {}
+ : { volumeText: row.latest_en_volume_text }),
},
- });
- }
+ ]),
+ );
+ const pending = [...new Set([...pendingRows, ...queueableRows].map((entry) => entry.anilistId))];
+
+ return jsonResponse(request, {
+ data,
+ ...(pending.length
+ ? {
+ pending,
+ retryAfterMs: pendingRetryMs(env),
+ }
+ : {}),
+ });
};
-addEventListener('fetch', (event) => {
- const request = event.request;
+const isAuthorisedSyncRequest = (request, env) => {
+ const token = env.MANGADEX_SYNC_TOKEN;
- try {
- switch (request.method) {
- case 'OPTIONS':
- event.respondWith(handleOptions(request));
+ if (!token) return isPrivateHostname(new URL(request.url).hostname);
+
+ return request.headers.get("Authorization") === `Bearer ${token}`;
+};
+
+const handleMangaSync = async (request, env) => {
+ if (!hasSupabaseConfig(env))
+ return jsonResponse(
+ request,
+ { error: "Supabase is not configured for the proxy worker." },
+ { status: 500 },
+ );
+
+ if (!isAuthorisedSyncRequest(request, env))
+ return jsonResponse(request, { error: "Forbidden" }, { status: 403 });
+
+ const result = await syncMangadexIndex(env);
+
+ return jsonResponse(request, { data: result });
+};
+
+export default {
+ async fetch(request, env, ctx) {
+ try {
+ const url = new URL(request.url);
- break;
+ if (request.method === "OPTIONS") return handleOptions(request);
- case 'GET':
- case 'HEAD':
- case 'POST':
- event.respondWith(handleRequest(request));
+ if (
+ url.pathname === "/manga/chapter-counts" &&
+ request.method === "POST"
+ )
+ return handleMangaChapterCounts(request, env, ctx);
- break;
+ if (url.pathname === "/manga/sync" && request.method === "POST")
+ return handleMangaSync(request, env);
- default:
- event.respondWith(async () => {
- return new Response(null, {
- status: 405,
- statusText: 'Method Not Allowed',
- });
- });
+ if (["GET", "HEAD", "POST"].includes(request.method))
+ return forwardProxyRequest(request);
- break;
+ return textResponse(request, null, {
+ status: 405,
+ statusText: "Method Not Allowed",
+ });
+ } catch (error) {
+ return jsonResponse(
+ request,
+ { error: error instanceof Error ? error.message : "Bad Request" },
+ { status: 400 },
+ );
}
- } catch {
- return new Response(null, {
- status: 400,
- statusText: 'Bad Request',
- });
- }
-});
+ },
+
+ async scheduled(_controller, env, ctx) {
+ if (!hasSupabaseConfig(env)) return;
+
+ ctx.waitUntil(syncMangadexIndex(env));
+ },
+};