add lazyload and move readme to func, add cache for readme and css
All checks were successful
Code quality checks / biome (push) Successful in 10s
All checks were successful
Code quality checks / biome (push) Successful in 10s
This commit is contained in:
parent
f6bda95f02
commit
10416dbff0
5 changed files with 116 additions and 112 deletions
|
@ -5,6 +5,7 @@ const routeDef: RouteDef = {
|
|||
method: "GET",
|
||||
accepts: "*/*",
|
||||
returns: "application/json",
|
||||
log: false,
|
||||
};
|
||||
|
||||
async function fetchSteamGridIcon(gameName: string): Promise<string | null> {
|
||||
|
@ -53,7 +54,10 @@ async function fetchSteamGridIcon(gameName: string): Promise<string | null> {
|
|||
async function handler(request: ExtendedRequest): Promise<Response> {
|
||||
if (!steamGridDbKey) {
|
||||
return Response.json(
|
||||
{ status: 503, error: "Route disabled due to missing SteamGridDB key" },
|
||||
{
|
||||
status: 503,
|
||||
error: "Route disabled due to missing SteamGridDB key",
|
||||
},
|
||||
{ status: 503 },
|
||||
);
|
||||
}
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
import { redisTtl } from "@config/environment";
|
||||
import { fetch } from "bun";
|
||||
import { redis } from "bun";
|
||||
|
||||
const routeDef: RouteDef = {
|
||||
method: "GET",
|
||||
|
@ -7,6 +9,37 @@ const routeDef: RouteDef = {
|
|||
log: false,
|
||||
};
|
||||
|
||||
async function fetchAndCacheCss(url: string): Promise<string | null> {
|
||||
const cacheKey = `css:${url}`;
|
||||
const cached = await redis.get(cacheKey);
|
||||
if (cached) return cached;
|
||||
|
||||
const res = await fetch(url, {
|
||||
headers: {
|
||||
Accept: "text/css",
|
||||
},
|
||||
});
|
||||
|
||||
if (!res.ok) return null;
|
||||
|
||||
if (res.headers.has("content-length")) {
|
||||
const size = Number.parseInt(res.headers.get("content-length") || "0", 10);
|
||||
if (size > 1024 * 50) return null;
|
||||
}
|
||||
|
||||
const text = await res.text();
|
||||
if (!text || text.length < 5) return null;
|
||||
|
||||
const sanitized = text
|
||||
.replace(/<script[\s\S]*?>[\s\S]*?<\/script>/gi, "")
|
||||
.replace(/@import\s+url\(['"]?(.*?)['"]?\);?/gi, "");
|
||||
|
||||
await redis.set(cacheKey, sanitized);
|
||||
await redis.expire(cacheKey, redisTtl);
|
||||
|
||||
return sanitized;
|
||||
}
|
||||
|
||||
async function handler(request: ExtendedRequest): Promise<Response> {
|
||||
const { url } = request.query;
|
||||
|
||||
|
@ -23,59 +56,21 @@ async function handler(request: ExtendedRequest): Promise<Response> {
|
|||
);
|
||||
}
|
||||
|
||||
const res = await fetch(url, {
|
||||
headers: {
|
||||
Accept: "text/css",
|
||||
},
|
||||
});
|
||||
const sanitized = await fetchAndCacheCss(url);
|
||||
|
||||
if (!res.ok) {
|
||||
if (!sanitized) {
|
||||
return Response.json(
|
||||
{
|
||||
success: false,
|
||||
error: {
|
||||
code: "FETCH_FAILED",
|
||||
message: "Failed to fetch CSS file",
|
||||
message: "Failed to fetch or sanitize CSS",
|
||||
},
|
||||
},
|
||||
{ status: 400 },
|
||||
);
|
||||
}
|
||||
|
||||
if (res.headers.has("content-length")) {
|
||||
const size = Number.parseInt(res.headers.get("content-length") || "0", 10);
|
||||
if (size > 1024 * 50) {
|
||||
return Response.json(
|
||||
{
|
||||
success: false,
|
||||
error: {
|
||||
code: "FILE_TOO_LARGE",
|
||||
message: "CSS file exceeds 50KB limit",
|
||||
},
|
||||
},
|
||||
{ status: 400 },
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const text = await res.text();
|
||||
if (!text || text.length < 5) {
|
||||
return Response.json(
|
||||
{
|
||||
success: false,
|
||||
error: {
|
||||
code: "INVALID_CONTENT",
|
||||
message: "CSS content is too small or invalid",
|
||||
},
|
||||
},
|
||||
{ status: 400 },
|
||||
);
|
||||
}
|
||||
|
||||
const sanitized = text
|
||||
.replace(/<script[\s\S]*?>[\s\S]*?<\/script>/gi, "")
|
||||
.replace(/@import\s+url\(['"]?(.*?)['"]?\);?/gi, "");
|
||||
|
||||
return new Response(sanitized, {
|
||||
headers: {
|
||||
"Content-Type": "text/css",
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
import { redisTtl } from "@config/environment";
|
||||
import { fetch } from "bun";
|
||||
import { redis } from "bun";
|
||||
import DOMPurify from "isomorphic-dompurify";
|
||||
import { parseHTML } from "linkedom";
|
||||
import { marked } from "marked";
|
||||
|
||||
const routeDef: RouteDef = {
|
||||
|
@ -9,6 +12,50 @@ const routeDef: RouteDef = {
|
|||
log: false,
|
||||
};
|
||||
|
||||
async function fetchAndCacheReadme(url: string): Promise<string | null> {
|
||||
const cacheKey = `readme:${url}`;
|
||||
const cached = await redis.get(cacheKey);
|
||||
if (cached) return cached;
|
||||
|
||||
const res = await fetch(url, {
|
||||
headers: {
|
||||
Accept: "text/markdown",
|
||||
},
|
||||
});
|
||||
|
||||
if (!res.ok) return null;
|
||||
|
||||
if (res.headers.has("content-length")) {
|
||||
const size = Number.parseInt(res.headers.get("content-length") || "0", 10);
|
||||
if (size > 1024 * 100) return null;
|
||||
}
|
||||
|
||||
const text = await res.text();
|
||||
if (!text || text.length < 10) return null;
|
||||
|
||||
let html: string;
|
||||
if (/\.(html?|htm)$/i.test(url)) {
|
||||
html = text;
|
||||
} else {
|
||||
html = await marked.parse(text);
|
||||
}
|
||||
|
||||
const { document } = parseHTML(html);
|
||||
for (const img of Array.from(document.querySelectorAll("img"))) {
|
||||
if (!img.hasAttribute("loading")) {
|
||||
img.setAttribute("loading", "lazy");
|
||||
}
|
||||
}
|
||||
|
||||
const dirtyHtml = document.toString();
|
||||
const safe = DOMPurify.sanitize(dirtyHtml) || "";
|
||||
|
||||
await redis.set(cacheKey, safe);
|
||||
await redis.expire(cacheKey, redisTtl);
|
||||
|
||||
return safe;
|
||||
}
|
||||
|
||||
async function handler(request: ExtendedRequest): Promise<Response> {
|
||||
const { url } = request.query;
|
||||
|
||||
|
@ -29,68 +76,21 @@ async function handler(request: ExtendedRequest): Promise<Response> {
|
|||
);
|
||||
}
|
||||
|
||||
const res = await fetch(url, {
|
||||
headers: {
|
||||
Accept: "text/markdown",
|
||||
},
|
||||
});
|
||||
const safe = await fetchAndCacheReadme(url);
|
||||
|
||||
if (!res.ok) {
|
||||
if (!safe) {
|
||||
return Response.json(
|
||||
{
|
||||
success: false,
|
||||
error: {
|
||||
code: "FETCH_FAILED",
|
||||
message: "Failed to fetch the file",
|
||||
message: "Failed to fetch or process file",
|
||||
},
|
||||
},
|
||||
{ status: 400 },
|
||||
);
|
||||
}
|
||||
|
||||
if (res.headers.has("content-length")) {
|
||||
const size = Number.parseInt(res.headers.get("content-length") || "0", 10);
|
||||
if (size > 1024 * 100) {
|
||||
return Response.json(
|
||||
{
|
||||
success: false,
|
||||
error: {
|
||||
code: "FILE_TOO_LARGE",
|
||||
message: "File size exceeds 100KB limit",
|
||||
},
|
||||
},
|
||||
{ status: 400 },
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const text = await res.text();
|
||||
if (!text || text.length < 10) {
|
||||
return Response.json(
|
||||
{
|
||||
success: false,
|
||||
error: {
|
||||
code: "INVALID_CONTENT",
|
||||
message: "File is too small or invalid",
|
||||
},
|
||||
},
|
||||
{ status: 400 },
|
||||
);
|
||||
}
|
||||
|
||||
let html: string;
|
||||
|
||||
if (
|
||||
url.toLowerCase().endsWith(".html") ||
|
||||
url.toLowerCase().endsWith(".htm")
|
||||
) {
|
||||
html = text;
|
||||
} else {
|
||||
html = await marked.parse(text);
|
||||
}
|
||||
|
||||
const safe = DOMPurify.sanitize(html) || "";
|
||||
|
||||
return new Response(safe, {
|
||||
headers: {
|
||||
"Content-Type": "text/html; charset=utf-8",
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue