forked from creations/profilePage
add lazyload and move readme to func, add cache for readme and css
This commit is contained in:
parent
f6bda95f02
commit
10416dbff0
5 changed files with 116 additions and 112 deletions
|
@ -1,5 +1,8 @@
|
|||
import { redisTtl } from "@config/environment";
|
||||
import { fetch } from "bun";
|
||||
import { redis } from "bun";
|
||||
import DOMPurify from "isomorphic-dompurify";
|
||||
import { parseHTML } from "linkedom";
|
||||
import { marked } from "marked";
|
||||
|
||||
const routeDef: RouteDef = {
|
||||
|
@ -9,6 +12,50 @@ const routeDef: RouteDef = {
|
|||
log: false,
|
||||
};
|
||||
|
||||
async function fetchAndCacheReadme(url: string): Promise<string | null> {
|
||||
const cacheKey = `readme:${url}`;
|
||||
const cached = await redis.get(cacheKey);
|
||||
if (cached) return cached;
|
||||
|
||||
const res = await fetch(url, {
|
||||
headers: {
|
||||
Accept: "text/markdown",
|
||||
},
|
||||
});
|
||||
|
||||
if (!res.ok) return null;
|
||||
|
||||
if (res.headers.has("content-length")) {
|
||||
const size = Number.parseInt(res.headers.get("content-length") || "0", 10);
|
||||
if (size > 1024 * 100) return null;
|
||||
}
|
||||
|
||||
const text = await res.text();
|
||||
if (!text || text.length < 10) return null;
|
||||
|
||||
let html: string;
|
||||
if (/\.(html?|htm)$/i.test(url)) {
|
||||
html = text;
|
||||
} else {
|
||||
html = await marked.parse(text);
|
||||
}
|
||||
|
||||
const { document } = parseHTML(html);
|
||||
for (const img of Array.from(document.querySelectorAll("img"))) {
|
||||
if (!img.hasAttribute("loading")) {
|
||||
img.setAttribute("loading", "lazy");
|
||||
}
|
||||
}
|
||||
|
||||
const dirtyHtml = document.toString();
|
||||
const safe = DOMPurify.sanitize(dirtyHtml) || "";
|
||||
|
||||
await redis.set(cacheKey, safe);
|
||||
await redis.expire(cacheKey, redisTtl);
|
||||
|
||||
return safe;
|
||||
}
|
||||
|
||||
async function handler(request: ExtendedRequest): Promise<Response> {
|
||||
const { url } = request.query;
|
||||
|
||||
|
@ -29,68 +76,21 @@ async function handler(request: ExtendedRequest): Promise<Response> {
|
|||
);
|
||||
}
|
||||
|
||||
const res = await fetch(url, {
|
||||
headers: {
|
||||
Accept: "text/markdown",
|
||||
},
|
||||
});
|
||||
const safe = await fetchAndCacheReadme(url);
|
||||
|
||||
if (!res.ok) {
|
||||
if (!safe) {
|
||||
return Response.json(
|
||||
{
|
||||
success: false,
|
||||
error: {
|
||||
code: "FETCH_FAILED",
|
||||
message: "Failed to fetch the file",
|
||||
message: "Failed to fetch or process file",
|
||||
},
|
||||
},
|
||||
{ status: 400 },
|
||||
);
|
||||
}
|
||||
|
||||
if (res.headers.has("content-length")) {
|
||||
const size = Number.parseInt(res.headers.get("content-length") || "0", 10);
|
||||
if (size > 1024 * 100) {
|
||||
return Response.json(
|
||||
{
|
||||
success: false,
|
||||
error: {
|
||||
code: "FILE_TOO_LARGE",
|
||||
message: "File size exceeds 100KB limit",
|
||||
},
|
||||
},
|
||||
{ status: 400 },
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const text = await res.text();
|
||||
if (!text || text.length < 10) {
|
||||
return Response.json(
|
||||
{
|
||||
success: false,
|
||||
error: {
|
||||
code: "INVALID_CONTENT",
|
||||
message: "File is too small or invalid",
|
||||
},
|
||||
},
|
||||
{ status: 400 },
|
||||
);
|
||||
}
|
||||
|
||||
let html: string;
|
||||
|
||||
if (
|
||||
url.toLowerCase().endsWith(".html") ||
|
||||
url.toLowerCase().endsWith(".htm")
|
||||
) {
|
||||
html = text;
|
||||
} else {
|
||||
html = await marked.parse(text);
|
||||
}
|
||||
|
||||
const safe = DOMPurify.sanitize(html) || "";
|
||||
|
||||
return new Response(safe, {
|
||||
headers: {
|
||||
"Content-Type": "text/html; charset=utf-8",
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue