From c4c4cae944f04efb9d60ae386e86e0aa0a26ef64 Mon Sep 17 00:00:00 2001 From: dusk Date: Thu, 16 Jan 2025 20:39:55 +0300 Subject: [PATCH] fix: reject load if no user agent is provided --- src/lib/robots.ts | 1 + src/lib/visits.ts | 2 +- src/routes/+layout.server.ts | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/src/lib/robots.ts b/src/lib/robots.ts index 4578f01..0805cf8 100644 --- a/src/lib/robots.ts +++ b/src/lib/robots.ts @@ -44,6 +44,7 @@ export const getRobotsTxt = async () => { } export const testUa = async (url: string, ua: string) => { + if (ua.length === 0) return false let parsedRobots = get(cachedParsedRobots) if (parsedRobots === null) { parsedRobots = robotsParser(`${PUBLIC_BASE_URL}/robots.txt`, await getRobotsTxt()) diff --git a/src/lib/visits.ts b/src/lib/visits.ts index 8e75f8b..d7c40bd 100644 --- a/src/lib/visits.ts +++ b/src/lib/visits.ts @@ -10,7 +10,7 @@ const visitCount = writable(parseInt(existsSync(visitCountFile) ? readFileSync(v type Visitor = { visits: number[] } const lastVisitors = writable>(new Map()) -const VISITOR_EXPIRY_SECONDS = 60 * 30 // half an hour seems reasonable +const VISITOR_EXPIRY_SECONDS = 60 * 60 // an hour seems reasonable export const incrementVisitCount = (request: Request, cookies: Cookies) => { let currentVisitCount = get(visitCount) diff --git a/src/routes/+layout.server.ts b/src/routes/+layout.server.ts index ebb0025..b6e943c 100644 --- a/src/routes/+layout.server.ts +++ b/src/routes/+layout.server.ts @@ -11,7 +11,7 @@ export async function load({ request, cookies, url }) { notifyDarkVisitors(url, request) // no await so it doesnt block load // block any requests if the user agent is disallowed by our robots txt - if (await testUa(url.toString(), request.headers.get('user-agent') ?? "unknown user agent") === false) { + if (await testUa(url.toString(), request.headers.get('user-agent') ?? "") === false) { throw error(403, "get a better user agent silly") }