diff --git a/src/routes/+layout.server.ts b/src/routes/+layout.server.ts index 734299f..10dd732 100644 --- a/src/routes/+layout.server.ts +++ b/src/routes/+layout.server.ts @@ -21,7 +21,13 @@ export async function load({ request, cookies, url, setHeaders, fetch }) { request_headers: request.headers, }) }).catch((why) => { - console.log("failed sending dark visitors analytics: ", why) + console.log("failed sending dark visitors analytics:", why) + return null + }).then(async (resp) => { + if (resp !== null) { + const msg = await resp.json() + console.log(`sent visitor analytic to dark visitors: ${resp.statusText}; ${msg.message}`) + } }) let currentVisitCount = get(visitCount) diff --git a/src/routes/robots.txt/+server.ts b/src/routes/robots.txt/+server.ts new file mode 100644 index 0000000..2a18f2d --- /dev/null +++ b/src/routes/robots.txt/+server.ts @@ -0,0 +1,39 @@ +import { env } from '$env/dynamic/private'; +import { get, writable } from 'svelte/store'; + +const cachedRobots = writable("") +const lastFetched = writable(Date.now()) + +const fetchRobotsTxt = async () => { + const robotsTxtResp = await fetch( + "https://api.darkvisitors.com/robots-txts", + { + method: "POST", + headers: { + "Authorization": `Bearer ${env.DARK_VISITORS_TOKEN}`, + "Content-Type": "application/json" + }, + body: JSON.stringify({ + agent_types: [ + "AI Assistant", + "AI Data Scraper", + "AI Search Crawler", + "Undocumented AI Agent", + ], + disallow: "/" + }) + } + ) + const robotsTxt = await robotsTxtResp.text() + lastFetched.set(Date.now()) + return robotsTxt +} + +export const GET = async ({ }) => { + let robotsTxt = get(cachedRobots) + if (robotsTxt.length === 0 || Date.now() - get(lastFetched) > 1000 * 60 * 60 * 24) { + robotsTxt = await fetchRobotsTxt() + cachedRobots.set(robotsTxt) + } + return new Response(robotsTxt) +} \ No newline at end of file