feat: add robots txt
All checks were successful
create archive with lfs / tag (push) Successful in 9s

This commit is contained in:
dusk 2024-10-31 16:28:03 +03:00
parent 8ede972994
commit 6041a544d6
Signed by: dusk
SSH Key Fingerprint: SHA256:Abmvag+juovVufZTxyWY8KcVgrznxvBjQpJesv071Aw
2 changed files with 46 additions and 1 deletions

View File

@ -22,6 +22,12 @@ export async function load({ request, cookies, url, setHeaders, fetch }) {
}) })
}).catch((why) => { }).catch((why) => {
console.log("failed sending dark visitors analytics:", why) console.log("failed sending dark visitors analytics:", why)
return null
}).then(async (resp) => {
if (resp !== null) {
const msg = await resp.json()
console.log(`sent visitor analytic to dark visitors: ${resp.statusText}; ${msg.message}`)
}
}) })
let currentVisitCount = get(visitCount) let currentVisitCount = get(visitCount)

View File

@ -0,0 +1,39 @@
import { env } from '$env/dynamic/private';
import { get, writable } from 'svelte/store';
const cachedRobots = writable<string>("")
const lastFetched = writable<number>(Date.now())
const fetchRobotsTxt = async () => {
const robotsTxtResp = await fetch(
"https://api.darkvisitors.com/robots-txts",
{
method: "POST",
headers: {
"Authorization": `Bearer ${env.DARK_VISITORS_TOKEN}`,
"Content-Type": "application/json"
},
body: JSON.stringify({
agent_types: [
"AI Assistant",
"AI Data Scraper",
"AI Search Crawler",
"Undocumented AI Agent",
],
disallow: "/"
})
}
)
const robotsTxt = await robotsTxtResp.text()
lastFetched.set(Date.now())
return robotsTxt
}
export const GET = async ({ }) => {
let robotsTxt = get(cachedRobots)
if (robotsTxt.length === 0 || Date.now() - get(lastFetched) > 1000 * 60 * 60 * 24) {
robotsTxt = await fetchRobotsTxt()
cachedRobots.set(robotsTxt)
}
return new Response(robotsTxt)
}