diff --git a/src/app.js b/src/app.js index b1f939f..5eb0a35 100644 --- a/src/app.js +++ b/src/app.js @@ -81,7 +81,7 @@ app.get('/recent', t(async (req, res) => { app.get('/robots.txt', (req, res) => { // Search engines should not serve the proxied pages as if they are wikipedia res.send(`User-agent: * -Disallow: /wiki/* +Disallow: /wiki/ `) }) @@ -92,6 +92,12 @@ app.use(serveStatic(fileURLToPath(new URL('../public', import.meta.url)))) */ app.get('/wiki/:page', t(async (req, res) => { + res.header('X-Robots-Tag', 'noindex,nofollow') + if (req.headers.authorization !== 'wikibattle.me client') { + res.status(404).end() + return + } + const body = await wiki.get(req.params.page) res.end(body.content) })) diff --git a/src/client/load-page.js b/src/client/load-page.js index 0784f2f..14af010 100644 --- a/src/client/load-page.js +++ b/src/client/load-page.js @@ -1,6 +1,12 @@ const cache = {} +const fetchOpts = { + headers: { + authorization: 'wikibattle.me client' + } +} + export default function load (page, cb) { - cache[page] ??= fetch(`./wiki/${page}`).then((response) => response.text()) + cache[page] ??= fetch(`./wiki/${page}`, fetchOpts).then((response) => response.text()) cache[page].then((result) => cb(null, result), cb) }