diff --git a/scripts/algolia-reindex.ts b/scripts/algolia-reindex.ts new file mode 100644 index 00000000..a5971172 --- /dev/null +++ b/scripts/algolia-reindex.ts @@ -0,0 +1,38 @@ +const algoliasearch = require('algoliasearch'); +const dotenv = require('dotenv'); +const fs = require('fs'); + +dotenv.config(); + +// see https://github.com/algolia-samples/api-clients-quickstarts/blob/master/javascript/indexing.js +(async () => { + try { + console.log('Starting Algolia update...'); + + // Algolia client credentials + const { ALGOLIA_APP_ID } = process.env; + const { ALGOLIA_API_KEY } = process.env; + const { ALGOLIA_INDEX_NAME } = process.env; + + // Initialize the client + // https://www.algolia.com/doc/api-client/getting-started/instantiate-client-index/ + const client = algoliasearch(ALGOLIA_APP_ID, ALGOLIA_API_KEY); + + // Initialize an index + // https://www.algolia.com/doc/api-client/getting-started/instantiate-client-index/#initialize-an-index + const index = client.initIndex(ALGOLIA_INDEX_NAME); + + const data = fs.readFileSync('./public/index.json'); + const objects = JSON.parse(data); + + // Save objects: Add multiple objects to an index + // https://www.algolia.com/doc/api-reference/api-methods/add-objects/?client=javascript + console.log('Save objects - Updating index now...'); + await index.saveObjects(objects).wait(); + + const result = await index.search(''); + console.log('Current objects in the index:', result.hits.length); + } catch (error) { + console.error(error); + } +})(); diff --git a/scripts/pagefind.mjs b/scripts/pagefind.mjs index 52e02a21..5f30f964 100644 --- a/scripts/pagefind.mjs +++ b/scripts/pagefind.mjs @@ -25,3 +25,86 @@ async function buildPagefindIndex() { await buildPagefindIndex(); await pagefind.close(); + + + + + + + + + +// #!/usr/bin / env node + +// import { execSync, spawnSync } from 'child_process'; +// import fs from 'fs'; +// import path from 'path'; +// import { fileURLToPath } from 'url'; +// import dotenv from 'dotenv'; + +// // current working directory and file paths +// const __filename = fileURLToPath(import.meta.url); +// const __dirname = path.dirname(__filename); +// const CURPATH = process.cwd(); + +// /** +// * Check if required tools are available. +// * Exits the process if any tool is missing. +// */ +// function checkRequiredTools() { +// const tools = ['hugo', 'git', 'npm']; +// const missingTools = tools.filter(tool => { +// try { +// execSync(`command -v ${tool}`); +// return false; +// } catch { +// return true; +// } +// }); + +// if (missingTools.length > 0) { +// console.error(`Missing required tools: ${missingTools.join(', ')}`); +// process.exit(1); +// } +// } + +// /** +// * Load environment variables from a `.env` file if it exists. +// */ +// function loadEnvVariables() { +// const envFile = path.join(CURPATH, '.env'); +// if (fs.existsSync(envFile)) { +// dotenv.config({ path: envFile }); +// } +// } + +// /** +// * Pagefind index for search functionality. +// */ +// async function buildPagefindIndex() { +// const { index } = await pagefind.createIndex({ +// rootSelector: "html", +// verbose: true, +// logfile: "debug.log" +// }); + +// if (index) { +// await index.addDirectory({ +// path: "public" +// }); +// await index.writeFiles({ +// outputPath: "public/search" +// }); +// } +// } + +// /** +// * Main function to orchestrate the steps. +// */ +// async function main() { +// checkRequiredTools(); +// loadEnvVariables(); +// await buildPagefindIndex(); +// } + +// main(); diff --git a/scripts/purge-cloudflare-cache.mjs b/scripts/purge-cloudflare-cache.mjs new file mode 100644 index 00000000..d733658e --- /dev/null +++ b/scripts/purge-cloudflare-cache.mjs @@ -0,0 +1,33 @@ +/* eslint-disable no-console */ +import dotenv from 'dotenv'; +import fetch from 'node-fetch'; + +dotenv.config(); + +// eslint-disable-next-line unicorn/prefer-top-level-await +(async () => { + try { + console.log('Clearing Cloudflare cacheā€¦'); + + // Cloudflare client credentials + const { CLOUDFLARE_TOKEN } = process.env; + const { CLOUDFLARE_ZONEID } = process.env; + + // Cloudflare API endpoint + const url = `https://api.cloudflare.com/client/v4/zones/${CLOUDFLARE_ZONEID}/purge_cache`; + + fetch(url, { + method: 'POST', + body: '{"purge_everything":true}', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${CLOUDFLARE_TOKEN}`, + }, + }) + .then((response) => response.json()) + .then(() => console.log('success')) + .catch((error) => console.log(error)); + } catch (error) { + console.error(error); + } +})(); diff --git a/scripts/screenshot-changes.mjs b/scripts/screenshot-changes.mjs new file mode 100644 index 00000000..1af0dd18 --- /dev/null +++ b/scripts/screenshot-changes.mjs @@ -0,0 +1,92 @@ +#!/usr/bin/env node + +import fs from 'fs'; +import { chromium } from 'playwright'; +import path from 'path'; +import { PNG } from 'pngjs'; +import pixelmatch from 'pixelmatch'; +import { fileURLToPath } from 'url'; +import { dirname } from 'path'; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = dirname(__filename); + +async function getData(screenshotDir = ".github/screenshots", retries = 3) { + const browser = await chromium.launch(); + const context = await browser.newContext({ + userAgent: 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36' + }); + const page = await context.newPage(); + + await page.emulateMedia({ colorScheme: 'dark' }); + await page.setExtraHTTPHeaders({ + 'Accept-Language': 'en-US,en;q=0.9' + }); + + for (let attempt = 1; attempt <= retries; attempt++) { + try { + await page.goto("https://kollitsch.dev/", { waitUntil: 'networkidle' }); + await page.waitForTimeout(500); + + // Ensure the directory exists + const screenshotPathDefault = path.join(__dirname, screenshotDir, `screenshot.png`); + const diffPath = path.join(__dirname, screenshotDir, 'diff.png'); + + const now = new Date(); + const timestamp = now.getFullYear().toString() + + (now.getMonth() + 1).toString().padStart(2, '0') + + now.getDate().toString().padStart(2, '0') + + now.getHours().toString().padStart(2, '0') + + now.getMinutes().toString().padStart(2, '0') + + now.getSeconds().toString().padStart(2, '0'); + const screenshotPathWithTimestamp = path.join(__dirname, screenshotDir, `screenshot_${timestamp}.png`); + + if (!fs.existsSync(screenshotDir)) { + fs.mkdirSync(screenshotDir, { recursive: true }); + } + + await page.screenshot({ path: screenshotPathWithTimestamp, fullPage: true }); + console.log(`Screenshot saved to ${screenshotPathWithTimestamp}`); + + if (fs.existsSync(screenshotPathDefault) && fs.existsSync(screenshotPathWithTimestamp)) { + const img1 = PNG.sync.read(fs.readFileSync(screenshotPathDefault)); + const img2 = PNG.sync.read(fs.readFileSync(screenshotPathWithTimestamp)); + const { width, height } = img1; + const diff = new PNG({ width, height }); + const numDiffPixels = pixelmatch(img1.data, img2.data, diff.data, width, height, { threshold: 0.1 }); + fs.writeFileSync(diffPath, PNG.sync.write(diff)); + + if (numDiffPixels > 0) { + fs.copyFileSync(screenshotPathWithTimestamp, screenshotPathDefault); + console.log('Changes detected, updating screenshot.'); + process.stdout.write('::set-output name=changes::true\n'); + } else { + console.log('No changes detected.'); + process.stdout.write('::set-output name=changes::false\n'); + } + } else { + fs.copyFileSync(screenshotPathWithTimestamp, screenshotPathDefault); + console.log('No previous screenshot found, saving new one.'); + process.stdout.write('::set-output name=changes::true\n'); + } + + await browser.close(); + + if (fs.existsSync(diffPath)) { + fs.unlinkSync(diffPath); + } + + return; + } catch (error) { + console.error(`Attempt ${attempt} failed: ${error.message}`); + if (attempt === retries) { + console.error('Max retries reached. Exiting.'); + await browser.close(); + process.exit(1); + } + console.log('Retrying...'); + } + } +} + +getData(); diff --git a/scripts/site-screenshot.mjs b/scripts/site-screenshot.mjs new file mode 100644 index 00000000..2903e307 --- /dev/null +++ b/scripts/site-screenshot.mjs @@ -0,0 +1,52 @@ +import puppeteer from 'puppeteer'; +import yargs from "yargs"; +import { hideBin } from 'yargs/helpers'; +import version from "../../package.json" with { type: "json" }; + +const argv = yargs(hideBin(process.argv)) + + .option("url", { + describe: "URL to capture a screenshot of", + demandOption: true, + type: "string" + }) + .option("output", { + describe: "Output file name", + default: "header.jpg", + type: "string" + }) + .option("width", { + describe: "Viewport width", + default: 1200, + type: "number" + }) + .option("height", { + describe: "Viewport height", + default: 600, + type: "number" + }) + .help() + .version(version.version) + .alias("help", "h").argv; + +export const takeScreenshot = async (url, output, width, height) => { + + const browser = await puppeteer.launch({ headless: "new", defaultViewport: { width: 1200, height: 800 } }); + const page = await browser.newPage(); + + try { + await page.goto(url, { waitUntil: 'load', timeout: 0 }); + } catch (e) { + console.log('Error: ' + url + ' not available'); + console.log(e.message); + } + + await page.emulateMediaFeatures([{ + name: 'prefers-color-scheme', value: 'dark' + }]); + await page.screenshot({ path: output }); + await browser.close(); +}; + +// @ts-ignore +takeScreenshot(argv.url, argv.output, argv.width, argv.height); diff --git a/scripts/update-hugo-version b/scripts/update-hugo-version new file mode 100644 index 00000000..e95d5246 --- /dev/null +++ b/scripts/update-hugo-version @@ -0,0 +1,38 @@ +#!/bin/bash + +REQUIRED_TOOLS=( + sed + curl +) + +# check if all requirements are met +for TOOL in "${REQUIRED_TOOLS[@]}"; do + if ! command -v "${TOOL}" >/dev/null; then + echo "${TOOL} is required... " + exit 1 + fi +done + +# Function to get the latest Hugo release +get_latest_release() { + curl --silent "https://api.github.com/repos/gohugoio/hugo/releases/latest" | + grep '"tag_name":' | + sed -E 's/.*"v([^"]+)".*/\1/' +} + +# The path to the TOML file +TOML_FILE="netlify.toml" + +# Fetch the new HUGO version +NEW_VERSION="$(get_latest_release)" + +# Check if TOML file exists +if [ ! -f "$TOML_FILE" ]; then + echo "TOML file not found: $TOML_FILE" + exit 1 +fi + +# Update the HUGO_VERSION in the TOML file +sed -i "s/^HUGO_VERSION = \".*\"/HUGO_VERSION = \"$NEW_VERSION\"/" "$TOML_FILE" + +echo "Updated HUGO_VERSION to $NEW_VERSION in $TOML_FILE"