diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 7844783b..62f53e26 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -13,8 +13,18 @@ jobs: bundler-cache: true - run: bundle exec rubocop - test: + verify-links: needs: [lint] + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: ruby/setup-ruby@v1 + with: + ruby-version: "3.3" + - run: bin/verify-links + + test: + needs: [verify-links] strategy: matrix: ruby: ["3.2", "3.3"] diff --git a/bin/crawl-links b/bin/crawl-links deleted file mode 100755 index fbf70b4c..00000000 --- a/bin/crawl-links +++ /dev/null @@ -1,36 +0,0 @@ -#!/usr/bin/env ruby -# frozen_string_literal: true - -require "open3" -require "net/http" -require "logger" - -logger = Logger.new($stdout) - -search_command = "grep -R @see ./lib" -output, = Open3.capture3(search_command) -matches = output.split("\n") - -failed = false - -threads = matches.map do |line| - Thread.new do - url = line.match(/http.*/)[0] - response = Net::HTTP.get_response(URI(url)) - case response - when Net::HTTPSuccess - logger.info("✅ #{response.code} #{url}") - when Net::HTTPFound - logger.warn("👀 #{response.code} #{url}") - else - failed = true - logger.error("❌ #{response.code} #{url}") - end - end -end -threads.each(&:join) - -if failed - logger.warn("Please fix broken documentation links.") - exit 1 -end diff --git a/bin/verify-links b/bin/verify-links new file mode 100755 index 00000000..4d63f277 --- /dev/null +++ b/bin/verify-links @@ -0,0 +1,50 @@ +#!/usr/bin/env ruby +# frozen_string_literal: true + +require "open3" +require "net/http" +require "logger" + +logger = Logger.new($stdout) + +# Search for URLs in the README file +search_command = "grep -Eo 'https?://[^ ]+' README" +output, = Open3.capture3(search_command) +readme_matches = output.split("\n") + +# Search for URLs in comments in other files +search_command = "grep -EoR '#.*https?://[^ ]+' lib" +output, = Open3.capture3(search_command) +lib_matches = output.split("\n") + +# Extract only the URLs +matches = (readme_matches + lib_matches).map do |line| + line.match(%r{https?://[^ )>,\"]+})[0] +end + +matches.uniq! + +failed = false + +threads = matches.map do |url| + Thread.new do + response = Net::HTTP.get_response(URI(url)) + case response + when Net::HTTPSuccess + logger.info("✅ #{response.code} #{url}") + when Net::HTTPFound, Net::HTTPMovedPermanently + logger.warn("👀 #{response.code} #{url}") + else + failed = true + logger.error("❌ #{response.code} #{url}") + end + rescue URI::InvalidURIError => e + logger.error("❌ Invalid URL #{url}: #{e.message}") + end +end +threads.each(&:join) + +if failed + logger.warn("Please fix broken documentation links.") + exit 1 +end