From 7a032e715302280265f0d0aeab77ac32907a92c1 Mon Sep 17 00:00:00 2001 From: Carlgo11 Date: Wed, 17 Jul 2024 21:40:44 +0200 Subject: [PATCH 01/10] initial test rewriting --- .github/workflows/repository.yml | 92 -------------------------------- Gemfile | 16 ------ scripts/Sign.rb | 17 ------ tests/languages.js | 37 +++++++++++++ tests/quality_checks.rb | 18 ------- tests/regions.js | 39 ++++++++++++++ tests/svg_lint.rb | 47 ---------------- tests/validate_facebook.rb | 38 ------------- tests/validate_json.rb | 72 ------------------------- tests/validate_languages.rb | 54 ------------------- tests/validate_regions.rb | 52 ------------------ tests/validate_similarweb.rb | 63 ---------------------- tests/validate_twitter.rb | 32 ----------- 13 files changed, 76 insertions(+), 501 deletions(-) delete mode 100644 .github/workflows/repository.yml delete mode 100644 Gemfile delete mode 100755 scripts/Sign.rb create mode 100644 tests/languages.js delete mode 100755 tests/quality_checks.rb create mode 100644 tests/regions.js delete mode 100755 tests/svg_lint.rb delete mode 100755 tests/validate_facebook.rb delete mode 100755 tests/validate_json.rb delete mode 100755 tests/validate_languages.rb delete mode 100755 tests/validate_regions.rb delete mode 100755 tests/validate_similarweb.rb delete mode 100755 tests/validate_twitter.rb diff --git a/.github/workflows/repository.yml b/.github/workflows/repository.yml deleted file mode 100644 index 541d9dd..0000000 --- a/.github/workflows/repository.yml +++ /dev/null @@ -1,92 +0,0 @@ -name: Repository builds and tests -on: [ push, pull_request ] -jobs: - - tests: - name: Tests - runs-on: ubuntu-22.04 - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - uses: ruby/setup-ruby@v1 - with: - bundler-cache: true - ruby-version: '3.0' - env: - BUNDLE_WITH: 'tests' - - uses: actions/cache@v3 - with: - path: | - /tmp/iso-*.txt - /tmp/similarweb - key: tmp-files-${{ hashFiles('/tmp/similarweb/*')}} - - name: Validate JSON structure - run: bundle exec ruby ./tests/validate_json.rb - - name: Validate Region codes - run: bundle exec ruby ./tests/validate_regions.rb - - name: Validate Language codes - run: bundle exec ruby ./tests/validate_languages.rb - - name: Validate file extensions and permissions - run: tests/validate_fs.sh - - name: Validate Images - run: bundle exec ruby ./tests/validate_images.rb - - name: Validate SVG - run: bundle exec ruby ./tests/svg_lint.rb - - name: Quality Checks - run: bundle exec ruby ./tests/quality_checks.rb - - publish: - name: Build and Publish files - if: > - github.event_name == 'push' && - github.ref == 'refs/heads/master' && - github.repository == '2factorauth/passkeys' - concurrency: - group: 'publish' - cancel-in-progress: true - needs: [ tests ] - permissions: - contents: read - pages: write - id-token: write - environment: production - runs-on: ubuntu-22.04 - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 2 - - uses: ruby/setup-ruby@v1 - with: - bundler-cache: true - ruby-version: '3.0' - - name: Generate API files - run: | - mkdir -p public/api/v1 - mkdir -p public/api/private - bundle exec ruby ./scripts/APIv1.rb - - name: Publish changes to Algolia - run: bundle exec ruby ./scripts/Algolia.rb - env: - ALGOLIA_APP_ID: ${{ secrets.ALGOLIA_APP_ID }} - ALGOLIA_INDEX_NAME : ${{ vars.ALGOLIA_INDEX_NAME }} - ALGOLIA_API_KEY: ${{ secrets.ALGOLIA_API_KEY }} - - uses: crazy-max/ghaction-import-gpg@v5 - id: pgp - with: - gpg_private_key: ${{ secrets.PGP_KEY }} - passphrase: ${{ secrets.PGP_PASSWORD }} - - name: Sign API files - run: bundle exec ruby scripts/Sign.rb - env: - PGP_PASSWORD: ${{ secrets.PGP_PASSWORD }} - PGP_KEY_ID: ${{ steps.pgp.outputs.keyid }} - - name: Prepare publish directory - run: mv icons/ public/icons - - uses: actions/upload-pages-artifact@v1 - with: - path: public/ - - name: Deploy to GitHub Pages - uses: actions/deploy-pages@v1 - - name: Send webhook to Cloudflare - run: curl -X POST -IL "${{ secrets.WEBHOOK }}" -o /dev/null -w '%{http_code}\n' -s diff --git a/Gemfile b/Gemfile deleted file mode 100644 index c561ab3..0000000 --- a/Gemfile +++ /dev/null @@ -1,16 +0,0 @@ -# frozen_string_literal: true - -source 'https://rubygems.org' - -group :tests, optional: true do - gem 'activesupport', '~> 7.1' - gem 'addressable', '~> 2.8' - gem 'nokogiri', '~> 1.15' - gem 'rubocop', '~> 1.57' - gem 'twitter', '~> 8.0' -end - -gem 'algolia', '~> 2.3' -gem 'dotenv', '~> 2.8' -gem 'json_schemer', '~> 2.0' -gem 'parallel', '~> 1.23' diff --git a/scripts/Sign.rb b/scripts/Sign.rb deleted file mode 100755 index 4180112..0000000 --- a/scripts/Sign.rb +++ /dev/null @@ -1,17 +0,0 @@ -#!/usr/bin ruby -# frozen_string_literal: true - -require 'English' -require 'parallel' -require 'fileutils' -require 'dotenv/load' - -PASSWORD = ENV['PGP_PASSWORD'] -KEY_ID = ENV['PGP_KEY_ID'] - -Parallel.each(Dir.glob('public/api/v*/*.json')) do |f| - puts "#{f}.sig" - `echo "#{PASSWORD}" | gpg --yes --passphrase --local-user "#{KEY_ID}" --output "#{f}.sig" --sign "#{f}" 2>/dev/null` - `gpg --verify "#{f}.sig" 2>/dev/null` - raise("::error f=#{f}:: File signing failed") unless $CHILD_STATUS.success? -end diff --git a/tests/languages.js b/tests/languages.js new file mode 100644 index 0000000..acca9f4 --- /dev/null +++ b/tests/languages.js @@ -0,0 +1,37 @@ +const fs = require("fs").promises; +const core = require("@actions/core"); + +const url = new URL( + "https://pkgstore.datahub.io/core/language-codes/language-codes_json/data/97607046542b532c395cf83df5185246/language-codes_json.json", +); + +async function main() { + let errors = false; + const files = process.argv.slice(2); + const res = await fetch(url, { + accept: "application/json", + "user-agent": "2factorauth/passkeys +https://2fa.directory/bots", + }); + + if (!res.ok) throw new Error("Unable to fetch language codes"); + + const data = await res.json(); + const codes = Object.values(data).map((language) => language.alpha2); + + if (files) { + for (const file of files) { + const data = await fs.readFile(file, "utf8"); + const json = await JSON.parse(data); + const entry = json[Object.keys(json)[0]]; + const language = entry.contact?.language; + + if (language && !codes.includes(language)) { + core.error(`${language} is not a valid language`, { file }); + errors = true; + } + } + } + process.exit(+errors); +} + +module.exports = main(); diff --git a/tests/quality_checks.rb b/tests/quality_checks.rb deleted file mode 100755 index 034c5df..0000000 --- a/tests/quality_checks.rb +++ /dev/null @@ -1,18 +0,0 @@ -#!/usr/bin/env ruby -# frozen_string_literal: true - -# This script warns when common PR mistakes are found. - -require 'json' - -# Fetch created (but not renamed) files in entries/** -diff = `git diff --name-only --diff-filter=Ard origin/master...HEAD entries/`.split("\n") - -diff&.each do |path| - entry = JSON.parse(File.read(path)).values[0] - next unless entry.key? 'passkeys' - - # rubocop:disable Layout/LineLength - puts "::warning file=#{path} title=Missing Documentation:: Since there is no documentation available, please could you provide us with screenshots of the setup/login process as evidence of 2FA? Please remember to block out any personal information." unless entry['documentation'] - # rubocop:enable Layout/LineLength -end diff --git a/tests/regions.js b/tests/regions.js new file mode 100644 index 0000000..3eccdc3 --- /dev/null +++ b/tests/regions.js @@ -0,0 +1,39 @@ +const fs = require("fs").promises; +const core = require("@actions/core"); + +const url = new URL( + "https://raw.githubusercontent.com/stefangabos/world_countries/master/data/countries/en/world.json", +); + +async function main() { + let errors = false; + const files = process.argv.slice(2); + const res = await fetch(url, { + accept: "application/json", + "user-agent": "2factorauth/passkeys +https://2fa.directory/bots", + }); + + if (!res.ok) throw new Error("Unable to fetch region codes"); + + const data = await res.json(); + const codes = Object.values(data).map((region) => region.alpha2); + + if (files) { + for (const file of files) { + const data = await fs.readFile(file, "utf8"); + const json = await JSON.parse(data); + const entry = json[Object.keys(json)[0]]; + const { regions } = entry; + + for (const region of regions || []) { + if (!codes.includes(region)) { + core.error(`${region} is not a valid region code`, { file }); + errors = true; + } + } + } + } + process.exit(+errors); +} + +module.exports = main(); diff --git a/tests/svg_lint.rb b/tests/svg_lint.rb deleted file mode 100755 index daefd04..0000000 --- a/tests/svg_lint.rb +++ /dev/null @@ -1,47 +0,0 @@ -#!/usr/bin/env ruby -# frozen_string_literal: true - -# This script warns when common SVG mistakes are found in a PR. - -require 'json' -require 'active_support' -require 'active_support/core_ext/hash' -require 'nokogiri' - -def error(file, msg) - puts "::error file=#{file}:: #{msg}" - @status = 1 -end - -def warn(file, msg) - puts "::warning file=#{file}:: #{msg}" -end - -def test(svg_content, xpath_expression, parent_element = '/svg') - # Parse the SVG content - !Nokogiri::XML(svg_content).remove_namespaces!.xpath("#{parent_element}#{xpath_expression}").empty? -end - -@status = 0 -diff = `git diff --name-only --diff-filter=Ard origin/master...HEAD -- 'icons/***.svg'`.split("\n") -diff.each do |file| - svg = File.read(file) - error(file, 'Invalid SVG file') if Nokogiri::XML(svg).errors.any? - error(file, 'Unnecessary processing instruction found') if svg.include? ' 1 - warn(file, 'Remove comments') if test(svg, '//comment()', '') - warn(file, 'Unusually large file size') if File.size(file) > 5 * 1024 - warn(file, 'Unnecessary data attribute') if test(svg, '//*[starts-with(name(@*), "data-")]') - warn(file, 'Use viewBox instead of height/width') if test(svg, '[@width or @height]') - warn(file, 'Unnecessary id or class attribute in root element') if test(svg, '[@id or @class]') - warn(file, 'Unnecessary fill="#000" attribute') if test(svg, '//path[@fill="#000"]') - warn(file, 'Use Attributes instead of style elements') if test(svg, '//*[style]') - warn(file, 'Use hex color instead of fill-opacity') if test(svg, '//*[@fill-opacity]') - warn(file, 'Unnecessary XML:space declaration found') if svg.include? 'xml:space' - if test(svg, '//*[@version or @class or @fill-rule or @script or @a or @style or @clipPath]') - warn(file, 'Unnecessary attribute(s) found') - end -end - -exit @status diff --git a/tests/validate_facebook.rb b/tests/validate_facebook.rb deleted file mode 100755 index bb2842a..0000000 --- a/tests/validate_facebook.rb +++ /dev/null @@ -1,38 +0,0 @@ -#!/usr/bin/env ruby -# frozen_string_literal: true - -# This script validates Facebook handles in entries/ files. - -require 'net/http' -require 'uri' -require 'nokogiri' - -status = 0 -diff = `git diff origin/master...HEAD entries/ | sed -n 's/^+.*"facebook"[^"]*"\\(.*\\)".*/\\1/p'` - -@headers = { - 'User-Agent' => 'Mozilla/5.0 (iPhone; CPU iPhone OS 14_6 like Mac OS X) ' \ - 'AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.0.3 Mobile/15E148 Safari/604.1', - 'From' => 'https://2fa.directory/' -} - -def fetch(handle) - response = Net::HTTP.get_response(URI("https://m.me/#{handle}"), @headers) - output = nil - if response.header['location'] =~ %r{^https://m\.facebook\.com/msg/(\d+|#{handle})/} - body = Net::HTTP.get_response(URI(response.header['location']), @headers).body - output = Nokogiri::HTML.parse(body).at_css('._4ag7.img')&.attr('src') - end - output -end - -diff.split("\n").each do |page| - raise("Facebook page \"#{page}\" is either private or doesn't exist.") unless fetch(page) - - puts("#{page} is valid.") -rescue StandardError => e - puts "\e[31m#{e.message}\e[39m" - status = 1 -end - -exit status diff --git a/tests/validate_json.rb b/tests/validate_json.rb deleted file mode 100755 index 41f9875..0000000 --- a/tests/validate_json.rb +++ /dev/null @@ -1,72 +0,0 @@ -#!/usr/bin/env ruby -# frozen_string_literal: true - -# This script validates the JSON data of files in entries/. - -require 'json_schemer' -require 'parallel' - -status = 0 -@seen_names = [] - -def valid_schema(data) - schema = JSONSchemer.schema(File.read('tests/entry_schema.json')) - schema.valid?(data) ? Array(nil) : schema.validate(data) -end - -def valid_domain(file, data) - domain = File.basename(file, '.*') - url = data.values[0]['url'] - default_url = "https://#{domain}" - return unless !url.nil? && (url.eql?(default_url) || url.eql?("#{default_url}/")) - - raise "Defining the url property for #{domain} is not necessary - '#{default_url}' is the default value" -end - -def valid_directory(file) - folder_name = file.split('/')[1] - expected_folder_name = File.basename(file, '.*')[0] - - return if folder_name.eql? expected_folder_name - - raise "Entry should be in the subdirectory with the same name as the first letter as the domain. \n - Received: entries/#{folder_name}. Expected: entries/#{expected_folder_name}" -end - -def unique_name(data) - name = data.keys[0] - return @seen_names.push name unless @seen_names.include? name - - raise "An entry with the name '#{name}' already exists. Duplicate site names are not allowed." -end - -Parallel.each(Dir.glob('entries/*/*.json'), in_threads: 16) do |file| - data = JSON.parse(File.read(file)) - (valid_schema data)&.each do |v| - puts '' - puts "::error file=#{file}:: '#{v['type'].capitalize}' error in #{file}" - puts "- tag: #{v['data_pointer'].split('/')[2]}" if v['data_pointer'].split('/').length >= 3 - puts " data: #{v['data']}" if v['details'].nil? - puts " data: #{v['details']}" unless v['details'].nil? - puts " expected: #{v['schema']['pattern']}" if v['type'].eql?('pattern') - puts " expected: #{v['schema']['format']}" if v['type'].eql?('format') - puts " expected: #{v['schema']['required']}" if v['type'].eql?('required') - puts " expected: only one of 'passkeys' or 'contact'" if v['type'].eql?('oneOf') - puts " expected: 'passkeys' to contain '#{v['schema']['contains']['const']}'" if v['type'].eql?('contains') - status = 1 - end - - valid_directory file - - valid_domain file, data - - unique_name data -rescue JSON::ParserError => e - puts "::error file=#{file}:: #{e.message}" - return 1 -rescue StandardError => e - status = 1 - puts "::error file=#{file}:: #{e.message}" -end - -exit(status) diff --git a/tests/validate_languages.rb b/tests/validate_languages.rb deleted file mode 100755 index e6208ed..0000000 --- a/tests/validate_languages.rb +++ /dev/null @@ -1,54 +0,0 @@ -#!/usr/bin/env ruby -# frozen_string_literal: true - -# This script validates language codes set in entries. - -require 'uri' -require 'net/http' -require 'json' -require 'parallel' - -list_url = 'https://pkgstore.datahub.io/core/language-codes/language-codes_json/data/97607046542b532c395cf83df5185246/language-codes_json.json' -code_cache = '/tmp/iso-693-1.txt' -codes = [] - -if File.exist?(code_cache) - codes = JSON.parse(File.read(code_cache.to_s)) -else - url = URI(list_url) - headers = { - 'Accept' => 'application/json', - 'User-Agent' => '2FactorAuth/LanguageValidator '\ - "(Ruby/#{RUBY_VERSION}; +https://2fa.directory/bot)", - 'From' => 'https://2fa.directory/' - } - https = Net::HTTP.new(url.host, url.port) - https.use_ssl = true - request = Net::HTTP::Get.new(url, headers) - response = https.request(request) - - raise("Request failed. Check URL & API key. (#{response.code})") unless response.code == '200' - - # Get region codes from body & store in cache file - JSON.parse(response.body).each { |v| codes.push(v['alpha2'].downcase) } - File.open(code_cache, 'w') { |file| file.write codes.to_json } -end - -status = 0 - -Parallel.each(Dir.glob('entries/*/*.json'), in_threads: 16) do |file| - website = JSON.parse(File.read(file)).values[0] - next if website['contact'].nil? || website['contact']['language'].nil? - - lang = website['contact']['language'] - next if codes.include?(lang) - - begin - raise("::error file=#{file}:: \"#{lang}\" is not a real ISO 693-1 alpha-2 code.") - rescue StandardError => e - puts e.message - status = 1 - end -end - -exit(status) diff --git a/tests/validate_regions.rb b/tests/validate_regions.rb deleted file mode 100755 index faaddef..0000000 --- a/tests/validate_regions.rb +++ /dev/null @@ -1,52 +0,0 @@ -#!/usr/bin/env ruby -# frozen_string_literal: true - -# This script validates region codes set in entries. - -require 'uri' -require 'net/http' -require 'json' - -list_url = 'https://raw.githubusercontent.com/stefangabos/world_countries/master/data/countries/en/world.json' -code_cache = '/tmp/iso-3166.txt' -headers = { 'Accept' => 'application/json', - 'User-Agent' => '2FactorAuth/RegionValidator' \ - "(Ruby/#{RUBY_VERSION}; +https://2fa.directory/bot)", - 'From' => 'https://2fa.directory/' } -countries = [] -if File.exist?(code_cache) - countries = JSON.parse(File.read(code_cache.to_s)) -else - response = Net::HTTP.get_response(URI(list_url), headers) - - raise("Request failed. Check URL & API key. (#{response.code})") unless response.code == '200' - - # Get region codes from body & store in cache file - JSON.parse(response.body).each { |v| countries.push(v['alpha2'].downcase) } - File.open(code_cache, 'w') { |file| file.write countries.to_json } -end -res = Net::HTTP.get_response(URI('https://raw.githubusercontent.com/2factorauth/passkeys.2fa.directory/master/data/region_identifiers.json'), headers) -regions = JSON.parse(res.body) - -status = 0 - -begin - Dir.glob('entries/*/*.json') do |file| - website = JSON.parse(File.read(file)).values[0] - next if website['regions'].nil? - - website['regions'].map! { |region_code| region_code.tr('-', '') } - website['regions'].each do |region| - next if countries.include?(region) - next if regions.include?(region) - - puts "::error file=#{file}:: \"#{region}\" is not a real ISO 3166-2 code." - status = 1 - end - end -rescue StandardError => e - puts e.message - status = 1 -end - -exit(status) diff --git a/tests/validate_similarweb.rb b/tests/validate_similarweb.rb deleted file mode 100755 index 100a125..0000000 --- a/tests/validate_similarweb.rb +++ /dev/null @@ -1,63 +0,0 @@ -#!/usr/bin/env ruby -# frozen_string_literal: true - -# This script validates that added entries meet the Similarweb rank criteria. - -require 'addressable' -require 'fileutils' -require 'json' -require 'net/http' -require 'uri' - -# Cache handling -module Cache - @cache_dir = '/tmp/similarweb/' - - def self.fetch(site) - path = "#{@cache_dir}#{site}" - File.read(path) if File.exist?(path) - end - - def self.store(site, rank) - FileUtils.mkdir_p(@cache_dir) - File.open("#{@cache_dir}#{site}", 'w') { |file| file.write rank } - end -end - -# Similarweb API handling -module Similarweb - def self.api_key - key = ENV['SIMILARWEB_API_KEY'] - raise('Similarweb API key not set') if key.nil? - - keys = key.split(' ') - keys[rand(0..(keys.length - 1))] - end - - def self.fetch(site) - response = Net::HTTP.get_response URI("https://api.similarweb.com/v1/similar-rank/#{site}/rank?api_key=#{api_key}") - raise("#{site} doesn't have a Similarweb ranking") if response.code.eql? '404' - raise("(#{response.code}) Request failed.") unless response.code.eql? '200' - - rank = JSON.parse(response.body)['similar_rank']['rank'] - Cache.store(site, rank) - rank - end -end - -status = 0 -# Fetch changes -diff = `git diff origin/master...HEAD entries/` -# Strip and loop through diff -diff.split("\n").each.with_index do |site, i| - sleep 2 if i.positive? - domain = Addressable::URI.parse("https://#{site}").domain - rank = Cache.fetch(domain) || Similarweb.fetch(domain) - failure = rank.to_i > 200_000 - puts "\e[#{failure ? '31' : '32'}m#{domain} - #{rank}\e[39m" - raise("Global rank #{rank} of #{domain} is above the maximum rank of 200K") if failure -rescue StandardError => e - puts "\e[31m#{e.message}\e[39m" - status = 1 -end -exit(status) diff --git a/tests/validate_twitter.rb b/tests/validate_twitter.rb deleted file mode 100755 index d211cd5..0000000 --- a/tests/validate_twitter.rb +++ /dev/null @@ -1,32 +0,0 @@ -#!/usr/bin/env ruby -# frozen_string_literal: true - -# This script validates Twitter/X handles in entries. - -require 'twitter' - -client = Twitter::REST::Client.new do |config| - config.consumer_key = ENV['TWITTER_CONSUMER_KEY'] - config.consumer_secret = ENV['TWITTER_CONSUMER_SECRET'] - config.access_token = ENV['TWITTER_ACCESS_KEY'] - config.access_token_secret = ENV['TWITTER_ACCESS_SECRET'] -end - -status = 0 -diff = `git diff origin/master...HEAD entries/ | sed -n 's/^+.*"twitter"[^"]*"\\(.*\\)".*/\\1/p'` -diff.split("\n").each do |handle| - begin - name = client.user(handle).screen_name - raise("Twitter handle \"#{handle}\" should be \"#{name}\".") unless handle.eql? name - - puts("#{name} is valid") - rescue Twitter::Error => e - raise('Twitter API keys not found or invalid.') if e.instance_of? Twitter::Error::BadRequest - raise('Too many requests to Twitter.') if e.instance_of? Twitter::Error::TooManyRequests - raise("Twitter handle \"#{handle}\" not found.") if e.instance_of? Twitter::Error::NotFound - end -rescue StandardError => e - puts "\e[31m#{e.message}\e[39m" - status = 1 -end -exit status From b8f74ed995b480def4180509d36342201e8a0de3 Mon Sep 17 00:00:00 2001 From: Carlgo11 Date: Wed, 17 Jul 2024 21:44:20 +0200 Subject: [PATCH 02/10] Add category test --- tests/categories.js | 38 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) create mode 100644 tests/categories.js diff --git a/tests/categories.js b/tests/categories.js new file mode 100644 index 0000000..d65cbc8 --- /dev/null +++ b/tests/categories.js @@ -0,0 +1,38 @@ +const fs = require('fs').promises; +const core = require('@actions/core'); + +async function main() { + let errors = false; + const files = process.argv.slice(2); + const res = await fetch( + 'https://raw.githubusercontent.com/2factorauth/passkeys.2fa.directory/master/data/categories.json', + { + accept: 'application/json', + 'user-agent': '2factorauth/passkeys +https://2fa.directory/bots', + }); + + if (!res.ok) throw new Error('Unable to fetch categories'); + + const data = await res.json(); + const allowed_categories = Object.keys(data); + + if (files) { + for (const file of files) { + const data = await fs.readFile(file, 'utf8'); + const json = await JSON.parse(data); + const entry = json[Object.keys(json)[0]]; + let {categories} = entry; + if(typeof categories === 'string') categories = [categories]; + + for (const category of categories || []) { + if (!allowed_categories.includes(category)) { + core.error(`${category} is not a valid category.`, {file}); + errors = true; + } + } + } + } + process.exit(+errors); +} + +module.exports = main(); From b7439d8d31d43b77660be20700489624df2c2d90 Mon Sep 17 00:00:00 2001 From: Carlgo11 Date: Wed, 17 Jul 2024 21:45:09 +0200 Subject: [PATCH 03/10] Add signing script --- scripts/Sign.sh | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) create mode 100644 scripts/Sign.sh diff --git a/scripts/Sign.sh b/scripts/Sign.sh new file mode 100644 index 0000000..6ce673c --- /dev/null +++ b/scripts/Sign.sh @@ -0,0 +1,28 @@ +#!/bin/bash + +# Create a temporary file to track the status +STATUS_FILE=$(mktemp) +echo 0 > "$STATUS_FILE" + +sign_and_verify() { + local f="$1" + echo "$f.sig" + echo "$PGP_PASSWORD" | gpg --yes --passphrase --local-user "$PGP_KEY_ID" --output "$f.sig" --sign "$f" + echo "::debug:: Signing of $f exited with status $?" + if ! gpg --verify "$f.sig" 2>/dev/null; then + echo "::error f=$f:: File signing failed for $f" + echo 1 > "$STATUS_FILE" + fi +} + +# Iterate API files in parallel +for f in api/v*/*.json; do + sign_and_verify "$f" & +done + +# Wait for all background processes to complete +wait + +STATUS=$(cat "$STATUS_FILE") +rm "$STATUS_FILE" +exit "$STATUS" From 0f32ff3304e19cc4efdd4d52bb4c82de261471c8 Mon Sep 17 00:00:00 2001 From: Carlgo11 Date: Fri, 19 Jul 2024 04:27:20 +0200 Subject: [PATCH 04/10] Rewrite more scripts to Node.js --- .github/workflows/publish.yml | 77 +++++++++++ .github/workflows/pull_request.yml | 75 +++++++++++ package.json | 20 +++ scripts/APIv1.js | 199 +++++++++++++++++++++++++++++ scripts/APIv1.rb | 78 ----------- scripts/Algolia.rb | 49 ------- tests/images.js | 71 ++++++++++ tests/json.js | 90 +++++++++++++ tests/svg.js | 69 ++++++++++ tests/validate_fs.sh | 4 +- 10 files changed, 603 insertions(+), 129 deletions(-) create mode 100644 .github/workflows/publish.yml create mode 100644 .github/workflows/pull_request.yml create mode 100644 package.json create mode 100644 scripts/APIv1.js delete mode 100755 scripts/APIv1.rb delete mode 100755 scripts/Algolia.rb create mode 100644 tests/images.js create mode 100644 tests/json.js create mode 100644 tests/svg.js diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml new file mode 100644 index 0000000..ae68d95 --- /dev/null +++ b/.github/workflows/publish.yml @@ -0,0 +1,77 @@ +name: Publish +on: + push: + branches: + - master + paths: + - 'entries/**' + - 'icons/**' + +concurrency: + group: 'publish' + cancel-in-progress: true + +jobs: + publish: + name: Build and Publish files + if: github.repository == '2factorauth/passkeys' + permissions: + pages: write + id-token: write + environment: production + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Set up Node.js + uses: actions/setup-node@v4 + with: + node-version: '20' + + - name: Install dependencies + run: npm install --omit=dev + + - name: Get modified files + id: diff + run: | + ENTRIES=$(git diff --name-only HEAD^ entries/| tr '\n' ' ') + if [ -n "$ENTRIES" ]; then + echo "entries=${ENTRIES}" >> $GITHUB_OUTPUT + fi + + - name: Generate API files + run: node scripts/APIv*.js + + - name: Publish changes to Algolia + if: steps.diff.outputs.entries + run: node scripts/Algolia.js ${{ steps.diff.outputs.entries }} + env: + ALGOLIA_APP_ID: ${{ secrets.ALGOLIA_APP_ID }} + ALGOLIA_INDEX_NAME: ${{ vars.ALGOLIA_INDEX_NAME }} + ALGOLIA_API_KEY: ${{ secrets.ALGOLIA_API_KEY }} + + - uses: crazy-max/ghaction-import-gpg@v6 + id: pgp + with: + gpg_private_key: ${{ secrets.PGP_KEY }} + passphrase: ${{ secrets.PGP_PASSWORD }} + + - name: Sign API files + run: bash scripts/Sign.sh + env: + PGP_PASSWORD: ${{ secrets.PGP_PASSWORD }} + PGP_KEY_ID: ${{ steps.pgp.outputs.keyid }} + + - name: Prepare publish directory + run: | + rsync -av icons public/ + + - uses: actions/upload-pages-artifact@v4 + with: + path: public/ + + - name: Deploy to GitHub Pages + uses: actions/deploy-pages@v4 + + - name: Send webhook to Cloudflare + run: curl -X POST -IL "${{ secrets.WEBHOOK }}" -o /dev/null -w '%{http_code}\n' -s diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml new file mode 100644 index 0000000..bf626bc --- /dev/null +++ b/.github/workflows/pull_request.yml @@ -0,0 +1,75 @@ +name: Pull Request tests +on: pull_request +run-name: "${{github.event.pull_request.title}} (#${{ github.event.number }})" +concurrency: + group: ${{ github.event.number }} + cancel-in-progress: true + +jobs: + node-tests: + name: JavaScript tests + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Node.js + uses: actions/setup-node@v4 + with: + node-version: '20' + + - name: Install dependencies + run: npm install --omit=optional + + - name: Get modified files + id: diff + run: | + echo "::debug:: Fetching files from ${{ github.api_url }}/repos/${{ github.repository }}/pulls/${{ github.event.number }}/files" + FILES=$(curl -s "${{ github.api_url }}/repos/${{ github.repository }}/pulls/${{ github.event.number }}/files" | jq -r '.[] | select(.status != "removed") | .filename' | tr '\n' ' ') + ENTRIES=$(echo "$FILES" | tr ' ' '\n' | grep -E '^entries/.*\.json$' | tr '\n' ' ') + if [ -n "$ENTRIES" ]; then + echo "entries=${ENTRIES}" >> $GITHUB_OUTPUT + fi + ICONS=$(echo "$FILES" | tr ' ' '\n' | grep -E '^icons/.*$' | tr '\n' ' ') + if [ -n "ICONS" ]; then + echo "ICONS=${ICONS}" >> $GITHUB_OUTPUT + fi + + - name: Validate JSON structure + if: steps.diff.outputs.entries + run: node tests/json.js ${{ steps.diff.outputs.entries }} + + - name: Validate file extensions and permissions + run: tests/validate-fs.sh + + - name: Validate Language codes + if: steps.diff.outputs.entries + run: node tests/languages.js ${{ steps.diff.outputs.entries }} + + - name: Validate Region codes + if: steps.diff.outputs.entries + run: node tests/regions.js ${{ steps.diff.outputs.entries }} + + - name: Validate Categories + if: steps.diff.outputs.entries + run: node tests/categories.js ${{ steps.diff.outputs.entries }} + + - name: Validate Icons + if: ${{ steps.diff.outputs.entries || steps.diff.outputs.icons }} + run: | + node tests/icons.js + node tests/svg.js ${{ steps.diff.outputs.icons }} + + - name: Validate URL reachability + if: steps.diff.outputs.entries + run: node tests/urls.js ${{ steps.diff.outputs.entries }} + continue-on-error: true + + external-tests: + name: External Tests + runs-on: ubuntu-latest + steps: + - name: Call PR Validator + run: | + curl -s --fail-with-body "https://pr-validator.2fa.directory/${{ github.event.repository.name }}/${{ github.event.number }}/" \ + -H "Content-Type: application/json" diff --git a/package.json b/package.json new file mode 100644 index 0000000..dcf3171 --- /dev/null +++ b/package.json @@ -0,0 +1,20 @@ +{ + "name": "passkeys", + "private": true, + "dependencies": { + "@actions/core": "^1.10.1", + "dotenv": "^16.4.5", + "glob": "^10.4.5" + }, + "optionalDependencies": { + "algoliasearch": "^4.24.0", + "jsonschema": "^1.4.1" + }, + "devDependencies": { + "ajv": "^8.17.1", + "ajv-errors": "^3.0.0", + "ajv-formats": "^3.0.1", + "@xmldom/xmldom": "^0.8.10", + "xpath": "^0.0.34" + } +} diff --git a/scripts/APIv1.js b/scripts/APIv1.js new file mode 100644 index 0000000..f838d21 --- /dev/null +++ b/scripts/APIv1.js @@ -0,0 +1,199 @@ +#!/usr/bin/env node + +const fs = require('fs').promises; +const path = require('path'); +const glob = require('glob'); +const {Validator} = require('jsonschema'); +const core = require('@actions/core'); +require("dotenv").config(); + +const entriesGlob = 'entries/*/*.json'; + +const readJSONFile = async (filePath) => { + const data = await fs.readFile(filePath, 'utf8'); + return JSON.parse(data); +}; + +const writeJSONFile = async (filePath, data) => { + await fs.writeFile(filePath, JSON.stringify(data, null, 2)); +}; + +const ensureDir = async (dirPath) => { + try { + await fs.mkdir(dirPath, {recursive: true}); + } catch (error) { + if (error.code !== 'EEXIST') throw error; + } +}; + +const processEntries = (entries, all) => { + for (const [key, entry] of Object.entries(entries)) { + const {'additional-domains': additionalDomains, img, categories, ...processedEntry} = entry; + + if (additionalDomains) { + additionalDomains.forEach((domain) => { + all[domain] = {...processedEntry}; + }); + } + + all[key] = {...processedEntry}; + } +}; + +const publicApi = async (allEntries) => { + const all = {}; + const passwordless = {}; + const mfa = {}; + const outputPath = 'public/v1'; + + processEntries(allEntries, all); + + // Separate mfa and passwordless entries + for (const [key, entry] of Object.entries(all)) { + if (entry['mfa']) mfa[key] = entry; + if (entry['passwordless']) passwordless[key] = entry; + } + + // Write JSON files in parallel + await ensureDir(outputPath); + await Promise.all([ + writeJSONFile(`${outputPath}/all.json`, Object.fromEntries(Object.entries(all).sort())), + writeJSONFile(`${outputPath}/mfa.json`, Object.fromEntries(Object.entries(mfa).sort())), + writeJSONFile(`${outputPath}/passwordless.json`, Object.fromEntries(Object.entries(passwordless).sort())), + writeJSONFile( + `${outputPath}/supported.json`, + Object.fromEntries(Object.entries(all).filter(([_, v]) => v['mfa'] || v['passwordless']).sort()), + ), + ]); +}; + +const privateApi = async (allEntries) => { + const all = {}; + const regions = {}; + const outputPath = 'public/private'; + + // Process entries + for (const [key, entry] of Object.entries(allEntries)) { + const {regions: entryRegions, 'additional-domains': additionalDomains, ...processedEntry} = entry; + + if (entryRegions) { + entryRegions.forEach((region) => { + if (region[0] !== '-') { + regions[region] = regions[region] || {count: 0}; + regions[region].count += 1; + } + }); + } + + processedEntry.categories = Array.isArray(entry.categories) ? + entry.categories: + entry.categories ? [entry.categories]:[]; + all[key] = processedEntry; + } + + regions.int = {count: Object.keys(all).length, selection: true}; + + // Write JSON files in parallel + await ensureDir(outputPath); + await Promise.all([ + writeJSONFile(`${outputPath}/all.json`, Object.fromEntries(Object.entries(all).sort())), + writeJSONFile( + `${outputPath}/regions.json`, + Object.fromEntries(Object.entries(regions).sort(([, a], [, b]) => b.count - a.count)), + ), + ]); +}; + +const validateSchema = async () => { + const schema = await readJSONFile('tests/api_schema.json'); + const validator = new Validator(); + + const files = glob.sync('public/v1/*.json'); + await Promise.all(files.map(async (file) => { + const data = await readJSONFile(file); + const validationResult = validator.validate(data, schema); + if (!validationResult.valid) { + validationResult.errors.forEach((error) => { + core.error(error.stack, {file}); + }); + process.exit(1); + } + })); +}; + +const fetch2FAEntries = async () => { + const privateEntries = {}; + const publicEntries = {}; + + const res = await fetch('https://api.2fa.directory/v3/all.json'); + const data = await res.json(); + for (const d of data) { + const [name, entry] = d; + if (!entry['tfa'] || !entry['tfa'].includes('u2f')) { + privateEntries[name] = { + domain: entry.domain, + contact: entry.contact, + categories: entry.keywords.length === 1 ? entry.keywords[0]:entry.keywords, + }; + if (entry.contact) { + publicEntries[entry.domain] = { + contact: entry.contact, + categories: entry.keywords.length === 1 ? entry.keywords[0]:entry.keywords, + }; + } else { + delete privateEntries[name].contact; + } + }else if(entry['tfa'].includes('u2f')){ + privateEntries[name] = { + domain: entry.domain, + mfa: 'allowed', + documentation: entry.documentation, + recovery: entry.recovery, + notes: entry.notes, + categories: entry.keywords, + }; + publicEntries[entry.domain] = { + mfa: 'allowed', + documentation: entry.documentation, + recovery: entry.recovery, + notes: entry.notes, + }; + } + } + return [privateEntries, publicEntries]; +}; + +const generateApis = async () => { + let publicEntries = {}; + let privateEntries = {}; + + // Parse all entries + const files = glob.sync(entriesGlob); + await Promise.all(files.map(async (file) => { + const data = await readJSONFile(file); + const key = path.basename(file, '.json'); + publicEntries[key] = data[Object.keys(data)[0]]; + privateEntries[Object.keys(data)[0]] = {...data[Object.keys(data)[0]], domain: key}; + })); + + // Fetch entries from the 2FA Directory + if (!process.env.NO_2FA_ENTRIES) { + const [priv, pub] = await fetch2FAEntries(); + publicEntries = {...pub, ...publicEntries}; + privateEntries = {...priv, ...privateEntries}; + } + + // Generate APIs + await Promise.all([ + publicApi(publicEntries), + privateApi(privateEntries), + ]); + + // Validate Public API files against JSON Schema + await validateSchema(); +}; + +generateApis().catch((err) => { + core.error(err); + process.exit(1); +}); diff --git a/scripts/APIv1.rb b/scripts/APIv1.rb deleted file mode 100755 index 2a2cf9f..0000000 --- a/scripts/APIv1.rb +++ /dev/null @@ -1,78 +0,0 @@ -#!/usr/bin/env ruby -# frozen_string_literal: true - -require 'json' -require 'json_schemer' - -@entries = 'entries/*/*.json' - -# This function generates all public API files that have unnecessary data removed. -# @return [Integer] Returns exit code -def public_api - all = {} - passwordless = {} - mfa = {} - path = 'public/api/v1' - - Dir.glob(@entries) { |file| all[File.basename(file, '.*')] = JSON.parse(File.read(file)).values[0] } - all.sort.to_h.each do |_, entry| - entry['additional-domains']&.each do |domain| - all[domain] = entry.reject { |key| %w[additional-domains img categories].include? key } - end - entry.delete('additional-domains') - entry.delete('img') - entry.delete('categories') - end - - all.to_h.each do |k, v| - mfa[k] = v if v['mfa'] - passwordless[k] = v if v['passwordless'] - end - - { 'all' => all, 'mfa' => mfa, 'passwordless' => passwordless }.each do |k, v| - File.open("#{path}/#{k}.json", 'w') { |file| file.write v.sort_by { |a, _| a.downcase }.to_h.to_json } - end - - File.open("#{path}/supported.json", 'w') do |file| - file.write all.select { |_, v| v.keys.any? { |k| k.match(/mfa|passwordless/) } }.sort_by { |k, _| k.downcase }.to_h.to_json - end -end - -# This function generates API files designed for the public interface of this project. -# @return [Integer] returns exit code -def private_api - all = {} - regions = {} - path = 'public/api/private' - - Dir.glob(@entries) { |file| all[JSON.parse(File.read(file)).keys[0]] = JSON.parse(File.read(file)).values[0].merge({ 'domain' => File.basename(file, '.*') }) } - - all.sort.to_h.each do |_, entry| - entry['regions']&.each do |region| - next if region[0] == '-' - - regions[region] = {} unless regions.key? region - regions[region]['count'] = 1 + regions[region]['count'].to_i - end - entry['categories'] = Array(entry['categories']) if entry.key?('categories') - entry.delete 'additional-domains' - end - - regions['int'] = { 'count' => all.length, 'selection' => true } - - File.open("#{path}/all.json", 'w') do |file| - file.write all.sort_by { |a, _| a.downcase }.to_h.to_json - end - File.open("#{path}/regions.json", 'w') do |file| - file.write regions.sort_by { |_, v| v['count'] }.reverse!.to_h.to_json - end -end - -public_api -Dir.glob('public/api/v1/*.json').each do |file| - data = JSON.parse(File.read(file)) - schema = JSONSchemer.schema(File.read('tests/api_schema.json')) - schema.validate(data).each { |e| p "::error file=#{file}:: #{e['error']}" } unless schema.valid?(data) -end - -private_api diff --git a/scripts/Algolia.rb b/scripts/Algolia.rb deleted file mode 100755 index ee82356..0000000 --- a/scripts/Algolia.rb +++ /dev/null @@ -1,49 +0,0 @@ -#!/usr/bin/env ruby -# frozen_string_literal: true - -require 'json' -require 'algolia' -require 'dotenv/load' - -ALGOLIA_APP_ID = ENV['ALGOLIA_APP_ID'] -ALGOLIA_API_KEY = ENV['ALGOLIA_API_KEY'] -ALGOLIA_INDEX_NAME = ENV['ALGOLIA_INDEX_NAME'] - -excludes = %w[notes documentation recovery passwordless mfa] -client = Algolia::Search::Client.create(ALGOLIA_APP_ID, ALGOLIA_API_KEY) -index = client.init_index(ALGOLIA_INDEX_NAME) -updates = [] - -diff = if ARGV[0].eql? '--all' - Dir['entries/*/*.json'] - else - `git diff --name-only #{ARGV[1] || 'HEAD^'} entries/`.split("\n") - end - -diff.each do |entry| - if File.exist? entry - name, data = JSON.parse(File.read(entry)).first - puts "Updating #{name}" - data.merge!({ 'name' => name, 'objectID' => File.basename(entry, '.*') }) - # Rename keys - data['category'] = data.delete 'categories' - - %w[mfa passwordless].each do |key| - next unless data.key?(key) - - data['supported'] ||= [] - data['supported'] << key - end - - # Remove keys that shouldn't be searchable - data.reject! { |k, _| excludes.include? k } - updates.push data - else - domain = File.basename(entry, File.extname(entry)) - puts "Removing #{domain}" - index.delete_object! domain - end -end - -res = index.save_objects(updates) -res.wait diff --git a/tests/images.js b/tests/images.js new file mode 100644 index 0000000..5882797 --- /dev/null +++ b/tests/images.js @@ -0,0 +1,71 @@ +const fs = require('fs').promises; +const core = require('@actions/core'); +const {glob} = require('glob'); +const path = require('path'); + +// Allowed image dimensions +const PNG_RES = [ + [16, 16], [32, 32], [64, 64], [128, 128]]; + +let seenImages = []; +let errors = false; + +async function main() { + const [entries, images] = await Promise.all([ + glob('entries/**/*.json'), glob('icons/*/*.*')]); + + await parseEntries(entries); + await parseImages(images); + + process.exit(+errors); +} + +async function parseEntries(entries) { + await Promise.all(entries.map(async (file) => { + const data = await fs.readFile(file, 'utf8'); + const json = await JSON.parse(data); + const entry = json[Object.keys(json)[0]]; + const {img} = entry; + const domain = path.parse(file).name; + const imgPath = `icons/${img ? `${img[0]}/${img}`:`${domain[0]}/${domain}.svg`}`; + + try { + await fs.readFile(imgPath); + } catch (e) { + core.error(`Image ${imgPath} not found.`, {file}); + errors = true; + } + seenImages.push(imgPath); + })); +} + +async function parseImages(images) { + await Promise.all(images.map(async (image) => { + if (!seenImages.includes(image)) { + core.error(`Unused image`, {file: image}); + errors = true; + } + + if (image.endsWith('.png')) { + if (!dimensionsAreValid(await getPNGDimensions(image), PNG_RES)) { + core.error(`PNGs must be one of the following dimensions: ${PNG_RES.map((a) => a.join('x')).join(', ')}`, + {file: image}); + errors = true; + } + } + })); +} + +function dimensionsAreValid(dimensions, validSizes) { + return validSizes.some((size) => size[0] === dimensions[0] && size[1] === dimensions[1]); +} + +async function getPNGDimensions(file) { + const buffer = await fs.readFile(file); + if (buffer.toString('ascii', 1, 4) !== 'PNG') throw new Error(`${file} is not a valid PNG file`); + + // Return [width, height] + return [buffer.readUInt32BE(16), buffer.readUInt32BE(20)]; +} + +main().catch((e) => core.setFailed(e)); diff --git a/tests/json.js b/tests/json.js new file mode 100644 index 0000000..48baa75 --- /dev/null +++ b/tests/json.js @@ -0,0 +1,90 @@ +const fs = require('fs').promises; +const core = require('@actions/core'); +const Ajv = require('ajv'); +const addFormats = require('ajv-formats'); +const schema = require('./entry_schema.json'); +const {basename} = require('node:path'); +const path = require('path'); + +const ajv = new Ajv({strict: false, allErrors: true}); +addFormats(ajv); +require('ajv-errors')(ajv); + +const validate = ajv.compile(schema); +let errors = false; + +/** + * Logs an error message and sets the errors flag to true. + * + * @param {string} message - The error message to log. + * @param {object} properties - Additional properties to log with the error. + */ +function error(message, properties) { + core.error(message, properties); + errors = true; +} + +async function main() { + const files = process.argv.slice(2); + + await Promise.all(files.map(async (file) => { + try { + const json = await JSON.parse(await fs.readFile(file, 'utf8')); + const entry = json[Object.keys(json)[0]]; + validateJSONSchema(file, json); + validateFileContents(file, entry); + } catch (e) { + error(`Failed to process ${file}: ${err.message}`, {file}); + } + })); + + process.exit(+errors); +} + +/** + * Validates the JSON schema of the provided file. + * + * @param {string} file - File path to be validated. + * @param {object} json - Parsed JSON content of the file. + */ +function validateJSONSchema(file, json) { + const valid = validate(json); + if (!valid) { + errors = true; + validate.errors.forEach((err) => { + const {message, instancePath, keyword: title} = err; + const instance = instancePath?.split('/'); + if (message) error(`${instance[instance.length - 1]} ${message}`, {file, title}); else error(err, {file}); + }); + } +} + +/** + * Validates the contents of the provided file according to custom rules. + * + * @param {string} file - File path to be validated. + * @param {object} entry - The main entry object within the JSON content. + */ +function validateFileContents(file, entry) { + const valid_name = `${entry.domain}.json`; + + if (basename(file) !== valid_name) error(`File name should be ${valid_name}`, {file, title: 'File name'}); + + if (entry.url === `https://${entry.domain}`) error(`Unnecessary url element defined.`, {file}); + + if (entry.img === `${entry.domain}.svg`) error(`Unnecessary img element defined.`, {file}); + + if (file !== `entries/${entry.domain[0]}/${valid_name}`) error( + `Entry should be placed in entries/${entry.domain[0]}/`, {file}); + + if (entry.tfa?.includes('custom-software') && !entry['custom-software']) error('Missing custom-software element', + {file}); + + if (entry.tfa?.includes('custom-hardware') && !entry['custom-hardware']) error('Missing custom-hardware element', + {file}); + + if (entry.tfa && !entry.documentation) core.warning( + 'No documentation set. Please provide screenshots in the pull request', {file, title: 'Missing documentation'}); +} + +module.exports = main(); diff --git a/tests/svg.js b/tests/svg.js new file mode 100644 index 0000000..3475084 --- /dev/null +++ b/tests/svg.js @@ -0,0 +1,69 @@ +const fs = require('fs'); +const {DOMParser} = require('@xmldom/xmldom'); +const xpath = require('xpath'); +const core = require('@actions/core'); +let errors = false; + +// Function to test the SVG content against an XPath expression +function test(svgContent, xpathExpression) { + try { + const doc = new DOMParser().parseFromString(svgContent, 'application/xml'); + const nodes = xpath.select(xpathExpression, doc); + return nodes.length > 0; + } catch (err) { + core.error(`Failed to parse SVG content: ${err.message}`); + return false; + } +} + +// Function to handle file checking +async function main() { + const files = process.argv.slice(2); + + await Promise.allSettled(files.map(async (file) => { + const error = (msg) => { + core.error(msg, {file}); + errors = true; + }; + + const warn = (msg) => { + core.warning(msg, {file}); + }; + + const svg = fs.readFileSync(file, 'utf8'); + const doc = new DOMParser().parseFromString(svg, 'application/xml'); + const parseErrors = doc.getElementsByTagName('parsererror'); + + if (parseErrors.length > 0) + error('Invalid SVG file'); + if (svg.includes(' line.trim()).length > 1) + error('Minimize file to one line'); + if (test(svg, '//comment()')) + warn('Remove comments'); + if (fs.statSync(file).size > 5 * 1024) + warn('Unusually large file size'); + if (test(svg, '//@*[(starts-with(name(), "data-") or starts-with(name(), "class-"))]')) + warn('Unnecessary data or class attribute'); + if (test(svg, '//@width | //@height')) + warn('Use viewBox instead of height/width'); + if (test(svg, '/*/@id')) + warn('Unnecessary id attribute in root element'); + if (test(svg, '//*[@fill="#000" or @fill="#000000"]')) + warn('Unnecessary fill="#000" attribute'); + if (test(svg, '//*[@style]')) + warn('Use Attributes instead of style elements'); + if (test(svg, '//*[@fill-opacity]')) + warn('Use hex color instead of fill-opacity'); + if (svg.includes('xml:space')) + warn('Unnecessary XML:space declaration found'); + if (test(svg, '//*[@version or @fill-rule or @script or @a or @clipPath or @class]')) + warn('Unnecessary attribute(s) found'); + })); + process.exit(+errors); +} + +module.exports = main(); diff --git a/tests/validate_fs.sh b/tests/validate_fs.sh index 57d1294..ac3b343 100755 --- a/tests/validate_fs.sh +++ b/tests/validate_fs.sh @@ -34,8 +34,8 @@ function checkPerm() [ -e api ] && checkExt api json sig checkExt icons svg png checkExt entries json -checkExt scripts rb -checkExt tests rb sh json +checkExt scripts js +checkExt tests js sh json checkPerm icons 664 644 checkPerm tests 775 755 664 644 checkPerm entries 664 644 From 952e708fb3371bec12fc49703a2ba9b598455843 Mon Sep 17 00:00:00 2001 From: Carlgo11 Date: Fri, 19 Jul 2024 04:57:34 +0200 Subject: [PATCH 05/10] Clean up scripts --- scripts/APIv1.js | 1 + scripts/Sign.sh | 8 +--- tests/categories.js | 2 + tests/images.js | 2 + tests/json.js | 22 +++------ tests/languages.js | 2 + tests/regions.js | 2 + tests/svg.js | 2 + tests/{validate_fs.sh => validate-fs.sh} | 3 +- tests/validate_images.rb | 57 ------------------------ 10 files changed, 21 insertions(+), 80 deletions(-) mode change 100644 => 100755 scripts/APIv1.js mode change 100644 => 100755 scripts/Sign.sh mode change 100644 => 100755 tests/categories.js mode change 100644 => 100755 tests/images.js mode change 100644 => 100755 tests/json.js mode change 100644 => 100755 tests/languages.js mode change 100644 => 100755 tests/regions.js mode change 100644 => 100755 tests/svg.js rename tests/{validate_fs.sh => validate-fs.sh} (95%) delete mode 100755 tests/validate_images.rb diff --git a/scripts/APIv1.js b/scripts/APIv1.js old mode 100644 new mode 100755 index f838d21..007193e --- a/scripts/APIv1.js +++ b/scripts/APIv1.js @@ -15,6 +15,7 @@ const readJSONFile = async (filePath) => { }; const writeJSONFile = async (filePath, data) => { + core.debug(`Writing to ${filePath}`) await fs.writeFile(filePath, JSON.stringify(data, null, 2)); }; diff --git a/scripts/Sign.sh b/scripts/Sign.sh old mode 100644 new mode 100755 index 6ce673c..ab2dc78 --- a/scripts/Sign.sh +++ b/scripts/Sign.sh @@ -6,7 +6,6 @@ echo 0 > "$STATUS_FILE" sign_and_verify() { local f="$1" - echo "$f.sig" echo "$PGP_PASSWORD" | gpg --yes --passphrase --local-user "$PGP_KEY_ID" --output "$f.sig" --sign "$f" echo "::debug:: Signing of $f exited with status $?" if ! gpg --verify "$f.sig" 2>/dev/null; then @@ -16,13 +15,10 @@ sign_and_verify() { } # Iterate API files in parallel -for f in api/v*/*.json; do - sign_and_verify "$f" & +for f in public/v*/*.json; do + sign_and_verify "$f" done -# Wait for all background processes to complete -wait - STATUS=$(cat "$STATUS_FILE") rm "$STATUS_FILE" exit "$STATUS" diff --git a/tests/categories.js b/tests/categories.js old mode 100644 new mode 100755 index d65cbc8..196d843 --- a/tests/categories.js +++ b/tests/categories.js @@ -1,3 +1,5 @@ +#!/usr/bin/env node + const fs = require('fs').promises; const core = require('@actions/core'); diff --git a/tests/images.js b/tests/images.js old mode 100644 new mode 100755 index 5882797..f58ee8f --- a/tests/images.js +++ b/tests/images.js @@ -1,3 +1,5 @@ +#!/usr/bin/env node + const fs = require('fs').promises; const core = require('@actions/core'); const {glob} = require('glob'); diff --git a/tests/json.js b/tests/json.js old mode 100644 new mode 100755 index 48baa75..9491716 --- a/tests/json.js +++ b/tests/json.js @@ -1,3 +1,5 @@ +#!/usr/bin/env node + const fs = require('fs').promises; const core = require('@actions/core'); const Ajv = require('ajv'); @@ -66,25 +68,15 @@ function validateJSONSchema(file, json) { * @param {object} entry - The main entry object within the JSON content. */ function validateFileContents(file, entry) { - const valid_name = `${entry.domain}.json`; - - if (basename(file) !== valid_name) error(`File name should be ${valid_name}`, {file, title: 'File name'}); - - if (entry.url === `https://${entry.domain}`) error(`Unnecessary url element defined.`, {file}); - - if (entry.img === `${entry.domain}.svg`) error(`Unnecessary img element defined.`, {file}); + const domain = basename(file, '.json'); - if (file !== `entries/${entry.domain[0]}/${valid_name}`) error( - `Entry should be placed in entries/${entry.domain[0]}/`, {file}); + if (entry['url'] === `https://${domain}`) error(`Unnecessary url element defined.`, {file}); - if (entry.tfa?.includes('custom-software') && !entry['custom-software']) error('Missing custom-software element', - {file}); + if (entry['img'] === `${entry.domain}.svg`) error(`Unnecessary img element defined.`, {file}); - if (entry.tfa?.includes('custom-hardware') && !entry['custom-hardware']) error('Missing custom-hardware element', - {file}); + if (file !== `entries/${domain[0]}/${domain}.json`) error(`Entry should be placed in entries/${domain[0]}/`, {file}); - if (entry.tfa && !entry.documentation) core.warning( - 'No documentation set. Please provide screenshots in the pull request', {file, title: 'Missing documentation'}); + if((entry['mfa'] || entry['passwordless']) && !entry['documentation']) core.warning('Since there is no documentation available, please could you provide us with screenshots of the setup/login process as evidence of 2FA? Please remember to block out any personal information.', {file, title: 'Missing Documentation'}); } module.exports = main(); diff --git a/tests/languages.js b/tests/languages.js old mode 100644 new mode 100755 index acca9f4..4661c76 --- a/tests/languages.js +++ b/tests/languages.js @@ -1,3 +1,5 @@ +#!/usr/bin/env node + const fs = require("fs").promises; const core = require("@actions/core"); diff --git a/tests/regions.js b/tests/regions.js old mode 100644 new mode 100755 index 3eccdc3..d25d813 --- a/tests/regions.js +++ b/tests/regions.js @@ -1,3 +1,5 @@ +#!/usr/bin/env node + const fs = require("fs").promises; const core = require("@actions/core"); diff --git a/tests/svg.js b/tests/svg.js old mode 100644 new mode 100755 index 3475084..e1f94b0 --- a/tests/svg.js +++ b/tests/svg.js @@ -1,3 +1,5 @@ +#!/usr/bin/env node + const fs = require('fs'); const {DOMParser} = require('@xmldom/xmldom'); const xpath = require('xpath'); diff --git a/tests/validate_fs.sh b/tests/validate-fs.sh similarity index 95% rename from tests/validate_fs.sh rename to tests/validate-fs.sh index ac3b343..0845fa7 100755 --- a/tests/validate_fs.sh +++ b/tests/validate-fs.sh @@ -34,12 +34,11 @@ function checkPerm() [ -e api ] && checkExt api json sig checkExt icons svg png checkExt entries json -checkExt scripts js +checkExt scripts js sh checkExt tests js sh json checkPerm icons 664 644 checkPerm tests 775 755 664 644 checkPerm entries 664 644 checkPerm scripts 775 755 -checkPerm .circleci 664 644 checkPerm .github 664 644 exit $status diff --git a/tests/validate_images.rb b/tests/validate_images.rb deleted file mode 100755 index d514ff4..0000000 --- a/tests/validate_images.rb +++ /dev/null @@ -1,57 +0,0 @@ -#!/usr/bin/env ruby -# frozen_string_literal: true - -# This script validates common image mistakes. - -require 'json' -require 'net/http' -require 'uri' -require 'parallel' - -@status = 0 -PNG_SIZE = [[32, 32], [64, 64], [128, 128]].freeze -seen_sites = [] - -def error(file, msg) - puts "::error file=#{file}:: #{msg}" - @status = 1 -end - -def alternative_src(image) - res = Net::HTTP.get_response URI("https://api.2fa.directory/icons/#{image[0]}/#{image}") - res.code.eql? '200' -end - -Parallel.each(Dir.glob('entries/*/*.json'), in_threads: 16) do |file| - website = JSON.parse(File.read(file)).values[0] - domain = File.basename(file, '.*') - img = website['img'] || "#{domain}.svg" - path = "icons/#{img[0]}/#{img}" - exists_locally = File.exist?(path) - exists_remotely = alternative_src(img) - if exists_locally || exists_remotely - error(file, 'Image already exists in 2fa.directory.') unless exists_locally ^ exists_remotely - else - error(file, "Image does not exist for #{domain} - #{path} cannot be found.") - end - - if website['img'].eql?("#{domain}.svg") - error(file, "Defining the img property for #{domain} is not necessary. #{img} is the default value.") - end - seen_sites.push(path) if File.exist?(path) -end - -Dir.glob('icons/*/*') do |file| - next if file.include? '/icons/' - - error(file, 'Unused image') unless seen_sites.include? file - - if file.include? '.png' - dimensions = IO.read(file)[0x10..0x18].unpack('NN') - unless PNG_SIZE.include? dimensions - error(file, "PNGs must be one of the following sizes: #{PNG_SIZE.map { |a| a.join('x') }.join(', ')}.") - end - end -end - -exit(@status) From fa0704095dadb9c865fc824b92de2ceb12155a93 Mon Sep 17 00:00:00 2001 From: Carlgo11 Date: Fri, 19 Jul 2024 05:09:28 +0200 Subject: [PATCH 06/10] Reword warning --- tests/json.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/json.js b/tests/json.js index 9491716..1e7c1de 100755 --- a/tests/json.js +++ b/tests/json.js @@ -76,7 +76,7 @@ function validateFileContents(file, entry) { if (file !== `entries/${domain[0]}/${domain}.json`) error(`Entry should be placed in entries/${domain[0]}/`, {file}); - if((entry['mfa'] || entry['passwordless']) && !entry['documentation']) core.warning('Since there is no documentation available, please could you provide us with screenshots of the setup/login process as evidence of 2FA? Please remember to block out any personal information.', {file, title: 'Missing Documentation'}); + if((entry['mfa'] || entry['passwordless']) && !entry['documentation']) core.warning('Please add documentation or provide a screenshot of the passkey setup-process.', {file, title: 'Missing Documentation'}); } module.exports = main(); From f6b9fcc2bb83ad84f3751e6db712af1cd73f3e15 Mon Sep 17 00:00:00 2001 From: Carlgo11 Date: Sun, 21 Jul 2024 01:09:39 +0200 Subject: [PATCH 07/10] Clean up code --- .editorconfig | 5 +- .gitignore | 1 - scripts/APIv1.js | 8 ++- tests/categories.js | 82 ++++++++++++++++++--------- tests/images.js | 43 ++++++++------ tests/json.js | 11 ++-- tests/languages.js | 20 ++++--- tests/regions.js | 23 ++++---- tests/{ => schemas}/api_schema.json | 0 tests/{ => schemas}/entry_schema.json | 0 10 files changed, 118 insertions(+), 75 deletions(-) rename tests/{ => schemas}/api_schema.json (100%) rename tests/{ => schemas}/entry_schema.json (100%) diff --git a/.editorconfig b/.editorconfig index 0ffcb16..d1ec09a 100644 --- a/.editorconfig +++ b/.editorconfig @@ -3,15 +3,12 @@ root = true [*] charset = utf-8 end_of_line = lf -indent_size = 4 +indent_size = 2 indent_style = space insert_final_newline = true max_line_length = 120 tab_width = 4 trim_trailing_whitespace = true -[*.json] -indent_size = 2 - [*.svg] max_line_length = off \ No newline at end of file diff --git a/.gitignore b/.gitignore index 8d21961..95bca7f 100644 --- a/.gitignore +++ b/.gitignore @@ -13,7 +13,6 @@ vendor/ # NPM files node_modules/ package-lock.json -package.json # OS files *.DS_Store diff --git a/scripts/APIv1.js b/scripts/APIv1.js index 007193e..87f48cd 100755 --- a/scripts/APIv1.js +++ b/scripts/APIv1.js @@ -78,14 +78,16 @@ const privateApi = async (allEntries) => { const {regions: entryRegions, 'additional-domains': additionalDomains, ...processedEntry} = entry; if (entryRegions) { + console.log(entryRegions) entryRegions.forEach((region) => { + if (region[0] !== '-') { regions[region] = regions[region] || {count: 0}; regions[region].count += 1; } }); } - + processedEntry.regions = entryRegions processedEntry.categories = Array.isArray(entry.categories) ? entry.categories: entry.categories ? [entry.categories]:[]; @@ -106,7 +108,7 @@ const privateApi = async (allEntries) => { }; const validateSchema = async () => { - const schema = await readJSONFile('tests/api_schema.json'); + const schema = await readJSONFile('tests/schemas/api_schema.json'); const validator = new Validator(); const files = glob.sync('public/v1/*.json'); @@ -135,6 +137,7 @@ const fetch2FAEntries = async () => { domain: entry.domain, contact: entry.contact, categories: entry.keywords.length === 1 ? entry.keywords[0]:entry.keywords, + regions: entry.regions }; if (entry.contact) { publicEntries[entry.domain] = { @@ -152,6 +155,7 @@ const fetch2FAEntries = async () => { recovery: entry.recovery, notes: entry.notes, categories: entry.keywords, + regions: entry.regions }; publicEntries[entry.domain] = { mfa: 'allowed', diff --git a/tests/categories.js b/tests/categories.js index 196d843..e9d00d6 100755 --- a/tests/categories.js +++ b/tests/categories.js @@ -3,38 +3,68 @@ const fs = require('fs').promises; const core = require('@actions/core'); -async function main() { - let errors = false; - const files = process.argv.slice(2); +/** + * Fetch the categories from API repository. + * + * @returns {Promise} The parsed JSON object containing categories. + * @throws Will throw an error if the fetch operation fails. + */ +async function fetchCategories() { const res = await fetch( - 'https://raw.githubusercontent.com/2factorauth/passkeys.2fa.directory/master/data/categories.json', - { - accept: 'application/json', - 'user-agent': '2factorauth/passkeys +https://2fa.directory/bots', + 'https://raw.githubusercontent.com/2factorauth/passkeys.2fa.directory/master/data/categories.json', { + headers: { + accept: 'application/json', + 'user-agent': '2factorauth/passkeys +https://2fa.directory/bots', + }, }); if (!res.ok) throw new Error('Unable to fetch categories'); - const data = await res.json(); - const allowed_categories = Object.keys(data); - - if (files) { - for (const file of files) { - const data = await fs.readFile(file, 'utf8'); - const json = await JSON.parse(data); - const entry = json[Object.keys(json)[0]]; - let {categories} = entry; - if(typeof categories === 'string') categories = [categories]; - - for (const category of categories || []) { - if (!allowed_categories.includes(category)) { - core.error(`${category} is not a valid category.`, {file}); - errors = true; - } - } + return res.json(); +} + +/** + * Validate a single file's categories against the allowed categories. + * + * @param {string} file - The path to the file to be validated. + * @param {Array} allowedCategories - The list of allowed category names. + * @returns {Promise} Returns true if an error occurred, false otherwise. + */ +async function validateFile(file, allowedCategories) { + const data = await fs.readFile(file, 'utf8'); + const json = JSON.parse(data); + const entry = json[Object.keys(json)[0]]; + let { categories } = entry; + if (typeof categories === 'string') categories = [categories]; + + for (const category of categories || []) { + if (!allowedCategories.includes(category)) { + core.error(`${category} is not a valid category.`, { file }); + return true; // Indicates an error occurred } } - process.exit(+errors); + return false; // No errors +} + +/** + * Main function to fetch categories and validate all provided files. + * + * @returns {Promise} + */ +async function main() { + // Fetch the allowed categories + const categoriesData = await fetchCategories(); + const allowedCategories = Object.keys(categoriesData); + + // Get the list of files from command-line arguments + const files = process.argv.slice(2); + + // Validate each file in parallel + const errors = await Promise.all(files.map(file => validateFile(file, allowedCategories))); + + // Exit with a status code of 1 if any errors occurred, otherwise 0 + process.exit(+errors.some(error => error)); } -module.exports = main(); +// Export the main function for use as a module +module.exports = main; diff --git a/tests/images.js b/tests/images.js index f58ee8f..276cfd7 100755 --- a/tests/images.js +++ b/tests/images.js @@ -6,8 +6,7 @@ const {glob} = require('glob'); const path = require('path'); // Allowed image dimensions -const PNG_RES = [ - [16, 16], [32, 32], [64, 64], [128, 128]]; +const PNG_RES = [[16, 16], [32, 32], [64, 64], [128, 128]]; let seenImages = []; let errors = false; @@ -22,23 +21,35 @@ async function main() { process.exit(+errors); } +async function alternativeSource(image) { + const res = await fetch(`https://api.2fa.directory/${image}`, { + headers: { + 'user-agent': '2factorauth/passkeys +https://2fa.directory/bots', + }, + }); + return res.ok; +} + async function parseEntries(entries) { await Promise.all(entries.map(async (file) => { - const data = await fs.readFile(file, 'utf8'); - const json = await JSON.parse(data); - const entry = json[Object.keys(json)[0]]; - const {img} = entry; - const domain = path.parse(file).name; - const imgPath = `icons/${img ? `${img[0]}/${img}`:`${domain[0]}/${domain}.svg`}`; + const data = await fs.readFile(file, 'utf8'); + const json = await JSON.parse(data); + const entry = json[Object.keys(json)[0]]; + const {img} = entry; + const domain = path.parse(file).name; + const imgPath = `icons/${img ? `${img[0]}/${img}`:`${domain[0]}/${domain}.svg`}`; - try { - await fs.readFile(imgPath); - } catch (e) { - core.error(`Image ${imgPath} not found.`, {file}); - errors = true; - } - seenImages.push(imgPath); - })); + try { + await fs.readFile(imgPath); + seenImages.push(imgPath); + } catch (e) { + if (!await alternativeSource(imgPath)) { + core.error(`Image ${imgPath} not found.`, {file}); + errors = true; + } + } + }), + ); } async function parseImages(images) { diff --git a/tests/json.js b/tests/json.js index 1e7c1de..d4d9a18 100755 --- a/tests/json.js +++ b/tests/json.js @@ -4,9 +4,8 @@ const fs = require('fs').promises; const core = require('@actions/core'); const Ajv = require('ajv'); const addFormats = require('ajv-formats'); -const schema = require('./entry_schema.json'); +const schema = require('./schemas/entry_schema.json'); const {basename} = require('node:path'); -const path = require('path'); const ajv = new Ajv({strict: false, allErrors: true}); addFormats(ajv); @@ -36,7 +35,7 @@ async function main() { validateJSONSchema(file, json); validateFileContents(file, entry); } catch (e) { - error(`Failed to process ${file}: ${err.message}`, {file}); + error(`Failed to process ${file}: ${e.message}`, {file}); } })); @@ -53,10 +52,10 @@ function validateJSONSchema(file, json) { const valid = validate(json); if (!valid) { errors = true; - validate.errors.forEach((err) => { - const {message, instancePath, keyword: title} = err; + validate.errors.forEach((e) => { + const {message, instancePath, keyword: title} = e; const instance = instancePath?.split('/'); - if (message) error(`${instance[instance.length - 1]} ${message}`, {file, title}); else error(err, {file}); + if (message) error(`${instance[instance.length - 1]} ${message}`, {file, title}); else error(e, {file}); }); } } diff --git a/tests/languages.js b/tests/languages.js index 4661c76..17b2d48 100755 --- a/tests/languages.js +++ b/tests/languages.js @@ -1,34 +1,36 @@ #!/usr/bin/env node -const fs = require("fs").promises; -const core = require("@actions/core"); +const fs = require('fs').promises; +const core = require('@actions/core'); const url = new URL( - "https://pkgstore.datahub.io/core/language-codes/language-codes_json/data/97607046542b532c395cf83df5185246/language-codes_json.json", + 'https://pkgstore.datahub.io/core/language-codes/language-codes_json/data/97607046542b532c395cf83df5185246/language-codes_json.json', ); async function main() { let errors = false; const files = process.argv.slice(2); const res = await fetch(url, { - accept: "application/json", - "user-agent": "2factorauth/passkeys +https://2fa.directory/bots", + headers: { + accept: 'application/json', + 'user-agent': '2factorauth/passkeys +https://2fa.directory/bots', + }, }); - if (!res.ok) throw new Error("Unable to fetch language codes"); + if (!res.ok) throw new Error('Unable to fetch language codes'); const data = await res.json(); - const codes = Object.values(data).map((language) => language.alpha2); + const codes = Object.values(data).map((language) => language['alpha2']); if (files) { for (const file of files) { - const data = await fs.readFile(file, "utf8"); + const data = await fs.readFile(file, 'utf8'); const json = await JSON.parse(data); const entry = json[Object.keys(json)[0]]; const language = entry.contact?.language; if (language && !codes.includes(language)) { - core.error(`${language} is not a valid language`, { file }); + core.error(`${language} is not a valid language`, {file}); errors = true; } } diff --git a/tests/regions.js b/tests/regions.js index d25d813..766097b 100755 --- a/tests/regions.js +++ b/tests/regions.js @@ -1,35 +1,36 @@ #!/usr/bin/env node -const fs = require("fs").promises; -const core = require("@actions/core"); +const fs = require('fs').promises; +const core = require('@actions/core'); const url = new URL( - "https://raw.githubusercontent.com/stefangabos/world_countries/master/data/countries/en/world.json", -); + 'https://raw.githubusercontent.com/stefangabos/world_countries/master/data/countries/en/world.json'); async function main() { let errors = false; const files = process.argv.slice(2); const res = await fetch(url, { - accept: "application/json", - "user-agent": "2factorauth/passkeys +https://2fa.directory/bots", + headers: { + accept: 'application/json', + 'user-agent': '2factorauth/passkeys +https://2fa.directory/bots', + }, }); - if (!res.ok) throw new Error("Unable to fetch region codes"); + if (!res.ok) throw new Error('Unable to fetch region codes'); const data = await res.json(); - const codes = Object.values(data).map((region) => region.alpha2); + const codes = Object.values(data).map((region) => region['alpha2']); if (files) { for (const file of files) { - const data = await fs.readFile(file, "utf8"); + const data = await fs.readFile(file, 'utf8'); const json = await JSON.parse(data); const entry = json[Object.keys(json)[0]]; - const { regions } = entry; + const {regions} = entry; for (const region of regions || []) { if (!codes.includes(region)) { - core.error(`${region} is not a valid region code`, { file }); + core.error(`${region} is not a valid region code`, {file}); errors = true; } } diff --git a/tests/api_schema.json b/tests/schemas/api_schema.json similarity index 100% rename from tests/api_schema.json rename to tests/schemas/api_schema.json diff --git a/tests/entry_schema.json b/tests/schemas/entry_schema.json similarity index 100% rename from tests/entry_schema.json rename to tests/schemas/entry_schema.json From a1f1aa25b928dd795eaf234ee6d44172b9dd0131 Mon Sep 17 00:00:00 2001 From: Carlgo11 Date: Sun, 21 Jul 2024 01:10:12 +0200 Subject: [PATCH 08/10] Rename image test to icons test --- .github/workflows/pull_request.yml | 5 +++-- tests/{images.js => icons.js} | 0 2 files changed, 3 insertions(+), 2 deletions(-) rename tests/{images.js => icons.js} (100%) diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index bf626bc..d766e56 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -57,8 +57,9 @@ jobs: - name: Validate Icons if: ${{ steps.diff.outputs.entries || steps.diff.outputs.icons }} run: | - node tests/icons.js - node tests/svg.js ${{ steps.diff.outputs.icons }} + node tests/icons.js & + node tests/svg.js ${{ steps.diff.outputs.icons }} & + wait - name: Validate URL reachability if: steps.diff.outputs.entries diff --git a/tests/images.js b/tests/icons.js similarity index 100% rename from tests/images.js rename to tests/icons.js From d8099e75d7714835e73298de34f2aa251b43eaee Mon Sep 17 00:00:00 2001 From: Carlgo11 Date: Sun, 21 Jul 2024 03:21:36 +0200 Subject: [PATCH 09/10] Add URL reachability test --- tests/urls.js | 57 +++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 57 insertions(+) create mode 100644 tests/urls.js diff --git a/tests/urls.js b/tests/urls.js new file mode 100644 index 0000000..d66d00c --- /dev/null +++ b/tests/urls.js @@ -0,0 +1,57 @@ +const fs = require('fs').promises; +const core = require('@actions/core'); +const AbortController = require('abort-controller'); +const {basename} = require('path'); + +// Helper function to create a timeout promise +function timeout(ms) { + return new Promise((_, reject) => + setTimeout(() => reject(new Error('timeout')), ms), + ); +} + +async function checkURL(url, file) { + const controller = new AbortController(); + const signal = controller.signal; + + try { + const res = await Promise.race([ + fetch(url, { + headers: { + 'User-Agent': + '2factorauth/URLValidator (+https://2fa.directory/bots)', + }, + signal, + }), + timeout(2000).then(() => controller.abort()), + ]); + + if (res.ok) return true; + else if (res.status !== 403) + core.warning(`Unable to fetch ${url} (${res.status})`, {file}); + } catch (e) { + core.warning(`Unable to fetch ${url}`, {file}); + } + return false; +} + +async function main(files) { + await Promise.all( + files.map(async (file) => { + const json = JSON.parse(await fs.readFile(file)); + const entry = json[Object.keys(json)[0]]; + const domain = basename(file, '.json'); + let urls = [entry.url ? entry.url:`https://${domain}/`]; + + entry['additional-domains']?.forEach((domain) => + urls.push(`https://${domain}/`), + ); + + await Promise.all(urls.map((url) => checkURL(url, file))); + }), + ); + + return true; +} + +main(process.argv.slice(2)).then(() => process.exit(0)); From 49839240d2645cf92e7112eaf08e5fb92e79c80b Mon Sep 17 00:00:00 2001 From: Carlgo11 Date: Sun, 21 Jul 2024 03:22:26 +0200 Subject: [PATCH 10/10] Add missing dependencies --- package.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/package.json b/package.json index dcf3171..540dcd4 100644 --- a/package.json +++ b/package.json @@ -11,10 +11,11 @@ "jsonschema": "^1.4.1" }, "devDependencies": { + "@xmldom/xmldom": "^0.8.10", + "abort-controller": "^3.0.0", "ajv": "^8.17.1", "ajv-errors": "^3.0.0", "ajv-formats": "^3.0.1", - "@xmldom/xmldom": "^0.8.10", "xpath": "^0.0.34" } }