diff --git a/.github/actions/pull-ghcr/action.yml b/.github/actions/pull-ghcr/action.yml index f35e4fdab..655b3b3bd 100644 --- a/.github/actions/pull-ghcr/action.yml +++ b/.github/actions/pull-ghcr/action.yml @@ -1,12 +1,12 @@ -name: "pull-ghcr" -description: "Pull all from GHCR" +name: pull-ghcr +description: Pull all from GHCR inputs: github_token: - description: "token to access GHCR" + description: token to access GHCR required: true runs: - using: "composite" + using: composite steps: - name: Login to GitHub Container Registry @@ -21,12 +21,12 @@ runs: set -x images=( - "wikibase" - "wikibase-bundle" - "elasticsearch" - "wdqs" - "wdqs-frontend" - "wdqs-proxy" + "wikibase" + "wikibase-bundle" + "elasticsearch" + "wdqs" + "wdqs-frontend" + "wdqs-proxy" "quickstatements" ) diff --git a/.github/actions/push-ghcr/action.yml b/.github/actions/push-ghcr/action.yml index 13aa9c05e..94d56aa27 100644 --- a/.github/actions/push-ghcr/action.yml +++ b/.github/actions/push-ghcr/action.yml @@ -1,15 +1,15 @@ -name: "push-ghcr" -description: "Push to GHCR" +name: push-ghcr +description: Push to GHCR inputs: docker_image: - description: "name of the docker image to push" + description: name of the docker image to push required: true github_token: - description: "token to access GHCR" + description: token to access GHCR required: true runs: - using: "composite" + using: composite steps: - uses: docker/login-action@v3 diff --git a/.github/actions/scan-image/action.yml b/.github/actions/scan-image/action.yml index f12fd7d63..d4354981c 100644 --- a/.github/actions/scan-image/action.yml +++ b/.github/actions/scan-image/action.yml @@ -1,12 +1,12 @@ -name: "scan-image" -description: "Scan image and upload results" +name: scan-image +description: Scan image and upload results inputs: image_name: - description: "name of the docker image to work with" + description: name of the docker image to work with required: true runs: - using: "composite" + using: composite steps: - uses: anchore/scan-action@v3 diff --git a/.github/reporter/report.js b/.github/reporter/report.js index 00348f9d7..d9cce1266 100644 --- a/.github/reporter/report.js +++ b/.github/reporter/report.js @@ -1,49 +1,53 @@ -const fs = require( 'fs' ) -const { extractModuleLineAndColumn } = require('mocha-json-streamier-reporter/lib/parse-stack-trace') -var core = require('@actions/core'); -var { issueCommand } = require('@actions/core/lib/command'); +const fs = require( 'fs' ); +const { + extractModuleLineAndColumn +} = require( 'mocha-json-streamier-reporter/lib/parse-stack-trace' ); +const core = require( '@actions/core' ); +const { issueCommand } = require( '@actions/core/lib/command' ); -const suiteName = process.argv[2]; +const suiteName = process.argv[ 2 ]; -if( suiteName ) { - return; +if ( suiteName ) { + return; } -const filePath = `../../test/suites/${suiteName}/results/result.json`; - -var resultObject = {}; - -if (fs.existsSync(filePath)) { - resultObject = JSON.parse(fs.readFileSync(filePath, 'utf8'))[suiteName]; - - if (resultObject.fail.length != 0) { - - resultObject.fail.forEach(test => { - const error = extractModuleLineAndColumn(test.error.stack); - let filePath = ''; - if( error.file ) { - filePath = error.file.replace('/usr/src/app/', 'test/'); - } - const message = test.fullTitle + ": " + test.error.message; - - issueCommand('error', { - file: filePath, - line: error.line, - col: error.column - }, message); - }); - - } else { - resultObject.pass.forEach(test => { - core.info( 'OK: ' + test.fullTitle ); - }); - - resultObject.skip.forEach(test => { - core.warning( 'SKIP: ' + test.fullTitle ); - }); - - core.info('\u001b[1mAll good 👍') - } +const filePath = `../../test/suites/${ suiteName }/results/result.json`; + +let resultObject = {}; + +if ( fs.existsSync( filePath ) ) { + resultObject = JSON.parse( fs.readFileSync( filePath, 'utf8' ) )[ suiteName ]; + + if ( resultObject.fail.length != 0 ) { + resultObject.fail.forEach( ( test ) => { + const error = extractModuleLineAndColumn( test.error.stack ); + let filePath = ''; + if ( error.file ) { + filePath = error.file.replace( '/usr/src/app/', 'test/' ); + } + const message = test.fullTitle + ': ' + test.error.message; + + issueCommand( + 'error', + { + file: filePath, + line: error.line, + col: error.column + }, + message + ); + } ); + } else { + resultObject.pass.forEach( ( test ) => { + core.info( 'OK: ' + test.fullTitle ); + } ); + + resultObject.skip.forEach( ( test ) => { + core.warning( 'SKIP: ' + test.fullTitle ); + } ); + + core.info( '\u001b[1mAll good 👍' ); + } } else { - core.error('No tests executed!'); + core.error( 'No tests executed!' ); } diff --git a/.github/workflows/_build_test.yml b/.github/workflows/_build_test.yml index 8766633e7..f43f11e85 100644 --- a/.github/workflows/_build_test.yml +++ b/.github/workflows/_build_test.yml @@ -20,14 +20,12 @@ jobs: fail-fast: false matrix: component: - [ - "wikibase", - "elasticsearch", - "wdqs", - "wdqs-frontend", - "wdqs-proxy", - "quickstatements" - ] + - wikibase + - elasticsearch + - wdqs + - wdqs-frontend + - wdqs-proxy + - quickstatements steps: - uses: actions/checkout@v4 @@ -70,22 +68,18 @@ jobs: fail-fast: false matrix: suite: - [ - repo, - fedprops, - repo_client, - quickstatements, - pingback, - confirm_edit, - elasticsearch, - - base__repo, - base__repo_client, - base__pingback, - base__fedprops, - - example - ] + - repo + - fedprops + - repo_client + - quickstatements + - pingback + - confirm_edit + - elasticsearch + - base__repo + - base__repo_client + - base__pingback + - base__fedprops + - example steps: - uses: actions/checkout@v4 @@ -122,20 +116,16 @@ jobs: fail-fast: false matrix: # TODO: can we get this from the environment to not have it hardcoded in the pipeline code? - version: [ - # Latest 1.37 - "WMDE9", - "WMDE9_BUNDLE", - # Latest 1.38 - "WMDE12", - "WMDE12_BUNDLE", - # Latest 1.39 - "WMDE15", - "WMDE15_BUNDLE", - # Previous 1.40 - "WMDE14", - "WMDE14_BUNDLE" - ] + version: + # Latest 1.38 + - WMDE12 + - WMDE12_BUNDLE + # Latest 1.39 + - WMDE15 + - WMDE15_BUNDLE + # Latest 1.40 + - WMDE16 + - WMDE16_BUNDLE steps: - uses: actions/checkout@v4 diff --git a/.github/workflows/build_test_main.yml b/.github/workflows/build_test_main.yml index eb9800e69..8a8fcbe17 100644 --- a/.github/workflows/build_test_main.yml +++ b/.github/workflows/build_test_main.yml @@ -3,7 +3,7 @@ name: đŸ§Ș Build and Test main on: push: branches: - - "main" + - main jobs: _: diff --git a/.github/workflows/build_test_publish_release.yml b/.github/workflows/build_test_publish_release.yml index 75dbec332..ed23714b5 100644 --- a/.github/workflows/build_test_publish_release.yml +++ b/.github/workflows/build_test_publish_release.yml @@ -56,7 +56,7 @@ jobs: - uses: docker/login-action@v3 with: # implicitly docker hub - username: "roti4wmde" # TODO: get a bot user + username: roti4wmde # TODO: get a bot user password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }} - name: Push release to dockerhub @@ -69,12 +69,12 @@ jobs: docker image ls images=( - "wikibase" - "wikibase-bundle" - "elasticsearch" - "wdqs" - "wdqs-frontend" - "wdqs-proxy" + "wikibase" + "wikibase-bundle" + "elasticsearch" + "wdqs" + "wdqs-frontend" + "wdqs-proxy" "quickstatements" ) diff --git a/CHANGES.md b/CHANGES.md index e0bf7a357..2940f2d36 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,16 +1,26 @@ # Changes -This file is only intended to serve the developers of this repository and of the releases. -This provides an overview of the releases that have been made using this release pipeline. +This file is only intended to serve the developers of this repository and of the releases. This provides an overview of the releases that have been made using this release pipeline. + +## March 2024: Mayor releases for 1.41.0 + +- [MediaWiki release announcement: 1.41.0](https://lists.wikimedia.org/hyperkitty/list/wikitech-l@lists.wikimedia.org/message/OMDFHJ2SKKJH775RW4UTC754OY4TP7UU/) +- [MediaWiki full release notes](https://www.mediawiki.org/wiki/Release_notes/1.41) +- [Wikibase Suite Release task T354644](https://phabricator.wikimedia.org/T354644) + +| Suite Version | Version Info | Date available | Release task | +| --- | --- | --- | --- | +| wmde.17 ([spec](https://github.com/wmde/wikibase-release-pipeline/blob/wmde.17/variables.env)) | MediaWiki (1.41.0, PHP 8.2.15, debian/apache 2.4.57-2), WDQS (0.3.137) | XX March 2024 | [T354644](https://phabricator.wikimedia.org/T354644) | ## February 2024: Security releases for 1.40.2 + - [MediaWiki security and maintenance release announcement: 1.35.14 / 1.39.6 / 1.40.2](https://lists.wikimedia.org/hyperkitty/list/wikitech-l@lists.wikimedia.org/message/TDBUBCCOQJUT4SCHJNPHKQNPBUUETY52/) - [MediaWiki full release notes](https://www.mediawiki.org/wiki/Release_notes/1.40) - [Release task T354645](https://phabricator.wikimedia.org/T354645) -| Suite Version | Version Info | Date available | Run number | Release task | -|---------------|----------------------|----------------|------------| --------------| -| wmde.16 ([spec](https://github.com/wmde/wikibase-release-pipeline/blob/wmde.16/variables.env)) | MediaWiki (1.40.2, PHP 8.1.27, debian/apache 2.4.57-2), WDQS (0.3.137) | 21 February 2024 | [7976683909](https://github.com/wmde/wikibase-release-pipeline/actions/runs/7976683909) | [T354645](https://phabricator.wikimedia.org/T354645) | +| Suite Version | Version Info | Date available | Run number | Release task | +| --- | --- | --- | --- | --- | +| wmde.16 ([spec](https://github.com/wmde/wikibase-release-pipeline/blob/wmde.16/variables.env)) | MediaWiki (1.40.2, PHP 8.1.27, debian/apache 2.4.57-2), WDQS (0.3.137) | 21 February 2024 | [7976683909](https://github.com/wmde/wikibase-release-pipeline/actions/runs/7976683909) | [T354645](https://phabricator.wikimedia.org/T354645) | ## November 2023: First 1.40 release @@ -22,9 +32,9 @@ The [standard MediaWiki upgrade process](https://www.mediawiki.org/wiki/Manual:U - [MediaWiki release notes](https://www.mediawiki.org/wiki/Release_notes/1.40) - [Wikibase release notes](https://github.com/wikimedia/Wikibase/blob/REL1_40/RELEASE-NOTES-1.40) -| Suite Version | Version Info | Date available | Run number | Release task | -|---------------|----------------------|----------------|------------| --------------| -| wmde.14 ([spec](https://github.com/wmde/wikibase-release-pipeline/blob/wmde.14/variables.env)) | MediaWiki (1.40.1, PHP 8.1.25, debian/apache 2.4.57-2), WDQS (0.3.135) | 15 November 2023 | [6928788894](https://github.com/wmde/wikibase-release-pipeline/actions/runs/6928788894) | [T340939](https://phabricator.wikimedia.org/T340939) | +| Suite Version | Version Info | Date available | Run number | Release task | +| --- | --- | --- | --- | --- | +| wmde.14 ([spec](https://github.com/wmde/wikibase-release-pipeline/blob/wmde.14/variables.env)) | MediaWiki (1.40.1, PHP 8.1.25, debian/apache 2.4.57-2), WDQS (0.3.135) | 15 November 2023 | [6928788894](https://github.com/wmde/wikibase-release-pipeline/actions/runs/6928788894) | [T340939](https://phabricator.wikimedia.org/T340939) | ## October 2023: Security releases for 1.39.5 @@ -35,20 +45,19 @@ The [standard MediaWiki upgrade process](https://www.mediawiki.org/wiki/Manual:U - [MediaWiki full release notes](https://www.mediawiki.org/wiki/Release_notes/1.39) -| Suite Version | MediaWiki release | Date available | Run number | Release task | -|---------------|-------------------------------------------------------------------------|----------------|----------------| --------------| +| Suite Version | MediaWiki release | Date available | Run number | Release task | +| --- | --- | --- | --- | --- | | wmde.13 ([spec](https://github.com/wmde/wikibase-release-pipeline/blob/wmde.13/versions/wmde13.env),[metadata](https://github.com/wmde/wikibase-release-pipeline/blob/main/versions/wmde13)) | MediaWiki (1.39.5, PHP 8.1.24, debian/apache2 2.4.57-2), WDQS (0.3.135) | October 2023 | [6637828869](https://github.com/wmde/wikibase-release-pipeline/actions/runs/6637828869) | [T348616](https://phabricator.wikimedia.org/T348616) | - ## August 2023: Security releases for 1.38.7 - [MediaWiki security and maintenance release announcement: 1.35.10 / 1.38.6 / 1.39.3](https://lists.wikimedia.org/hyperkitty/list/wikitech-l@lists.wikimedia.org/message/6UQBHI5FWLATD7QO7DI4YS54U7XSSLAN/) - [MediaWiki security and maintenance release announcement: 1.35.11 / 1.38.7 / 1.39.4](https://lists.wikimedia.org/hyperkitty/list/wikitech-l@lists.wikimedia.org/thread/HVT3U3XYY35PSCIQPHMY4VQNF3Q6MHUO/) - [MediaWiki full release notes](https://www.mediawiki.org/wiki/Release_notes/1.38) -| Suite Version | MediaWiki release | Date available | Run number | Release task | -|---------------|-------------------------------------------------------------------------|----------------|----------------| --------------| -| wmde.12 ([spec](https://github.com/wmde/wikibase-release-pipeline/blob/wmde.12/versions/wmde12.env),[metadata](https://github.com/wmde/wikibase-release-pipeline/blob/main/versions/wmde12)) | MediaWiki (1.38.6, PHP 8.0.29, debian/apache2 2.4.56-1~deb11u2), WDQS (0.3.118) | August 2023 | [6665463497](https://github.com/wmde/wikibase-release-pipeline/actions/runs/6665463497) | [T334778](https://phabricator.wikimedia.org/T334778) | +| Suite Version | MediaWiki release | Date available | Run number | Release task | +| --- | --- | --- | --- | --- | +| wmde.12 ([spec](https://github.com/wmde/wikibase-release-pipeline/blob/wmde.12/versions/wmde12.env),[metadata](https://github.com/wmde/wikibase-release-pipeline/blob/main/versions/wmde12)) | MediaWiki (1.38.6, PHP 8.0.29, debian/apache2 2.4.56-1~deb11u2), WDQS (0.3.118) | August 2023 | [6665463497](https://github.com/wmde/wikibase-release-pipeline/actions/runs/6665463497) | [T334778](https://phabricator.wikimedia.org/T334778) | ## March 2023: First 1.39 release @@ -58,21 +67,25 @@ The [standard MediaWiki upgrade process](https://www.mediawiki.org/wiki/Manual:U - [Wikibase release notes](https://github.com/wikimedia/Wikibase/blob/REL1_39/RELEASE-NOTES-1.39) Users that still have lines in their LocalSettings.php file that look like this: - - `require_once "$IP/extensions/Wikibase/client/WikibaseClient.php"` - - `require_once "$IP/extensions/Wikibase/repo/WikibaseRepo.php"` + +- `require_once "$IP/extensions/Wikibase/client/WikibaseClient.php"` +- `require_once "$IP/extensions/Wikibase/repo/WikibaseRepo.php"` + Will need to switch to using the new `wfLoadExtension` function instead. - - `wfLoadExtension( "WikibaseClient", "$IP/extensions/Wikibase/extension-client.json" );` - - `wfLoadExtension( "WikibaseRepo", "$IP/extensions/Wikibase/extension-repo.json" );` + +- `wfLoadExtension( "WikibaseClient", "$IP/extensions/Wikibase/extension-client.json" );` +- `wfLoadExtension( "WikibaseRepo", "$IP/extensions/Wikibase/extension-repo.json" );` + If using a modified template for LocalSettings.php, generation, you'll need lines like this instead: - - `wfLoadExtension( "WikibaseClient", "${DOLLAR}IP\/extensions\/Wikibase\/extension-client.json" );` - - `wfLoadExtension( "WikibaseRepo", "${DOLLAR}IP\/extensions\/Wikibase\/extension-repo.json" );` -Users of tarballs that also use PHP 8+ will need to set `error_reporting = E_ALL ^ E_DEPRECATED` in their `php.ini` file -OR`error_reporting(E_ALL ^ E_DEPRECATED);` in their LocalSettings.php file. +- `wfLoadExtension( "WikibaseClient", "${DOLLAR}IP\/extensions\/Wikibase\/extension-client.json" );` +- `wfLoadExtension( "WikibaseRepo", "${DOLLAR}IP\/extensions\/Wikibase\/extension-repo.json" );` + +Users of tarballs that also use PHP 8+ will need to set `error_reporting = E_ALL ^ E_DEPRECATED` in their `php.ini` file OR`error_reporting(E_ALL ^ E_DEPRECATED);` in their LocalSettings.php file. -| Suite Version | Version Info | Date available | Run number | Release task | -|---------------|----------------------|----------------|------------| --------------| -| wmde.11 ([spec](https://github.com/wmde/wikibase-release-pipeline/blob/wmde.11/versions/wmde11.env),[metadata](https://github.com/wmde/wikibase-release-pipeline/blob/main/versions/wmde11)) | MediaWiki (1.39.1, PHP 8.1.15), WDQS (0.3.121) | 16 March 2023 | [4427564194](https://github.com/wmde/wikibase-release-pipeline/actions/runs/4427564194) | [T329236](https://phabricator.wikimedia.org/T329236) | +| Suite Version | Version Info | Date available | Run number | Release task | +| --- | --- | --- | --- | --- | +| wmde.11 ([spec](https://github.com/wmde/wikibase-release-pipeline/blob/wmde.11/versions/wmde11.env),[metadata](https://github.com/wmde/wikibase-release-pipeline/blob/main/versions/wmde11)) | MediaWiki (1.39.1, PHP 8.1.15), WDQS (0.3.121) | 16 March 2023 | [4427564194](https://github.com/wmde/wikibase-release-pipeline/actions/runs/4427564194) | [T329236](https://phabricator.wikimedia.org/T329236) | ## January 2023: First 1.38 release @@ -81,9 +94,9 @@ The [standard MediaWiki upgrade process](https://www.mediawiki.org/wiki/Manual:U - [MediaWiki release notes](https://www.mediawiki.org/wiki/Release_notes/1.38) - [Wikibase release notes](https://github.com/wikimedia/Wikibase/blob/REL1_38/RELEASE-NOTES-1.38) -| Suite Version | Version Info | Date available | Run number | Release task | -|---------------|----------------------|----------------|------------| --------------| -| wmde.10 ([spec](https://github.com/wmde/wikibase-release-pipeline/blob/wmde.10/versions/wmde10.env),[metadata](https://github.com/wmde/wikibase-release-pipeline/blob/main/versions/wmde10)) | MediaWiki (1.38.5, PHP 8.0.26), WDQS (0.3.118) | [16 January 2023](https://lists.wikimedia.org/hyperkitty/list/wikibaseug@lists.wikimedia.org/thread/KFPAI4S3S24IPAXTSF3AKYGSFUOKTM34/) | [3925740668](https://github.com/wmde/wikibase-release-pipeline/actions/runs/3925740668) | [T322407](https://phabricator.wikimedia.org/T322407) | +| Suite Version | Version Info | Date available | Run number | Release task | +| --- | --- | --- | --- | --- | +| wmde.10 ([spec](https://github.com/wmde/wikibase-release-pipeline/blob/wmde.10/versions/wmde10.env),[metadata](https://github.com/wmde/wikibase-release-pipeline/blob/main/versions/wmde10)) | MediaWiki (1.38.5, PHP 8.0.26), WDQS (0.3.118) | [16 January 2023](https://lists.wikimedia.org/hyperkitty/list/wikibaseug@lists.wikimedia.org/thread/KFPAI4S3S24IPAXTSF3AKYGSFUOKTM34/) | [3925740668](https://github.com/wmde/wikibase-release-pipeline/actions/runs/3925740668) | [T322407](https://phabricator.wikimedia.org/T322407) | ## November 2022: First 1.37 release @@ -94,25 +107,25 @@ The [standard MediaWiki upgrade process](https://www.mediawiki.org/wiki/Manual:U The Query service was also updated from `0.3.97` to `0.3.115`, but no additional upgrade process is needed. -| Suite Version | Version Info | Date available | Run number | Release task | -|---------------|----------------------|----------------|------------| --------------| -| wmde.9 ([spec](https://github.com/wmde/wikibase-release-pipeline/blob/wmde.9/versions/wmde9.env),[metadata](https://github.com/wmde/wikibase-release-pipeline/blob/main/versions/wmde9)) | MediaWiki (1.37.6, PHP 7.4.30), WDQS (0.3.115) | [1 November 2022](https://lists.wikimedia.org/hyperkitty/list/wikibaseug@lists.wikimedia.org/thread/RWS6EV7SHFNOD6KKQ6JA7RUW4TEXSXN5/) | [3303724221](https://github.com/wmde/wikibase-release-pipeline/actions/runs/3303724221) | [T317890](https://phabricator.wikimedia.org/T317890) | +| Suite Version | Version Info | Date available | Run number | Release task | +| --- | --- | --- | --- | --- | +| wmde.9 ([spec](https://github.com/wmde/wikibase-release-pipeline/blob/wmde.9/versions/wmde9.env),[metadata](https://github.com/wmde/wikibase-release-pipeline/blob/main/versions/wmde9)) | MediaWiki (1.37.6, PHP 7.4.30), WDQS (0.3.115) | [1 November 2022](https://lists.wikimedia.org/hyperkitty/list/wikibaseug@lists.wikimedia.org/thread/RWS6EV7SHFNOD6KKQ6JA7RUW4TEXSXN5/) | [3303724221](https://github.com/wmde/wikibase-release-pipeline/actions/runs/3303724221) | [T317890](https://phabricator.wikimedia.org/T317890) | ## September 2022: Security releases -| Suite Version | MediaWiki release | Date available | Run number | Release task | -|---------------|----------------------|----------------|------------| --------------| -| wmde.8 ([spec](https://github.com/wmde/wikibase-release-pipeline/blob/wmde.8/versions/wmde8.env),[metadata](https://github.com/wmde/wikibase-release-pipeline/blob/ec8b9e0bb25b9524df5671ad7785a66683598920/versions/wmde8)) | 1.36.4 | September 2022 | [2971822356](https://github.com/wmde/wikibase-release-pipeline/actions/runs/2971822356) | [T316707](https://phabricator.wikimedia.org/T316707) | -| wmde.7 ([spec](https://github.com/wmde/wikibase-release-pipeline/blob/wmde.7/versions/wmde7.env),[metadata](https://github.com/wmde/wikibase-release-pipeline/blob/ec8b9e0bb25b9524df5671ad7785a66683598920/versions/wmde7)) | 1.35.7 | September 2022 | [2971057584](https://github.com/wmde/wikibase-release-pipeline/actions/runs/2971057584) | [T314881](https://phabricator.wikimedia.org/T314881) | +| Suite Version | MediaWiki release | Date available | Run number | Release task | +| --- | --- | --- | --- | --- | +| wmde.8 ([spec](https://github.com/wmde/wikibase-release-pipeline/blob/wmde.8/versions/wmde8.env),[metadata](https://github.com/wmde/wikibase-release-pipeline/blob/ec8b9e0bb25b9524df5671ad7785a66683598920/versions/wmde8)) | 1.36.4 | September 2022 | [2971822356](https://github.com/wmde/wikibase-release-pipeline/actions/runs/2971822356) | [T316707](https://phabricator.wikimedia.org/T316707) | +| wmde.7 ([spec](https://github.com/wmde/wikibase-release-pipeline/blob/wmde.7/versions/wmde7.env),[metadata](https://github.com/wmde/wikibase-release-pipeline/blob/ec8b9e0bb25b9524df5671ad7785a66683598920/versions/wmde7)) | 1.35.7 | September 2022 | [2971057584](https://github.com/wmde/wikibase-release-pipeline/actions/runs/2971057584) | [T314881](https://phabricator.wikimedia.org/T314881) | Releases that bring a handful of small security fixes to MediaWiki & Wikibase, along with small patches to other components. ## February 2022: Security releases | Suite Version | Compatible MediaWiki | Date available | Run number | Release task | -|---------------|----------------------|----------------|------------| --------------| -| wmde.5 ([spec](https://github.com/wmde/wikibase-release-pipeline/blob/wmde.5/versions/wmde5.env)) | 1.36 | 10 February 2022 | [1824280943](https://github.com/wmde/wikibase-release-pipeline/actions/runs/1824280943) | [T301359](https://phabricator.wikimedia.org/T301359) | -| wmde.6 ([spec](https://github.com/wmde/wikibase-release-pipeline/blob/wmde.6/versions/wmde6.env)) | 1.35 | 22 February 2022 | [1853048237](https://github.com/wmde/wikibase-release-pipeline/actions/runs/1853048237) | [T301663](https://phabricator.wikimedia.org/T301663) | +| --- | --- | --- | --- | --- | +| wmde.5 ([spec](https://github.com/wmde/wikibase-release-pipeline/blob/wmde.5/versions/wmde5.env)) | 1.36 | 10 February 2022 | [1824280943](https://github.com/wmde/wikibase-release-pipeline/actions/runs/1824280943) | [T301359](https://phabricator.wikimedia.org/T301359) | +| wmde.6 ([spec](https://github.com/wmde/wikibase-release-pipeline/blob/wmde.6/versions/wmde6.env)) | 1.35 | 22 February 2022 | [1853048237](https://github.com/wmde/wikibase-release-pipeline/actions/runs/1853048237) | [T301663](https://phabricator.wikimedia.org/T301663) | ### Security related changes @@ -132,8 +145,8 @@ The [standard MediaWiki upgrade process](https://www.mediawiki.org/wiki/Manual:U The Query service was also updated from `0.3.40` to `0.3.97`, but no additional upgrade process is needed. | Suite Version | Compatible MediaWiki | Date available | Release board | -|---------------|----------------------|----------------| --------------| -| wmde.4 ([spec](https://github.com/wmde/wikibase-release-pipeline/blob/wmde.4/versions/wmde4.env)) | 1.36 | 5 January 2022 | [Board](https://phabricator.wikimedia.org/project/board/5645/query/all/) | +| --- | --- | --- | --- | +| wmde.4 ([spec](https://github.com/wmde/wikibase-release-pipeline/blob/wmde.4/versions/wmde4.env)) | 1.36 | 5 January 2022 | [Board](https://phabricator.wikimedia.org/project/board/5645/query/all/) | ## December 2021: 1.35 Security release @@ -142,31 +155,29 @@ Fix for CVE-2021-44228 [Log4Shell](https://en.wikipedia.org/wiki/Log4Shell) issu Included a MediaWiki update to 1.35.5 | Suite Version | Compatible MediaWiki | Date available | Release board | -|---------------|----------------------|----------------|--------------| -| wmde.3 ([spec](https://github.com/wmde/wikibase-release-pipeline/blob/wmde.3/versions/wmde3.env)) | 1.35 | 16 December 2021 | [Board](https://phabricator.wikimedia.org/project/board/5645/query/all/) | +| --- | --- | --- | --- | +| wmde.3 ([spec](https://github.com/wmde/wikibase-release-pipeline/blob/wmde.3/versions/wmde3.env)) | 1.35 | 16 December 2021 | [Board](https://phabricator.wikimedia.org/project/board/5645/query/all/) | ## November 2021: 1.35 Security release -Primarily for a MediaWiki update to 1.35.4 -(other components were also updated) +Primarily for a MediaWiki update to 1.35.4 (other components were also updated) | Suite Version | Compatible MediaWiki | Date available | Release board | -|---------------|----------------------|----------------|--------------| -| wmde.2 ([spec](https://github.com/wmde/wikibase-release-pipeline/blob/wmde.2/versions/wmde2.env)) | 1.35 | 30 November 2021 | [Board](https://phabricator.wikimedia.org/project/board/5622/query/all/) | +| --- | --- | --- | --- | +| wmde.2 ([spec](https://github.com/wmde/wikibase-release-pipeline/blob/wmde.2/versions/wmde2.env)) | 1.35 | 30 November 2021 | [Board](https://phabricator.wikimedia.org/project/board/5622/query/all/) | ## May 2021: 1.35 Security release -Primarily for a MediaWiki update to 1.35.2 -(other components were also updated) +Primarily for a MediaWiki update to 1.35.2 (other components were also updated) | Suite Version | Compatible MediaWiki | Date available | Release board | -|---------------|----------------------|----------------|--------------| -| wmde.1 ([spec](https://github.com/wmde/wikibase-release-pipeline/blob/wmde.1/versions/wmde1.env)) | 1.35 | 6 May 2021 | ??? | +| --- | --- | --- | --- | +| wmde.1 ([spec](https://github.com/wmde/wikibase-release-pipeline/blob/wmde.1/versions/wmde1.env)) | 1.35 | 6 May 2021 | ??? | ## February 2021: First 1.35 release An initial suite release using MediaWiki 1.35.0 | Suite Version | Compatible MediaWiki | Date available | Release board | -|---------------|----------------------|----------------|--------------| -| wmde.0 ([spec](https://github.com/wmde/wikibase-release-pipeline/blob/wmde.1/versions/wmde0.env)) | 1.35 | 19 February 2021 | ??? | +| --- | --- | --- | --- | +| wmde.0 ([spec](https://github.com/wmde/wikibase-release-pipeline/blob/wmde.1/versions/wmde0.env)) | 1.35 | 19 February 2021 | ??? | diff --git a/build/QuickStatements/README.md b/build/QuickStatements/README.md index 3f0058daa..e8a89e0b8 100644 --- a/build/QuickStatements/README.md +++ b/build/QuickStatements/README.md @@ -4,44 +4,43 @@ QuickStatements as seen at [https://github.com/magnusmanske/quickstatements](htt ### Environment variables -Variable | Default | Description --------------------------------------|--------------------------|------------ -`WB_PUBLIC_SCHEME_HOST_AND_PORT` | NONE | Host and port of Wikibase as seen by the user's browser -`QS_PUBLIC_SCHEME_HOST_AND_PORT` | NONE | Host and port of QuickStatements as seen by the user's browser -`OAUTH_CONSUMER_KEY` | NONE | OAuth consumer key (obtained from Wikibase) -`OAUTH_CONSUMER_SECRET` | NONE | OAuth consumer secret (obtained from wikibase) -`PHP_TIMEZONE` | UTC | setting of php.ini date.timezone -`MW_SITE_LANG` | "en" | Site language -`MW_SITE_NAME` | "wikibase-docker" | Site name -`WB_PROPERTY_NAMESPACE` | NONE | Wikibase Property namespace -`WB_ITEM_NAMESPACE` | NONE | Wikibase Item namespace -`WB_PROPERTY_PREFIX` | NONE | Wikibase Property prefix -`WB_ITEM_PREFIX` | NONE | Wikibase Item prefix +| Variable | Default | Description | +| --- | --- | --- | +| `WB_PUBLIC_SCHEME_HOST_AND_PORT` | NONE | Host and port of Wikibase as seen by the user's browser | +| `QS_PUBLIC_SCHEME_HOST_AND_PORT` | NONE | Host and port of QuickStatements as seen by the user's browser | +| `OAUTH_CONSUMER_KEY` | NONE | OAuth consumer key (obtained from Wikibase) | +| `OAUTH_CONSUMER_SECRET` | NONE | OAuth consumer secret (obtained from wikibase) | +| `PHP_TIMEZONE` | UTC | setting of php.ini date.timezone | +| `MW_SITE_LANG` | "en" | Site language | +| `MW_SITE_NAME` | "wikibase-docker" | Site name | +| `WB_PROPERTY_NAMESPACE` | NONE | Wikibase Property namespace | +| `WB_ITEM_NAMESPACE` | NONE | Wikibase Item namespace | +| `WB_PROPERTY_PREFIX` | NONE | Wikibase Property prefix | +| `WB_ITEM_PREFIX` | NONE | Wikibase Item prefix | ### Filesystem layout -Directory | Description ---------------------------------------------|------------------------------------------------------------------------------- -`/var/www/html/quickstatements` | Base QuickStatements directory -`/var/www/html/quickstatements/public_html` | The Apache root folder -`/var/www/html/magnustools` | Base magnustools directory - -File | Description -------------------------- | ------------------------------------------------------------------------------ -`/templates/config.json` | Template for QuickStatements' config.json (substituted to `/var/www/html/quickstatements/public_html/config.json` at runtime) -`/templates/oauth.ini` | Template for QuickStatements' oauth.ini (substituted to `/var/www/html/quickstatements/oauth.ini` at runtime) -`/templates/php.ini` | php config (default provided sets date.timezone to prevent php complaining substituted to `/usr/local/etc/php/conf.d/php.ini` at runtime) +| Directory | Description | +| ------------------------------------------- | ------------------------------ | +| `/var/www/html/quickstatements` | Base QuickStatements directory | +| `/var/www/html/quickstatements/public_html` | The Apache root folder | +| `/var/www/html/magnustools` | Base magnustools directory | +| File | Description | +| --- | --- | +| `/templates/config.json` | Template for QuickStatements' config.json (substituted to `/var/www/html/quickstatements/public_html/config.json` at runtime) | +| `/templates/oauth.ini` | Template for QuickStatements' oauth.ini (substituted to `/var/www/html/quickstatements/oauth.ini` at runtime) | +| `/templates/php.ini` | php config (default provided sets date.timezone to prevent php complaining substituted to `/usr/local/etc/php/conf.d/php.ini` at runtime) | ### Set up QuickStatements -In order to authorize QuickStatements against Wikibase via OAuth, this container must be available on an address on the host machine that is also visible within the Docker network. Set `QS_PUBLIC_SCHEME_HOST_AND_PORT` to this address. -Likewise, Wikibase needs to be able to access QuickStatements for the OAuth callback on a host-recognizable address, set using `WB_PUBLIC_SCHEME_HOST_AND_PORT`. +In order to authorize QuickStatements against Wikibase via OAuth, this container must be available on an address on the host machine that is also visible within the Docker network. Set `QS_PUBLIC_SCHEME_HOST_AND_PORT` to this address. + +Likewise, Wikibase needs to be able to access QuickStatements for the OAuth callback on a host-recognizable address, set using `WB_PUBLIC_SCHEME_HOST_AND_PORT`. Note that Docker Engine doesn't provide such addresses, so you will likely need to set up a reverse proxy (such as nginx or haproxy) alongside either public DNS entries or a local DNS server using entries that route to these container. See the Wikibase Suite example configuration for more guidance on how to set that up. -You can pass the consumer and secret token you got from your Wikibase instance to this container using the environment variables - `OAUTH_CONSUMER_KEY` and `OAUTH_CONSUMER_SECRET`. Alternatively you can let the [extra-install scripts](../WikibaseBundle/extra-install/QuickStatements.sh) supplied in the Wikibase bundle handle this for you. +You can pass the consumer and secret token you got from your Wikibase instance to this container using the environment variables `OAUTH_CONSUMER_KEY` and `OAUTH_CONSUMER_SECRET`. Alternatively you can let the [extra-install scripts](../WikibaseBundle/extra-install/QuickStatements.sh) supplied in the Wikibase bundle handle this for you. Test whether it works by navigating to `QS_PUBLIC_SCHEME_HOST_AND_PORT` and logging in. @@ -52,6 +51,7 @@ Finally you should be redirected back to QuickStatements, and you should see you Use QuickStatements as you normally would, using the Run button. The "Run in background" option is not supported by this image. #### Troubleshooting + If you see an error such as `mw-oauth exception` when trying to log in, check that you have passed the correct consumer token and secret token to QuickStatements. If you have changed the value of $wgSecretKey $wgOAuthSecretKey since you made the consumer, you'll need to make another new consumer or reissue the secret token for the old one. diff --git a/build/WDQS-frontend/README.md b/build/WDQS-frontend/README.md index 40c2711ea..504dd7475 100644 --- a/build/WDQS-frontend/README.md +++ b/build/WDQS-frontend/README.md @@ -4,10 +4,10 @@ Frontend for the Wikibase Query Service, as seen at [https://query.wikidata.org] ### Environment variables -Variable | Default | Description -------------------| ----------------------------| ---------- -`LANGUAGE` | "en" | Language to use in the UI -`BRAND_TITLE` | "DockerWikibaseQueryService" | Name to display on the UI -`WIKIBASE_HOST` | "wikibase.svc" | Hostname of the Wikibase host -`WDQS_HOST` | "wdqs-proxy.svc" | Hostname of the WDQS host (probably READONLY, hence use of the wdqs-proxy service) -`COPYRIGHT_URL` | "undefined" | URL for the copyright notice +| Variable | Default | Description | +| --- | --- | --- | +| `LANGUAGE` | "en" | Language to use in the UI | +| `BRAND_TITLE` | "DockerWikibaseQueryService" | Name to display on the UI | +| `WIKIBASE_HOST` | "wikibase.svc" | Hostname of the Wikibase host | +| `WDQS_HOST` | "wdqs-proxy.svc" | Hostname of the WDQS host (probably READONLY, hence use of the wdqs-proxy service) | +| `COPYRIGHT_URL` | "undefined" | URL for the copyright notice | diff --git a/build/WDQS-proxy/README.md b/build/WDQS-proxy/README.md index 2f76ecc8f..f206697e0 100644 --- a/build/WDQS-proxy/README.md +++ b/build/WDQS-proxy/README.md @@ -6,17 +6,17 @@ In order to change how this image is configured just mount over the wdqs.templat ## Environment variables -Variable | Default | Description --------------------------| ----------------------------| ---------- -`PROXY_PASS_HOST` | "wdqs.svc:9999" | Where to forward the requests to -`PROXY_MAX_QUERY_MILLIS` | 60000 | Timeout in milliseconds +| Variable | Default | Description | +| --- | --- | --- | +| `PROXY_PASS_HOST` | "wdqs.svc:9999" | Where to forward the requests to | +| `PROXY_MAX_QUERY_MILLIS` | 60000 | Timeout in milliseconds | ### Filesystem layout -File | Description ---------------------------------- | ------------------------------------------------------------------------------ -`/etc/nginx/conf.d/wdqs.template` | Template for the nginx config (substituted to `/etc/nginx/conf.d/default.conf` at runtime) -`/etc/nginx/conf.d/default.conf` | nginx config. To override this you must also use a custom entrypoint to avoid the file being overwritten. +| File | Description | +| --- | --- | +| `/etc/nginx/conf.d/wdqs.template` | Template for the nginx config (substituted to `/etc/nginx/conf.d/default.conf` at runtime) | +| `/etc/nginx/conf.d/default.conf` | nginx config. To override this you must also use a custom entrypoint to avoid the file being overwritten. | ### Development diff --git a/build/WDQS/README.md b/build/WDQS/README.md index e0ea8c7b2..605bf8c7a 100644 --- a/build/WDQS/README.md +++ b/build/WDQS/README.md @@ -10,8 +10,7 @@ When running WDQS in a setup without WDQS-proxy **please consider disabling thes ### Upgrading -When upgrading between WDQS versions the data stored in `/wdqs/data` may not be compatible with the newer version. -When testing the new image if no data appears to be loaded into the Query Service you will need to reload the data. +When upgrading between WDQS versions the data stored in `/wdqs/data` may not be compatible with the newer version. When testing the new image if no data appears to be loaded into the Query Service you will need to reload the data. If all changes are still in [RecentChanges] then simply removing `/wdqs/data` and restarting the service should reload all data. @@ -19,32 +18,32 @@ If all changes are still in [RecentChanges] then simply removing `/wdqs/data` an If you can not use [RecentChanges] then you will need to reload from an RDF dump: - - [Make an RDF dump from your Wikibase repository using the dumpRdf.php maintenance script.](https://doc.wikimedia.org/Wikibase/master/php/docs_topics_rdf-binding.html) - - [Load the RDF dump into the query service](https://github.com/wikimedia/wikidata-query-rdf/blob/master/docs/getting-started.md#load-the-dump) +- [Make an RDF dump from your Wikibase repository using the dumpRdf.php maintenance script.](https://doc.wikimedia.org/Wikibase/master/php/docs_topics_rdf-binding.html) +- [Load the RDF dump into the query service](https://github.com/wikimedia/wikidata-query-rdf/blob/master/docs/getting-started.md#load-the-dump) ### Environment variables -Variable | Default | Since | Description --------------------------| ------------------| --------| ---------- -`MEMORY` | "" | 0.2.5 | Memory limit for Blazegraph -`HEAP_SIZE` | "1g" | 0.2.5 | Heap size for Blazegraph -`WIKIBASE_HOST` | "wikibase.svc" | 0.2.5 | Hostname of the Wikibase host -`WIKIBASE_SCHEME` | "http" | 0.2.5 | Scheme of the Wikibase host -`WDQS_HOST` | "wdqs.svc" | 0.2.5 | Hostname of the WDQS host (this service) -`WDQS_PORT` | "9999" | 0.2.5 | Port of the WDQS host (this service) -`WDQS_ENTITY_NAMESPACES` | "120,122" | 0.2.5 | Wikibase Namespaces to load data from -`WIKIBASE_MAX_DAYS_BACK` | "90" | 0.3.0 | Max days updater is allowed back from now -`BLAZEGRAPH_EXTRA_OPTS` | "" | wmde.9 | Extra options to be passed to Blazegraph +| Variable | Default | Since | Description | +| --- | --- | --- | --- | +| `MEMORY` | "" | 0.2.5 | Memory limit for Blazegraph | +| `HEAP_SIZE` | "1g" | 0.2.5 | Heap size for Blazegraph | +| `WIKIBASE_HOST` | "wikibase.svc" | 0.2.5 | Hostname of the Wikibase host | +| `WIKIBASE_SCHEME` | "http" | 0.2.5 | Scheme of the Wikibase host | +| `WDQS_HOST` | "wdqs.svc" | 0.2.5 | Hostname of the WDQS host (this service) | +| `WDQS_PORT` | "9999" | 0.2.5 | Port of the WDQS host (this service) | +| `WDQS_ENTITY_NAMESPACES` | "120,122" | 0.2.5 | Wikibase Namespaces to load data from | +| `WIKIBASE_MAX_DAYS_BACK` | "90" | 0.3.0 | Max days updater is allowed back from now | +| `BLAZEGRAPH_EXTRA_OPTS` | "" | wmde.9 | Extra options to be passed to Blazegraph | Note on `BLAZEGRAPH_EXTRA_OPTS`: These are options that are directly passed to the Blazegraph jar. That means they must be prefixed with `-D`. Example: `-Dhttps.proxyHost=http://my.proxy.com -Dhttps.proxyPort=3128`. See [the Wikidata Query Service User Manual](https://www.mediawiki.org/wiki/Wikidata_Query_Service/User_Manual#Configurable_properties) for all available options. ### Filesystem layout -File | Description ---------------------------------- | ------------------------------------------------------------------------------ -`/wdqs/whitelist.txt` | SPARQL endpoints that are allowed for federation -`/wdqs/RWStore.properties` | Properties for the service -`/templates/mwservices.json` | Template for MediaWiki services (substituted to `/wdqs/mwservices.json` at runtime) +| File | Description | +| --- | --- | +| `/wdqs/whitelist.txt` | SPARQL endpoints that are allowed for federation | +| `/wdqs/RWStore.properties` | Properties for the service | +| `/templates/mwservices.json` | Template for MediaWiki services (substituted to `/wdqs/mwservices.json` at runtime) | ### Troubleshooting diff --git a/build/Wikibase/Dockerfile b/build/Wikibase/Dockerfile index a0816b15d..155ed2ebc 100644 --- a/build/Wikibase/Dockerfile +++ b/build/Wikibase/Dockerfile @@ -17,6 +17,10 @@ RUN git clone https://gerrit.wikimedia.org/r/mediawiki/extensions/Wikibase /tmp/ FROM ${MEDIAWIKI_IMAGE_URL} as collector COPY --from=git-checkout /tmp/wikibase /var/www/html/extensions/Wikibase +COPY ./fix-do-not-add-stdport-in-detectServer.patch /tmp/fix-do-not-add-stdport-in-detectServer.patch +WORKDIR /var/www/html +RUN patch -Np1 < /tmp/fix-do-not-add-stdport-in-detectServer.patch + # ########################################################################### # hadolint ignore=DL3006 FROM ${COMPOSER_IMAGE_URL} as composer diff --git a/build/Wikibase/README.md b/build/Wikibase/README.md index d92957182..dc79d2297 100644 --- a/build/Wikibase/README.md +++ b/build/Wikibase/README.md @@ -4,32 +4,32 @@ Wikibase running on Mediawiki. ### Environment variables -Variable | Default | Description --------------------------| -------------------------| ---------- -`DB_SERVER` | "mysql.svc:3306" | Hostname and port for the MySQL server to use for Mediawiki & Wikibase -`DB_USER` | "wikiuser" | Username to use for the MySQL server -`DB_PASS` | "sqlpass" | Password to use for the MySQL server -`DB_NAME` | "my_wiki" | Database name to use for the MySQL server -`MW_SITE_NAME` | "wikibase-docker" | $wgSitename to use for MediaWiki -`MW_SITE_LANG` | "en" | $wgLanguageCode to use for MediaWiki -`MW_ADMIN_NAME` | "WikibaseAdmin" | Admin username to create on MediaWiki first install -`MW_ADMIN_PASS` | "WikibaseDockerAdminPass" | Admin password to use for admin account on first install -`MW_WG_SECRET_KEY` | "secretkey" | Used as source of entropy for persistent login/Oauth etc.. -`MW_WG_ENABLE_UPLOADS` | "false" | $wgEnableUploads to use for MediaWiki -`MW_WG_UPLOAD_DIRECTORY` | "/var/www/html/images" | $wgUploadDirectory to use for MediaWiki -`MW_WG_JOB_RUN_RATE` | "2" | $wgJobRunRate to use for MediaWiki -`WIKIBASE_PINGBACK` | "false" | boolean for [WikibasePingback](https://doc.wikimedia.org/Wikibase/master/php/md_docs_topics_pingback.html) +| Variable | Default | Description | +| --- | --- | --- | +| `DB_SERVER` | "mysql.svc:3306" | Hostname and port for the MySQL server to use for Mediawiki & Wikibase | +| `DB_USER` | "wikiuser" | Username to use for the MySQL server | +| `DB_PASS` | "sqlpass" | Password to use for the MySQL server | +| `DB_NAME` | "my_wiki" | Database name to use for the MySQL server | +| `MW_SITE_NAME` | "wikibase-docker" | $wgSitename to use for MediaWiki | +| `MW_SITE_LANG` | "en" | $wgLanguageCode to use for MediaWiki | +| `MW_ADMIN_NAME` | "WikibaseAdmin" | Admin username to create on MediaWiki first install | +| `MW_ADMIN_PASS` | "WikibaseDockerAdminPass" | Admin password to use for admin account on first install | +| `MW_WG_SECRET_KEY` | "secretkey" | Used as source of entropy for persistent login/Oauth etc.. | +| `MW_WG_ENABLE_UPLOADS` | "false" | $wgEnableUploads to use for MediaWiki | +| `MW_WG_UPLOAD_DIRECTORY` | "/var/www/html/images" | $wgUploadDirectory to use for MediaWiki | +| `MW_WG_JOB_RUN_RATE` | "2" | $wgJobRunRate to use for MediaWiki | +| `WIKIBASE_PINGBACK` | "false" | boolean for [WikibasePingback](https://doc.wikimedia.org/Wikibase/master/php/md_docs_topics_pingback.html) | ### Filesystem layout -Directory | Description ---------------------------------- | ------------------------------------------------------------------------------ -`/var/www/html` | Base Mediawiki directory -`/var/www/html/skins` | Mediawiki skins directory -`/var/www/html/extensions` | Mediawiki extensions directory -`/var/www/html/LocalSettings.d` | LocalSettings snippet directory. All PHP files from here will be loaded at the end of the default `/LocalSettings.php.template` +| Directory | Description | +| --- | --- | +| `/var/www/html` | Base Mediawiki directory | +| `/var/www/html/skins` | Mediawiki skins directory | +| `/var/www/html/extensions` | Mediawiki extensions directory | +| `/var/www/html/LocalSettings.d` | LocalSettings snippet directory. All PHP files from here will be loaded at the end of the default `/LocalSettings.php.template` | -File | Description ---------------------------------- | ------------------------------------------------------------------------------ -`/LocalSettings.php.template` | Template for Mediawiki LocalSettings.php (substituted to `/var/www/html/LocalSettings.php` at runtime) -`/var/www/html/LocalSettings.php` | LocalSettings.php location, when passed in `/LocalSettings.php.template` will not be used. install.php & update.php will also not be run. +| File | Description | +| --- | --- | +| `/LocalSettings.php.template` | Template for Mediawiki LocalSettings.php (substituted to `/var/www/html/LocalSettings.php` at runtime) | +| `/var/www/html/LocalSettings.php` | LocalSettings.php location, when passed in `/LocalSettings.php.template` will not be used. install.php & update.php will also not be run. | diff --git a/build/Wikibase/fix-do-not-add-stdport-in-detectServer.patch b/build/Wikibase/fix-do-not-add-stdport-in-detectServer.patch new file mode 100644 index 000000000..7f185b7d5 --- /dev/null +++ b/build/Wikibase/fix-do-not-add-stdport-in-detectServer.patch @@ -0,0 +1,17 @@ +diff --git a/includes/Request/WebRequest.php b/includes/Request/WebRequest.php +index 734076449b8..78acae64100 100644 +--- a/includes/Request/WebRequest.php ++++ b/includes/Request/WebRequest.php +@@ -288,10 +288,10 @@ class WebRequest { + $port = $stdPort; + } elseif ( $parts[1] === false ) { + if ( isset( $_SERVER['SERVER_PORT'] ) ) { +- $port = $_SERVER['SERVER_PORT']; ++ $port = (int)$_SERVER['SERVER_PORT']; + } // else leave it as $stdPort + } else { +- $port = $parts[1]; ++ $port = (int)$parts[1]; + } + break; + } diff --git a/build/WikibaseBundle/README.md b/build/WikibaseBundle/README.md index 456e14778..d2ea1b8d9 100644 --- a/build/WikibaseBundle/README.md +++ b/build/WikibaseBundle/README.md @@ -25,18 +25,18 @@ see [base image](../Wikibase/README.md) ### Filesystem layout -Directory | Description ---------------------------------------|------------------------------------------------------------------------------- -`/var/www/html` | Base Mediawiki directory -`/var/www/html/skins` | Mediawiki skins directory -`/var/www/html/extensions` | Mediawiki extensions directory -`/var/www/html/LocalSettings.d` | Bundle extension configuration directory -`/extra-install` | Extra install scripts for automatic setup -`/templates/` | Directory containing templates - -File | Description ---------------------------------------|------------------------------------------------------------------------------- -`/LocalSettings.php.template` | Template for Mediawiki LocalSettings.php (substituted to `/var/www/html/LocalSettings.php` at runtime) -`/var/www/html/LocalSettings.php` | LocalSettings.php location, when passed in `/LocalSettings.php.template` will not be used. install.php & update.php will also not be run. -`/extra-install/ElasticSearch.sh` | Script for automatically creating Elasticsearch indices -`/extra-install/QuickStatements.sh` | Script for automatically creating OAuth consumer for Quick Statements +| Directory | Description | +| ------------------------------- | ----------------------------------------- | +| `/var/www/html` | Base Mediawiki directory | +| `/var/www/html/skins` | Mediawiki skins directory | +| `/var/www/html/extensions` | Mediawiki extensions directory | +| `/var/www/html/LocalSettings.d` | Bundle extension configuration directory | +| `/extra-install` | Extra install scripts for automatic setup | +| `/templates/` | Directory containing templates | + +| File | Description | +| --- | --- | +| `/LocalSettings.php.template` | Template for Mediawiki LocalSettings.php (substituted to `/var/www/html/LocalSettings.php` at runtime) | +| `/var/www/html/LocalSettings.php` | LocalSettings.php location, when passed in `/LocalSettings.php.template` will not be used. install.php & update.php will also not be run. | +| `/extra-install/ElasticSearch.sh` | Script for automatically creating Elasticsearch indices | +| `/extra-install/QuickStatements.sh` | Script for automatically creating OAuth consumer for Quick Statements | diff --git a/docs/adr/0001-docker-image-repository.md b/docs/adr/0001-docker-image-repository.md index 8101e8f26..18ae8d49e 100644 --- a/docs/adr/0001-docker-image-repository.md +++ b/docs/adr/0001-docker-image-repository.md @@ -14,14 +14,14 @@ There are a number of existing docker repositories we could consider to host our Some key properties of existing registries were considered in the table below: -| Repository | Free to WMDE | Self-Service (1) | Tooling provided for a built-in pipeline | Visibility of built images (2) | Possibility of replication to repository | Restrictions on use of non-Wikimedia images | -| ------------------------- | ------------------------------- | ----------------------- | -------------------------------------------------------------------------------------------------- | -------------------------- | ---------------------------------------- |--------------------------------------------------| -| Dockerhub | Currently (3) | Yes | Static builds from dockerfile | High | Yes | no | -| Github Container registry | Currently | Yes | Github Actions | Medium | Yes | no | -| Google Container Registry | No (pay for storage and egress) | Yes | Google Cloud | Medium | Yes | no | -| AWS Container registry | No (pay for storage and egress) | Yes | Amazon Cloud | Medium | Yes | no | -| Azure Container Registry | No (some complex structure) | Yes | Azure Container Registry Tasks | Medium | Yes | no | -| WMF Docker Registry | Yes | No (negotiation needed) | [https://wikitech.wikimedia.org/wiki/PipelineLib](https://wikitech.wikimedia.org/wiki/PipelineLib) | Low | No (probably not) | Yes (only audited versions of specific images are allowed) (4) | +| Repository | Free to WMDE | Self-Service (1) | Tooling provided for a built-in pipeline | Visibility of built images (2) | Possibility of replication to repository | Restrictions on use of non-Wikimedia images | +| --- | --- | --- | --- | --- | --- | --- | +| Dockerhub | Currently (3) | Yes | Static builds from dockerfile | High | Yes | no | +| Github Container registry | Currently | Yes | Github Actions | Medium | Yes | no | +| Google Container Registry | No (pay for storage and egress) | Yes | Google Cloud | Medium | Yes | no | +| AWS Container registry | No (pay for storage and egress) | Yes | Amazon Cloud | Medium | Yes | no | +| Azure Container Registry | No (some complex structure) | Yes | Azure Container Registry Tasks | Medium | Yes | no | +| WMF Docker Registry | Yes | No (negotiation needed) | [https://wikitech.wikimedia.org/wiki/PipelineLib](https://wikitech.wikimedia.org/wiki/PipelineLib) | Low | No (probably not) | Yes (only audited versions of specific images are allowed) (4) | 1. We can create new images and names without filing a ticket for speaking to people 2. Approximate fraction of the market of docker pulls that happens here @@ -30,8 +30,7 @@ Some key properties of existing registries were considered in the table below: ## Decision -We will publish Docker release images to Dockerhub. Dockerhub is the goto location for publishing Docker images. -We do not consider publishing Wikibase Docker images to WMF Docker Registry yet, as its purpose is to hold images to be used in Wikimedia production infrastructure. This might change once Wikibase (i.e. MediaWiki and extensions) will be deployed using container images - not the case as of December 2020. +We will publish Docker release images to Dockerhub. Dockerhub is the goto location for publishing Docker images. We do not consider publishing Wikibase Docker images to WMF Docker Registry yet, as its purpose is to hold images to be used in Wikimedia production infrastructure. This might change once Wikibase (i.e. MediaWiki and extensions) will be deployed using container images - not the case as of December 2020. ## Consequences diff --git a/docs/adr/0002-tarball-hosting.md b/docs/adr/0002-tarball-hosting.md index c004b956e..6da24edff 100644 --- a/docs/adr/0002-tarball-hosting.md +++ b/docs/adr/0002-tarball-hosting.md @@ -8,12 +8,11 @@ accepted ## Context -We need to determine a place to host our new release artifacts (tarballs). -Currently releases are being served by the Extension Distributor and the release branches of the git repositories. +We need to determine a place to host our new release artifacts (tarballs). Currently releases are being served by the Extension Distributor and the release branches of the git repositories. ## Github -#### pros: +#### pros: - The same framework we use to produce the artifacts (in the current implementation of the release pipeline) - Minimal effort required. @@ -25,7 +24,7 @@ Currently releases are being served by the Extension Distributor and the release ## releases.wikimedia.org -#### pros: +#### pros: - Unified front for releases of the Wikimedia Foundation. @@ -33,25 +32,25 @@ Currently releases are being served by the Extension Distributor and the release - Poor documentation (https://wikitech.wikimedia.org/wiki/Releases.wikimedia.org) for what we are looking for. - Seemingly bound to puppet/modules/releases repository for configuration -- No direct control, needs negotiation. +- No direct control, needs negotiation. - Does not seem to be hosting any other extensions (ExtensionDistributor seems to be the desired place for these). However, Mediawiki with bundled extensions is released there https://releases.wikimedia.org/mediawiki/1.35/ ## Wikiba.se -#### pros: +#### pros: - WMDE owned means direct control - The official site for Wikibase #### cons: -- We would need to build something that either pulls the artifacts or gets them uploaded from the pipeline. +- We would need to build something that either pulls the artifacts or gets them uploaded from the pipeline. - Effort required is estimated to be high. - Meant to be a marketing website ## Extension Distributor -#### pros: +#### pros: - Seems to be the "goto place" for MediaWiki extensions @@ -63,12 +62,12 @@ Currently releases are being served by the Extension Distributor and the release --- -| Repository | Service provider | Free to WMDE | Self-Service (1) | Method of publishing | Visibility/Current usage | Estimated effort | Documentation | URL | Trusted | -| ------------- |------------------- | ------------------------------ | ---------------------- | ---------------------------------------------------- | -------------- | ----------------- | ------------- | --------------------------|---------| -| Github | Github/Microsoft | Currently, Might change in the future. (2) | Yes | Github Releases / Github Action Artifact | - | Low | Good | github.com/wikimedia/ | no | -| WMF Releases | WMF | Yes | No (negotiation needed - likely only initially) | FTP/SFTP/SCP | - | High | Poor | releases.wikimedia.org | yes | -| Wikiba.se | WMDE | Yes | Yes | FTP or some kind of pulling by the server (TBD - not existing yet) | - | High | Poor (non existent) | wikiba.se, or releases.wikiba.se | yes | -| ExtensionDist | WMF/Volunteers? | Yes | No (negotiation needed - likely only initially) | Undefined, would need adaptations | Low (3) | Very High | Poor | extdist.wmflabs.org/dist/ | yes | +| Repository | Service provider | Free to WMDE | Self-Service (1) | Method of publishing | Visibility/Current usage | Estimated effort | Documentation | URL | Trusted | +| --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | +| Github | Github/Microsoft | Currently, Might change in the future. (2) | Yes | Github Releases / Github Action Artifact | - | Low | Good | github.com/wikimedia/ | no | +| WMF Releases | WMF | Yes | No (negotiation needed - likely only initially) | FTP/SFTP/SCP | - | High | Poor | releases.wikimedia.org | yes | +| Wikiba.se | WMDE | Yes | Yes | FTP or some kind of pulling by the server (TBD - not existing yet) | - | High | Poor (non existent) | wikiba.se, or releases.wikiba.se | yes | +| ExtensionDist | WMF/Volunteers? | Yes | No (negotiation needed - likely only initially) | Undefined, would need adaptations | Low (3) | Very High | Poor | extdist.wmflabs.org/dist/ | yes | --- @@ -78,12 +77,12 @@ Currently releases are being served by the Extension Distributor and the release ## Decision -* Wikibase release artifacts will be hosted on the WMF-controlled domain https://releases.wikimedia.org/. +- Wikibase release artifacts will be hosted on the WMF-controlled domain https://releases.wikimedia.org/. ## Consequences -* A new release group called `releasers-wikibase` will be created for access to the repository -* An [SRE-Access-Request] will be created, and the hike team members will be added as initial maintainers. The group will be later on expanded to cover other relevant WMDE staff. -* Publishing step of Wikibase release pipeline will be adjusted to publish tarball release artifacts to https://releases.wikimedia.org/wikibase/ (or similar directory - final name to be defined during the implementation). +- A new release group called `releasers-wikibase` will be created for access to the repository +- An [SRE-Access-Request] will be created, and the hike team members will be added as initial maintainers. The group will be later on expanded to cover other relevant WMDE staff. +- Publishing step of Wikibase release pipeline will be adjusted to publish tarball release artifacts to https://releases.wikimedia.org/wikibase/ (or similar directory - final name to be defined during the implementation). [SRE-Access-Request]: https://phabricator.wikimedia.org/T268818 diff --git a/docs/adr/0003-tarball-contents.md b/docs/adr/0003-tarball-contents.md index d52e8c146..76d7501ab 100644 --- a/docs/adr/0003-tarball-contents.md +++ b/docs/adr/0003-tarball-contents.md @@ -8,19 +8,17 @@ proposed ## Context -The release pipeline of Wikibase will produce a tarball release package containing a tested version of the Wikibase extension. -The package will normally include the necessary files to run the software and sometimes documentation of changes to the software and how to use them. +The release pipeline of Wikibase will produce a tarball release package containing a tested version of the Wikibase extension. The package will normally include the necessary files to run the software and sometimes documentation of changes to the software and how to use them. In order to get a overview on the practices by the open-source php community a brief survey was done on the releases of some popular projects. - -| Name | Release notes | Historic release notes | Installation instructions | Upgrade instructions | Hidden files | Vendor folder | -| --------- | ---------------| --------------------------- | -------------------------- | ---------------------| --------------|---------------| -| mediawiki | yes | yes | yes | yes | no | yes (9) -| laravel | no (1) | no | no | no | some (6) | no -| nextcloud | no (2) | no | no | no | some (7) | yes -| joomla | no (3) | no (3) | yes (3) | no (4) | no | yes -| wordpress | no (5) | no | yes (8) | no | no | - +| Name | Release notes | Historic release notes | Installation instructions | Upgrade instructions | Hidden files | Vendor folder | +| --- | --- | --- | --- | --- | --- | --- | +| mediawiki | yes | yes | yes | yes | no | yes (9) | +| laravel | no (1) | no | no | no | some (6) | no | +| nextcloud | no (2) | no | no | no | some (7) | yes | +| joomla | no (3) | no (3) | yes (3) | no (4) | no | yes | +| wordpress | no (5) | no | yes (8) | no | no | - | --- @@ -38,23 +36,22 @@ In order to get a overview on the practices by the open-source php community a b MediaWiki sticks out from the crowd by including most of their release specific documentation in each of the tarball releases while all other projects in the survey do not. At first glance this feels like a undesirable pattern to adopt as it's uncommon among other projects. In reality this points out a tested and proven methodology for documenting changes and shipping release notes by the mediawiki community. Release notes are added to the release branch and refined throughout the release process and then used as a template for the documentation on mediawiki.org. -The `vendor` folder comes populated in some of the release packages for the different projects and this is also the behavior of [Extension Distributor]. -The reasons given for this behavior dates back to a [request from 2014] to include the composer dependencies in tarballs as composer could for various reasons not be available on the system the software was supposed to be installed on. +The `vendor` folder comes populated in some of the release packages for the different projects and this is also the behavior of [Extension Distributor]. The reasons given for this behavior dates back to a [request from 2014] to include the composer dependencies in tarballs as composer could for various reasons not be available on the system the software was supposed to be installed on. ## Decision Seeing that there is an already defined process for release notes by the mediawiki community it feels natural to use existing tools and adopting this process. -The `vendor` folder will be included in the tarball as this aligns with the mediawiki tarballs and the behavior of [Extension Distributor]. +The `vendor` folder will be included in the tarball as this aligns with the mediawiki tarballs and the behavior of [Extension Distributor]. -The tarball package *WILL* include apart from the source-code the following additions. +The tarball package _WILL_ include apart from the source-code the following additions. - `vendor` folder populated by composer - Release notes (RELEASE-NOTES-N.NN) (example RELEASE-NOTES-1.36) - Historic changes (HISTORY) (Previous release notes in one file) - git submodules -The tarball will *NOT* include. +The tarball will _NOT_ include. - `node_modules` - `build` folder @@ -69,10 +66,10 @@ The tarball will *NOT* include. ## Consequences -- Include release notes in the tarball. +- Include release notes in the tarball. - Include vendor folder populated by composer - Remove any files that are not critical to the functionality of the software. -- The installation instructions for Wikibase will need to be updated in order to describe a use-case where the `vendor` folder is already existing. +- The installation instructions for Wikibase will need to be updated in order to describe a use-case where the `vendor` folder is already existing. [request from 2014]: https://lists.wikimedia.org/pipermail/wikitech-l/2014-July/077888.html [Extension Distributor]: https://www.mediawiki.org/wiki/Extension:ExtensionDistributor diff --git a/docs/adr/0004-wdqs-tarball-content.md b/docs/adr/0004-wdqs-tarball-content.md index eef74f0a5..f1f7214fa 100644 --- a/docs/adr/0004-wdqs-tarball-content.md +++ b/docs/adr/0004-wdqs-tarball-content.md @@ -8,13 +8,11 @@ accepted ## Context -Unlike in the case of Mediawiki extensions, where the extension code is packaged as a tarball to be installed in end user's environment, there is no immediate idea of how to package the Query Service (in particular as it actually involves a number of software components). -Components included [Query Service backend] (including the WMF Blazegraph fork) and [Query Service UI]. -Should these be bundled as separate tarballs? Or as a single one? +Unlike in the case of Mediawiki extensions, where the extension code is packaged as a tarball to be installed in end user's environment, there is no immediate idea of how to package the Query Service (in particular as it actually involves a number of software components). Components included [Query Service backend] (including the WMF Blazegraph fork) and [Query Service UI]. Should these be bundled as separate tarballs? Or as a single one? [Query Service backend] releases is currently [built and published](https://gerrit.wikimedia.org/r/plugins/gitiles/wikidata/query/rdf/+/refs/heads/master/dist/) as a [service zip archive] which contains the necessary components including the built-in UI of blazegraph. -The [Query Service UI] has at one point been part of this build process but the two components have recently been [separated](https://phabricator.wikimedia.org/T241291). +The [Query Service UI] has at one point been part of this build process but the two components have recently been [separated](https://phabricator.wikimedia.org/T241291). ## Decision @@ -22,8 +20,8 @@ As the [Query Service backend] does not depend on the [Query Service UI] and the ## Consequences -* The [Query Service backend] will be released as a standalone component as described by the [service zip archive]. -* The [Query Service UI] will be released as a standalone component. +- The [Query Service backend] will be released as a standalone component as described by the [service zip archive]. +- The [Query Service UI] will be released as a standalone component. [Query Service UI]: https://gerrit.wikimedia.org/r/plugins/gitiles/wikidata/query/gui [Query Service backend]: https://gerrit.wikimedia.org/r/plugins/gitiles/wikidata/query/rdf diff --git a/docs/adr/0005-release-notes-process.md b/docs/adr/0005-release-notes-process.md index dcb75d60c..013ba8b02 100644 --- a/docs/adr/0005-release-notes-process.md +++ b/docs/adr/0005-release-notes-process.md @@ -10,10 +10,9 @@ accepted As we adopt the new release strategy we also want to determine a process for writing and maintaining release notes. -Our release process will still be closely bound to the release branches WMF are using it makes sense to inspect their process for producing and maintaining release notes within the source control system. +Our release process will still be closely bound to the release branches WMF are using it makes sense to inspect their process for producing and maintaining release notes within the source control system. -For mediawiki/core the release notes are maintained and worked on within the source repository. -Each release branch contains a RELEASE_NOTES-N.NN document describing changes that was made to the software up until the point the branch was cut from master. Any backports to these branches also comes with an update to the release notes document. +For mediawiki/core the release notes are maintained and worked on within the source repository. Each release branch contains a RELEASE_NOTES-N.NN document describing changes that was made to the software up until the point the branch was cut from master. Any backports to these branches also comes with an update to the release notes document. As a new release branch is cut/created a new [template] release document is added to the master branch and any previous release notes are merged into a [HISTORY] document within the repository containing all previous release notes. diff --git a/docs/adr/0006-pipline-runner.md b/docs/adr/0006-pipline-runner.md index 7072de928..6efd65c51 100644 --- a/docs/adr/0006-pipline-runner.md +++ b/docs/adr/0006-pipline-runner.md @@ -13,6 +13,7 @@ A decision needs to be made on where the pipeline should be executed. The initia As there is a desire to run the pipeline on Wikimedia Foundation's infrastructure an investigation into the possibility to use [PipelineLib] has concluded some drawbacks that makes this a less ideal candidate. [PipelineLib] is backed by [Blubber] which is a piece of software for generating opinionated Dockerfiles that are suited to run tests on and eventually end up in a production environment. Limitations of PipelineLib and Blubber include: + - Does currently not support publishing tarballs (could potentially be done by injecting jenkins credentials that publish in a separate container) - Does not support using base images other than those on docker-registry.wikimedia.org - [Blubber] does not provide root access inside the container @@ -25,8 +26,8 @@ Another option would be running the pipeline on a WMDE controlled [Toolforge] VP Some key properties of considered options are summarized in the table below: -| Infrastructure | Owner/Provider | Cost of introduction | Cost of maintenance | Trusted and Secure? | WMDE can modify/update | Has some native tooling for docker images | Has some way to build non-docker artifacts | Restriction on the source of software run on the infrastructure? | -| -------------- | -------------- | -------------------- | ------------------- | ------------------- | ---------------------- | ----------------------------------------- | ------------------------------------------ | ---------------------------------------------------------------- | +| Infrastructure | Owner/Provider | Cost of introduction | Cost of maintenance | Trusted and Secure? | WMDE can modify/update | Has some native tooling for docker images | Has some way to build non-docker artifacts | Restriction on the source of software run on the infrastructure? | +| --- | --- | --- | --- | --- | --- | --- | --- | --- | | Github | Github/Microsoft | low | low | no (1) | yes | yes | yes | none, everything from the internet that can be run in a container? | | PipelineLib | WMF | medium-high (2) | medium (3) | yes | yes but with limitations (4) | yes | no (right now) | Only things hosted somewhere on WMF infrastructure (gerrit, phabricator diffusion, etc). Does not support using base images other than those on docker-registry.wikimedia.org. Mainly supporting ubuntu-flavoured image building (apt is not a thing in alpine) | | Wikimedia Jenkins | WMF | medium-high | medium | yes | yes but with limitations (4) | no | yes | Only things hosted somewhere on WMF infrastructure (gerrit, phabricator diffusion, etc) | @@ -41,15 +42,11 @@ Some key properties of considered options are summarized in the table below: ## Decision -As the infrastructure that is already existing, and has the least technical limitations, Github Actions will be used to run Wikibase release pipeline. -To minimize binding to a specific infrastructure, Wikibase release pipeline will be implemented as a set of Docker container images, intended to be run on the end infrastructure. +As the infrastructure that is already existing, and has the least technical limitations, Github Actions will be used to run Wikibase release pipeline. To minimize binding to a specific infrastructure, Wikibase release pipeline will be implemented as a set of Docker container images, intended to be run on the end infrastructure. ## Consequences -WMDE will continue running Wikibase release pipeline on Github Actions but will replace the proof of concept implementation with a container-based more abstract solution. -Technical solution ensuring the integrity of packages generated on the third-party infrastructure will be introduced. -Process of actual publishing release artifacts to final locations will be defined separately. It is considered that the final step might happen from WMDE infrastructure. - +WMDE will continue running Wikibase release pipeline on Github Actions but will replace the proof of concept implementation with a container-based more abstract solution. Technical solution ensuring the integrity of packages generated on the third-party infrastructure will be introduced. Process of actual publishing release artifacts to final locations will be defined separately. It is considered that the final step might happen from WMDE infrastructure. [Github]: https://docs.github.com/en/free-pro-team@latest/actions [PipelineLib]: https://wikitech.wikimedia.org/wiki/PipelineLib diff --git a/docs/adr/0007-wikibase-release-notes-publish.md b/docs/adr/0007-wikibase-release-notes-publish.md index 89f69f417..f23a3ad14 100644 --- a/docs/adr/0007-wikibase-release-notes-publish.md +++ b/docs/adr/0007-wikibase-release-notes-publish.md @@ -14,7 +14,7 @@ As our process for maintaining release notes will be closely modeled after media As described in [ADR0005], mediawiki is maintaining release notes for each specific release within the corresponding release branch. During the preparation of creating a new release from the development branch, a new release branch is cut and the [template] that has been populated with the relevant changes from the ongoing development is to serve as the base for a richer curated version on mediawiki.org [(Example: 1.35)](https://www.mediawiki.org/wiki/Release_notes/1.35). -The release notes document within the repository does not contain the full links to documentation or phabricator tickets but is supported by templates to generate these dynamically. +The release notes document within the repository does not contain the full links to documentation or phabricator tickets but is supported by templates to generate these dynamically. For links to phabricator the task number is used by the [PhabT] template which also supports linking to comments, this could easily be re-used by WMDE as we are also using phabricator. diff --git a/docs/adr/0008-wmde-repo-release-notes.md b/docs/adr/0008-wmde-repo-release-notes.md index 1642aaaa1..f299e8e80 100644 --- a/docs/adr/0008-wmde-repo-release-notes.md +++ b/docs/adr/0008-wmde-repo-release-notes.md @@ -8,7 +8,7 @@ accepted ## Context -In [ADR0005] we are proposing a process for maintaining release notes within the Wikibase repository. The Wikibase repository and some additional WMDE owned repositories are bound to the release branches as a [compatibility policy](https://www.mediawiki.org/wiki/Compatibility#mediawiki_extensions). +In [ADR0005] we are proposing a process for maintaining release notes within the Wikibase repository. The Wikibase repository and some additional WMDE owned repositories are bound to the release branches as a [compatibility policy](https://www.mediawiki.org/wiki/Compatibility#mediawiki_extensions). Currently only mediawiki related code (core, skins, extensions) is covered by this compatibility policy and could easily apply the same process for maintaining and publishing release notes as to that what has been proposed to be used by Wikibase ([ADR0005], [ADR0007]). @@ -28,13 +28,13 @@ The Wikidata Query GUI is the exception of the WMDE owned repositories that we i A quick review showed that the following [5 most downloaded WMF extensions](https://grafana.wikimedia.org/d/000000161/extension-distributor-downloads?orgId=1&from=now-5y&to=now&var-release=REL1_35&var-groupby=1d) do not maintain or publish release notes. -| Name | Release notes | Historic release notes | -| -------------------- | ---------------| --------------------------- | -| VisualEditor | no | no | -| MobileFrontend | no | no | -| LDAPAuthentication2 | no | no | -| PluggableAuth | no | no | -| TemplateStyles | no | no | +| Name | Release notes | Historic release notes | +| ------------------- | ------------- | ---------------------- | +| VisualEditor | no | no | +| MobileFrontend | no | no | +| LDAPAuthentication2 | no | no | +| PluggableAuth | no | no | +| TemplateStyles | no | no | This is of course not ideal but could be interpreted as a hint that writing and maintaining release notes for all extensions is a cumbersome task that should only be done for the repositories we deem to be our main products and bring the biggest impact. @@ -45,7 +45,7 @@ For the first release to use the our new strategy the Wikibase extension will be ## Consequences - Write release notes for the Wikibase Extension -- After the first successful release this decision will be re-evaluated and possibly applied to a selection of the repositories that support them. +- After the first successful release this decision will be re-evaluated and possibly applied to a selection of the repositories that support them. [ADR0005]: (0005-release-notes-process.md) [ADR0007]: (0007-wikibase-release-notes-publish.md) diff --git a/docs/adr/0009-non-WMDE-release-notes.md b/docs/adr/0009-non-WMDE-release-notes.md index 6fb332286..40d5a41d6 100644 --- a/docs/adr/0009-non-WMDE-release-notes.md +++ b/docs/adr/0009-non-WMDE-release-notes.md @@ -10,14 +10,11 @@ accepted We intend to package and release software that is not maintained by WMDE. For example, the Wikidata Query Service (WDQS). -This software comes from a variety of sources including software that is used and maintained by the WMF in Wikimedia -production Wikis but also some from complete third parties. +This software comes from a variety of sources including software that is used and maintained by the WMF in Wikimedia production Wikis but also some from complete third parties. Some of this software including ElasticSearch and WikibaseLocalMedia already have curated release notes for versions. -Other software such as WDQS and Mediawiki extensions do not have release notes. They may have notable changes documented either -in git commit messages or in phabricator tickets linked to those commits. It could be possible to computationally extract a compile these -with some effort. +Other software such as WDQS and Mediawiki extensions do not have release notes. They may have notable changes documented either in git commit messages or in phabricator tickets linked to those commits. It could be possible to computationally extract a compile these with some effort. Software such as QuickStatements may prove difficult to build release note from git. It may require inspecting the code changes by eye. @@ -30,6 +27,7 @@ We will not write custom release notes for software that we do not maintain. We will attempt to forward already curated release notes from upstream maintainers. ## Consequences + When adding components to the release pipeline that have curated release notes we should merge these release notes into the appropriate artifacts. If upstream components start providing release notes we should make changes to include them in the appropriate artifacts when possible. diff --git a/docs/adr/0010-queryservice-tarball.md b/docs/adr/0010-queryservice-tarball.md index 62d57fbd2..f20dce589 100644 --- a/docs/adr/0010-queryservice-tarball.md +++ b/docs/adr/0010-queryservice-tarball.md @@ -14,7 +14,7 @@ This package already contains all the required components to build the docker im ## Decision -Do not produce a queryservice tarball to be published. +Do not produce a queryservice tarball to be published. ## Consequences diff --git a/docs/adr/0011-wikibase-bundle.md b/docs/adr/0011-wikibase-bundle.md index c82018f28..890305a12 100644 --- a/docs/adr/0011-wikibase-bundle.md +++ b/docs/adr/0011-wikibase-bundle.md @@ -8,8 +8,7 @@ accepted ## Context -A key part of release pipeline is to produce a bundled wikibase docker image prepared with the extensions and components known as the "wikibase suite". -This bundle image will consist of the build artifacts and as described in [ADR0001](0001-docker-image-repository.md) these images should be published to dockerhub. +A key part of release pipeline is to produce a bundled wikibase docker image prepared with the extensions and components known as the "wikibase suite". This bundle image will consist of the build artifacts and as described in [ADR0001](0001-docker-image-repository.md) these images should be published to dockerhub. In previous wikibase docker artifacts WMDE has offered a "base" and a "bundled" version of Wikibase where the base version only contain mediawiki and Wikibase. The new pipeline should still produce and publish these artifacts. @@ -17,10 +16,9 @@ As we publish our releases we also need to make a decision if we want this bundl ## Decision -The wikibase release pipeline will not produce a bundled tarball to be published. +The wikibase release pipeline will not produce a bundled tarball to be published. ## Consequences - No bundled tarball will be published - A bundled wikibase docker image and a base image will be produced - diff --git a/docs/adr/0012-versioning.md b/docs/adr/0012-versioning.md index 4b0dae477..872fd0abc 100644 --- a/docs/adr/0012-versioning.md +++ b/docs/adr/0012-versioning.md @@ -8,7 +8,7 @@ accepted ## Context -WMF does during their release process create a number of branches which are used during the release process to orchestrate the different tools to produce the correct artifacts and to support the continuous deployment to production. The release branches (ex. REL1_35) are cut for MediaWiki and every extension repository with the purpose of maintaining releases and the weekly deployment branches are (ex. wmf/1.36.0-wmf.13) are used to deployments to the different [deployment groups](https://versions.toolforge.org/). +WMF does during their release process create a number of branches which are used during the release process to orchestrate the different tools to produce the correct artifacts and to support the continuous deployment to production. The release branches (ex. REL1_35) are cut for MediaWiki and every extension repository with the purpose of maintaining releases and the weekly deployment branches are (ex. wmf/1.36.0-wmf.13) are used to deployments to the different [deployment groups](https://versions.toolforge.org/). Both of these types of branches are maintained by the WMF and will be used by the wikibase release strategy. However we need to decide how to maintain our own intermediate releases that will be published on releases.wikimedia.org, dockerhub and tagged on git. @@ -27,7 +27,7 @@ Example: 3. WMF releases a 1.35.1 security patch release of Mediawiki 4. We release 1.35.2 which contains Mediawiki 1.35.1 (and the previous changes mentioned) -For this reason we probably want to adopt adopt another scheme in order to avoid this confusion. Looking at the MediaWiki naming convention for release candidates (`1.35.0-rc.0`) something along these lines might be sufficient to prevent any confusion. +For this reason we probably want to adopt adopt another scheme in order to avoid this confusion. Looking at the MediaWiki naming convention for release candidates (`1.35.0-rc.0`) something along these lines might be sufficient to prevent any confusion. ## Decision @@ -39,5 +39,4 @@ For this reason we probably want to adopt adopt another scheme in order to avoid - We will not create branches of every minor version number we release. Instead we will use tagging. - Releases in between new release branches being cut will be versioned by appending our own version number to the mediawiki version. -Example: A Wikibase release based on 1.35.0 would be versioned as 1.35.0-wmde.0 -Example: A queryservice frontend release would be versioned wmde.0, wmde.1 etc. +Example: A Wikibase release based on 1.35.0 would be versioned as 1.35.0-wmde.0 Example: A queryservice frontend release would be versioned wmde.0, wmde.1 etc. diff --git a/docs/adr/0013-docker-latest-tag.md b/docs/adr/0013-docker-latest-tag.md index 0205c3d5b..886c01303 100644 --- a/docs/adr/0013-docker-latest-tag.md +++ b/docs/adr/0013-docker-latest-tag.md @@ -12,9 +12,9 @@ Docker allows tags to be defined for the images in a docker repository. The defa Previously the wikibase repository has shipped `wikibase/wikibase` images with three different latest tags depending on the flavor of wikibase. -* latest-bundle -* latest-base -* latest (seems to be the base) +- latest-bundle +- latest-base +- latest (seems to be the base) As we now have a separate image called `wikibase/wikibase-bundle` the need for latest-bundle / latest-base is gone and we need to decide if tagging each image with "latest" is something we still want to do for each of the images we are to publish. diff --git a/docs/adr/0014-signing-release-packages.md b/docs/adr/0014-signing-release-packages.md index e5ec065be..cec23385f 100644 --- a/docs/adr/0014-signing-release-packages.md +++ b/docs/adr/0014-signing-release-packages.md @@ -17,16 +17,18 @@ Signing the artifacts our release pipeline produces is a step that can be taken ### Signing tarball packages with GPG -Tarball files can be signed with [GPG](https://gnupg.org/gph/en/manual/x135.html) in a similar way to which MediaWiki are signing theirs. In the case of MediaWiki a private and public key-pair is generated for release engineering members and published on https://www.mediawiki.org/keys/keys.html. +Tarball files can be signed with [GPG](https://gnupg.org/gph/en/manual/x135.html) in a similar way to which MediaWiki are signing theirs. In the case of MediaWiki a private and public key-pair is generated for release engineering members and published on https://www.mediawiki.org/keys/keys.html. By importing the list of public keys the end-user can then verify the release tarball and it's signature by issuing the following commands. Import keys: + ```sh $ gpg --fetch-keys "https://www.mediawiki.org/keys/keys.txt" ``` Verify release package: + ```sh $ gpg --verify mediawiki-core-1.35.2.zip.sig mediawiki-core-1.35.2.zip gpg: Signature made tor 8 apr 2021 20:40:08 CEST @@ -37,14 +39,13 @@ gpg: There is no indication that the signature belongs to the owner. Primary key fingerprint: 1D98 867E 8298 2C8F E0AB C25F 9B69 B310 9D3B B7B0 ``` -### Signing docker images with Docker Content Trust +### Signing docker images with Docker Content Trust Docker images can be signed with [DCT](https://docs.docker.com/engine/security/trust/) which allows you to sign tags associated with images. -DCT works by using a set of keys to sign a specific tag in a docker repository. -Using signatures is optional and only apply to a specific tag, there can be signed and unsigned tags within the same repository. +DCT works by using a set of keys to sign a specific tag in a docker repository. Using signatures is optional and only apply to a specific tag, there can be signed and unsigned tags within the same repository. -#### Root key +#### Root key The root-key or "offline" key as named by the docker documentation is what is used to add new signers and repositories and is generally consider that it should be kept safe on offline hardware. @@ -67,10 +68,11 @@ Therefore the decision for the release pipeline will be not to sign any of the r However the benefits of signing are obvious and therefore it's probably a good idea to take the time and answer the following questions to have a solid foundation to stand on when doing release signing. - Which engineers are to be considered release engineers? - - Needs access to organization in docker registry - - Needs access to organization in github - - Need access to `releasers-wikibase` group - - Would need to publish and maintain a GPG key used for signing Docker and release tarballs + + - Needs access to organization in docker registry + - Needs access to organization in github + - Need access to `releasers-wikibase` group + - Would need to publish and maintain a GPG key used for signing Docker and release tarballs - How do we securely store/generate offline "organization" private keys? diff --git a/docs/adr/0015-security-fixes-non-wmde-software.md b/docs/adr/0015-security-fixes-non-wmde-software.md index d1b3dd957..c2184fcce 100644 --- a/docs/adr/0015-security-fixes-non-wmde-software.md +++ b/docs/adr/0015-security-fixes-non-wmde-software.md @@ -27,6 +27,7 @@ Applying security fixes to the running Wikibase instance (or other Wikibase rela - Don't publish the fix before it's deployed in WMF production environment ### 3. Deploy fix to WMF production environment + - [Follow deployment instructions from here](https://wikitech.wikimedia.org/wiki/How_to_deploy_code#Creating_a_Security_Patch) ### 4. Releasing @@ -72,6 +73,5 @@ For other software without a constant release cycle or even explanation as to wh - ALL MediaWiki maintenance releases should generate a new Wikibase release. - Add some kind of mechanism that regularly checks what the latest stable MediaWiki version is. If the version has changed we should do a release. - Add a link to the [bug reporting documentation](https://www.mediawiki.org/wiki/Reporting_security_bugs) on the [Wikibase landing page on mediawiki.org](https://www.mediawiki.org/wiki/Wikibase) - -## Consequences +## Consequences diff --git a/docs/adr/0016-updating-non-wmde-software.md b/docs/adr/0016-updating-non-wmde-software.md index f41a68566..a3cad692b 100644 --- a/docs/adr/0016-updating-non-wmde-software.md +++ b/docs/adr/0016-updating-non-wmde-software.md @@ -9,12 +9,12 @@ proposed ## Context Wikibase Suite includes a number of software components that are not maintained by Wikimedia Deutschland (WMDE). Those include (examples as of May 2021): + - Software which is maintained by the Wikimedia Foundation (WMF), e.g. Wikidata Query Service (WDQS); in case of docker images also MediaWiki software. - Broadly-used software which is maintained by non-Wikimedia parties, e.g. Elastic Search. - Wikibase/Wikidata-specific software which is maintained by Wikimedia community (volunteer) developers, e.g. Quick Statements. -All these software components receive updates from their maintainers in a way which is not necessarily synchronized with the release cycle of Wikibase Suite. -Users of Wikibase Suite might benefit from those changes, in particular from the updates which fix the incorrect functionality (bugs), and issues related to the software security. +All these software components receive updates from their maintainers in a way which is not necessarily synchronized with the release cycle of Wikibase Suite. Users of Wikibase Suite might benefit from those changes, in particular from the updates which fix the incorrect functionality (bugs), and issues related to the software security. Offering updates to software components not maintained by the WMDE that are known to compatible with the rest of the Wikibase Suite would require applying updates to versions of the components included in the Wikibase Suite, possible changes to Wikibase Suite structure (e.g. configuration) when applicable, and necessary testing procedures to ensure continued functionality of the Wikibase Suite with the updated components. @@ -22,7 +22,6 @@ There might also be changes desired by the users of Wikibase Suite to be made to Making changes to the software not maintained by WMDE would mean additional effort for WMDE's software developer teams, and likely require WMDE development teams gather additional expertise to be able to make good contributions to those software components. - ## Decision To ensure the security and integrity of systems running Wikibase Suite, WMDE will be releasing updated versions of the Wikibase Suite that will include fixes to significant security issues discovered in the software components not maintained by WMDE, once those fixes have been published by respective maintainers. @@ -36,4 +35,4 @@ In order to maximize the impact WMDE software development teams can bring for th 1. WMDE establishes a process of incorporating and releasing fixes to severe security issues in the components not maintained by WMDE. See more on this in [ADR 15](0015-security-fixes-non-wmde-software.md). 2. WMDE will monitor changes to the components of Wikibase Suite which are not maintained by WMDE to understand non-security-related changes happening in those components. The list of channels to follow by WMDE staff will be published for internal use. 3. WMDE will not accept feature requests and other change requests related to the software components that WMDE does not maintain. -4. WMDE will direct users of Wikibase Suite, e.g. by visibly documenting this information, on channels they should use to report problems or feature requests in the components not maintained by WMDE (it might be Phabricator for some software, and dedicated bug trackers, etc for other). +4. WMDE will direct users of Wikibase Suite, e.g. by visibly documenting this information, on channels they should use to report problems or feature requests in the components not maintained by WMDE (it might be Phabricator for some software, and dedicated bug trackers, etc for other). diff --git a/docs/adr/0017-using-mediawiki-docker-image.md b/docs/adr/0017-using-mediawiki-docker-image.md index c2fa23a00..c6fae6d09 100644 --- a/docs/adr/0017-using-mediawiki-docker-image.md +++ b/docs/adr/0017-using-mediawiki-docker-image.md @@ -10,19 +10,17 @@ accepted During the first iteration of development on the wikibase release pipeline, one of the goals was to build and run it against the master branches of MediaWiki, Wikibase and other bundled extensions ([T270133](https://phabricator.wikimedia.org/T270133)). -Because of the lack of such docker images at the time the team decided to create our own, inspired by the work of the official docker images. -The benefits of this decision was seen when previously untested parts of Wikibase (Multi Wiki testing client/repo ) now had some coverage on the master branch. -During the development of the pipeline several issues were detected by using these custom docker images, sometimes the pipeline would breakdown days before a bug report would appear on phabricator. +Because of the lack of such docker images at the time the team decided to create our own, inspired by the work of the official docker images. The benefits of this decision was seen when previously untested parts of Wikibase (Multi Wiki testing client/repo ) now had some coverage on the master branch. During the development of the pipeline several issues were detected by using these custom docker images, sometimes the pipeline would breakdown days before a bug report would appear on phabricator. This can be useful but also comes with some additional drawbacks that can affect the maintainability and the quality of the releases WMDE will produce. -- To offer the same quality and security as the official Mediawiki docker images we now also have to maintain our own rather than building upon what already exists. +- To offer the same quality and security as the official Mediawiki docker images we now also have to maintain our own rather than building upon what already exists. - Any updates or security fixes to these images are probably also more likely to be identified and patched in the official MediaWiki docker images quicker than in any image maintained by WMDE. - The MediaWiki docker images are battle proven with 10+ Million downloads, our custom images are not. As the priority of the release pipeline should be to provide stable and secure releases it could make sense to revert this initial decision of building our own image. -The decision to adopt parts of the testing done in the release pipeline for Wikibase CI is still pending. Depending on the outcome of [T282476](https://phabricator.wikimedia.org/T282476), custom images could then be required again and could serve as a base when used for testing in CI where the requirements for security or performance aren't as high ([T282479](https://phabricator.wikimedia.org/T282479)). +The decision to adopt parts of the testing done in the release pipeline for Wikibase CI is still pending. Depending on the outcome of [T282476](https://phabricator.wikimedia.org/T282476), custom images could then be required again and could serve as a base when used for testing in CI where the requirements for security or performance aren't as high ([T282479](https://phabricator.wikimedia.org/T282479)). ## Decision diff --git a/docs/adr/index.md b/docs/adr/index.md index 4bf572615..d50ea1791 100644 --- a/docs/adr/index.md +++ b/docs/adr/index.md @@ -6,24 +6,23 @@ Architecture (understood widely) decisions should be documented in the ADR forma A template for this repository is provided here: @subpage adr_nnnn -ADRs related to possible changed, superseded or any other outdated decisions should NOT be removed from the directory. -Superseding decisions should reference ADRs they're changing or overriding. +ADRs related to possible changed, superseded or any other outdated decisions should NOT be removed from the directory. Superseding decisions should reference ADRs they're changing or overriding. Current ADRs include: -* [1 - docker image repository](0001-docker-image-repository.md) -* [2 - tarball hosting](0002-tarball-hosting.md) -* [3 - tarball contents](0003-tarball-contents.md) -* [4 - wdqs tarball contents](0004-wdqs-tarball-content.md) -* [5 - release notes process](0005-release-notes-process.md) -* [6 - pipeline runner](0006-pipline-runner.md) -* [7 - wikibase release notes publishing](0007-wikibase-release-notes-publish.md) -* [9 - non-WMDE release notes](0009-non-WMDE-release-notes.md) -* [10 - queryservice tarball](0010-queryservice-tarball.md) -* [11 - wikibase bundle](0011-wikibase-bundle.md) -* [12 - versioning](0012-versioning.md) -* [13 - docker latest tag](0013-docker-latest-tag.md) -* [14 - signing release packages](0014-signing-release-packages.md) -* [15 - security fixes non wmde software](0015-security-fixes-non-wmde-software.md) -* [16 - updating non wmde software](0016-updating-non-wmde-software.md) -* [17 - using mediawiki docker image](0017-using-mediawiki-docker-image.md) +- [1 - docker image repository](0001-docker-image-repository.md) +- [2 - tarball hosting](0002-tarball-hosting.md) +- [3 - tarball contents](0003-tarball-contents.md) +- [4 - wdqs tarball contents](0004-wdqs-tarball-content.md) +- [5 - release notes process](0005-release-notes-process.md) +- [6 - pipeline runner](0006-pipline-runner.md) +- [7 - wikibase release notes publishing](0007-wikibase-release-notes-publish.md) +- [9 - non-WMDE release notes](0009-non-WMDE-release-notes.md) +- [10 - queryservice tarball](0010-queryservice-tarball.md) +- [11 - wikibase bundle](0011-wikibase-bundle.md) +- [12 - versioning](0012-versioning.md) +- [13 - docker latest tag](0013-docker-latest-tag.md) +- [14 - signing release packages](0014-signing-release-packages.md) +- [15 - security fixes non wmde software](0015-security-fixes-non-wmde-software.md) +- [16 - updating non wmde software](0016-updating-non-wmde-software.md) +- [17 - using mediawiki docker image](0017-using-mediawiki-docker-image.md) diff --git a/docs/index.md b/docs/index.md index 52eaebeb4..ef02d83f1 100644 --- a/docs/index.md +++ b/docs/index.md @@ -1,6 +1,7 @@ ## Wikibase release pipeline ### Documentation + - [Overview](topics/release-overview.md) - [Security Releases](topics/security-releases.md) - [Testing](topics/testing.md) diff --git a/docs/topics/extensions.md b/docs/topics/extensions.md index 4e9a79423..11fc5e307 100644 --- a/docs/topics/extensions.md +++ b/docs/topics/extensions.md @@ -5,9 +5,10 @@ ### Creating an OAuth consumer To create an OAuth consumer + ``` # set these environment variables -MW_ADMIN_NAME=WikibaseDockerAdmin +MW_ADMIN_NAME=WikibaseDockerAdmin QS_PUBLIC_SCHEME_HOST_AND_PORT=http://wdqs.svc:9999 @@ -39,7 +40,7 @@ A successful response should return with `"created": true` flag set. ### Configure OAuth -Replace the and with the key and secret that was generated when [Creating an OAuth consumer](#creating-an-oauth-consumer). +Replace the and with the key and secret that was generated when [Creating an OAuth consumer](#creating-an-oauth-consumer). Create the following file and place it at `/quickstatements/data/oauth.ini` in the wikibase container. @@ -50,5 +51,3 @@ agent = 'Wikibase Docker QuickStatements' consumerKey = '' consumerSecret = '' ``` - - diff --git a/docs/topics/security-releases.md b/docs/topics/security-releases.md index e33723453..74d819228 100644 --- a/docs/topics/security-releases.md +++ b/docs/topics/security-releases.md @@ -12,13 +12,12 @@ You can find the latest base components that may need updating from these locati You can see all new commits since the commit being used using a comparison URL (see below). - You can then use a new commit hash that only includes minor fixes including security updates to packages and code. +You can then use a new commit hash that only includes minor fixes including security updates to packages and code. - `WDQSQUERYGUI_COMMIT`: https://github.com/wikimedia/wikidata-query-gui/compare/3be93201ca2efad44f36430b8cf3a2c94cb22ebf...master - `QUICKSTATEMENTS_COMMIT`: https://github.com/magnusmanske/quickstatements/compare/757d238cf6d306f9daf8276f620415cf09f4afe6...master - `MAGNUSTOOLS_COMMIT`: https://bitbucket.org/magnusmanske/magnustools/branches/compare/master..7db2f401390df471d9650010ce059e4308d0cc9e This repository includes lots of code that is not being used withing quicksttaements. Changes to this code could be ignored. - **No specific security releases** - `WDQS_VERSION`: WDQS does not currently have "security releases" so to speak. Any needed increments would need to be advised from the WMF team. You can see releases at https://archiva.wikimedia.org/repository/releases/org/wikidata/query/rdf/service/ diff --git a/docs/topics/test-system.md b/docs/topics/test-system.md index a714abdcf..db4e602da 100644 --- a/docs/topics/test-system.md +++ b/docs/topics/test-system.md @@ -2,35 +2,30 @@ The test systems are located on a CloudVPS machine at `wikibase-product-testing-2022.wikidata-dev.eqiad1.wikimedia.cloud`. -You can find all code for running test test systems in the `/opt/test-systems` directory. -This directory is owned by the `mediawiki` user. -Each test system is a copy of the `example` docker compose setup, with customized env vars and settings. +You can find all code for running test test systems in the `/opt/test-systems` directory. This directory is owned by the `mediawiki` user. Each test system is a copy of the `example` docker compose setup, with customized env vars and settings. -Four optional test systems are maintained. -These may or may not be running at any given time, as they are only intended for use during product verification during the release process. -Engineers can start and stop these +Four optional test systems are maintained. These may or may not be running at any given time, as they are only intended for use during product verification during the release process. Engineers can start and stop these -NOTE: Federated properties can not be enabled in wmde.9/1.37 releases. So that test system is not currently used / updated. -It can again be enabled in wmde.10/1.38 +NOTE: Federated properties can not be enabled in wmde.9/1.37 releases. So that test system is not currently used / updated. It can again be enabled in wmde.10/1.38 **Default Wikibase** -- Previous release (internal ports 82**) +- Previous release (internal ports 82\*\*) - https://wikibase-product-testing-previous.wmcloud.org (8280 internal) - https://wikibase-query-testing-previous.wmcloud.org (8281 internal) - https://wikibase-qs-testing-previous.wmcloud.org (8282 internal) -- Latest release (internal ports 83**) +- Latest release (internal ports 83\*\*) - https://wikibase-product-testing.wmcloud.org (8380 internal) - https://wikibase-query-testing.wmcloud.org (8381 internal) - https://wikibase-qs-testing.wmcloud.org (8382 internal) **Federated Properties** -- Previous release with fed props (internal ports 84**) +- Previous release with fed props (internal ports 84\*\*) - https://wikibase-product-testing-fedprops-previous.wmcloud.org (8480 internal) - https://wikibase-query-testing-fedprops-previous.wmcloud.org (8481 internal) - https://wikibase-qs-testing-fedprops-previous.wmcloud.org (8482 internal) -- Latest release with fed props (internal ports 85**) +- Latest release with fed props (internal ports 85\*\*) - https://wikibase-product-testing-fedprops.wmcloud.org (8580 internal) - https://wikibase-query-testing-fedprops.wmcloud.org (8581 internal) - https://wikibase-qs-testing-fedprops.wmcloud.org (8582 internal) @@ -54,13 +49,12 @@ sudo chown mediawiki:wikidev /opt/test-systems sudo chmod +775 /opt/test-systems ``` -You can create a test system using the `prepare-docker-compose-config` script from https://github.com/wmde/wikibase-suite-test-system-tools which generates the necessary docker compose configuration using parametrized input. -The input paremeters are: +You can create a test system using the `prepare-docker-compose-config` script from https://github.com/wmde/wikibase-suite-test-system-tools which generates the necessary docker compose configuration using parametrized input. The input paremeters are: - - IMAGE_PREFIX: Prefix of images to use. Use an empty string to use locally loaded images. - - TEST_SYSTEM: The name of the test system to create, should be either "latest" or "previous" - - EXAMPLE_HASH: Hash of the release pipeline repository to use the docker compose example from - - BUILD_NUMBER: Build of images, or tag, to use for images of the test system +- IMAGE_PREFIX: Prefix of images to use. Use an empty string to use locally loaded images. +- TEST_SYSTEM: The name of the test system to create, should be either "latest" or "previous" +- EXAMPLE_HASH: Hash of the release pipeline repository to use the docker compose example from +- BUILD_NUMBER: Build of images, or tag, to use for images of the test system For example: @@ -79,14 +73,11 @@ To start the test system: sudo docker compose -f docker-compose.yml -f docker-compose.extra.yml up -d ``` -TODO in order to keep the LocalSettings.php file between updates of the mediawiki container we want to copy it out, onto disk, and mount it in. -However that is a little dificult right now due to https://phabricator.wikimedia.org/T298632 -So Adam will write these docs once that task is merged and resolved. +TODO in order to keep the LocalSettings.php file between updates of the mediawiki container we want to copy it out, onto disk, and mount it in. However that is a little dificult right now due to https://phabricator.wikimedia.org/T298632 So Adam will write these docs once that task is merged and resolved. ## Updating -All data is stored in volumes, so the easiest way to update a test system is to turn it off, recreate it using the steps above, just with different intputs, and then run `up` again. -The one thing that needs copying over and mounting in the docker compose file is the LocalSetting.php file for MediaWiki which on initial setup is created by the wikibase container and stored in the container only. +All data is stored in volumes, so the easiest way to update a test system is to turn it off, recreate it using the steps above, just with different intputs, and then run `up` again. The one thing that needs copying over and mounting in the docker compose file is the LocalSetting.php file for MediaWiki which on initial setup is created by the wikibase container and stored in the container only. That would look something like this... diff --git a/docs/topics/testing.md b/docs/topics/testing.md index 344c0db54..1d328a46d 100644 --- a/docs/topics/testing.md +++ b/docs/topics/testing.md @@ -1,6 +1,6 @@ # Testing -Tests are organized in "suites" which can be found in `test/suites`. Each suite runs a series of specs (tests) found in the `test/specs` directory. Which specs run in each suite by default are specified in the `.config.ts` file in each suite directory under the `specs` key. +Tests are organized in "suites" which can be found in `test/suites`. Each suite runs a series of specs (tests) found in the `test/specs` directory. Which specs run in each suite by default are specified in the `.config.ts` file in each suite directory under the `specs` key. All test suites except `example` and `upgrade` are ran against the most recently built local Docker images, those are the images with the `:latest` tag which are also taken when no tag is specified. The `example` test suite runs against the remote Docker Images specified in the configuration in the `/example` directory. The `upgrade` suite runs the remote Docker images from the specified previous version, and tests upgrading to the latest local build. @@ -37,7 +37,7 @@ There are also a few special options which are useful when writing tests, or in # Sets test timeouts to 1 day so they don't timeout while debugging with `await browser.debug()` calls # This however can have undesirable effects during normal test runs so only use for actual debugging -# purposes. +# purposes. ./test.sh repo --debug # `DEBUG`: Shows full Docker compose up/down progress logs for the Test Runner @@ -47,7 +47,7 @@ DEBUG=true ./test.sh repo WDIO Testrunner CLI options are also supported. See https://webdriver.io/docs/testrunner. -## Variables for testing some other instance +## Variables for testing some other instance In order to test your own instances of the services, make sure to set the following environment variables to the services that should be tested: diff --git a/docs/topics/upgrading.md b/docs/topics/upgrading.md index 42ed68c16..9dc2d03e4 100644 --- a/docs/topics/upgrading.md +++ b/docs/topics/upgrading.md @@ -5,16 +5,17 @@ This document describes the process that can be applied when backing up and upgr # Back up your data Always back up your data before attempting an upgrade! Backing up the database is **NOT** sufficient to restore a failed upgrade. Remember that any content in the containers, in particular the `/var/www/html/LocalSettings.php` file is generated at startup and is at **risk of being lost once the old containers are removed!** + ## Back up your database In all of our images we rely on a database to persist data. Normally these are stored in Docker volumes and can be seen in the mysql container in the Docker example as `mediawiki-mysql-data`. ```yml - mysql: - image: "${MARIADB_IMAGE_URL}" - restart: unless-stopped - volumes: - - mediawiki-mysql-data:/var/lib/mysql +mysql: + image: "${MARIADB_IMAGE_URL}" + restart: unless-stopped + volumes: + - mediawiki-mysql-data:/var/lib/mysql ``` Under ideal circumstances, a backup isn't necessary to upgrade to a new version; however, there is always the possibility of something going wrong, and having a backup is always a good idea. @@ -60,6 +61,7 @@ docker run -v wikibase_mediawiki-mysql-data:/volume -v /tmp/wikibase-data:/backu ``` ## Back up other data + ### 1.1 Copy your LocalSettings.php file If you haven't mounted your own LocalSettings.php file, located in `/var/www/html/LocalSettings.php`, you run the risk of losing this important file when upgrading. @@ -88,11 +90,13 @@ services: volumes: - /tmp/LocalSettings.php:/var/www/html/LocalSettings.php ``` + ### 2. Copy other data written inside container In some newer images, the default value of upload images is written inside the container at `/var/www/html/images`. Review your configuration and make backups of any logs or other data that you wish to save. # Do the upgrade + ## Stop the running containers Before we do the actual upgrade, we need to stop the containers and remove the volume that is shared between the `wikibase` and `wikibase-jobrunner` containers. @@ -154,5 +158,4 @@ docker exec php /var/www/html/maintenance/update.php Running this command will execute the MediaWiki Updater. After it has completed, your upgrade should be successful! - For more information on upgrading, consult addshore's [blog post](https://addshore.com/2019/01/wikibase-docker-mediawiki-wikibase-update/) describing how it was done for the [wikibase registry](https://wikibase-registry.wmflabs.org) (which has custom extensions installed). diff --git a/example/README.md b/example/README.md index a3d27a3d3..cefa0525f 100644 --- a/example/README.md +++ b/example/README.md @@ -2,8 +2,8 @@ The example docker compose configuration consists of two files: -* `docker-compose.yml` contains two services: wikibase and mysql -* `docker-compose.extra.yml` contains additional services such as wdqs, wdqs-frontend, elasticsearch and quickstatements +- `docker-compose.yml` contains two services: wikibase and mysql +- `docker-compose.extra.yml` contains additional services such as wdqs, wdqs-frontend, elasticsearch and quickstatements **We recommend you go through `docker-compose.extra.yml` and remove any unwanted services.** @@ -29,8 +29,7 @@ The example docker-compose.yml sets up a dedicated job runner which restarts its If you run large batches of edits, this job runner may not be able to keep up with edits. -You can speed it up by increasing the `MAX_JOBS` variable to run more jobs between restarts, if you’re okay with configuration changes not taking effect in the job runner immediately. -Alternatively, you can run several job runners in parallel by using the `--scale` option. +You can speed it up by increasing the `MAX_JOBS` variable to run more jobs between restarts, if you’re okay with configuration changes not taking effect in the job runner immediately. Alternatively, you can run several job runners in parallel by using the `--scale` option. ```sh docker compose up --scale wikibase-jobrunner=8 @@ -60,9 +59,9 @@ In the volumes section of the wikibase service in [docker-compose.extra.yml](doc Looking inside extra-install.sh, you see that it executes two scripts which set up an OAuth consumer for quickstatements and creates indices for Elasticsearch. - There are also additional environment variables passed into Wikibase to configure the Elasticsearch host and port. + ```yml - MW_ELASTIC_HOST: ${MW_ELASTIC_HOST} - MW_ELASTIC_PORT: ${MW_ELASTIC_PORT} +MW_ELASTIC_HOST: ${MW_ELASTIC_HOST} +MW_ELASTIC_PORT: ${MW_ELASTIC_PORT} ``` diff --git a/example/docker-compose.extra.yml b/example/docker-compose.extra.yml index a493cd9ee..12ad7151a 100644 --- a/example/docker-compose.extra.yml +++ b/example/docker-compose.extra.yml @@ -45,6 +45,7 @@ services: environment: - WIKIBASE_HOST - WDQS_HOST=wdqs-proxy.svc + wdqs: image: "${WDQS_IMAGE_NAME}" restart: unless-stopped diff --git a/lint.sh b/lint.sh index 015dbafbd..8cdecdd4b 100755 --- a/lint.sh +++ b/lint.sh @@ -13,16 +13,29 @@ done if $SHOULD_FIX then - echo "Fixing Linting and Formatting issues in Typescript" - NPM_LINT_COMMAND="npm run lint:fix --silent" - PYTHON_FLAGS="--fix" + echo "Fixing Linting and Formatting Issues" + NPM_JS_COMMAND="lint:fix-js" + NPM_YML_COMMAND="lint:fix-yml" + NEWLINE_FLAGS="--fix" + BLACK_FLAGS="" else - NPM_LINT_COMMAND="npm run lint --silent" - PYTHON_FLAGS="" + NPM_JS_COMMAND="lint-js" + NPM_YML_COMMAND="lint-yml" + NEWLINE_FLAGS="" + BLACK_FLAGS="--check" fi -# â„č Linting Javascript test/**/*.ts -$TEST_COMPOSE run --rm --build test-runner -c "$NPM_LINT_COMMAND" +# â„č Linting Javascript test/**/*.cjs,js,json,mjs,ts +$RUN_TEST_RUNNER_CMD "npm run $NPM_JS_COMMAND" + +# â„č Linting Markdown **/*.md +if $SHOULD_FIX +then + $RUN_TEST_RUNNER_CMD "npm run lint:fix-md" +fi + +# â„č Linting YML **/*.yml +$RUN_TEST_RUNNER_CMD "npm run $NPM_YML_COMMAND" # â„č Linting Shell Scripts (**/*.sh) - https://github.com/koalaman/shellcheck#from-your-terminal find . -type d -name node_modules -prune -false -o -name "*.sh" -print0 \ @@ -33,8 +46,11 @@ docker run --rm -v "$(pwd)":/code -v "$(pwd)/.hadolint.yml":/.hadolint.yml hadol find . -name Dockerfile -print -o -type d -name node_modules -prune | xargs hadolint " +# â„č Formatting Python scripts +$RUN_TEST_RUNNER_CMD "python3 -m black ../ $BLACK_FLAGS" + # â„č Linting newlines across the repo MY_FILES="$(git ls-files)" $TEST_COMPOSE run --rm --build -v "$(pwd):/tmp" test-runner -c " - python3 scripts/add_newline.py /tmp '$MY_FILES' $PYTHON_FLAGS + python3 scripts/add_newline.py /tmp '$MY_FILES' $NEWLINE_FLAGS " diff --git a/test/.eslintrc.json b/test/.eslintrc.json index 7a401655c..0f2eb6152 100644 --- a/test/.eslintrc.json +++ b/test/.eslintrc.json @@ -5,7 +5,7 @@ "wikimedia/language/es2019", "@wmde/wikimedia-typescript" ], - "ignorePatterns": [ "**/results/result.json" ], + "ignorePatterns": [ "**/results/result.json", "!.github", "!example" ], "parserOptions": { "sourceType": "module", "allowImportExportEverywhere": true, @@ -17,7 +17,10 @@ "wdio/no-pause": "warn", "es-x/no-import-meta": "off", "es-x/no-nullish-coalescing-operators": "off", - "es-x/no-class-fields": "off" + "es-x/no-class-fields": "off", + "yml/block-sequence": 1, + "yml/no-empty-mapping-value": 1, + "yml/plain-scalar": 1 }, "overrides": [ { @@ -31,7 +34,7 @@ "files": [ "*.yaml", "*.yml" ], "parser": "yaml-eslint-parser", "rules": { - "yml/no-empty-mapping-value": 1 + "spaced-comment": 0 } } ] diff --git a/test/.prettierrc.json b/test/.prettierrc.json index c139cd3d5..3002b1a31 100644 --- a/test/.prettierrc.json +++ b/test/.prettierrc.json @@ -1,10 +1,11 @@ { + "proseWrap": "never", "trailingComma": "none", "useTabs": true, "plugins": [ "prettier-plugin-organize-imports" ], "overrides": [ { - "files": [ "*.mjs", "*.ts" ], + "files": [ "*.cjs", "*.js", "*.mjs", "*.ts" ], "options": { "singleQuote": true } diff --git a/test/Dockerfile b/test/Dockerfile index 78f040aee..03c799528 100644 --- a/test/Dockerfile +++ b/test/Dockerfile @@ -14,7 +14,8 @@ RUN apt-get update && \ setuptools \ requests \ bs4 \ - lxml + lxml \ + black WORKDIR /usr/src/test/ diff --git a/test/README.md b/test/README.md index b67f76bec..efe17b18e 100644 --- a/test/README.md +++ b/test/README.md @@ -1,6 +1,7 @@ # Test-cases ## Wikibase + - [ ] Create an item using the UI - [ ] Add a statement - [ ] Add a qualifier @@ -9,12 +10,15 @@ - [ ] add a statement - [ ] add a qualifier - [ ] add a reference + ## Wikibase & WDQS/WDQS-frontend + - [x] Create an item on Wikibase, and verify it can be found on Query Service (including the label etc) - [x] Delete an item on Wikibase, and verify that the item isn’t there on the query service any more - [x] Create an item with a statement and verify that querying using prefixes work correctly (wdt:P1 etc.. ) ## Wikibase Client & Repo + - [x] Create an item on repo with sitelink to client - [x] Create an item on repo and verify that client can reference it using wikitext - [ ] Verify that changes on repo are dispatched to client ( TODO figure out all use-cases ) @@ -24,42 +28,34 @@ - [x] Create an item on repo and verify that client can use it using Lua (Scribunto) ## Scribunto + - [x] Verify Lua module can be executed - [x] Verify Lua module can be executed within time-limit ## Wikibase Federated properties (using wikidata.org as source wiki) -- [X] Create an item on Wikibase and add a statement using a federated property - - [X] verify it is available with the correct rdf through the Entity page - - [X] verify it is available through the WDQS-frontend with the correct prefixes + +- [x] Create an item on Wikibase and add a statement using a federated property + - [x] verify it is available with the correct rdf through the Entity page + - [x] verify it is available through the WDQS-frontend with the correct prefixes ## Elasticsearch -- [X] Create item with an alias and search by item alias -- [X] Case-insensitive search should work through Wikibase + +- [x] Create item with an alias and search by item alias +- [x] Case-insensitive search should work through Wikibase ## Environment The behavior of the tests can be modified with several environment variables. -* `WIKIBASE_URL`: protocol, host name and port of the MediaWiki installation. - Defaults to `http://127.0.0.1:8080` (Vagrant). -* `MW_SCRIPT_PATH`: path to `index.php`, `api.php` etc. under `WIKIBASE_URL`. - Defaults to `/w`. -* `WIKIBASE_PROPERTY_STRING`, `WIKIBASE_PROPERTY_URL`, etc.: - Property ID of a property with datatype `string`, `url`, etc. – - if not set, a new property of this type will be created each time the tests are run. - (This will fail unless anonymous users are allowed to create properties on the wiki, - so setting `WIKIBASE_PROPERTY_STRING` correctly is recommended.) -* `HEADED_TESTS`: set to `true` to run tests in a headed browser. Follow the test - execution on http://localhost:7900/?autoconnect=1&resize=scale +- `WIKIBASE_URL`: protocol, host name and port of the MediaWiki installation. Defaults to `http://127.0.0.1:8080` (Vagrant). +- `MW_SCRIPT_PATH`: path to `index.php`, `api.php` etc. under `WIKIBASE_URL`. Defaults to `/w`. +- `WIKIBASE_PROPERTY_STRING`, `WIKIBASE_PROPERTY_URL`, etc.: Property ID of a property with datatype `string`, `url`, etc. – if not set, a new property of this type will be created each time the tests are run. (This will fail unless anonymous users are allowed to create properties on the wiki, so setting `WIKIBASE_PROPERTY_STRING` correctly is recommended.) +- `HEADED_TESTS`: set to `true` to run tests in a headed browser. Follow the test execution on http://localhost:7900/?autoconnect=1&resize=scale ## Write more tests -When working on the browser tests, -you’ll want to consult the documentation of the following libraries we use: +When working on the browser tests, you’ll want to consult the documentation of the following libraries we use: -* [WebdriverIO](https://webdriver.io/docs/api) for controlling the browser - (`browser`, `$`, `waitForVisible`, 
) -* [Mocha](https://mochajs.org/) as the general testing framework - (`describe`, `it`, `before`, 
) -* [`expect`](https://webdriver.io/docs/api/expect-webdriverio/) for assertions - (`toBe`, `toEqual`, 
) +- [WebdriverIO](https://webdriver.io/docs/api) for controlling the browser (`browser`, `$`, `waitForVisible`, 
) +- [Mocha](https://mochajs.org/) as the general testing framework (`describe`, `it`, `before`, 
) +- [`expect`](https://webdriver.io/docs/api/expect-webdriverio/) for assertions (`toBe`, `toEqual`, 
) diff --git a/test/helpers/default-functions.ts b/test/helpers/default-functions.ts index 716406e75..7be54e354 100644 --- a/test/helpers/default-functions.ts +++ b/test/helpers/default-functions.ts @@ -2,12 +2,12 @@ import axios, { AxiosResponse } from 'axios'; import lodash from 'lodash'; import { Context } from 'mocha'; import WikibaseApi from 'wdio-wikibase/wikibase.api.js'; -import { TestSettings } from '../types/TestSettings.js'; import Binding from '../types/binding.js'; import BotResponse from '../types/bot-response.js'; import DatabaseConfig from '../types/database-config.js'; import ExternalChange from '../types/external-change.js'; import LuaCPUValue from '../types/lua-cpu-value.js'; +import { TestSettings } from '../types/test-settings.js'; export function defaultFunctions(): void { const settings: TestSettings = testEnv.settings; @@ -175,7 +175,7 @@ export function defaultFunctions(): void { const changes = result.data.query.recentchanges; const foundResult = lodash.find( changes, expectedChange ); - expect( result.status ).toBe( 200 ); + expect( result.status ).toEqual( 200 ); if ( !foundResult ) { testEnv.testLog.error( 'Could not find:' ); diff --git a/test/helpers/json-reporter.ts b/test/helpers/json-reporter.ts index 34e3476fd..f3aea81f6 100644 --- a/test/helpers/json-reporter.ts +++ b/test/helpers/json-reporter.ts @@ -2,7 +2,7 @@ import WDIOReporter, { SuiteStats, TestStats } from '@wdio/reporter'; import { Reporters } from '@wdio/types'; import { existsSync, readFileSync, writeFileSync } from 'fs'; import { ResultType, TestResult } from '../types/test-results.js'; -import { utf8 } from './readFileEncoding.js'; +import { utf8 } from './read-file-encoding.js'; type JsonReporterOptions = { suiteName: string; diff --git a/test/helpers/readFileEncoding.ts b/test/helpers/read-file-encoding.ts similarity index 100% rename from test/helpers/readFileEncoding.ts rename to test/helpers/read-file-encoding.ts diff --git a/test/package-lock.json b/test/package-lock.json index 3f62f5067..76654ce43 100644 --- a/test/package-lock.json +++ b/test/package-lock.json @@ -9,7 +9,7 @@ "version": "0.1.0", "devDependencies": { "@types/mocha": "^10.0.6", - "@types/node": "^20.11.24", + "@types/node": "^20.11.28", "@wdio/cli": "^8.18.2", "@wdio/local-runner": "^8.18.2", "@wdio/mocha-framework": "^8.18.2", @@ -639,9 +639,9 @@ } }, "node_modules/@puppeteer/browsers": { - "version": "1.7.1", - "resolved": "https://registry.npmjs.org/@puppeteer/browsers/-/browsers-1.7.1.tgz", - "integrity": "sha512-nIb8SOBgDEMFY2iS2MdnUZOg2ikcYchRrBoF+wtdjieRFKR2uGRipHY/oFLo+2N6anDualyClPzGywTHRGrLfw==", + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/@puppeteer/browsers/-/browsers-1.9.1.tgz", + "integrity": "sha512-PuvK6xZzGhKPvlx3fpfdM2kYY3P/hB1URtK8wA7XUJ6prn6pp22zvJHu48th0SGcHL9SutbPHrFuQgfXTFobWA==", "dev": true, "dependencies": { "debug": "4.3.4", @@ -650,7 +650,7 @@ "proxy-agent": "6.3.1", "tar-fs": "3.0.4", "unbzip2-stream": "1.4.3", - "yargs": "17.7.1" + "yargs": "17.7.2" }, "bin": { "browsers": "lib/cjs/main-cli.js" @@ -659,33 +659,6 @@ "node": ">=16.3.0" } }, - "node_modules/@puppeteer/browsers/node_modules/yargs": { - "version": "17.7.1", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.1.tgz", - "integrity": "sha512-cwiTb08Xuv5fqF4AovYacTFNxk62th7LKJ6BL9IGUpTJrWoU7/7WdQGTP2SjKf1dUNBGzDd28p/Yfs/GI6JrLw==", - "dev": true, - "dependencies": { - "cliui": "^8.0.1", - "escalade": "^3.1.1", - "get-caller-file": "^2.0.5", - "require-directory": "^2.1.1", - "string-width": "^4.2.3", - "y18n": "^5.0.5", - "yargs-parser": "^21.1.1" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/@puppeteer/browsers/node_modules/yargs-parser": { - "version": "21.1.1", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", - "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", - "dev": true, - "engines": { - "node": ">=12" - } - }, "node_modules/@sinclair/typebox": { "version": "0.27.8", "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.27.8.tgz", @@ -795,9 +768,9 @@ "dev": true }, "node_modules/@types/node": { - "version": "20.11.24", - "resolved": "https://registry.npmjs.org/@types/node/-/node-20.11.24.tgz", - "integrity": "sha512-Kza43ewS3xoLgCEpQrsT+xRo/EJej1y0kVYGiLFE1NEODXGzTfwiC6tXTLMQskn1X4/Rjlh0MQUvx9W+L9long==", + "version": "20.11.28", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.11.28.tgz", + "integrity": "sha512-M/GPWVS2wLkSkNHVeLkrF2fD5Lx5UC4PxA0uZcKc6QqbIQUJyW1jVjueJYi1z8n0I5PxYrtpnPnWglE+y9A0KA==", "dev": true, "dependencies": { "undici-types": "~5.26.4" @@ -2526,13 +2499,19 @@ } }, "node_modules/call-bind": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", - "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.7.tgz", + "integrity": "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==", "dev": true, "dependencies": { - "function-bind": "^1.1.1", - "get-intrinsic": "^1.0.2" + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", + "set-function-length": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -3262,9 +3241,9 @@ } }, "node_modules/data-uri-to-buffer": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-6.0.1.tgz", - "integrity": "sha512-MZd3VlchQkp8rdend6vrx7MmVDJzSNTBvghvKjirLkD+WTChA3KUf0jkE68Q4UyctNqI11zZO9/x2Yx+ub5Cvg==", + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-6.0.2.tgz", + "integrity": "sha512-7hvf7/GW8e86rW0ptuwS3OcBGDjIi6SZva7hCyWC0yYry2cOPmLIjXAUHI6DK2HsnwJd9ifmt57i8eV2n4YNpw==", "dev": true, "engines": { "node": ">= 14" @@ -3409,17 +3388,20 @@ } }, "node_modules/define-data-property": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.1.tgz", - "integrity": "sha512-E7uGkTzkk1d0ByLeSc6ZsFS79Axg+m1P/VsgYsxHgiuc3tFSj+MjMIwe90FC4lOAZzNBdY7kkO2P2wKdsQ1vgQ==", + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", + "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", "dev": true, "dependencies": { - "get-intrinsic": "^1.2.1", - "gopd": "^1.0.1", - "has-property-descriptors": "^1.0.0" + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "gopd": "^1.0.1" }, "engines": { "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, "node_modules/define-properties": { @@ -3478,9 +3460,9 @@ "dev": true }, "node_modules/diff": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/diff/-/diff-5.1.0.tgz", - "integrity": "sha512-D+mk+qE8VC/PAUrlAU34N+VfXev0ghe5ywmpqrawphmVZc1bEfn56uo9qpyGp1p4xpzOHkSW4ztBd6L7Xx4ACw==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-5.0.0.tgz", + "integrity": "sha512-/VTCrvm5Z0JGty/BWHljh+BAiw3IK+2j87NGMu8Nwc/f48WoDAC395uomO9ZD117ZOBaHmkX1oyLvkVM/aIT3w==", "dev": true, "engines": { "node": ">=0.3.1" @@ -3788,21 +3770,6 @@ "util-deprecate": "~1.0.1" } }, - "node_modules/duplexer2/node_modules/safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", - "dev": true - }, - "node_modules/duplexer2/node_modules/string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "dev": true, - "dependencies": { - "safe-buffer": "~5.1.0" - } - }, "node_modules/eastasianwidth": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", @@ -4119,6 +4086,27 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/es-define-property": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz", + "integrity": "sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==", + "dev": true, + "dependencies": { + "get-intrinsic": "^1.2.4" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "dev": true, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/es-set-tostringtag": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.1.tgz", @@ -5880,15 +5868,19 @@ } }, "node_modules/get-intrinsic": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.1.tgz", - "integrity": "sha512-2DcsyfABl+gVHEfCOaTrWgyt+tb6MSEGmKq+kI5HwLbIYgjgmMcV8KQ41uaKz1xxUcn9tJtgFbQUEVcEbd0FYw==", + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz", + "integrity": "sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==", "dev": true, "dependencies": { - "function-bind": "^1.1.1", - "has": "^1.0.3", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", "has-proto": "^1.0.1", - "has-symbols": "^1.0.3" + "has-symbols": "^1.0.3", + "hasown": "^2.0.0" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -6368,12 +6360,12 @@ } }, "node_modules/has-property-descriptors": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz", - "integrity": "sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", + "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", "dev": true, "dependencies": { - "get-intrinsic": "^1.1.1" + "es-define-property": "^1.0.0" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -6418,6 +6410,18 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/hasown": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.1.tgz", + "integrity": "sha512-1/th4MHjnwncwXsIW6QMzlvYL9kG5e/CpVvLRZe4XPa8TOUNbCELqmvhDmnkNsAjwaG4+I8gJJL0JBvTTLO9qA==", + "dev": true, + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/he": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", @@ -6702,9 +6706,9 @@ } }, "node_modules/ip": { - "version": "1.1.8", - "resolved": "https://registry.npmjs.org/ip/-/ip-1.1.8.tgz", - "integrity": "sha512-PuExPYUiu6qMBQb4l06ecm6T6ujzhmh+MeJcW9wa89PoAz5pvd4zPgN5WJV104mb6S2T1AwNIAaB70JNrLQWhg==", + "version": "1.1.9", + "resolved": "https://registry.npmjs.org/ip/-/ip-1.1.9.tgz", + "integrity": "sha512-cyRxvOEpNHNtchU3Ln9KC/auJgup87llfQpQ+t5ghoC/UhL16SWzbueiCsdTnWmqAWl7LadfuwhlqmtOaqMHdQ==", "dev": true }, "node_modules/is-array-buffer": { @@ -7662,21 +7666,6 @@ "util-deprecate": "~1.0.1" } }, - "node_modules/lazystream/node_modules/safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", - "dev": true - }, - "node_modules/lazystream/node_modules/string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "dev": true, - "dependencies": { - "safe-buffer": "~5.1.0" - } - }, "node_modules/ldjson-stream": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/ldjson-stream/-/ldjson-stream-1.2.1.tgz", @@ -7998,9 +7987,9 @@ } }, "node_modules/lru-cache": { - "version": "10.0.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.0.1.tgz", - "integrity": "sha512-IJ4uwUTi2qCccrioU6g9g/5rvvVl13bsdczUUcqbciD9iLr095yj8DQKdObriEvuNSx325N1rV1O0sJFszx75g==", + "version": "10.2.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.2.0.tgz", + "integrity": "sha512-2bIM8x+VAf6JT4bKAljS1qUWgMsqZRPGJS6FSahIMPVvctcNhyVp7AJu7quxOW9jwkryBReKZY5tY5JYv2n/7Q==", "dev": true, "engines": { "node": "14 || >=16.14" @@ -8421,6 +8410,15 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, + "node_modules/mocha/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0" + } + }, "node_modules/mocha/node_modules/cliui": { "version": "7.0.4", "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", @@ -8450,15 +8448,6 @@ "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, - "node_modules/mocha/node_modules/diff": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/diff/-/diff-5.0.0.tgz", - "integrity": "sha512-/VTCrvm5Z0JGty/BWHljh+BAiw3IK+2j87NGMu8Nwc/f48WoDAC395uomO9ZD117ZOBaHmkX1oyLvkVM/aIT3w==", - "dev": true, - "engines": { - "node": ">=0.3.1" - } - }, "node_modules/mocha/node_modules/glob": { "version": "8.1.0", "resolved": "https://registry.npmjs.org/glob/-/glob-8.1.0.tgz", @@ -8490,15 +8479,6 @@ "node": ">=10" } }, - "node_modules/mocha/node_modules/minimatch/node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", - "dev": true, - "dependencies": { - "balanced-match": "^1.0.0" - } - }, "node_modules/mocha/node_modules/ms": { "version": "2.1.3", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", @@ -8927,9 +8907,9 @@ } }, "node_modules/object-inspect": { - "version": "1.12.3", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.3.tgz", - "integrity": "sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g==", + "version": "1.13.1", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.1.tgz", + "integrity": "sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ==", "dev": true, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -9436,9 +9416,9 @@ } }, "node_modules/postcss-html/node_modules/js-tokens": { - "version": "8.0.2", - "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-8.0.2.tgz", - "integrity": "sha512-Olnt+V7xYdvGze9YTbGFZIfQXuGV4R3nQwwl8BrtgaPE/wq8UFpUHWuTNc05saowhSr1ZO6tx+V6RjE9D5YQog==", + "version": "8.0.3", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-8.0.3.tgz", + "integrity": "sha512-UfJMcSJc+SEXEl9lH/VLHSZbThQyLpw1vLO1Lb+j4RWDvG3N2f7yj3PVQA3cmkTBNldJ9eFnM+xEXxHIXrYiJw==", "dev": true }, "node_modules/postcss-less": { @@ -9784,27 +9764,6 @@ "node": ">= 14" } }, - "node_modules/puppeteer-core/node_modules/ws": { - "version": "8.13.0", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.13.0.tgz", - "integrity": "sha512-x9vcZYTrFPC7aSIbj7sRCYo7L/Xb8Iy+pW0ng0wt2vCJv7M9HOMy0UoN3rr+IFC7hb7vXoqS+P9ktyLLLhO+LA==", - "dev": true, - "engines": { - "node": ">=10.0.0" - }, - "peerDependencies": { - "bufferutil": "^4.0.1", - "utf-8-validate": ">=5.0.2" - }, - "peerDependenciesMeta": { - "bufferutil": { - "optional": true - }, - "utf-8-validate": { - "optional": true - } - } - }, "node_modules/puppeteer-core/node_modules/yargs": { "version": "17.7.1", "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.1.tgz", @@ -9966,18 +9925,18 @@ } }, "node_modules/read-pkg-up/node_modules/json-parse-even-better-errors": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-3.0.0.tgz", - "integrity": "sha512-iZbGHafX/59r39gPwVPRBGw0QQKnA7tte5pSMrhWOW7swGsVvVTjmfyAV9pNqk8YGT7tRCdxRu8uzcgZwoDooA==", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-3.0.1.tgz", + "integrity": "sha512-aatBvbL26wVUCLmbWdCpeu9iF5wOyWpagiKkInA+kfws3sWdBrTnsvN2CKcyCYyUrc7rebNBlK6+kteg7ksecg==", "dev": true, "engines": { "node": "^14.17.0 || ^16.13.0 || >=18.0.0" } }, "node_modules/read-pkg-up/node_modules/lines-and-columns": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-2.0.3.tgz", - "integrity": "sha512-cNOjgCnLB+FnvWWtyRTzmB3POJ+cXxTA81LoW7u8JdmhfXzriropYwpjShnz1QLLWsQwY7nIxoDmcPTwphDK9w==", + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-2.0.4.tgz", + "integrity": "sha512-wM1+Z03eypVAVUCE7QdSqpVIvelbOakn1M0bPDoA4SGWPx3sNDVUiMo3L6To6WWGClB7VyXnhQ4Sn7gxiJbE6A==", "dev": true, "engines": { "node": "^12.20.0 || ^14.13.1 || >=16.0.0" @@ -10044,9 +10003,9 @@ } }, "node_modules/read-pkg-up/node_modules/parse-json": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-7.1.0.tgz", - "integrity": "sha512-ihtdrgbqdONYD156Ap6qTcaGcGdkdAxodO1wLqQ/j7HP1u2sFYppINiq4jyC8F+Nm+4fVufylCV00QmkTHkSUg==", + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-7.1.1.tgz", + "integrity": "sha512-SgOTCX/EZXtZxBE5eJ97P4yGM5n37BwRU+YMsH4vNzFqJV/oWFXXCmwFlgWUM4PrakybVOueJJ6pwHqSVhTFDw==", "dev": true, "dependencies": { "@babel/code-frame": "^7.21.4", @@ -10102,9 +10061,9 @@ } }, "node_modules/read-pkg-up/node_modules/type-fest": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-4.4.0.tgz", - "integrity": "sha512-HT3RRs7sTfY22KuPQJkD/XjbTbxgP2Je5HPt6H6JEGvcjHd5Lqru75EbrP3tb4FYjNJ+DjLp+MNQTFQU0mhXNw==", + "version": "4.11.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-4.11.1.tgz", + "integrity": "sha512-MFMf6VkEVZAETidGGSYW2B1MjXbGX+sWIywn2QPEaJ3j08V+MwVRHMXtf2noB8ENJaD0LIun9wh5Z6OPNf1QzQ==", "dev": true, "engines": { "node": ">=16" @@ -10594,24 +10553,10 @@ } }, "node_modules/safe-buffer": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", - "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ] + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true }, "node_modules/safe-regex": { "version": "2.1.1", @@ -10779,6 +10724,23 @@ "randombytes": "^2.1.0" } }, + "node_modules/set-function-length": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.1.tgz", + "integrity": "sha512-j4t6ccc+VsKwYHso+kElc5neZpjtq9EnRICFZtWyBsLojhmeF/ZBd/elqm22WJh/BziDe/SBiOeAt0m2mfLD0g==", + "dev": true, + "dependencies": { + "define-data-property": "^1.1.2", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.3", + "gopd": "^1.0.1", + "has-property-descriptors": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/set-function-name": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.1.tgz", @@ -10830,14 +10792,18 @@ } }, "node_modules/side-channel": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", - "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.6.tgz", + "integrity": "sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==", "dev": true, "dependencies": { - "call-bind": "^1.0.0", - "get-intrinsic": "^1.0.2", - "object-inspect": "^1.9.0" + "call-bind": "^1.0.7", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.4", + "object-inspect": "^1.13.1" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -10953,9 +10919,9 @@ } }, "node_modules/socks/node_modules/ip": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ip/-/ip-2.0.0.tgz", - "integrity": "sha512-WKa+XuLG1A1R0UWhl2+1XQSi+fZWMsYKffMZTTYsiZaUD8k2yDAj5atimTUD2TZkyCkNEeYE5NhFZmupOGtjYQ==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/ip/-/ip-2.0.1.tgz", + "integrity": "sha512-lJUL9imLTNi1ZfXT+DU6rBBdbiKGBuay9B6xGSPVjUeQwaH1RIGqef8RZkUtHioLmSNpPR5M4HVKJGm1j8FWVQ==", "dev": true }, "node_modules/source-map": { @@ -11083,12 +11049,12 @@ } }, "node_modules/string_decoder": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", - "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", "dev": true, "dependencies": { - "safe-buffer": "~5.2.0" + "safe-buffer": "~5.1.0" } }, "node_modules/string-width": { @@ -11518,12 +11484,12 @@ } }, "node_modules/superagent/node_modules/qs": { - "version": "6.11.2", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.2.tgz", - "integrity": "sha512-tDNIz22aBzCDxLtVH++VnTfzxlfeK5CbqohpSqpJgj1Wg/cQbStNAz3NuqCs5vV+pjBsK4x4pN9HlVh7rcYRiA==", + "version": "6.12.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.12.0.tgz", + "integrity": "sha512-trVZiI6RMOkO476zLGaBIzszOdFPnCCXHPG9kn0yuS1uz6xdVxPfZdB3vUig9pxPFDM9BRAgz/YUIVQ1/vuiUg==", "dev": true, "dependencies": { - "side-channel": "^1.0.4" + "side-channel": "^1.0.6" }, "engines": { "node": ">=0.6" @@ -12103,21 +12069,6 @@ "util-deprecate": "~1.0.1" } }, - "node_modules/unzipper/node_modules/safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", - "dev": true - }, - "node_modules/unzipper/node_modules/string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "dev": true, - "dependencies": { - "safe-buffer": "~5.1.0" - } - }, "node_modules/upath": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/upath/-/upath-2.0.1.tgz", @@ -12726,9 +12677,9 @@ } }, "node_modules/ws": { - "version": "8.14.2", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.14.2.tgz", - "integrity": "sha512-wEBG1ftX4jcglPxgFCMJmZ2PLtSbJ2Peg6TmpJFTbe9GZYOQCDPdMYu/Tm0/bGZkw8paZnJY45J4K2PZrLYq8g==", + "version": "8.13.0", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.13.0.tgz", + "integrity": "sha512-x9vcZYTrFPC7aSIbj7sRCYo7L/Xb8Iy+pW0ng0wt2vCJv7M9HOMy0UoN3rr+IFC7hb7vXoqS+P9ktyLLLhO+LA==", "dev": true, "engines": { "node": ">=10.0.0" diff --git a/test/package.json b/test/package.json index 68b67717b..36fa19844 100644 --- a/test/package.json +++ b/test/package.json @@ -5,7 +5,7 @@ "type": "module", "devDependencies": { "@types/mocha": "^10.0.6", - "@types/node": "^20.11.24", + "@types/node": "^20.11.28", "@wdio/cli": "^8.18.2", "@wdio/local-runner": "^8.18.2", "@wdio/mocha-framework": "^8.18.2", @@ -45,9 +45,14 @@ }, "scripts": { "test": "NODE_NO_WARNINGS=1 npx --node-options='--require ts-node/register --loader=ts-node/esm' node cli.ts", - "lint": "npx eslint . --ext .json --ext .mjs --ext .ts --ext .yml", + "lint": "npx eslint . --ext .cjs,.js,.mjs,.ts,.json,.yml", + "lint-js": "npx eslint . --ext .cjs,.js,.json,.mjs,.ts", + "lint-yml": "npx eslint .. --ext .yml --config ./.eslintrc.json", "lint:fix": "npm run lint:fix:format && npm run lint:fix:lint", - "lint:fix:format": "npx prettier *.{json,mjs,ts,yml} */*.{json,mjs,ts,yml} */*/*.{json,mjs,ts,yml} */*/*/*.{json,mjs,ts,yml} --write --log-level error", - "lint:fix:lint": "npx eslint . --ext .json --ext .mjs --ext .ts --ext .yml --fix" + "lint:fix:format": "npx prettier \"**/*.{cjs,js,mjs,ts,json,md,yml}\" --write --log-level error", + "lint:fix:lint": "npx eslint . --ext .cjs,.js,.mjs,.ts,.json,.yml --fix", + "lint:fix-js": "npx prettier \"**/*.{cjs,js,json,mjs,ts}\" --write --log-level error && npx eslint . --ext .cjs,.js,.json,.mjs,.ts --fix", + "lint:fix-md": "npx prettier \"../**/*.md\" --write --log-level error --config ./.prettierrc.json", + "lint:fix-yml": "npx prettier \"../**/*.yml\" --write --log-level error --config ./.prettierrc.json && npx eslint .. --ext .yml --fix --config ./.eslintrc.json" } } diff --git a/test/scripts/add_newline.py b/test/scripts/add_newline.py index 180f4177e..90dc67d94 100644 --- a/test/scripts/add_newline.py +++ b/test/scripts/add_newline.py @@ -29,10 +29,16 @@ def file_should_be_run(file: str) -> bool: return False -def add_newline(file: str, root_dir: str, should_fix: bool) -> bool: +def file_exists(file: str, root_dir: str) -> bool: file_path = os.path.join(root_dir, file) + result = os.path.isfile(file_path) + if not result: + print(f"Missing file: {file_path}") + return result + - with open(file_path, mode="r+") as current_file: +def add_newline(file: str, root_dir: str, should_fix: bool) -> bool: + with open(os.path.join(root_dir, file), mode="r+") as current_file: current_file.seek(0, os.SEEK_END) current_file.seek(current_file.tell() - 1, os.SEEK_SET) @@ -50,11 +56,11 @@ def add_newline(file: str, root_dir: str, should_fix: bool) -> bool: root = sys.argv[1] should_fix = False if len(sys.argv) > 3: - if sys.argv[3] == '--fix': + if sys.argv[3] == "--fix": should_fix = True found_errors = False for file in sys.argv[2].split("\n"): - if file_should_be_run(file): + if file_should_be_run(file) and file_exists(file, root): found_errors |= not add_newline(file, root, should_fix) if found_errors: sys.exit(2) diff --git a/test/setup/checkIfUp.ts b/test/setup/check-if-up.ts similarity index 100% rename from test/setup/checkIfUp.ts rename to test/setup/check-if-up.ts diff --git a/test/setup/loadEnvFiles.ts b/test/setup/load-env-files.ts similarity index 100% rename from test/setup/loadEnvFiles.ts rename to test/setup/load-env-files.ts diff --git a/test/setup/makeTestSettings.ts b/test/setup/make-test-settings.ts similarity index 97% rename from test/setup/makeTestSettings.ts rename to test/setup/make-test-settings.ts index d32cab0c9..937911850 100644 --- a/test/setup/makeTestSettings.ts +++ b/test/setup/make-test-settings.ts @@ -8,8 +8,8 @@ import TestSettings, { TestHooks, TestRunnerSettings, TestSuiteSettings -} from '../types/TestSettings.js'; -import loadEnvFiles from './loadEnvFiles.js'; +} from '../types/test-settings.js'; +import loadEnvFiles from './load-env-files.js'; export const ONE_DAY_IN_MS = 86400000; diff --git a/test/setup/TestEnv.ts b/test/setup/test-env.ts similarity index 97% rename from test/setup/TestEnv.ts rename to test/setup/test-env.ts index 93e11345a..9fe4605bb 100644 --- a/test/setup/TestEnv.ts +++ b/test/setup/test-env.ts @@ -4,9 +4,9 @@ import { spawnSync } from 'child_process'; import { mkdirSync, rmSync } from 'fs'; import * as readline from 'readline'; import { SevereServiceError } from 'webdriverio'; -import TestSettings from '../types/TestSettings.js'; -import checkIfUp from './checkIfUp.js'; -import { makeTestSettings } from './makeTestSettings.js'; +import TestSettings from '../types/test-settings.js'; +import checkIfUp from './check-if-up.js'; +import { makeTestSettings } from './make-test-settings.js'; declare global { // eslint-disable-next-line no-var, no-use-before-define diff --git a/test/setup/wdio.conf.ts b/test/setup/wdio.conf.ts index 65caa65ea..70dc950d7 100755 --- a/test/setup/wdio.conf.ts +++ b/test/setup/wdio.conf.ts @@ -4,7 +4,7 @@ import { existsSync } from 'fs'; import { dirname } from 'path'; import { fileURLToPath } from 'url'; import JsonReporter from '../helpers/json-reporter.js'; -import TestEnv from './TestEnv.js'; +import TestEnv from './test-env.js'; // eslint-disable-next-line no-underscore-dangle const __dirname = dirname( fileURLToPath( import.meta.url ) ); diff --git a/test/specs/confirm_edit/confirm-edit.ts b/test/specs/confirm_edit/confirm-edit.ts index 64af96912..d4a69b0ce 100644 --- a/test/specs/confirm_edit/confirm-edit.ts +++ b/test/specs/confirm_edit/confirm-edit.ts @@ -7,6 +7,6 @@ describe( 'ConfirmEdit', function () { 'paris' ); - expect( executionResult ).toBe( 'something great' ); + expect( executionResult ).toEqual( 'something great' ); } ); } ); diff --git a/test/specs/elasticsearch/elasticsearch.ts b/test/specs/elasticsearch/elasticsearch.ts index c64188ec8..54afc4a87 100644 --- a/test/specs/elasticsearch/elasticsearch.ts +++ b/test/specs/elasticsearch/elasticsearch.ts @@ -1,6 +1,7 @@ import { getTestString } from 'wdio-mediawiki/Util.js'; import WikibaseApi from 'wdio-wikibase/wikibase.api.js'; import ItemPage from '../../helpers/pages/entity/item.page.js'; +import page from '../../helpers/pages/page.js'; import SearchResult from '../../types/search-result.js'; const itemAlias: string = getTestString( 'alias' ); @@ -19,7 +20,7 @@ describe( 'ElasticSearch', function () { } ); it( 'Should be able to set alias', async function () { - await browser.url( `${ testEnv.vars.WIKIBASE_URL }/wiki/Special:SetAliases/` ); + await page.open( '/wiki/Special:SetAliases/' ); // input id await $( '#wb-modifyentity-id input' ).setValue( itemId ); @@ -37,10 +38,8 @@ describe( 'ElasticSearch', function () { let searchResult: SearchResult[]; const testLabel = 'Testitem'; - expect( itemLabel.includes( testLabel ) ).toBe( false ); - expect( itemLabel.toLowerCase().includes( testLabel.toLowerCase() ) ).toBe( - true - ); + expect( itemLabel ).not.toMatch( testLabel ); + expect( itemLabel.toLowerCase() ).toMatch( testLabel.toLowerCase() ); await browser.waitUntil( async () => { @@ -63,9 +62,9 @@ describe( 'ElasticSearch', function () { ); expect( searchResult ).toHaveLength( 1 ); - expect( searchResult[ 0 ].id ).toBe( itemId ); - expect( searchResult[ 0 ].match.type ).toBe( 'label' ); - expect( searchResult[ 0 ].match.text ).toBe( itemLabel ); + expect( searchResult[ 0 ].id ).toEqual( itemId ); + expect( searchResult[ 0 ].match.type ).toEqual( 'label' ); + expect( searchResult[ 0 ].match.text ).toEqual( itemLabel ); } ); it( 'should be able to search via alias', async function () { @@ -92,8 +91,8 @@ describe( 'ElasticSearch', function () { ); expect( searchResult ).toHaveLength( 1 ); - expect( searchResult[ 0 ].id ).toBe( itemId ); - expect( searchResult[ 0 ].match.type ).toBe( 'alias' ); - expect( searchResult[ 0 ].match.text ).toBe( itemAlias ); + expect( searchResult[ 0 ].id ).toEqual( itemId ); + expect( searchResult[ 0 ].match.type ).toEqual( 'alias' ); + expect( searchResult[ 0 ].match.text ).toEqual( itemAlias ); } ); } ); diff --git a/test/specs/fedprops/item.ts b/test/specs/fedprops/fedprops-item.ts similarity index 86% rename from test/specs/fedprops/item.ts rename to test/specs/fedprops/fedprops-item.ts index c062f9253..94a4c359c 100644 --- a/test/specs/fedprops/item.ts +++ b/test/specs/fedprops/fedprops-item.ts @@ -17,7 +17,7 @@ describe( 'Fed props Item', function () { `${ testEnv.vars.WIKIBASE_URL }/w/api.php?action=wbsearchentities&search=ISNI&format=json&language=en&type=property` ); - expect( result.data.success ).toBe( 1 ); + expect( result.data.success ).toEqual( 1 ); expect( result.data.search.length ).toBeGreaterThan( 0 ); } ); @@ -39,9 +39,9 @@ describe( 'Fed props Item', function () { await ItemPage.open( itemId ); - await expect( - $( '.wikibase-statementgroupview-property' ) - ).toHaveTextContaining( propertyValue ); // value is the label + await expect( $( '.wikibase-statementgroupview-property' ) ).toHaveText( + new RegExp( propertyValue ) + ); // value is the label await SpecialEntityPage.addStatementLink; } ); @@ -51,7 +51,7 @@ describe( 'Fed props Item', function () { await SpecialEntityDataPage.getData( 'Q1', 'ttl' ); } catch ( error ) { expect( error ).toBeInstanceOf( AxiosError ); - expect( error.request.res.statusCode ).toBe( 500 ); + expect( error.request.res.statusCode ).toEqual( 500 ); } } ); @@ -59,7 +59,7 @@ describe( 'Fed props Item', function () { const data = await SpecialEntityDataPage.getData( 'Q1' ); expect( data.entities.Q1.claims[ 'http://www.wikidata.org/entity/P213' ] - ).not.toBeNull(); + ).toEqual( expect.anything() ); } ); it( 'should NOT show up in Special:EntityData with rdf', async function () { @@ -67,7 +67,7 @@ describe( 'Fed props Item', function () { await SpecialEntityDataPage.getData( 'Q1', 'rdf' ); } catch ( error ) { expect( error ).toBeInstanceOf( AxiosError ); - expect( error.request.res.statusCode ).toBe( 500 ); + expect( error.request.res.statusCode ).toEqual( 500 ); } } ); @@ -90,7 +90,7 @@ describe( 'Fed props Item', function () { `<${ testEnv.vars.WIKIBASE_URL }/entity/${ itemId }>`, propertyValue ) - ).resolves.toBe( false ); + ).resolves.toEqual( false ); } ); it( 'should NOT show up in queryservice ui after creation', async function () { @@ -103,30 +103,30 @@ describe( 'Fed props Item', function () { // Item should never have made its way into the query service, as TTL doesnt work await expect( QueryServiceUIPage.resultIncludes( 'schema:version' ) - ).resolves.toBe( false ); + ).resolves.toEqual( false ); await expect( QueryServiceUIPage.resultIncludes( 'schema:dateModified' ) - ).resolves.toBe( false ); + ).resolves.toEqual( false ); await expect( QueryServiceUIPage.resultIncludes( 'wikibase:timestamp' ) - ).resolves.toBe( false ); + ).resolves.toEqual( false ); await expect( QueryServiceUIPage.resultIncludes( 'rdfs:label', itemLabel ) - ).resolves.toBe( false ); + ).resolves.toEqual( false ); await expect( QueryServiceUIPage.resultIncludes( 'wikibase:statements', '1' ) - ).resolves.toBe( false ); + ).resolves.toEqual( false ); await expect( QueryServiceUIPage.resultIncludes( 'wikibase:sitelinks', '0' ) - ).resolves.toBe( false ); + ).resolves.toEqual( false ); await expect( QueryServiceUIPage.resultIncludes( 'wikibase:identifiers', '1' ) - ).resolves.toBe( false ); + ).resolves.toEqual( false ); - await expect( QueryServiceUIPage.resultIncludes( 'p:P213' ) ).resolves.toBe( + await expect( QueryServiceUIPage.resultIncludes( 'p:P213' ) ).resolves.toEqual( false ); } ); diff --git a/test/specs/fedprops/prefetching.ts b/test/specs/fedprops/property-prefetching.ts similarity index 93% rename from test/specs/fedprops/prefetching.ts rename to test/specs/fedprops/property-prefetching.ts index 0ce4f1eea..31377fb26 100644 --- a/test/specs/fedprops/prefetching.ts +++ b/test/specs/fedprops/property-prefetching.ts @@ -1,6 +1,7 @@ import { getTestString } from 'wdio-mediawiki/Util.js'; import WikibaseApi from 'wdio-wikibase/wikibase.api.js'; import ItemPage from '../../helpers/pages/entity/item.page.js'; +import page from '../../helpers/pages/page.js'; describe( 'Property Prefetching', function () { let itemId: string; @@ -54,7 +55,7 @@ describe( 'Property Prefetching', function () { for ( const guid of propertyGuids ) { const response = await browser.deleteClaim( guid ); - expect( response.success ).toBe( 1 ); + expect( response.success ).toEqual( 1 ); } // Sleep for 2 seconds to ensure post edit things run @@ -73,8 +74,8 @@ describe( 'Property Prefetching', function () { } ); it( 'Should render recent changes list within threshold', async function () { - await browser.url( - `${ testEnv.vars.WIKIBASE_URL }/wiki/Special:RecentChanges?limit=50&days=7&urlversion=2&enhanced=0` + await page.open( + '/wiki/Special:RecentChanges?limit=50&days=7&urlversion=2&enhanced=0' ); await $( 'ul.special' ); diff --git a/test/specs/pingback/pingback.ts b/test/specs/pingback/pingback.ts index 4fafe1a7d..ca1924463 100644 --- a/test/specs/pingback/pingback.ts +++ b/test/specs/pingback/pingback.ts @@ -1,6 +1,8 @@ +import page from '../../helpers/pages/page.js'; + describe( 'Pingback', function () { it( 'Should ping on first page request', async function () { - await browser.url( testEnv.vars.WIKIBASE_URL + '/wiki/Main_Page' ); + await page.open( '/wiki/Main_Page' ); // eslint-disable-next-line wdio/no-pause await browser.pause( 5 * 1000 ); @@ -8,8 +10,8 @@ describe( 'Pingback', function () { const sqlResult = await browser.dbQuery( 'SELECT * from updatelog where ul_key LIKE "WikibasePingback%"' ); - expect( sqlResult.includes( 'WikibasePingback\t' ) ).toBe( true ); - expect( sqlResult.includes( 'WikibasePingback-1.' ) ).toBe( true ); + expect( sqlResult ).toMatch( 'WikibasePingback\t' ); + expect( sqlResult ).toMatch( 'WikibasePingback-1.' ); const result = await browser.makeRequest( 'http://mediawiki.svc' ); expect( result.data ).toHaveLength( 2 ); @@ -18,6 +20,6 @@ describe( 'Pingback', function () { Object.keys( result.data[ 0 ] )[ 0 ].replace( ';', '' ) ); - expect( requestData.schema ).toBe( 'WikibasePingback' ); + expect( requestData.schema ).toEqual( 'WikibasePingback' ); } ); } ); diff --git a/test/specs/quickstatements/quickstatements.ts b/test/specs/quickstatements/quickstatements-service.ts similarity index 92% rename from test/specs/quickstatements/quickstatements.ts rename to test/specs/quickstatements/quickstatements-service.ts index 1e4edd1c7..f4866bffc 100644 --- a/test/specs/quickstatements/quickstatements.ts +++ b/test/specs/quickstatements/quickstatements-service.ts @@ -88,7 +88,7 @@ describe( 'QuickStatements Service', function () { await browser.pause( 2 * 1000 ); // redirect back to app - await expect( $( 'nav.navbar' ) ).toHaveTextContaining( 'QuickStatements' ); + await expect( $( 'nav.navbar' ) ).toHaveText( /QuickStatements/ ); } ); it( 'Should be able to click batch button and be taken to the next page', async function () { @@ -106,8 +106,8 @@ describe( 'QuickStatements Service', function () { const responseQ1Data = await SpecialEntityDataPage.getData( 'Q1' ); const responseQ2Data = await SpecialEntityDataPage.getData( 'Q2' ); - expect( responseQ1Data.entities.Q1.id ).toBe( 'Q1' ); - expect( responseQ2Data.entities.Q2.id ).toBe( 'Q2' ); + expect( responseQ1Data.entities.Q1.id ).toEqual( 'Q1' ); + expect( responseQ2Data.entities.Q2.id ).toEqual( 'Q2' ); } ); it( 'Should be able to create item with label', async function () { @@ -116,7 +116,7 @@ describe( 'QuickStatements Service', function () { await browser.executeQuickStatement( 'CREATE\nLAST|Len|"Best label"' ); const responseQ3Data = await SpecialEntityDataPage.getData( 'Q3' ); - expect( responseQ3Data.entities.Q3.labels.en.value ).toBe( 'Best label' ); + expect( responseQ3Data.entities.Q3.labels.en.value ).toEqual( 'Best label' ); } ); it( 'Should be able to create an item with statement', async function () { @@ -129,10 +129,12 @@ describe( 'QuickStatements Service', function () { ); const responseQ4Data = await SpecialEntityDataPage.getData( 'Q4' ); - expect( responseQ4Data.entities.Q4.labels.en.value ).toBe( 'freshwater eel' ); + expect( responseQ4Data.entities.Q4.labels.en.value ).toEqual( + 'freshwater eel' + ); expect( responseQ4Data.entities.Q4.claims.P1[ 0 ].mainsnak.datavalue.value - ).toBe( 'slippery fish' ); + ).toEqual( 'slippery fish' ); } ); it( 'Should be able to add an alias to an item', async function () { @@ -140,7 +142,7 @@ describe( 'QuickStatements Service', function () { // go look at wikibase const responseQ1Data = await SpecialEntityDataPage.getData( 'Q1' ); - expect( lodash.isEmpty( responseQ1Data.entities.Q1.aliases ) ).toBe( false ); + expect( lodash.isEmpty( responseQ1Data.entities.Q1.aliases ) ).toEqual( false ); } ); it( 'Should be able to add a label to an item', async function () { @@ -148,7 +150,7 @@ describe( 'QuickStatements Service', function () { // go look at wikibase const responseQ1Data = await SpecialEntityDataPage.getData( 'Q1' ); - expect( lodash.isEmpty( responseQ1Data.entities.Q1.labels ) ).toBe( false ); + expect( lodash.isEmpty( responseQ1Data.entities.Q1.labels ) ).toEqual( false ); } ); it( 'Should be able to add a description to an item', async function () { @@ -156,7 +158,9 @@ describe( 'QuickStatements Service', function () { // go look at wikibase const responseQ1Data = await SpecialEntityDataPage.getData( 'Q1' ); - expect( lodash.isEmpty( responseQ1Data.entities.Q1.descriptions ) ).toBe( false ); + expect( lodash.isEmpty( responseQ1Data.entities.Q1.descriptions ) ).toEqual( + false + ); } ); it.skip( 'Should be able to add a sitelink to an item', async function () { @@ -164,7 +168,7 @@ describe( 'QuickStatements Service', function () { // go look at wikibase const responseQ1Data = await SpecialEntityDataPage.getData( 'Q1' ); - expect( lodash.isEmpty( responseQ1Data.entities.Q1.sitelinks ) ).toBe( false ); + expect( lodash.isEmpty( responseQ1Data.entities.Q1.sitelinks ) ).toEqual( false ); } ); it( 'Should be able to add a statement to an item', async function () { @@ -173,12 +177,12 @@ describe( 'QuickStatements Service', function () { await browser.executeQuickStatement( `Q1|${ propertyId }|"Will it blend?"` ); const responseQ1Data = await SpecialEntityDataPage.getData( 'Q1' ); - expect( responseQ1Data.entities.Q1.claims[ propertyId ][ 0 ].type ).toBe( + expect( responseQ1Data.entities.Q1.claims[ propertyId ][ 0 ].type ).toEqual( 'statement' ); expect( responseQ1Data.entities.Q1.claims[ propertyId ][ 0 ].mainsnak.datavalue.value - ).toBe( 'Will it blend?' ); + ).toEqual( 'Will it blend?' ); } ); describe( 'Should be able to add qualifiers to statements with a range of datatypes', function () { @@ -203,7 +207,7 @@ describe( 'QuickStatements Service', function () { ); expect( getQualifierType( responseQ1, mainPropertyId, qualifierPropertyId ) - ).toBe( qualifierSnakDataType ); + ).toEqual( qualifierSnakDataType ); } ); } ); } ); @@ -217,7 +221,7 @@ describe( 'QuickStatements Service', function () { ); const responseQ1Data = await SpecialEntityDataPage.getData( 'Q1' ); - expect( responseQ1Data.entities.Q1.claims[ propertyId ][ 0 ].type ).toBe( + expect( responseQ1Data.entities.Q1.claims[ propertyId ][ 0 ].type ).toEqual( 'statement' ); } ); @@ -240,9 +244,9 @@ describe( 'QuickStatements Service', function () { propertyIdItem ); - expect( typeof refValue ).not.toBe( 'string' ); + expect( typeof refValue ).not.toEqual( 'string' ); if ( typeof refValue !== 'string' ) { - expect( refValue.id ).toBe( 'Q2' ); + expect( refValue.id ).toEqual( 'Q2' ); } } ); @@ -259,7 +263,7 @@ describe( 'QuickStatements Service', function () { const response = await browser.makeRequest( `${ testEnv.vars.WIKIBASE_URL }/w/api.php?action=wbgetclaims&format=json&entity=${ itemId }` ); - expect( getReferenceValue( response, propertyIdItem, propertyURL ) ).toBe( + expect( getReferenceValue( response, propertyIdItem, propertyURL ) ).toEqual( 'https://www.wikidata.org' ); } ); @@ -275,7 +279,7 @@ describe( 'QuickStatements Service', function () { const response = await browser.makeRequest( `${ testEnv.vars.WIKIBASE_URL }/w/api.php?action=wbgetclaims&format=json&entity=${ itemId }` ); - expect( getReferenceValue( response, propertyIdItem, propertyId ) ).toBe( + expect( getReferenceValue( response, propertyIdItem, propertyId ) ).toEqual( 'some string' ); } ); @@ -286,19 +290,25 @@ describe( 'QuickStatements Service', function () { await browser.executeQuickStatement( `${ itemId }|${ propertyIdItem }|Q1` ); let responseData = await SpecialEntityDataPage.getData( itemId ); - expect( propertyIdItem in responseData.entities[ itemId ].claims ).toBe( true ); + expect( propertyIdItem in responseData.entities[ itemId ].claims ).toEqual( + true + ); await browser.executeQuickStatement( `-${ itemId }|${ propertyIdItem }|Q1` ); responseData = await SpecialEntityDataPage.getData( itemId ); - expect( propertyIdItem in responseData.entities[ itemId ].claims ).toBe( false ); + expect( propertyIdItem in responseData.entities[ itemId ].claims ).toEqual( + false + ); } ); it( 'Should be able to change label', async function () { await browser.executeQuickStatement( 'Q1|LSv|"Some other label"' ); const responseQ1Data = await SpecialEntityDataPage.getData( 'Q1' ); - expect( responseQ1Data.entities.Q1.labels.sv.value ).toBe( 'Some other label' ); + expect( responseQ1Data.entities.Q1.labels.sv.value ).toEqual( + 'Some other label' + ); } ); it( 'Should be able to merge two items', async function () { @@ -307,7 +317,7 @@ describe( 'QuickStatements Service', function () { await browser.executeQuickStatement( 'MERGE|Q1|Q2' ); const responseQ2Data = await SpecialEntityDataPage.getData( 'Q2' ); - expect( responseQ2Data.entities.Q1.id ).toBe( 'Q1' ); + expect( responseQ2Data.entities.Q1.id ).toEqual( 'Q1' ); } ); it( 'Should have a Last Batches button', async function () { diff --git a/test/specs/repo/extensions/babel.ts b/test/specs/repo/extensions/babel.ts index 131344e0c..f15d4e845 100644 --- a/test/specs/repo/extensions/babel.ts +++ b/test/specs/repo/extensions/babel.ts @@ -17,13 +17,11 @@ describe( 'Babel', function () { '{{#babel: sv | en }}' ); - expect( - executionContent.includes( 'Den hĂ€r anvĂ€ndaren har svenska som modersmĂ„l.' ) - ).toBe( true ); - expect( - executionContent.includes( - 'This user has a native understanding of English.' - ) - ).toBe( true ); + expect( executionContent ).toMatch( + 'Den hĂ€r anvĂ€ndaren har svenska som modersmĂ„l.' + ); + expect( executionContent ).toMatch( + 'This user has a native understanding of English.' + ); } ); } ); diff --git a/test/specs/repo/extensions/entityschema.ts b/test/specs/repo/extensions/entityschema.ts index 1e9461423..16f8dbbfc 100644 --- a/test/specs/repo/extensions/entityschema.ts +++ b/test/specs/repo/extensions/entityschema.ts @@ -1,5 +1,6 @@ import { readFile } from 'fs/promises'; -import { utf8 } from '../../../helpers/readFileEncoding.js'; +import page from '../../../helpers/pages/page.js'; +import { utf8 } from '../../../helpers/read-file-encoding.js'; describe( 'EntitySchema', function () { const testLabel = 'A label'; @@ -10,7 +11,7 @@ describe( 'EntitySchema', function () { } ); it( 'Should be able to create an EntitySchema', async function () { - await browser.url( testEnv.vars.WIKIBASE_URL + '/wiki/EntitySchema:test' ); + await page.open( '/wiki/EntitySchema:test' ); // gives the link to Special:NewEntitySchema await $( '.noarticletext a' ).click(); @@ -38,9 +39,9 @@ describe( 'EntitySchema', function () { await expect( $( '.entityschema-title-id' ) ).toHaveText( '(E1)' ); await expect( entitySchemaEl.$( 'div' ) ).toHaveElementClass( 'mw-highlight' ); - await expect( $( '.external.entityschema-check-schema' ) ).toHaveAttrContaining( + await expect( $( '.external.entityschema-check-schema' ) ).toHaveAttr( 'href', - 'http://validator.svc' + /http:\/\/validator\.svc/ ); } ); } ); diff --git a/test/specs/repo/extensions/nuke.ts b/test/specs/repo/extensions/nuke.ts index 9abf58698..6e8e7b10d 100644 --- a/test/specs/repo/extensions/nuke.ts +++ b/test/specs/repo/extensions/nuke.ts @@ -1,4 +1,5 @@ import LoginPage from 'wdio-mediawiki/LoginPage.js'; +import page from '../../../helpers/pages/page.js'; describe( 'Nuke', function () { beforeEach( async function () { @@ -19,13 +20,13 @@ describe( 'Nuke', function () { {} ); - expect( pageExistsResult.status ).toBe( 200 ); + expect( pageExistsResult.status ).toEqual( 200 ); await LoginPage.login( testEnv.vars.MW_ADMIN_NAME, testEnv.vars.MW_ADMIN_PASS ); - await browser.url( testEnv.vars.WIKIBASE_URL + '/wiki/Special:Nuke' ); + await page.open( '/wiki/Special:Nuke' ); await $( 'button.oo-ui-inputWidget-input' ).click(); @@ -47,6 +48,6 @@ describe( 'Nuke', function () { {} ); - expect( pageIsGoneResult.status ).toBe( 404 ); + expect( pageIsGoneResult.status ).toEqual( 404 ); } ); } ); diff --git a/test/specs/repo/extensions/scribunto.ts b/test/specs/repo/extensions/scribunto.ts index 467e8e93e..7d08dc4e3 100644 --- a/test/specs/repo/extensions/scribunto.ts +++ b/test/specs/repo/extensions/scribunto.ts @@ -1,5 +1,5 @@ import { readFile } from 'fs/promises'; -import { utf8 } from '../../../helpers/readFileEncoding.js'; +import { utf8 } from '../../../helpers/read-file-encoding.js'; describe( 'Scribunto', function () { beforeEach( async function () { @@ -25,7 +25,7 @@ describe( 'Scribunto', function () { ); // should come from executed lua script - expect( executionContent.includes( 'Hello, world!' ) ).toBe( true ); + expect( executionContent ).toMatch( 'Hello, world!' ); } ); it( 'Should be able to execute lua module within 0.05 seconds', async function () { @@ -35,6 +35,6 @@ describe( 'Scribunto', function () { ); expect( cpuTime.value ).toBeLessThan( 0.05 ); - expect( cpuTime.scale ).toBe( 'seconds' ); + expect( cpuTime.scale ).toEqual( 'seconds' ); } ); } ); diff --git a/test/specs/repo/extensions/syntax-highlight.ts b/test/specs/repo/extensions/syntax-highlight.ts index 9cdc3f8d3..3fc89ae6b 100644 --- a/test/specs/repo/extensions/syntax-highlight.ts +++ b/test/specs/repo/extensions/syntax-highlight.ts @@ -1,5 +1,6 @@ import { readFile } from 'fs/promises'; -import { utf8 } from '../../../helpers/readFileEncoding.js'; +import page from '../../../helpers/pages/page.js'; +import { utf8 } from '../../../helpers/read-file-encoding.js'; describe( 'SyntaxHighlight', function () { beforeEach( async function () { @@ -20,7 +21,7 @@ describe( 'SyntaxHighlight', function () { fileContents ); - await browser.url( testEnv.vars.WIKIBASE_URL + '/wiki/Module:Olives' ); + await page.open( '/wiki/Module:Olives' ); // should come with highlighted lua script await $( '.mw-highlight' ); diff --git a/test/specs/repo/extensions/universal-language-selector.ts b/test/specs/repo/extensions/universal-language-selector.ts index cc9530325..0384e5307 100644 --- a/test/specs/repo/extensions/universal-language-selector.ts +++ b/test/specs/repo/extensions/universal-language-selector.ts @@ -1,13 +1,30 @@ +import page from '../../../helpers/pages/page.js'; + describe( 'UniversalLanguageSelector', function () { beforeEach( async function () { await browser.skipIfExtensionNotPresent( this, 'UniversalLanguageSelector' ); } ); it( 'Should be able to see the language selector menu', async function () { - await browser.url( testEnv.vars.WIKIBASE_URL ); - await $( '#searchInput' ).click(); - await $( '.imeselector' ).click(); + await page.open( '' ); + + await $( '#searchform input' ).click(); + + // work around lang selector not showing up the first time + // blur the search bar + await $( '.page-Main_Page' ).click(); + // focus search bar again, lang selector should be there now + await $( '#searchform input' ).click(); + + await $$( '.imeselector' ) + .filter( async ( selector ) => selector.isClickable() )[ 0 ] + .click(); - await expect( $( '.imeselector-menu h3' ) ).toHaveText( 'English' ); + // We need to use getHTML(). If an element isn't interactable + // getText() returns an empty string. + // https://webdriver.io/docs/api/element/getText/ + await expect( + $( 'div.imeselector-menu h3.ime-list-title' ).getHTML() + ).resolves.toMatch( /English/ ); } ); } ); diff --git a/test/specs/repo/extensions/visual-editor.ts b/test/specs/repo/extensions/visual-editor.ts index d3155235b..49274636c 100644 --- a/test/specs/repo/extensions/visual-editor.ts +++ b/test/specs/repo/extensions/visual-editor.ts @@ -1,12 +1,12 @@ +import page from '../../../helpers/pages/page.js'; + describe( 'VisualEditor', function () { beforeEach( async function () { await browser.skipIfExtensionNotPresent( this, 'VisualEditor' ); } ); it( 'Should be able to edit a page using the editor', async function () { - await browser.url( - testEnv.vars.WIKIBASE_URL + '/wiki/TestVisualEditor?veaction=edit' - ); + await page.open( '/wiki/TestVisualEditor?veaction=edit' ); // start editing await $( '.oo-ui-messageDialog-actions' ).waitForDisplayed(); diff --git a/test/specs/repo/extensions/wikibase-edtf.ts b/test/specs/repo/extensions/wikibase-edtf.ts index e3e7746c4..31b2af516 100644 --- a/test/specs/repo/extensions/wikibase-edtf.ts +++ b/test/specs/repo/extensions/wikibase-edtf.ts @@ -11,7 +11,7 @@ describe( 'WikibaseEdtf', function () { it( 'Should allow to create and use the EDTF property', async function () { // create the property const propertyId = await WikibaseApi.createProperty( 'edtf' ); - expect( propertyId.startsWith( 'P' ) ).toBe( true ); + expect( propertyId ).toMatch( /^P/ ); const rawValue = '1985-04-12T23:20:30'; @@ -36,8 +36,8 @@ describe( 'WikibaseEdtf', function () { const responseSnak = responseData.entities[ itemId ].claims[ propertyId ][ 0 ].mainsnak; - expect( responseSnak.datavalue.value ).toBe( '1985-04-12T23:20:30' ); - expect( responseSnak.datatype ).toBe( 'edtf' ); + expect( responseSnak.datavalue.value ).toEqual( '1985-04-12T23:20:30' ); + expect( responseSnak.datatype ).toEqual( 'edtf' ); // for a pretty screenshot await ItemPage.open( itemId ); diff --git a/test/specs/repo/extensions/wikibase-local-media.ts b/test/specs/repo/extensions/wikibase-local-media.ts index bf283b77e..b0078f5c8 100644 --- a/test/specs/repo/extensions/wikibase-local-media.ts +++ b/test/specs/repo/extensions/wikibase-local-media.ts @@ -2,6 +2,7 @@ import LoginPage from 'wdio-mediawiki/LoginPage.js'; import WikibaseApi from 'wdio-wikibase/wikibase.api.js'; import ItemPage from '../../../helpers/pages/entity/item.page.js'; import PropertyPage from '../../../helpers/pages/entity/property.page.js'; +import page from '../../../helpers/pages/page.js'; import propertyIdSelector from '../../../helpers/property-id-selector.js'; import { Claim } from '../../../types/entity-data.js'; @@ -19,7 +20,7 @@ describe( 'WikibaseLocalMedia', function () { testEnv.vars.MW_ADMIN_PASS ); - await browser.url( `${ testEnv.vars.WIKIBASE_URL }/wiki/Special:Upload/` ); + await page.open( '/wiki/Special:Upload/' ); const filePath = new URL( 'image.png', import.meta.url ); await $( '#wpUploadFile' ).setValue( filePath.pathname ); @@ -31,12 +32,13 @@ describe( 'WikibaseLocalMedia', function () { it( 'Should allow to create a property with localMedia datatype', async function () { propertyId = await WikibaseApi.createProperty( 'localMedia' ); - expect( propertyId.startsWith( 'P' ) ).toBe( true ); + expect( propertyId ).toMatch( /^P\d+$/ ); await PropertyPage.open( propertyId ); propertyLabel = await $( '#firstHeading' ).getText(); - await expect( $( '#firstHeading' ) ).toHaveTextContaining( propertyId ); + // eslint-disable-next-line security/detect-non-literal-regexp + await expect( $( '#firstHeading' ) ).toHaveText( new RegExp( propertyId ) ); } ); it( 'Should allow to use uploaded image on statement', async function () { @@ -57,9 +59,9 @@ describe( 'WikibaseLocalMedia', function () { const itemId = await WikibaseApi.createItem( 'image-test', data ); await ItemPage.open( itemId ); - await expect( $( '.wikibase-snakview-value img' ) ).toHaveAttrContaining( + await expect( $( '.wikibase-snakview-value img' ) ).toHaveAttr( 'src', - 'Image.png' + /Image\.png/ ); } ); diff --git a/test/specs/repo/extensions/wikibase-manifest.ts b/test/specs/repo/extensions/wikibase-manifest.ts index 19bda931e..e472fb017 100644 --- a/test/specs/repo/extensions/wikibase-manifest.ts +++ b/test/specs/repo/extensions/wikibase-manifest.ts @@ -9,13 +9,13 @@ describe( 'WikibaseManifest', function () { ); const data = result.data; - expect( 'wikibase-docker' ).toBe( data.name ); + expect( data.name ).toEqual( 'wikibase-docker' ); - expect( testEnv.vars.WIKIBASE_URL + '/w/api.php' ).toBe( data.api.action ); - expect( testEnv.vars.WIKIBASE_URL + '/w/rest.php' ).toBe( data.api.rest ); + expect( data.api.action ).toEqual( testEnv.vars.WIKIBASE_URL + '/w/api.php' ); + expect( data.api.rest ).toEqual( testEnv.vars.WIKIBASE_URL + '/w/rest.php' ); - expect( + expect( data.oauth.registration_page ).toEqual( testEnv.vars.WIKIBASE_URL + '/wiki/Special:OAuthConsumerRegistration' - ).toBe( data.oauth.registration_page ); + ); } ); } ); diff --git a/test/specs/repo/property.ts b/test/specs/repo/property.ts index ad444440c..b3d5f8088 100644 --- a/test/specs/repo/property.ts +++ b/test/specs/repo/property.ts @@ -89,6 +89,7 @@ describe( 'Property', function () { it( 'Should display the added properties on the "Recent changes" page', async function () { await browser.waitForJobs(); + await $( '.vector-main-menu-dropdown' ).click(); await $( '=Recent changes' ).click(); await expect( $( `=(${ propertyId })` ) ).toExist(); await expect( $( `=(${ stringPropertyId })` ) ).toExist(); @@ -111,16 +112,16 @@ describe( 'Property', function () { $( 'ul.mw-contributions-list' ).$$( 'li' ) ).resolves.toHaveLength( 4 ); await expect( $( 'span.mw-tag-marker-mw-undo' ) ).toExist(); - await expect( - $( 'ul.mw-contributions-list' ).$( 'li.before' ) - ).toHaveTextContaining( undoSummaryText ); + await expect( $( 'ul.mw-contributions-list' ).$( 'li.before' ) ).toHaveText( + new RegExp( undoSummaryText ) + ); } ); it( 'Should be able to set label, description, aliases', async function () { await page.open( '/wiki/Special:SetLabelDescriptionAliases/' ); await $( 'label=ID:' ).click(); await browser.keys( propertyId.split( '' ) ); - await $( 'span=Set label, description and aliases' ).click(); + await $( 'span=Continue' ).click(); await $( 'label=Label:' ).click(); await browser.keys( `${ dataType.name } Label`.split( '' ) ); @@ -131,7 +132,7 @@ describe( 'Property', function () { `${ dataType.name } Alias A|${ dataType.name } Alias B`.split( '' ) ); - await $( 'span=Set label, description and aliases' ).click(); + await $( 'span=Save changes' ).click(); await expect( $( `span.wikibase-labelview-text=${ dataType.name } Label` ) diff --git a/test/specs/repo/queryservice.ts b/test/specs/repo/queryservice.ts index 6ef210271..1967bf174 100644 --- a/test/specs/repo/queryservice.ts +++ b/test/specs/repo/queryservice.ts @@ -3,6 +3,7 @@ import LoginPage from 'wdio-mediawiki/LoginPage.js'; import { getTestString } from 'wdio-mediawiki/Util.js'; import WikibaseApi from 'wdio-wikibase/wikibase.api.js'; import QueryServiceUIPage from '../../helpers/pages/queryservice-ui/queryservice-ui.page.js'; +import SpecialNewItemPage from '../../helpers/pages/special/new-item.page.js'; import { wikibasePropertyString } from '../../helpers/wikibase-property-types.js'; describe( 'QueryService', function () { @@ -12,14 +13,14 @@ describe( 'QueryService', function () { { validateStatus: false }, {} ); - expect( result.status ).toBe( 405 ); + expect( result.status ).toEqual( 405 ); } ); it( 'Should be able to get sparql endpoint', async function () { const result = await browser.makeRequest( `${ testEnv.vars.WDQS_PROXY_URL }/bigdata/namespace/wdq/sparql` ); - expect( result.status ).toBe( 200 ); + expect( result.status ).toEqual( 200 ); } ); it( 'Should not be possible to reach blazegraph ldf api that is not enabled', async function () { @@ -27,7 +28,7 @@ describe( 'QueryService', function () { `${ testEnv.vars.WDQS_PROXY_URL }/bigdata/namespace/wdq/ldf`, { validateStatus: false } ); - expect( result.status ).toBe( 404 ); + expect( result.status ).toEqual( 404 ); } ); it( 'Should not be possible to reach blazegraph ldf assets thats not enabled', async function () { @@ -35,7 +36,7 @@ describe( 'QueryService', function () { `${ testEnv.vars.WDQS_PROXY_URL }/bigdata/namespace/wdq/assets`, { validateStatus: false } ); - expect( result.status ).toBe( 404 ); + expect( result.status ).toEqual( 404 ); } ); it( 'Should not be possible to reach blazegraph workbench', async function () { @@ -43,7 +44,7 @@ describe( 'QueryService', function () { `${ testEnv.vars.WDQS_PROXY_URL }/bigdata/#query`, { validateStatus: false } ); - expect( result.status ).toBe( 404 ); + expect( result.status ).toEqual( 404 ); } ); it( 'Should show up with property in queryservice ui after creation', async function () { @@ -79,30 +80,30 @@ describe( 'QueryService', function () { await expect( QueryServiceUIPage.resultIncludes( 'schema:version' ) - ).resolves.toBe( true ); + ).resolves.toEqual( true ); await expect( QueryServiceUIPage.resultIncludes( 'schema:dateModified' ) - ).resolves.toBe( true ); + ).resolves.toEqual( true ); await expect( QueryServiceUIPage.resultIncludes( 'wikibase:timestamp' ) - ).resolves.toBe( true ); + ).resolves.toEqual( true ); // label should match on the prefix await expect( QueryServiceUIPage.resultIncludes( 'rdfs:label', itemLabel ) - ).resolves.toBe( true ); + ).resolves.toEqual( true ); // should have one statement await expect( QueryServiceUIPage.resultIncludes( 'wikibase:statements', '1' ) - ).resolves.toBe( true ); + ).resolves.toEqual( true ); await expect( QueryServiceUIPage.resultIncludes( 'wikibase:sitelinks', '0' ) - ).resolves.toBe( true ); + ).resolves.toEqual( true ); await expect( QueryServiceUIPage.resultIncludes( 'wikibase:identifiers', '0' ) - ).resolves.toBe( true ); + ).resolves.toEqual( true ); // property value is set with correct rdf await expect( @@ -110,7 +111,7 @@ describe( 'QueryService', function () { `<${ testEnv.vars.WIKIBASE_URL }/prop/direct/${ propertyId }>`, propertyValue ) - ).resolves.toBe( true ); + ).resolves.toEqual( true ); // query the property using wdt: prefix await QueryServiceUIPage.open( `SELECT * WHERE{ ?s wdt:${ propertyId } ?o }` ); @@ -124,13 +125,41 @@ describe( 'QueryService', function () { `<${ testEnv.vars.WIKIBASE_URL }/entity/${ itemId }>`, propertyValue ) - ).resolves.toBe( true ); + ).resolves.toEqual( true ); } ); it( 'Should not show up in queryservice ui after deletion', async function () { - // TODO make an item using the UI - const itemId = await WikibaseApi.createItem( getTestString( 'T267743-' ) ); + await SpecialNewItemPage.open(); + + await $( 'input[name="label"]' ).setValue( getTestString( 'T267743-' ) ); + await $( 'input[name="description"]' ).setValue( getTestString( 'Description' ) ); + await $( 'input[name="aliases"]' ).setValue( + `${ getTestString( 'A' ) }|${ getTestString( 'B' ) }` + ); + await SpecialNewItemPage.submit(); + + await expect( $( 'h1#firstHeading' ).$( 'span.wikibase-title-id' ) ).toHaveText( + /\(Q\d+\)/ + ); + const itemId = ( + await $( 'h1#firstHeading' ).$( 'span.wikibase-title-id' ).getText() + ).replace( /[()]/g, '' ); + + // Check it shows up after creation + await QueryServiceUIPage.open( `SELECT * WHERE{ wd:${ itemId } ?p ?o }` ); + + // wait for WDQS-updater + // eslint-disable-next-line wdio/no-pause + await browser.pause( 20 * 1000 ); + + await QueryServiceUIPage.submit(); + await QueryServiceUIPage.resultTable; + + await expect( + QueryServiceUIPage.resultIncludes( 'schema:version' ) + ).resolves.toBe( true ); + // Attempt to delete await LoginPage.login( testEnv.vars.MW_ADMIN_NAME, testEnv.vars.MW_ADMIN_PASS @@ -154,14 +183,14 @@ describe( 'QueryService', function () { const resultText = await QueryServiceUIPage.resultTable.getText(); // item should not be included - expect( resultText.includes( 'schema:version' ) ).toBe( false ); - expect( resultText.includes( 'schema:dateModified' ) ).toBe( false ); - expect( resultText.includes( 'wikibase:sitelinks' ) ).toBe( false ); - expect( resultText.includes( 'wikibase:identifiers' ) ).toBe( false ); - expect( resultText.includes( 'rdfs:label' ) ).toBe( false ); + expect( resultText ).not.toMatch( 'schema:version' ); + expect( resultText ).not.toMatch( 'schema:dateModified' ); + expect( resultText ).not.toMatch( 'wikibase:sitelinks' ); + expect( resultText ).not.toMatch( 'wikibase:identifiers' ); + expect( resultText ).not.toMatch( 'rdfs:label' ); // timestamp always shows - expect( resultText.includes( 'wikibase:timestamp' ) ).toBe( true ); + expect( resultText ).toMatch( 'wikibase:timestamp' ); } ); it( 'Should show results for a select query', async function () { diff --git a/test/specs/repo/search.ts b/test/specs/repo/search.ts index 781ec831b..55582d051 100644 --- a/test/specs/repo/search.ts +++ b/test/specs/repo/search.ts @@ -14,6 +14,6 @@ describe( 'Search', function () { `${ testEnv.vars.WIKIBASE_URL }/w/api.php?action=wbsearchentities&search=${ itemLabel }&format=json&errorformat=plaintext&language=en&uselang=en&type=item` ); - expect( result.data.search[ 0 ].label ).toBe( itemLabel ); + expect( result.data.search[ 0 ].label ).toEqual( itemLabel ); } ); } ); diff --git a/test/specs/repo/special-item.ts b/test/specs/repo/special-new-item.ts similarity index 100% rename from test/specs/repo/special-item.ts rename to test/specs/repo/special-new-item.ts diff --git a/test/specs/repo/special-property.ts b/test/specs/repo/special-new-property.ts similarity index 100% rename from test/specs/repo/special-property.ts rename to test/specs/repo/special-new-property.ts diff --git a/test/specs/repo/recent-changes.ts b/test/specs/repo/special-recent-changes.ts similarity index 77% rename from test/specs/repo/recent-changes.ts rename to test/specs/repo/special-recent-changes.ts index 5c3869239..7e9bd9e6a 100644 --- a/test/specs/repo/recent-changes.ts +++ b/test/specs/repo/special-recent-changes.ts @@ -1,42 +1,44 @@ +import page from '../../helpers/pages/page.js'; + describe( 'Special:RecentChanges', function () { beforeEach( async function () { - await browser.url( - `${ testEnv.vars.WIKIBASE_URL }/wiki/Special:RecentChanges?limit=50&days=7&urlversion=2&enhanced=0` + await page.open( + '/wiki/Special:RecentChanges?limit=50&days=7&urlversion=2&enhanced=0' ); } ); it( 'Should be able to change limit', async function () { await expect( $( 'div.mw-rcfilters-ui-changesLimitAndDateButtonWidget' ) - ).toHaveTextContaining( '50 changes' ); + ).toHaveText( /50 changes/ ); await $( 'div.mw-rcfilters-ui-changesLimitAndDateButtonWidget' ).click(); await $( 'div.mw-rcfilters-ui-changesLimitPopupWidget' ) .$( 'span=100' ) .click(); await expect( $( 'div.mw-rcfilters-ui-changesLimitAndDateButtonWidget' ) - ).toHaveTextContaining( '100 changes' ); + ).toHaveText( /100 changes/ ); } ); it( 'Should be able to change time to 2 hours', async function () { await expect( $( 'div.mw-rcfilters-ui-changesLimitAndDateButtonWidget' ) - ).toHaveTextContaining( '7 days' ); + ).toHaveText( /7 days/ ); await $( 'div.mw-rcfilters-ui-changesLimitAndDateButtonWidget' ).click(); await $( 'div.mw-rcfilters-ui-datePopupWidget-hours' ).$( 'span=2' ).click(); await expect( $( 'div.mw-rcfilters-ui-changesLimitAndDateButtonWidget' ) - ).toHaveTextContaining( '2 hours' ); + ).toHaveText( /2 hours/ ); } ); it( 'Should be able to change time to 3 days', async function () { await expect( $( 'div.mw-rcfilters-ui-changesLimitAndDateButtonWidget' ) - ).toHaveTextContaining( '7 days' ); + ).toHaveText( /7 days/ ); await $( 'div.mw-rcfilters-ui-changesLimitAndDateButtonWidget' ).click(); await $( 'div.mw-rcfilters-ui-datePopupWidget-days' ).$( 'span=3' ).click(); await expect( $( 'div.mw-rcfilters-ui-changesLimitAndDateButtonWidget' ) - ).toHaveTextContaining( '3 days' ); + ).toHaveText( /3 days/ ); } ); } ); diff --git a/test/specs/repo/special-version.ts b/test/specs/repo/special-version.ts index 4ac33c103..269f275f7 100644 --- a/test/specs/repo/special-version.ts +++ b/test/specs/repo/special-version.ts @@ -1,8 +1,11 @@ +import page from '../../helpers/pages/page.js'; + describe( 'Special:Version', function () { it( 'Should contain the correct MediaWiki version', async function () { - await browser.url( `${ testEnv.vars.WIKIBASE_URL }/wiki/Special:Version` ); - await expect( $( '#sv-software' ) ).toHaveTextContaining( - `MediaWiki ${ testEnv.vars.MEDIAWIKI_VERSION }` + await page.open( '/wiki/Special:Version' ); + await expect( $( '#sv-software' ) ).toHaveText( + // eslint-disable-next-line security/detect-non-literal-regexp + new RegExp( `MediaWiki ${ testEnv.vars.MEDIAWIKI_VERSION }` ) ); } ); @@ -38,7 +41,7 @@ describe( 'Special:Version', function () { it( `Should contain ${ name } extensions`, async function () { await browser.skipIfExtensionNotPresent( this, name ); - await browser.url( `${ testEnv.vars.WIKIBASE_URL }/wiki/Special:Version` ); + await page.open( '/wiki/Special:Version' ); // /wiki/Special:Version generate these for each installed extension const elementSelector = await $( diff --git a/test/specs/repo/api.ts b/test/specs/repo/wikibase-api.ts similarity index 87% rename from test/specs/repo/api.ts rename to test/specs/repo/wikibase-api.ts index 6290be1eb..d9fa7937a 100644 --- a/test/specs/repo/api.ts +++ b/test/specs/repo/wikibase-api.ts @@ -9,7 +9,7 @@ describe( 'Wikibase API', function () { // eslint-disable-next-line mocha/no-setup-in-describe dataTypes.forEach( ( dataType: WikibasePropertyType ) => { it( `Should be able to create many properties and items of type ${ dataType.name }`, async function () { - Array( 100 ).forEach( async () => { + for ( let i = 0; i < 100; i++ ) { const itemLabel = 'T267743-'; const propertyValue = `PropertyExample${ dataType.name }Value`; const propertyId = await WikibaseApi.createProperty( dataType.urlName ); @@ -32,9 +32,9 @@ describe( 'Wikibase API', function () { data ); - expect( itemId.startsWith( 'Q' ) ).toBe( true ); - expect( propertyId.startsWith( 'P' ) ).toBe( true ); - } ); + expect( itemId ).toMatch( /^Q\d+$/ ); + expect( propertyId ).toMatch( /^P\d+$/ ); + } } ); } ); } ); diff --git a/test/specs/repo_client/extensions/scribunto-item.ts b/test/specs/repo_client/extensions/scribunto-item.ts index b38d938c4..acf35121c 100644 --- a/test/specs/repo_client/extensions/scribunto-item.ts +++ b/test/specs/repo_client/extensions/scribunto-item.ts @@ -4,7 +4,7 @@ import LoginPage from 'wdio-mediawiki/LoginPage.js'; import { getTestString } from 'wdio-mediawiki/Util.js'; import WikibaseApi from 'wdio-wikibase/wikibase.api.js'; import ItemPage from '../../../helpers/pages/entity/item.page.js'; -import { utf8 } from '../../../helpers/readFileEncoding.js'; +import { utf8 } from '../../../helpers/read-file-encoding.js'; import ExternalChange from '../../../types/external-change.js'; const itemLabel = getTestString( 'The Item' ); @@ -66,7 +66,7 @@ describe( 'Scribunto Item', function () { ); // should come from executed lua script - expect( executionContent.includes( itemLabel ) ).toBe( true ); + expect( executionContent ).toMatch( itemLabel ); } ); // This will generate a change that will dispatch diff --git a/test/specs/repo_client/interwiki-links.ts b/test/specs/repo_client/interwiki-links.ts index dfddd0e43..4da8ffd24 100644 --- a/test/specs/repo_client/interwiki-links.ts +++ b/test/specs/repo_client/interwiki-links.ts @@ -1,5 +1,5 @@ import { readFile } from 'fs/promises'; -import { utf8 } from '../../helpers/readFileEncoding.js'; +import { utf8 } from '../../helpers/read-file-encoding.js'; describe( 'Interwiki links', function () { it( 'Should be able to insert interwiki links', async function () { @@ -16,9 +16,7 @@ describe( 'Interwiki links', function () { const clientWikiQueryResults = await browser.dbQuery( 'SELECT iw_url FROM interwiki WHERE iw_prefix = "client_wiki"' ); - expect( - clientWikiQueryResults.includes( testEnv.vars.WIKIBASE_CLIENT_URL ) - ).toBe( true ); + expect( clientWikiQueryResults ).toMatch( testEnv.vars.WIKIBASE_CLIENT_URL ); const config = { user: testEnv.vars.DB_USER, @@ -35,6 +33,6 @@ describe( 'Interwiki links', function () { 'SELECT iw_url FROM interwiki WHERE iw_prefix = "my_wiki"', config ); - expect( myWikiQueryResults.includes( testEnv.vars.WIKIBASE_URL ) ).toBe( true ); + expect( myWikiQueryResults ).toMatch( testEnv.vars.WIKIBASE_URL ); } ); } ); diff --git a/test/specs/repo_client/item.ts b/test/specs/repo_client/item.ts index d62eb39ad..5283454f9 100644 --- a/test/specs/repo_client/item.ts +++ b/test/specs/repo_client/item.ts @@ -3,6 +3,7 @@ import LoginPage from 'wdio-mediawiki/LoginPage.js'; import { getTestString } from 'wdio-mediawiki/Util.js'; import WikibaseApi from 'wdio-wikibase/wikibase.api.js'; import ItemPage from '../../helpers/pages/entity/item.page.js'; +import page from '../../helpers/pages/page.js'; import SpecialNewItemPage from '../../helpers/pages/special/new-item.page.js'; import ExternalChange from '../../types/external-change.js'; @@ -68,24 +69,24 @@ describe( 'Item', function () { `{{#statements:${ propertyId }|from=${ itemId }}}` ); // label should come from repo property - expect( bodyText ).toBe( propertyValue ); + expect( bodyText ).toEqual( propertyValue ); } ); // This will generate a change that will dispatch it( 'Should be able to create site-links from item to client', async function () { // Create a site-link on a the Main_Page - await browser.url( - `${ testEnv.vars.WIKIBASE_URL }/wiki/Special:SetSiteLink/Q1?site=client_wiki&page=${ pageTitle }` + await page.open( + `/wiki/Special:SetSiteLink/Q1?site=client_wiki&page=${ pageTitle }` ); await $( '#wb-setsitelink-submit button' ).click(); // label should come from repo property - await expect( - $( '.wikibase-sitelinklistview-listview li' ) - ).toHaveTextContaining( 'client_wiki' ); - await expect( - $( '.wikibase-sitelinklistview-listview li' ) - ).toHaveTextContaining( pageTitle ); + await expect( $( '.wikibase-sitelinklistview-listview li' ) ).toHaveText( + /client_wiki/ + ); + await expect( $( '.wikibase-sitelinklistview-listview li' ) ).toHaveText( + new RegExp( pageTitle ) + ); } ); it( 'Should be able to see site-link change is dispatched to client', async function () { diff --git a/test/specs/upgrade/pre-upgrade.ts b/test/specs/upgrade/pre-upgrade.ts index e93baef5a..9f2b2725d 100644 --- a/test/specs/upgrade/pre-upgrade.ts +++ b/test/specs/upgrade/pre-upgrade.ts @@ -27,8 +27,8 @@ describe( 'Wikibase pre upgrade', function () { data ); - expect( itemId.startsWith( 'Q' ) ).toBe( true ); - expect( propertyId.startsWith( 'P' ) ).toBe( true ); + expect( itemId ).toMatch( /^Q\d+$/ ); + expect( propertyId ).toMatch( /^P\d+$/ ); } } ); @@ -52,7 +52,7 @@ describe( 'Wikibase pre upgrade', function () { const itemId = await WikibaseApi.createItem( itemLabel, data ); - expect( itemId.startsWith( 'Q' ) ).toBe( true ); - expect( propertyId.startsWith( 'P' ) ).toBe( true ); + expect( itemId ).toMatch( /^Q\d+$/ ); + expect( propertyId ).toMatch( /^P\d+$/ ); } ); } ); diff --git a/test/specs/upgrade/queryservice-post-upgrade.ts b/test/specs/upgrade/queryservice-post-upgrade.ts index 1de526e37..89f355fb6 100644 --- a/test/specs/upgrade/queryservice-post-upgrade.ts +++ b/test/specs/upgrade/queryservice-post-upgrade.ts @@ -38,8 +38,8 @@ describe( 'Wikibase post upgrade', function () { newItemId = await WikibaseApi.createItem( itemLabel, data ); - expect( newItemId.startsWith( 'Q' ) ).toBe( true ); - expect( newPropertyId.startsWith( 'P' ) ).toBe( true ); + expect( newItemId ).toMatch( /^Q\d+$/ ); + expect( newPropertyId ).toMatch( /^P\d+$/ ); } ); it( 'New item should show up in Queryservice', async function () { @@ -99,7 +99,7 @@ describe( 'Wikibase post upgrade', function () { expect( timestamp ).toEqual( expect.anything() ); expect( statement ).toEqual( expect.anything() ); - expect( property.o.value ).toBe( propertyValue ); - expect( itemLabelValue.o.value ).toBe( itemLabel ); + expect( property.o.value ).toEqual( propertyValue ); + expect( itemLabelValue.o.value ).toEqual( itemLabel ); } ); } ); diff --git a/test/specs/upgrade/queryservice-pre-and-post-upgrade.ts b/test/specs/upgrade/queryservice-pre-and-post-upgrade.ts index e60246f13..08d54280a 100644 --- a/test/specs/upgrade/queryservice-pre-and-post-upgrade.ts +++ b/test/specs/upgrade/queryservice-pre-and-post-upgrade.ts @@ -23,13 +23,13 @@ describe( 'Wikibase post upgrade', function () { const result = await browser.makeRequest( `${ testEnv.vars.WIKIBASE_URL }/w/api.php?action=wbsearchentities&search=UpgradeItem&format=json&language=en&type=item` ); - const success = result.data.success; - const searchResults = result.data.search; - expect( success ).toBe( 1 ); + expect( result.data.success ).toEqual( 1 ); + + const searchResults = result.data.search; expect( searchResults ).toHaveLength( 1 ); - expect( searchResults[ 0 ].match.text ).toBe( 'UpgradeItem' ); - expect( searchResults[ 0 ].match.type ).toBe( 'label' ); + expect( searchResults[ 0 ].match.text ).toEqual( 'UpgradeItem' ); + expect( searchResults[ 0 ].match.type ).toEqual( 'label' ); oldItemID = searchResults[ 0 ].id; @@ -105,9 +105,9 @@ describe( 'Wikibase post upgrade', function () { expect( statement ).toEqual( expect.anything() ); expect( property ).toEqual( expect.anything() ); - expect( property.o.value ).toBe( 'UpgradeItemStringValue' ); + expect( property.o.value ).toEqual( 'UpgradeItemStringValue' ); expect( itemLabelValue ).toEqual( expect.anything() ); - expect( itemLabelValue.o.value ).toBe( 'UpgradeItem' ); + expect( itemLabelValue.o.value ).toEqual( 'UpgradeItem' ); } ); } ); diff --git a/test/specs/upgrade/upgrade.ts b/test/specs/upgrade/upgrade.ts index afef2fc07..0cef0dade 100644 --- a/test/specs/upgrade/upgrade.ts +++ b/test/specs/upgrade/upgrade.ts @@ -64,8 +64,8 @@ describe( 'Wikibase upgrade', function () { getTestString( itemLabel ), data ); - expect( itemId.startsWith( 'Q' ) ).toBe( true ); - expect( propertyId.startsWith( 'P' ) ).toBe( true ); + expect( itemId ).toMatch( /^Q\d+$/ ); + expect( propertyId ).toMatch( /^P\d+$/ ); } } ); @@ -73,13 +73,13 @@ describe( 'Wikibase upgrade', function () { const result = await browser.makeRequest( `${ testEnv.vars.WIKIBASE_URL }/w/api.php?action=wbsearchentities&search=UpgradeItem&format=json&language=en&type=item` ); - const success = result.data.success; - const searchResults = result.data.search; - expect( success ).toBe( 1 ); + expect( result.data.success ).toEqual( 1 ); + + const searchResults = result.data.search; expect( searchResults ).toHaveLength( 1 ); - expect( searchResults[ 0 ].match.text ).toBe( 'UpgradeItem' ); - expect( searchResults[ 0 ].match.type ).toBe( 'label' ); + expect( searchResults[ 0 ].match.text ).toEqual( 'UpgradeItem' ); + expect( searchResults[ 0 ].match.type ).toEqual( 'label' ); oldItemID = searchResults[ 0 ].id; diff --git a/test/suites/base__fedprops/base__fedprops.conf.ts b/test/suites/base__fedprops/base__fedprops.conf.ts index aac9508ed..5ab429609 100755 --- a/test/suites/base__fedprops/base__fedprops.conf.ts +++ b/test/suites/base__fedprops/base__fedprops.conf.ts @@ -1,5 +1,5 @@ -import TestEnv from '../../setup/TestEnv.js'; -import { defaultTestSettings } from '../../setup/makeTestSettings.js'; +import { defaultTestSettings } from '../../setup/make-test-settings.js'; +import TestEnv from '../../setup/test-env.js'; import wdioConfig from '../../setup/wdio.conf.js'; export const testEnv = TestEnv.createWithDefaults( { diff --git a/test/suites/base__pingback/base__pingback.conf.ts b/test/suites/base__pingback/base__pingback.conf.ts index a04394490..a71f39f97 100755 --- a/test/suites/base__pingback/base__pingback.conf.ts +++ b/test/suites/base__pingback/base__pingback.conf.ts @@ -1,5 +1,5 @@ -import TestEnv from '../../setup/TestEnv.js'; -import { defaultTestSettings } from '../../setup/makeTestSettings.js'; +import { defaultTestSettings } from '../../setup/make-test-settings.js'; +import TestEnv from '../../setup/test-env.js'; import wdioConfig from '../../setup/wdio.conf.js'; export const testEnv = TestEnv.createWithDefaults( { diff --git a/test/suites/base__repo/base__repo.conf.ts b/test/suites/base__repo/base__repo.conf.ts index 46067a50f..d97f4a51b 100755 --- a/test/suites/base__repo/base__repo.conf.ts +++ b/test/suites/base__repo/base__repo.conf.ts @@ -1,5 +1,5 @@ -import TestEnv from '../../setup/TestEnv.js'; -import { defaultTestSettings } from '../../setup/makeTestSettings.js'; +import { defaultTestSettings } from '../../setup/make-test-settings.js'; +import TestEnv from '../../setup/test-env.js'; import wdioConfig from '../../setup/wdio.conf.js'; export const testEnv = TestEnv.createWithDefaults( { diff --git a/test/suites/base__repo_client/base__repo_client.conf.ts b/test/suites/base__repo_client/base__repo_client.conf.ts index 325976fdf..9a905a762 100755 --- a/test/suites/base__repo_client/base__repo_client.conf.ts +++ b/test/suites/base__repo_client/base__repo_client.conf.ts @@ -1,5 +1,5 @@ -import TestEnv from '../../setup/TestEnv.js'; -import { defaultTestSettings } from '../../setup/makeTestSettings.js'; +import { defaultTestSettings } from '../../setup/make-test-settings.js'; +import TestEnv from '../../setup/test-env.js'; import wdioConfig from '../../setup/wdio.conf.js'; export const testEnv = TestEnv.createWithDefaults( { diff --git a/test/suites/client-LocalSettings.php.template b/test/suites/client-LocalSettings.php.template index 38fe9b543..2bb55a7b8 100644 --- a/test/suites/client-LocalSettings.php.template +++ b/test/suites/client-LocalSettings.php.template @@ -87,6 +87,10 @@ foreach (glob("LocalSettings.d/*.php") as ${DOLLAR}filename) continue; } + if (strpos(${DOLLAR}filename, 'EntitySchema') !== false) { + continue; + } + if (strpos(${DOLLAR}filename, 'Elastic') !== false) { continue; } diff --git a/test/suites/confirm_edit/confirm_edit.conf.ts b/test/suites/confirm_edit/confirm_edit.conf.ts index 93e9127bf..eff498c4c 100755 --- a/test/suites/confirm_edit/confirm_edit.conf.ts +++ b/test/suites/confirm_edit/confirm_edit.conf.ts @@ -1,5 +1,5 @@ -import TestEnv from '../../setup/TestEnv.js'; -import { defaultTestSettings } from '../../setup/makeTestSettings.js'; +import { defaultTestSettings } from '../../setup/make-test-settings.js'; +import TestEnv from '../../setup/test-env.js'; import wdioConfig from '../../setup/wdio.conf.js'; export const testEnv = TestEnv.createWithDefaults( { diff --git a/test/suites/elasticsearch/elasticsearch.conf.ts b/test/suites/elasticsearch/elasticsearch.conf.ts index 89990c9e3..541ace760 100755 --- a/test/suites/elasticsearch/elasticsearch.conf.ts +++ b/test/suites/elasticsearch/elasticsearch.conf.ts @@ -1,5 +1,5 @@ -import TestEnv from '../../setup/TestEnv.js'; -import { defaultTestSettings } from '../../setup/makeTestSettings.js'; +import { defaultTestSettings } from '../../setup/make-test-settings.js'; +import TestEnv from '../../setup/test-env.js'; import wdioConfig from '../../setup/wdio.conf.js'; export const testEnv = TestEnv.createWithDefaults( { diff --git a/test/suites/example/example.conf.ts b/test/suites/example/example.conf.ts index 61546babd..7098940d5 100755 --- a/test/suites/example/example.conf.ts +++ b/test/suites/example/example.conf.ts @@ -1,4 +1,4 @@ -import TestEnv from '../../setup/TestEnv.js'; +import TestEnv from '../../setup/test-env.js'; import wdioConfig from '../../setup/wdio.conf.js'; export const testEnv = TestEnv.createWithDefaults( { diff --git a/test/suites/fedprops/fedprops.conf.ts b/test/suites/fedprops/fedprops.conf.ts index 51983a08d..916d3a6d6 100755 --- a/test/suites/fedprops/fedprops.conf.ts +++ b/test/suites/fedprops/fedprops.conf.ts @@ -1,5 +1,5 @@ -import TestEnv from '../../setup/TestEnv.js'; -import { defaultTestSettings } from '../../setup/makeTestSettings.js'; +import { defaultTestSettings } from '../../setup/make-test-settings.js'; +import TestEnv from '../../setup/test-env.js'; import wdioConfig from '../../setup/wdio.conf.js'; export const testEnv = TestEnv.createWithDefaults( { diff --git a/test/suites/pingback/pingback.conf.ts b/test/suites/pingback/pingback.conf.ts index f8c6a4410..f2450d2e0 100755 --- a/test/suites/pingback/pingback.conf.ts +++ b/test/suites/pingback/pingback.conf.ts @@ -1,5 +1,5 @@ -import TestEnv from '../../setup/TestEnv.js'; -import { defaultTestSettings } from '../../setup/makeTestSettings.js'; +import { defaultTestSettings } from '../../setup/make-test-settings.js'; +import TestEnv from '../../setup/test-env.js'; import wdioConfig from '../../setup/wdio.conf.js'; export const testEnv = TestEnv.createWithDefaults( { diff --git a/test/suites/quickstatements/quickstatements.conf.ts b/test/suites/quickstatements/quickstatements.conf.ts index 8610b6531..f168bd895 100755 --- a/test/suites/quickstatements/quickstatements.conf.ts +++ b/test/suites/quickstatements/quickstatements.conf.ts @@ -1,5 +1,5 @@ -import TestEnv from '../../setup/TestEnv.js'; -import { defaultTestSettings } from '../../setup/makeTestSettings.js'; +import { defaultTestSettings } from '../../setup/make-test-settings.js'; +import TestEnv from '../../setup/test-env.js'; import wdioConfig from '../../setup/wdio.conf.js'; export const testEnv = TestEnv.createWithDefaults( { diff --git a/test/suites/repo/repo.conf.ts b/test/suites/repo/repo.conf.ts index f2e27bd48..8749cd1df 100755 --- a/test/suites/repo/repo.conf.ts +++ b/test/suites/repo/repo.conf.ts @@ -1,5 +1,5 @@ -import TestEnv from '../../setup/TestEnv.js'; -import { defaultTestSettings } from '../../setup/makeTestSettings.js'; +import { defaultTestSettings } from '../../setup/make-test-settings.js'; +import TestEnv from '../../setup/test-env.js'; import wdioConfig from '../../setup/wdio.conf.js'; export const testEnv = TestEnv.createWithDefaults( { diff --git a/test/suites/repo_client/repo_client.conf.ts b/test/suites/repo_client/repo_client.conf.ts index a0077faec..c560c7666 100755 --- a/test/suites/repo_client/repo_client.conf.ts +++ b/test/suites/repo_client/repo_client.conf.ts @@ -1,5 +1,5 @@ -import TestEnv from '../../setup/TestEnv.js'; -import { defaultTestSettings } from '../../setup/makeTestSettings.js'; +import { defaultTestSettings } from '../../setup/make-test-settings.js'; +import TestEnv from '../../setup/test-env.js'; import wdioConfig from '../../setup/wdio.conf.js'; export const testEnv = TestEnv.createWithDefaults( { diff --git a/test/suites/upgrade/upgrade.conf.ts b/test/suites/upgrade/upgrade.conf.ts index 160aeac4d..749f0a0bb 100644 --- a/test/suites/upgrade/upgrade.conf.ts +++ b/test/suites/upgrade/upgrade.conf.ts @@ -1,5 +1,5 @@ -import TestEnv from '../../setup/TestEnv.js'; -import { defaultTestSettings } from '../../setup/makeTestSettings.js'; +import { defaultTestSettings } from '../../setup/make-test-settings.js'; +import TestEnv from '../../setup/test-env.js'; import wdioConfig from '../../setup/wdio.conf.js'; import versions from './versions.js'; diff --git a/test/suites/upgrade/versions.ts b/test/suites/upgrade/versions.ts index 00252f8a0..8531e0187 100644 --- a/test/suites/upgrade/versions.ts +++ b/test/suites/upgrade/versions.ts @@ -23,6 +23,9 @@ export const versions = { WMDE16: 'wikibase/wikibase:1.40.2-wmde.16', WMDE16_BUNDLE: 'wikibase/wikibase-bundle:1.40.2-wmde.16', + WMDE17: 'wikibase/wikibase:1.41.0-wmde.17', + WMDE17_BUNDLE: 'wikibase/wikibase-bundle:1.41.0-wmde.17', + LOCAL_BUILD: 'wikibase/wikibase', LOCAL_BUILD_BUNDLE: 'wikibase/wikibase-bundle' }; diff --git a/test/types/TestSettings.ts b/test/types/test-settings.ts similarity index 100% rename from test/types/TestSettings.ts rename to test/types/test-settings.ts diff --git a/update_commits.py b/update_commits.py index 467399a05..698456a23 100644 --- a/update_commits.py +++ b/update_commits.py @@ -28,7 +28,7 @@ def get_commit(variable: str, url: str, parse_commit: callable, previous_commit: def parse_gerrit_commit(response: requests.Response) -> str: """Parse webpage using BeautifulSoup""" soup = BeautifulSoup(response.content, "lxml") - return soup.find("th", text="commit").next_sibling.text + return soup.find("th", string="commit").next_sibling.text github_pattern = re.compile( diff --git a/variables.env b/variables.env index 99e00e01b..ab5d3ad6c 100644 --- a/variables.env +++ b/variables.env @@ -16,8 +16,8 @@ WMDE_RELEASE_VERSION=main # Update only patch versions for security releases. # Choose latest version for major releases. # https://hub.docker.com/_/mediawiki -MEDIAWIKI_VERSION=1.40.2 -MEDIAWIKI_IMAGE_URL=mediawiki:1.40.2 +MEDIAWIKI_VERSION=1.41.0 +MEDIAWIKI_IMAGE_URL=mediawiki:1.41.0 # ############################################################################## @@ -49,8 +49,8 @@ ELASTICSEARCH_IMAGE_URL=docker.elastic.co/elasticsearch/elasticsearch:7.10.2 # Update to latest for major releases. # # https://central.sonatype.com/artifact/org.wikimedia.search/extra -# https://github.com/wikimedia/search-extra/compare/extra-parent-7.10.2-wmf4...extra-parent-7.10.2-wmf10 -ELASTICSEARCH_PLUGIN_WIKIMEDIA_EXTRA=7.10.2-wmf4 +# https://github.com/wikimedia/search-extra/compare/extra-parent-7.10.2-wmf4...extra-parent-7.10.2-wmf12 +ELASTICSEARCH_PLUGIN_WIKIMEDIA_EXTRA=7.10.2-wmf12 # https://central.sonatype.com/artifact/org.wikimedia.search.highlighter/experimental-highlighter-elasticsearch-plugin ELASTICSEARCH_PLUGIN_WIKIMEDIA_HIGHLIGHTER=7.10.2 @@ -76,14 +76,15 @@ COMPOSER_IMAGE_URL=docker-registry.wikimedia.org/releng/composer-php82:0.1.0-s3 # https://mariadb.org/mariadb/all-releases/ MARIADB_IMAGE_URL=mariadb:10.11 -# https://hub.docker.com/_/nginx -NGINX_IMAGE_URL=nginx:1.25.3-bookworm +# https://hub.docker.com/_/nginx +NGINX_IMAGE_URL=nginx:1.25.4-bookworm # https://hub.docker.com/_/node NODE_IMAGE_URL=node:20.11.1-bookworm +# https://www.php.net/supported-versions.php # https://hub.docker.com/_/php -PHP_IMAGE_URL=php:8.1.27-apache-bookworm +PHP_IMAGE_URL=php:8.2.15-apache-bookworm # https://hub.docker.com/_/eclipse-temurin # https://github.com/docker-library/docs/tree/master/eclipse-temurin/README.md#supported-tags-and-respective-dockerfile-links @@ -96,6 +97,13 @@ JRE_IMAGE_URL=eclipse-temurin:8u402-b06-jre-jammy DEBIAN_IMAGE_URL=debian:bookworm-slim +# ****************************************************************************** +# ****************************************************************************** +# Versions below are automatically updated by ./build.sh update_hashes +# ****************************************************************************** +# ****************************************************************************** + + # ############################################################################## # WMF maintained extensions # ############################################################################## @@ -103,36 +111,36 @@ DEBIAN_IMAGE_URL=debian:bookworm-slim # Versions in REL_ branches ensure compatibility with respective mediawiki versions. # Shouldn't require much of a review. # -# https://gerrit.wikimedia.org/r/plugins/gitiles/mediawiki/extensions/Wikibase/+/refs/heads/REL1_40 -WIKIBASE_COMMIT=903c3f9d0513d4e847727d73b0ab5c742c2d2f09 -# https://gerrit.wikimedia.org/r/plugins/gitiles/mediawiki/extensions/Babel/+/refs/heads/REL1_40 -BABEL_COMMIT=43e9555fb5ffc00dfc76dd32aa12febe483d39d3 -# https://gerrit.wikimedia.org/r/plugins/gitiles/mediawiki/extensions/cldr/+/refs/heads/REL1_40 -CLDR_COMMIT=ac9ad554535c7160bd982be0cce1b9a73890bc46 -# https://gerrit.wikimedia.org/r/plugins/gitiles/mediawiki/extensions/CirrusSearch/+/refs/heads/REL1_40 -CIRRUSSEARCH_COMMIT=9a51bae5915b6fc20cbaa80f5b3dbe12e738ed86 -# https://gerrit.wikimedia.org/r/plugins/gitiles/mediawiki/extensions/ConfirmEdit/+/refs/heads/REL1_40 -CONFIRMEDIT_COMMIT=d4075f7e67aaea04627703e1c4f08743196d7854 -# https://gerrit.wikimedia.org/r/plugins/gitiles/mediawiki/extensions/Elastica/+/refs/heads/REL1_40 -ELASTICA_COMMIT=b3b7675e8208f28fbf0a7939c108acf3e301656e -# https://gerrit.wikimedia.org/r/plugins/gitiles/mediawiki/extensions/EntitySchema/+/refs/heads/REL1_40 -ENTITYSCHEMA_COMMIT=597c8bbd32810605262df7f2f9d20d02d8b7552f -# https://gerrit.wikimedia.org/r/plugins/gitiles/mediawiki/extensions/Nuke/+/refs/heads/REL1_40 -NUKE_COMMIT=220878a258fbfb1a4ef48b049ea1364816ef3149 -# https://gerrit.wikimedia.org/r/plugins/gitiles/mediawiki/extensions/OAuth/+/refs/heads/REL1_40 -OAUTH_COMMIT=0b21f8acdf38d5d9b19fb42197e07211925d0541 -# https://gerrit.wikimedia.org/r/plugins/gitiles/mediawiki/extensions/Scribunto/+/refs/heads/REL1_40 -SCRIBUNTO_COMMIT=89255870fd6a8f3aa98782cc5260d2f1c0b2ad4d -# https://gerrit.wikimedia.org/r/plugins/gitiles/mediawiki/extensions/SyntaxHighlight_GeSHi/+/refs/heads/REL1_40 -SYNTAXHIGHLIGHT_GESHI_COMMIT=79787a3234b293f57482de4964a802375256403e -# https://gerrit.wikimedia.org/r/plugins/gitiles/mediawiki/extensions/UniversalLanguageSelector/+/refs/heads/REL1_40 -UNIVERSALLANGUAGESELECTOR_COMMIT=de7670d2217316401b7d38735a7be0e4482f1b42 -# https://gerrit.wikimedia.org/r/plugins/gitiles/mediawiki/extensions/VisualEditor/+/refs/heads/REL1_40 -VISUALEDITOR_COMMIT=00b627cb83f95a6dc62c640969429ab99c8cad2e -# https://gerrit.wikimedia.org/r/plugins/gitiles/mediawiki/extensions/WikibaseCirrusSearch/+/refs/heads/REL1_40 -WIKIBASECIRRUSSEARCH_COMMIT=c88e34b0b2f6dbeb7f69cacdd40d95e6101f9807 -# https://gerrit.wikimedia.org/r/plugins/gitiles/mediawiki/extensions/WikibaseManifest/+/refs/heads/REL1_40 -WIKIBASEMANIFEST_COMMIT=10f0ddcb7eac28f56999f983f169cf7b1c6e240a +# https://gerrit.wikimedia.org/r/plugins/gitiles/mediawiki/extensions/Wikibase/+/refs/heads/REL1_41 +WIKIBASE_COMMIT=c42002be38fa2756b8e475b38b1e6ab86d4f53ce +# https://gerrit.wikimedia.org/r/plugins/gitiles/mediawiki/extensions/Babel/+/refs/heads/REL1_41 +BABEL_COMMIT=e110eaa7293796ae3cf149177551997cc2009095 +# https://gerrit.wikimedia.org/r/plugins/gitiles/mediawiki/extensions/cldr/+/refs/heads/REL1_41 +CLDR_COMMIT=cad7768c4bd092318067fe15d674bf17ff965e2a +# https://gerrit.wikimedia.org/r/plugins/gitiles/mediawiki/extensions/CirrusSearch/+/refs/heads/REL1_41 +CIRRUSSEARCH_COMMIT=f304f9a27ee1d99d9582fc222596e6c0e49d4ef9 +# https://gerrit.wikimedia.org/r/plugins/gitiles/mediawiki/extensions/ConfirmEdit/+/refs/heads/REL1_41 +CONFIRMEDIT_COMMIT=9fb5930d4485a72001016ad62e63e457addb91f2 +# https://gerrit.wikimedia.org/r/plugins/gitiles/mediawiki/extensions/Elastica/+/refs/heads/REL1_41 +ELASTICA_COMMIT=24f94b80b51555e5e5b30b0633d215ee001650d4 +# https://gerrit.wikimedia.org/r/plugins/gitiles/mediawiki/extensions/EntitySchema/+/refs/heads/REL1_41 +ENTITYSCHEMA_COMMIT=85ea615968509afab9214c572ac7aedc7dda4424 +# https://gerrit.wikimedia.org/r/plugins/gitiles/mediawiki/extensions/Nuke/+/refs/heads/REL1_41 +NUKE_COMMIT=8e969d99c09faac3ec64b35ce5553e7ec8c87b28 +# https://gerrit.wikimedia.org/r/plugins/gitiles/mediawiki/extensions/OAuth/+/refs/heads/REL1_41 +OAUTH_COMMIT=8cdf22304b8382ef0015da9ef3210f90eb7687ce +# https://gerrit.wikimedia.org/r/plugins/gitiles/mediawiki/extensions/Scribunto/+/refs/heads/REL1_41 +SCRIBUNTO_COMMIT=825e1203b060850804885041dcd5d744f81a5df0 +# https://gerrit.wikimedia.org/r/plugins/gitiles/mediawiki/extensions/SyntaxHighlight_GeSHi/+/refs/heads/REL1_41 +SYNTAXHIGHLIGHT_GESHI_COMMIT=40908dd4204915eb20f1e43161c0489c7571f37d +# https://gerrit.wikimedia.org/r/plugins/gitiles/mediawiki/extensions/UniversalLanguageSelector/+/refs/heads/REL1_41 +UNIVERSALLANGUAGESELECTOR_COMMIT=47158fbee4ce6870133c29f1ed6affdb2eb31acc +# https://gerrit.wikimedia.org/r/plugins/gitiles/mediawiki/extensions/VisualEditor/+/refs/heads/REL1_41 +VISUALEDITOR_COMMIT=753916e615161ee417bd5c6a37a19cbcffd49fb0 +# https://gerrit.wikimedia.org/r/plugins/gitiles/mediawiki/extensions/WikibaseCirrusSearch/+/refs/heads/REL1_41 +WIKIBASECIRRUSSEARCH_COMMIT=7a4fdf2342eaecee9217e430d702c03c66dab55c +# https://gerrit.wikimedia.org/r/plugins/gitiles/mediawiki/extensions/WikibaseManifest/+/refs/heads/REL1_41 +WIKIBASEMANIFEST_COMMIT=8c7cdae4d5a1e68b19fcca8eb5c2a7b6e3dc3e55 # ############################################################################## @@ -159,7 +167,7 @@ MAGNUSTOOLS_COMMIT=5b8cea412000072a2c8753023c11472a4ac11ef5 # No versioning scheme. Review changes carefully. # # https://gerrit.wikimedia.org/r/plugins/gitiles/wikidata/query/gui/+/refs/heads/master -WDQSQUERYGUI_COMMIT=4f355e5e1ee4747471b6cedb2fc78fe29f9c8cb7 +WDQSQUERYGUI_COMMIT=ff6d1c0413dc661d45ee16b6dbb624cd82cfc174 # ##############################################################################