From 2f3a7374bf9e14b24569083235391634b8c781f7 Mon Sep 17 00:00:00 2001 From: alvicsam Date: Wed, 30 Aug 2023 17:58:04 +0200 Subject: [PATCH 1/2] [ci] add more jobs for pipeline cancel, cleanup --- .gitlab-ci.yml | 45 ++ cumulus/scripts/ci/changelog/.gitignore | 4 - cumulus/scripts/ci/changelog/Gemfile | 23 - cumulus/scripts/ci/changelog/Gemfile.lock | 84 --- cumulus/scripts/ci/changelog/README.md | 78 --- cumulus/scripts/ci/changelog/bin/changelog | 164 ------ .../scripts/ci/changelog/digests/.gitignore | 1 - cumulus/scripts/ci/changelog/digests/.gitkeep | 0 cumulus/scripts/ci/changelog/lib/changelog.rb | 32 -- .../ci/changelog/templates/change.md.tera | 44 -- .../ci/changelog/templates/changes.md.tera | 21 - .../changelog/templates/changes_api.md.tera | 19 - .../templates/changes_client.md.tera | 17 - .../changelog/templates/changes_misc.md.tera | 39 -- .../templates/changes_runtime.md.tera | 19 - .../ci/changelog/templates/compiler.md.tera | 6 - .../ci/changelog/templates/debug.md.tera | 9 - .../changelog/templates/docker_image.md.tera | 11 - .../templates/global_priority.md.tera | 35 -- .../changelog/templates/high_priority.md.tera | 56 -- .../templates/host_functions.md.tera | 38 -- .../changelog/templates/migrations-db.md.tera | 26 - .../templates/migrations-runtime.md.tera | 14 - .../changelog/templates/pre_release.md.tera | 11 - .../ci/changelog/templates/runtime.md.tera | 28 - .../ci/changelog/templates/runtimes.md.tera | 17 - .../ci/changelog/templates/template.md.tera | 38 -- .../scripts/ci/changelog/test/test_basic.rb | 23 - cumulus/scripts/ci/common/lib.sh | 141 ----- cumulus/scripts/ci/create-benchmark-pr.sh | 53 -- cumulus/scripts/ci/github/check-rel-br | 127 ----- cumulus/scripts/ci/github/check_labels.sh | 91 ---- .../ci/github/extrinsic-ordering-filter.sh | 55 -- cumulus/scripts/ci/github/runtime-version.rb | 10 - .../scripts/ci/gitlab/pipeline/benchmarks.yml | 84 --- cumulus/scripts/ci/gitlab/pipeline/build.yml | 138 ----- .../ci/gitlab/pipeline/integration_tests.yml | 2 - .../scripts/ci/gitlab/pipeline/publish.yml | 105 ---- .../ci/gitlab/pipeline/short-benchmarks.yml | 56 -- cumulus/scripts/ci/gitlab/pipeline/test.yml | 111 ---- .../scripts/ci/gitlab/pipeline/zombienet.yml | 141 ----- cumulus/scripts/ci/gitlab/prettier.sh | 6 - substrate/scripts/ci/common/lib.sh | 117 ----- substrate/scripts/ci/github/check_labels.sh | 68 --- .../scripts/ci/github/generate_changelog.sh | 85 --- .../scripts/ci/gitlab/check-each-crate.py | 57 -- substrate/scripts/ci/gitlab/check_runtime.sh | 121 ----- substrate/scripts/ci/gitlab/check_signed.sh | 16 - substrate/scripts/ci/gitlab/ensure-deps.sh | 80 --- .../scripts/ci/gitlab/pipeline/build.yml | 215 -------- .../scripts/ci/gitlab/pipeline/check.yml | 78 --- .../scripts/ci/gitlab/pipeline/publish.yml | 270 ---------- substrate/scripts/ci/gitlab/pipeline/test.yml | 494 ------------------ .../scripts/ci/gitlab/pipeline/zombienet.yml | 67 --- substrate/scripts/ci/gitlab/prettier.sh | 6 - .../ci/gitlab/publish_draft_release.sh | 54 -- 56 files changed, 45 insertions(+), 3705 deletions(-) delete mode 100644 cumulus/scripts/ci/changelog/.gitignore delete mode 100644 cumulus/scripts/ci/changelog/Gemfile delete mode 100644 cumulus/scripts/ci/changelog/Gemfile.lock delete mode 100644 cumulus/scripts/ci/changelog/README.md delete mode 100755 cumulus/scripts/ci/changelog/bin/changelog delete mode 100644 cumulus/scripts/ci/changelog/digests/.gitignore delete mode 100644 cumulus/scripts/ci/changelog/digests/.gitkeep delete mode 100644 cumulus/scripts/ci/changelog/lib/changelog.rb delete mode 100644 cumulus/scripts/ci/changelog/templates/change.md.tera delete mode 100644 cumulus/scripts/ci/changelog/templates/changes.md.tera delete mode 100644 cumulus/scripts/ci/changelog/templates/changes_api.md.tera delete mode 100644 cumulus/scripts/ci/changelog/templates/changes_client.md.tera delete mode 100644 cumulus/scripts/ci/changelog/templates/changes_misc.md.tera delete mode 100644 cumulus/scripts/ci/changelog/templates/changes_runtime.md.tera delete mode 100644 cumulus/scripts/ci/changelog/templates/compiler.md.tera delete mode 100644 cumulus/scripts/ci/changelog/templates/debug.md.tera delete mode 100644 cumulus/scripts/ci/changelog/templates/docker_image.md.tera delete mode 100644 cumulus/scripts/ci/changelog/templates/global_priority.md.tera delete mode 100644 cumulus/scripts/ci/changelog/templates/high_priority.md.tera delete mode 100644 cumulus/scripts/ci/changelog/templates/host_functions.md.tera delete mode 100644 cumulus/scripts/ci/changelog/templates/migrations-db.md.tera delete mode 100644 cumulus/scripts/ci/changelog/templates/migrations-runtime.md.tera delete mode 100644 cumulus/scripts/ci/changelog/templates/pre_release.md.tera delete mode 100644 cumulus/scripts/ci/changelog/templates/runtime.md.tera delete mode 100644 cumulus/scripts/ci/changelog/templates/runtimes.md.tera delete mode 100644 cumulus/scripts/ci/changelog/templates/template.md.tera delete mode 100755 cumulus/scripts/ci/changelog/test/test_basic.rb delete mode 100644 cumulus/scripts/ci/common/lib.sh delete mode 100755 cumulus/scripts/ci/create-benchmark-pr.sh delete mode 100755 cumulus/scripts/ci/github/check-rel-br delete mode 100755 cumulus/scripts/ci/github/check_labels.sh delete mode 100755 cumulus/scripts/ci/github/extrinsic-ordering-filter.sh delete mode 100644 cumulus/scripts/ci/github/runtime-version.rb delete mode 100644 cumulus/scripts/ci/gitlab/pipeline/benchmarks.yml delete mode 100644 cumulus/scripts/ci/gitlab/pipeline/build.yml delete mode 100644 cumulus/scripts/ci/gitlab/pipeline/integration_tests.yml delete mode 100644 cumulus/scripts/ci/gitlab/pipeline/publish.yml delete mode 100644 cumulus/scripts/ci/gitlab/pipeline/short-benchmarks.yml delete mode 100644 cumulus/scripts/ci/gitlab/pipeline/test.yml delete mode 100644 cumulus/scripts/ci/gitlab/pipeline/zombienet.yml delete mode 100755 cumulus/scripts/ci/gitlab/prettier.sh delete mode 100755 substrate/scripts/ci/common/lib.sh delete mode 100755 substrate/scripts/ci/github/check_labels.sh delete mode 100755 substrate/scripts/ci/github/generate_changelog.sh delete mode 100755 substrate/scripts/ci/gitlab/check-each-crate.py delete mode 100755 substrate/scripts/ci/gitlab/check_runtime.sh delete mode 100755 substrate/scripts/ci/gitlab/check_signed.sh delete mode 100755 substrate/scripts/ci/gitlab/ensure-deps.sh delete mode 100644 substrate/scripts/ci/gitlab/pipeline/build.yml delete mode 100644 substrate/scripts/ci/gitlab/pipeline/check.yml delete mode 100644 substrate/scripts/ci/gitlab/pipeline/publish.yml delete mode 100644 substrate/scripts/ci/gitlab/pipeline/test.yml delete mode 100644 substrate/scripts/ci/gitlab/pipeline/zombienet.yml delete mode 100755 substrate/scripts/ci/gitlab/prettier.sh delete mode 100755 substrate/scripts/ci/gitlab/publish_draft_release.sh diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 9e7346601ba3..0e7e9956a56b 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -333,3 +333,48 @@ cancel-pipeline-cargo-clippy: extends: .cancel-pipeline-template needs: - job: cargo-clippy + +cancel-pipeline-build-linux-stable: + extends: .cancel-pipeline-template + needs: + - job: build-linux-stable + +cancel-pipeline-build-linux-stable-cumulus: + extends: .cancel-pipeline-template + needs: + - job: build-linux-stable-cumulus + +cancel-pipeline-build-linux-substrate: + extends: .cancel-pipeline-template + needs: + - job: build-linux-substrate + +cancel-pipeline-test-node-metrics: + extends: .cancel-pipeline-template + needs: + - job: test-node-metrics + +cancel-pipeline-test-frame-ui: + extends: .cancel-pipeline-template + needs: + - job: test-frame-ui + +cancel-pipeline-quick-benchmarks: + extends: .cancel-pipeline-template + needs: + - job: quick-benchmarks + +cancel-pipeline-check-try-runtime: + extends: .cancel-pipeline-template + needs: + - job: check-try-runtime + +cancel-pipeline-test-frame-examples-compile-to-wasm: + extends: .cancel-pipeline-template + needs: + - job: test-frame-examples-compile-to-wasm + +cancel-pipeline-build-short-benchmark: + extends: .cancel-pipeline-template + needs: + - job: build-short-benchmark diff --git a/cumulus/scripts/ci/changelog/.gitignore b/cumulus/scripts/ci/changelog/.gitignore deleted file mode 100644 index 4fbcc523b04c..000000000000 --- a/cumulus/scripts/ci/changelog/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -changelog.md -*.json -release*.md -.env diff --git a/cumulus/scripts/ci/changelog/Gemfile b/cumulus/scripts/ci/changelog/Gemfile deleted file mode 100644 index 46b058e3c500..000000000000 --- a/cumulus/scripts/ci/changelog/Gemfile +++ /dev/null @@ -1,23 +0,0 @@ -# frozen_string_literal: true - -source 'https://rubygems.org' - -git_source(:github) { |repo_name| "https://github.com/#{repo_name}" } - -gem 'octokit', '~> 4' - -gem 'git_diff_parser', '~> 3' - -gem 'toml', '~> 0.3.0' - -gem 'rake', group: :dev - -gem 'optparse', '~> 0.1.1' - -gem 'logger', '~> 1.4' - -gem 'changelogerator', '0.10.1' - -gem 'test-unit', group: :dev - -gem 'rubocop', group: :dev, require: false diff --git a/cumulus/scripts/ci/changelog/Gemfile.lock b/cumulus/scripts/ci/changelog/Gemfile.lock deleted file mode 100644 index 893bec549195..000000000000 --- a/cumulus/scripts/ci/changelog/Gemfile.lock +++ /dev/null @@ -1,84 +0,0 @@ -GEM - remote: https://rubygems.org/ - specs: - addressable (2.8.0) - public_suffix (>= 2.0.2, < 5.0) - ast (2.4.2) - changelogerator (0.10.1) - git_diff_parser (~> 3) - octokit (~> 4) - faraday (1.8.0) - faraday-em_http (~> 1.0) - faraday-em_synchrony (~> 1.0) - faraday-excon (~> 1.1) - faraday-httpclient (~> 1.0.1) - faraday-net_http (~> 1.0) - faraday-net_http_persistent (~> 1.1) - faraday-patron (~> 1.0) - faraday-rack (~> 1.0) - multipart-post (>= 1.2, < 3) - ruby2_keywords (>= 0.0.4) - faraday-em_http (1.0.0) - faraday-em_synchrony (1.0.0) - faraday-excon (1.1.0) - faraday-httpclient (1.0.1) - faraday-net_http (1.0.1) - faraday-net_http_persistent (1.2.0) - faraday-patron (1.0.0) - faraday-rack (1.0.0) - git_diff_parser (3.2.0) - logger (1.4.4) - multipart-post (2.1.1) - octokit (4.21.0) - faraday (>= 0.9) - sawyer (~> 0.8.0, >= 0.5.3) - optparse (0.1.1) - parallel (1.21.0) - parser (3.0.2.0) - ast (~> 2.4.1) - parslet (2.0.0) - power_assert (2.0.1) - public_suffix (4.0.6) - rainbow (3.0.0) - rake (13.0.6) - regexp_parser (2.1.1) - rexml (3.2.5) - rubocop (1.23.0) - parallel (~> 1.10) - parser (>= 3.0.0.0) - rainbow (>= 2.2.2, < 4.0) - regexp_parser (>= 1.8, < 3.0) - rexml - rubocop-ast (>= 1.12.0, < 2.0) - ruby-progressbar (~> 1.7) - unicode-display_width (>= 1.4.0, < 3.0) - rubocop-ast (1.13.0) - parser (>= 3.0.1.1) - ruby-progressbar (1.11.0) - ruby2_keywords (0.0.5) - sawyer (0.8.2) - addressable (>= 2.3.5) - faraday (> 0.8, < 2.0) - test-unit (3.5.1) - power_assert - toml (0.3.0) - parslet (>= 1.8.0, < 3.0.0) - unicode-display_width (2.1.0) - -PLATFORMS - x86_64-darwin-20 - x86_64-darwin-22 - -DEPENDENCIES - changelogerator (= 0.10.1) - git_diff_parser (~> 3) - logger (~> 1.4) - octokit (~> 4) - optparse (~> 0.1.1) - rake - rubocop - test-unit - toml (~> 0.3.0) - -BUNDLED WITH - 2.2.22 diff --git a/cumulus/scripts/ci/changelog/README.md b/cumulus/scripts/ci/changelog/README.md deleted file mode 100644 index 478e0b56d9ca..000000000000 --- a/cumulus/scripts/ci/changelog/README.md +++ /dev/null @@ -1,78 +0,0 @@ -# Changelog - -Currently, the changelog is built locally. It will be moved to CI once labels stabilize. - -For now, a bit of preparation is required before you can run the script: -- fetch the srtool digests -- store them under the `digests` folder as `-srtool-digest.json` -- ensure the `.env` file is up to date with correct information - -The content of the release notes is generated from the template files under the `scripts/ci/changelog/templates` folder. For readability and maintenance, the template is split into several small snippets. - -Run: -``` -./bin/changelog [=HEAD] -``` - -For instance: -``` -./bin/changelog parachains-v7.0.0-rc8 -``` - -A file called `release-notes.md` will be generated and can be used for the release. - -## ENV - -You may use the following ENV for testing: - -``` -RUSTC_STABLE="rustc 1.56.1 (59eed8a2a 2021-11-01)" -RUSTC_NIGHTLY="rustc 1.57.0-nightly (51e514c0f 2021-09-12)" -PRE_RELEASE=true -HIDE_SRTOOL_ROCOCO=true -HIDE_SRTOOL_SHELL=true -REF1=statemine-v5.0.0 -REF2=HEAD -DEBUG=1 -NO_CACHE=1 -``` - -By default, the template will include all the information, including the runtime data. -For clients releases, we don't need those and they can be skipped by setting the following env: -``` -RELEASE_TYPE=client -``` - -## Considered labels - -The following list will likely evolve over time and it will be hard to keep it in sync. -In any case, if you want to find all the labels that are used, search for `meta` in the templates. -Currently, the considered labels are: - -- Priority: C labels -- Audit: D labels -- E4 => new host function -- B0 => silent, not showing up -- B1-releasenotes (misc unless other labels) -- B5-client (client changes) -- B7-runtimenoteworthy (runtime changes) -- T6-XCM - -Note that labels with the same letter are mutually exclusive. -A PR should not have both `B0` and `B5`, or both `C1` and `C9`. In case of conflicts, the template will -decide which label will be considered. - -## Dev and debuggin - -### Hot Reload - -The following command allows **Hot Reload**: -``` -fswatch templates -e ".*\.md$" | xargs -n1 -I{} ./bin/changelog statemine-v5.0.0 -``` -### Caching - -By default, if the changelog data from Github is already present, the calls to the Github API will be skipped -and the local version of the data will be used. This is much faster. -If you know that some labels have changed in Github, you probably want to refresh the data. -You can then either delete manually the `cumulus.json` file or `export NO_CACHE=1` to force refreshing the data. diff --git a/cumulus/scripts/ci/changelog/bin/changelog b/cumulus/scripts/ci/changelog/bin/changelog deleted file mode 100755 index 6cd012a29edb..000000000000 --- a/cumulus/scripts/ci/changelog/bin/changelog +++ /dev/null @@ -1,164 +0,0 @@ -#!/usr/bin/env ruby - -# frozen_string_literal: true - -# call for instance as: -# ./bin/changelog statemine-v5.0.0 -# -# You may set the ENV NO_CACHE to force fetching from Github -# You should also ensure you set the ENV: GITHUB_TOKEN - -require_relative '../lib/changelog' -require 'logger' - -logger = Logger.new($stdout) -logger.level = Logger::DEBUG -logger.debug('Starting') - -changelogerator_version = `changelogerator --version` -logger.debug(changelogerator_version) - -owner = 'paritytech' -repo = 'cumulus' -ref1 = ARGV[0] -ref2 = ARGV[1] || 'HEAD' -output = ARGV[2] || 'release-notes.md' - -ENV['REF1'] = ref1 -ENV['REF2'] = ref2 - -gh_cumulus = SubRef.new(format('%s/%s', { owner: owner, repo: repo })) - -polkadot_ref1 = gh_cumulus.get_dependency_reference(ref1, 'polkadot-primitives') -polkadot_ref2 = gh_cumulus.get_dependency_reference(ref2, 'polkadot-primitives') - -substrate_ref1 = gh_cumulus.get_dependency_reference(ref1, 'sp-io') -substrate_ref2 = gh_cumulus.get_dependency_reference(ref2, 'sp-io') - -logger.debug("Cumulus from: #{ref1}") -logger.debug("Cumulus to: #{ref2}") - -logger.debug("Polkadot from: #{polkadot_ref1}") -logger.debug("Polkadot to: #{polkadot_ref2}") - -logger.debug("Substrate from: #{substrate_ref1}") -logger.debug("Substrate to: #{substrate_ref2}") - -cumulus_data = 'cumulus.json' -substrate_data = 'substrate.json' -polkadot_data = 'polkadot.json' - -logger.debug("Using CUMULUS: #{cumulus_data}") -logger.debug("Using SUBSTRATE: #{substrate_data}") -logger.debug("Using POLKADOT: #{polkadot_data}") - -logger.warn('NO_CACHE set') if ENV['NO_CACHE'] - -# This is acting as cache so we don't spend time querying while testing -if ENV['NO_CACHE'] || !File.file?(cumulus_data) - logger.debug(format('Fetching data for Cumulus into %s', cumulus_data)) - cmd = format('changelogerator %s/%s -f %s -t %s > %s', - { owner: owner, repo: repo, from: ref1, to: ref2, output: cumulus_data }) - system(cmd) -else - logger.debug("Re-using:#{cumulus_data}") -end - -if ENV['NO_CACHE'] || !File.file?(polkadot_data) - logger.debug(format('Fetching data for Polkadot into %s', polkadot_data)) - cmd = format('changelogerator %s/%s -f %s -t %s > %s', - { owner: owner, repo: 'polkadot', from: polkadot_ref1, to: polkadot_ref2, output: polkadot_data }) - system(cmd) -else - logger.debug("Re-using:#{polkadot_data}") -end - -if ENV['NO_CACHE'] || !File.file?(substrate_data) - logger.debug(format('Fetching data for Substrate into %s', substrate_data)) - cmd = format('changelogerator %s/%s -f %s -t %s > %s', - { owner: owner, repo: 'substrate', from: substrate_ref1, to: substrate_ref2, output: substrate_data }) - system(cmd) -else - logger.debug("Re-using:#{substrate_data}") -end - -POLKADOT_COLLECTIVES_DIGEST = ENV['COLLECTIVES_POLKADOT_DIGEST'] || 'digests/collectives-polkadot-srtool-digest.json' -SHELL_DIGEST = ENV['SHELL_DIGEST'] || 'digests/shell-srtool-digest.json' -ASSET_HUB_WESTEND_DIGEST = ENV['ASSET_HUB_WESTEND_DIGEST'] || 'digests/asset-hub-westend-srtool-digest.json' -ASSET_HUB_KUSAMA_DIGEST = ENV['ASSET_HUB_KUSAMA_DIGEST'] || 'digests/asset-hub-kusama-srtool-digest.json' -ASSET_HUB_POLKADOT_DIGEST = ENV['ASSET_HUB_POLKADOT_DIGEST'] || 'digests/asset-hub-westend-srtool-digest.json' -BRIDGE_HUB_ROCOCO_DIGEST = ENV['BRIDGE_HUB_ROCOCO_DIGEST'] || 'digests/bridge-hub-rococo-srtool-digest.json' -BRIDGE_HUB_KUSAMA_DIGEST = ENV['BRIDGE_HUB_KUSAMA_DIGEST'] || 'digests/bridge-hub-kusama-srtool-digest.json' -BRIDGE_HUB_POLKADOT_DIGEST = ENV['BRIDGE_HUB_POLKADOT_DIGEST'] || 'digests/bridge-hub-polkadot-srtool-digest.json' -ROCOCO_PARA_DIGEST = ENV['ROCOCO_PARA_DIGEST'] || 'digests/rococo-parachain-srtool-digest.json' -CANVAS_KUSAMA_DIGEST = ENV['CANVAS_KUSAMA_DIGEST'] || 'digests/contracts-rococo-srtool-digest.json' - -logger.debug("Release type: #{ENV['RELEASE_TYPE']}") - -if ENV['RELEASE_TYPE'] && ENV['RELEASE_TYPE'] == 'client' - logger.debug('Building changelog without runtimes') - cmd = format('jq \ - --slurpfile cumulus %s \ - --slurpfile substrate %s \ - --slurpfile polkadot %s \ - -n \'{ - cumulus: $cumulus[0], - substrate: $substrate[0], - polkadot: $polkadot[0], - }\' > context.json', cumulus_data, substrate_data, polkadot_data, - ) -else - logger.debug('Building changelog with runtimes') - - # Here we compose all the pieces together into one - # single big json file. - cmd = format('jq \ - --slurpfile cumulus %s \ - --slurpfile substrate %s \ - --slurpfile polkadot %s \ - --slurpfile srtool_shell %s \ - --slurpfile srtool_westmint %s \ - --slurpfile srtool_statemine %s \ - --slurpfile srtool_statemint %s \ - --slurpfile srtool_rococo_parachain %s \ - --slurpfile srtool_contracts_rococo %s \ - --slurpfile srtool_polkadot_collectives %s \ - --slurpfile srtool_bridge_hub_rococo %s \ - --slurpfile srtool_bridge_hub_kusama %s \ - --slurpfile srtool_bridge_hub_polkadot %s \ - -n \'{ - cumulus: $cumulus[0], - substrate: $substrate[0], - polkadot: $polkadot[0], - srtool: [ - { order: 10, name: "asset-hub-polkadot", note: " (Former Statemint)", data: $srtool_statemint[0] }, - { order: 11, name: "bridge-hub-polkadot", data: $srtool_bridge_hub_polkadot[0] }, - { order: 20, name: "asset-hub-kusama", note: " (Former Statemine)", data: $srtool_statemine[0] }, - { order: 21, name: "bridge-hub-kusama", data: $srtool_bridge_hub_kusama[0] }, - { order: 30, name: "asset-hub-westend", note: " (Former Westmint)", data: $srtool_westmint[0] }, - { order: 40, name: "rococo", data: $srtool_rococo_parachain[0] }, - { order: 41, name: "bridge-hub-rococo", data: $srtool_bridge_hub_rococo[0] }, - { order: 50, name: "polkadot-collectives", data: $srtool_polkadot_collectives[0] }, - { order: 60, name: "contracts", data: $srtool_contracts_rococo[0] }, - { order: 90, name: "shell", data: $srtool_shell[0] } - ] }\' > context.json', - cumulus_data, - substrate_data, - polkadot_data, - SHELL_DIGEST, - ASSET_HUB_WESTEND_DIGEST, - ASSET_HUB_KUSAMA_DIGEST, - ASSET_HUB_POLKADOT_DIGEST, - ROCOCO_PARA_DIGEST, - CANVAS_KUSAMA_DIGEST, - POLKADOT_COLLECTIVES_DIGEST, - BRIDGE_HUB_ROCOCO_DIGEST, - BRIDGE_HUB_KUSAMA_DIGEST, - BRIDGE_HUB_POLKADOT_DIGEST - ) -end -system(cmd) - -cmd = format('tera --env --env-key env --include-path templates \ - --template templates/template.md.tera context.json > %s', output) -system(cmd) diff --git a/cumulus/scripts/ci/changelog/digests/.gitignore b/cumulus/scripts/ci/changelog/digests/.gitignore deleted file mode 100644 index a6c57f5fb2ff..000000000000 --- a/cumulus/scripts/ci/changelog/digests/.gitignore +++ /dev/null @@ -1 +0,0 @@ -*.json diff --git a/cumulus/scripts/ci/changelog/digests/.gitkeep b/cumulus/scripts/ci/changelog/digests/.gitkeep deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/cumulus/scripts/ci/changelog/lib/changelog.rb b/cumulus/scripts/ci/changelog/lib/changelog.rb deleted file mode 100644 index 2d9ee29a8c89..000000000000 --- a/cumulus/scripts/ci/changelog/lib/changelog.rb +++ /dev/null @@ -1,32 +0,0 @@ -# frozen_string_literal: true - -# A Class to find Substrate references -class SubRef - require 'octokit' - require 'toml' - - attr_reader :client, :repository - - def initialize(github_repo) - @client = Octokit::Client.new( - access_token: ENV['GITHUB_TOKEN'] - ) - @repository = @client.repository(github_repo) - end - - # This function checks the Cargo.lock of a given - # Rust project, for a given package, and fetches - # the dependency git ref. - def get_dependency_reference(ref, package) - cargo = TOML::Parser.new( - Base64.decode64( - @client.contents( - @repository.full_name, - path: 'Cargo.lock', - query: { ref: ref.to_s } - ).content - ) - ).parsed - cargo['package'].find { |p| p['name'] == package }['source'].split('#').last - end -end diff --git a/cumulus/scripts/ci/changelog/templates/change.md.tera b/cumulus/scripts/ci/changelog/templates/change.md.tera deleted file mode 100644 index 609a038789ac..000000000000 --- a/cumulus/scripts/ci/changelog/templates/change.md.tera +++ /dev/null @@ -1,44 +0,0 @@ -{# This macro shows ONE change #} -{%- macro change(c, cml="[C]", dot="[P]", sub="[S]") -%} - -{%- if c.meta.C and c.meta.C.agg.max >= 5 -%} -{%- set prio = " ‼️ HIGH" -%} -{%- elif c.meta.C and c.meta.C.agg.max >= 3 -%} -{%- set prio = " ❗️ Medium" -%} -{%- elif c.meta.C and c.meta.C.agg.max < 3 -%} -{%- set prio = " Low" -%} -{%- else -%} -{%- set prio = "" -%} -{%- endif -%} - -{%- set audit = "" -%} -{# -{%- if c.meta.D and c.meta.D.D1 -%} -{%- set audit = "✅ audited " -%} -{%- elif c.meta.D and c.meta.D.D2 -%} -{%- set audit = "✅ trivial " -%} -{%- elif c.meta.D and c.meta.D.D3 -%} -{%- set audit = "✅ trivial " -%} -{%- elif c.meta.D and c.meta.D.D5 -%} -{%- set audit = "⏳ pending non-critical audit " -%} -{%- else -%} -{%- set audit = "" -%} -{%- endif -%} -#} -{%- if c.html_url is containing("polkadot") -%} -{%- set repo = dot -%} -{%- elif c.html_url is containing("cumulus") -%} -{%- set repo = cml -%} -{%- elif c.html_url is containing("substrate") -%} -{%- set repo = sub -%} -{%- else -%} -{%- set repo = " " -%} -{%- endif -%} -{# #} -{%- if c.meta.T and c.meta.T.T6 -%} -{%- set xcm = " [✉️ XCM]" -%} -{%- else -%} -{%- set xcm = "" -%} -{%- endif -%} -{{- repo }} {{ audit }}[`#{{c.number}}`]({{c.html_url}}) {{- prio }} - {{ c.title | capitalize | truncate(length=60, end="…") }}{{xcm }} -{%- endmacro change %} diff --git a/cumulus/scripts/ci/changelog/templates/changes.md.tera b/cumulus/scripts/ci/changelog/templates/changes.md.tera deleted file mode 100644 index f1704546b0a7..000000000000 --- a/cumulus/scripts/ci/changelog/templates/changes.md.tera +++ /dev/null @@ -1,21 +0,0 @@ -{# This include generates the section showing the changes #} -## Changes - -### Legend - -- {{ CML }} Cumulus -- {{ DOT }} Polkadot -- {{ SUB }} Substrate - -{% if env.RELEASE_TYPE and env.RELEASE_TYPE == "client" %} -{% include "changes_client.md.tera" %} -{% else %} -{% include "migrations-runtime.md.tera" -%} - -{% include "changes_runtime.md.tera" %} - -{% endif %} - -{% include "changes_api.md.tera" %} - -{% include "changes_misc.md.tera" %} diff --git a/cumulus/scripts/ci/changelog/templates/changes_api.md.tera b/cumulus/scripts/ci/changelog/templates/changes_api.md.tera deleted file mode 100644 index 2379c178c031..000000000000 --- a/cumulus/scripts/ci/changelog/templates/changes_api.md.tera +++ /dev/null @@ -1,19 +0,0 @@ -{%- import "change.md.tera" as m_c -%} - -### API - -{#- The changes are sorted by merge date -#} -{% for pr in changes | sort(attribute="merged_at") -%} - -{%- if pr.meta.B -%} -{%- if pr.meta.B.B0 -%} -{#- We skip silent ones -#} -{%- else -%} - -{%- if pr.meta.B.B1 and pr.meta.T.T2 and not pr.title is containing("ompanion") %} -- {{ m_c::change(c=pr) }} -{%- endif -%} -{%- endif -%} - -{%- endif -%} -{%- endfor %} diff --git a/cumulus/scripts/ci/changelog/templates/changes_client.md.tera b/cumulus/scripts/ci/changelog/templates/changes_client.md.tera deleted file mode 100644 index 05a521d6870b..000000000000 --- a/cumulus/scripts/ci/changelog/templates/changes_client.md.tera +++ /dev/null @@ -1,17 +0,0 @@ -{% import "change.md.tera" as m_c -%} -### Client - -{#- The changes are sorted by merge date #} -{%- for pr in changes | sort(attribute="merged_at") %} - -{%- if pr.meta.B %} - {%- if pr.meta.B.B0 %} - {#- We skip silent ones -#} - {%- else -%} - - {%- if pr.meta.B.B1 and pr.meta.T and pr.meta.T.T0 and not pr.title is containing("ompanion") %} -- {{ m_c::change(c=pr) }} - {%- endif -%} - {% endif -%} - {% endif -%} -{% endfor %} diff --git a/cumulus/scripts/ci/changelog/templates/changes_misc.md.tera b/cumulus/scripts/ci/changelog/templates/changes_misc.md.tera deleted file mode 100644 index b36595bc5d6a..000000000000 --- a/cumulus/scripts/ci/changelog/templates/changes_misc.md.tera +++ /dev/null @@ -1,39 +0,0 @@ -{%- import "change.md.tera" as m_c -%} - -{%- set_global misc_count = 0 -%} -{#- First pass to count #} -{%- for pr in changes -%} - {%- if pr.meta.B %} - {%- if pr.meta.B.B0 -%} - {#- We skip silent ones -#} - {%- else -%} - {%- if pr.meta.T and pr.meta.T.agg.max > 2 %} -{%- set_global misc_count = misc_count + 1 -%} - {%- endif -%} - {% endif -%} - {% endif -%} -{% endfor %} - -### Misc - -{% if misc_count > 10 %} -There are other misc. changes. You can expand the list below to view them all. -
Other misc. changes -{% endif -%} - -{#- The changes are sorted by merge date #} -{%- for pr in changes | sort(attribute="merged_at") %} - {%- if pr.meta.B and not pr.title is containing("ompanion") %} - {%- if pr.meta.B.B0 %} - {#- We skip silent ones -#} - {%- else -%} - {%- if pr.meta.T and pr.meta.T.agg.max > 2 %} -- {{ m_c::change(c=pr) }} - {%- endif -%} - {% endif -%} - {% endif -%} -{% endfor %} - -{% if misc_count > 10 %} -
-{% endif -%} diff --git a/cumulus/scripts/ci/changelog/templates/changes_runtime.md.tera b/cumulus/scripts/ci/changelog/templates/changes_runtime.md.tera deleted file mode 100644 index 39c272637655..000000000000 --- a/cumulus/scripts/ci/changelog/templates/changes_runtime.md.tera +++ /dev/null @@ -1,19 +0,0 @@ -{%- import "change.md.tera" as m_c -%} - -### Runtime - -{#- The changes are sorted by merge date -#} -{% for pr in changes | sort(attribute="merged_at") -%} - -{%- if pr.meta.B -%} -{%- if pr.meta.B.B0 -%} -{#- We skip silent ones -#} -{%- else -%} - -{%- if pr.meta.B.B1 and pr.meta.T.T1 and not pr.title is containing("ompanion") %} -- {{ m_c::change(c=pr) }} -{%- endif -%} -{%- endif -%} - -{%- endif -%} -{%- endfor %} diff --git a/cumulus/scripts/ci/changelog/templates/compiler.md.tera b/cumulus/scripts/ci/changelog/templates/compiler.md.tera deleted file mode 100644 index 0420a88c3965..000000000000 --- a/cumulus/scripts/ci/changelog/templates/compiler.md.tera +++ /dev/null @@ -1,6 +0,0 @@ -## Rust compiler versions - -This release was tested against the following versions of `rustc`. Other versions may work. - -- Rust Stable: `{{ env.RUSTC_STABLE }}` -- Rust Nightly: `{{ env.RUSTC_NIGHTLY }}` diff --git a/cumulus/scripts/ci/changelog/templates/debug.md.tera b/cumulus/scripts/ci/changelog/templates/debug.md.tera deleted file mode 100644 index 4f0b14c00f12..000000000000 --- a/cumulus/scripts/ci/changelog/templates/debug.md.tera +++ /dev/null @@ -1,9 +0,0 @@ -{%- set to_ignore = changes | filter(attribute="meta.B.B0") %} - diff --git a/cumulus/scripts/ci/changelog/templates/docker_image.md.tera b/cumulus/scripts/ci/changelog/templates/docker_image.md.tera deleted file mode 100644 index cb0c619f3a70..000000000000 --- a/cumulus/scripts/ci/changelog/templates/docker_image.md.tera +++ /dev/null @@ -1,11 +0,0 @@ - -## Docker images - -The docker image for this release can be found in [Docker hub](https://hub.docker.com/r/parity/polkadot-parachain/tags?page=1&ordering=last_updated). -(It will be available a few minutes after the release has been published). - -You may also pull it with: - -``` -docker pull parity/polkadot-parachain:latest -``` diff --git a/cumulus/scripts/ci/changelog/templates/global_priority.md.tera b/cumulus/scripts/ci/changelog/templates/global_priority.md.tera deleted file mode 100644 index 3d8a507ed1fe..000000000000 --- a/cumulus/scripts/ci/changelog/templates/global_priority.md.tera +++ /dev/null @@ -1,35 +0,0 @@ -{%- import "high_priority.md.tera" as m_p -%} -## Global Priority - -{%- set cumulus_prio = 0 -%} -{%- set polkadot_prio = 0 -%} -{%- set substrate_prio = 0 -%} - -{# We fetch the various priorities #} -{%- if cumulus.meta.C -%} - {%- set cumulus_prio = cumulus.meta.C.max -%} -{%- endif -%} -{%- if polkadot.meta.C -%} - {%- set polkadot_prio = polkadot.meta.C.max -%} -{%- endif -%} -{%- if substrate.meta.C -%} - {%- set substrate_prio = substrate.meta.C.max -%} -{%- endif -%} - -{# We compute the global priority #} -{%- set global_prio = cumulus_prio -%} -{%- if polkadot_prio > global_prio -%} - {% set global_prio = polkadot_prio -%} -{%- endif -%} -{%- if substrate_prio > global_prio -%} - {%- set global_prio = substrate_prio -%} -{%- endif %} - - - -{# We show the result #} -{{ m_p::high_priority(p=global_prio, changes=changes) }} diff --git a/cumulus/scripts/ci/changelog/templates/high_priority.md.tera b/cumulus/scripts/ci/changelog/templates/high_priority.md.tera deleted file mode 100644 index 21e331892b8f..000000000000 --- a/cumulus/scripts/ci/changelog/templates/high_priority.md.tera +++ /dev/null @@ -1,56 +0,0 @@ -{%- import "change.md.tera" as m_c -%} - -{# This macro convert a priority level into readable output #} -{%- macro high_priority(p, changes) -%} - -{# real globals don't work so we count the number of host functions here as well #} -{# unfortunately, the next snippet is duplicated in the host_functions.md.tera template #} -{# as well #} -{%- set_global host_fn_count = 0 -%} - -{# We loop first to count the number of host functions but we do not display anything yet #} -{%- for pr in changes -%} -{%- if pr.meta.B and pr.meta.B.B0 -%} -{#- We skip silent ones -#} -{%- else -%} - {%- if pr.meta.E and pr.meta.E.E4 -%} - {%- set_global host_fn_count = host_fn_count + 1 -%} - {%- endif -%} -{%- endif -%} -{%- endfor -%} - -{%- if p >= 5 or host_fn_count > 0 -%} - {%- set prio = "‼️ HIGH" -%} - {%- set text = "This is a **high priority** release and you must upgrade as as soon as possible." -%} -{%- elif p >= 3 -%} - {%- set prio = "❗️ Medium" -%} - {%- set text = "This is a medium priority release and you should upgrade in a timely manner." -%} -{%- else -%} - {%- set prio = "Low" -%} - {%- set text = "This is a low priority release and you may upgrade at your convenience." -%} -{%- endif -%} - - -{% if prio -%} -{{prio}}: {{text}} -{%- else -%} - -{%- endif %} - -{# We only show details if Medium or High #} -{%- if p >= 5 -%} -The changes motivating this priority level are: -{% for pr in changes | sort(attribute="merged_at") -%} - {%- if pr.meta.C -%} - {%- if pr.meta.C.agg.max >= p %} -- {{ m_c::change(c=pr) }} -{%- if pr.meta.B and pr.meta.B.B1 and pr.meta.T and pr.meta.T.T1 %} -(RUNTIME) -{% endif %} - - {%- endif -%} - {%- endif -%} -{%- endfor %} -{%- endif %} - -{%- endmacro priority -%} diff --git a/cumulus/scripts/ci/changelog/templates/host_functions.md.tera b/cumulus/scripts/ci/changelog/templates/host_functions.md.tera deleted file mode 100644 index 2a9b26e8090c..000000000000 --- a/cumulus/scripts/ci/changelog/templates/host_functions.md.tera +++ /dev/null @@ -1,38 +0,0 @@ -{%- import "change.md.tera" as m_c -%} - -{%- set_global host_fn_count = 0 -%} - -{# We loop first to count the number of host functions but we do not display anything yet #} -{%- for pr in changes -%} -{%- if pr.meta.B and pr.meta.B.B0 -%} -{#- We skip silent ones -#} -{%- else -%} - {%- if pr.meta.E and pr.meta.E.E4 -%} - {%- set_global host_fn_count = host_fn_count + 1 -%} - {% endif -%} -{%- endif -%} -{%- endfor -%} - - - -{% if host_fn_count == 0 -%} - -{%- else -%} -## Host functions - -⚠️ The runtimes in this release contain {{ host_fn_count }} new **host function{{ host_fn_count | pluralize }}**. - -⚠️ It is critical that you update your client before the chain switches to the new runtimes. - -{% for pr in changes | sort(attribute="merged_at") -%} - -{%- if pr.meta.B and pr.meta.B.B0 -%} -{#- We skip silent ones -#} -{%- else -%} - {%- if pr.meta.E and pr.meta.E.E4 -%} - - {{ m_c::change(c=pr) }} - {% endif -%} - {% endif -%} -{%- endfor -%} - -{%- endif %} diff --git a/cumulus/scripts/ci/changelog/templates/migrations-db.md.tera b/cumulus/scripts/ci/changelog/templates/migrations-db.md.tera deleted file mode 100644 index e840d991d9a8..000000000000 --- a/cumulus/scripts/ci/changelog/templates/migrations-db.md.tera +++ /dev/null @@ -1,26 +0,0 @@ -{%- import "change.md.tera" as m_c %} -{%- set_global db_migration_count = 0 -%} - -## Database Migrations - -{% for pr in changes | sort(attribute="merged_at") -%} - -{%- if pr.meta.B and pr.meta.B.B0 %} -{#- We skip silent ones -#} -{%- else -%} -{%- if pr.meta.E and pr.meta.E.E2 -%} -{%- set_global db_migration_count = db_migration_count + 1 -%} -- {{ m_c::change(c=pr) }} -{% endif -%} -{% endif -%} -{% endfor -%} - -{%- if db_migration_count == 0 -%} -No Database migration detected in this release. -{% else %} - -There is {{ db_migration_count }} database migration(s) in this release. - -Database migrations are operations bringing your database to the latest stand. -Some migrations may break compatibility and making a backup of your database is highly recommended. -{%- endif %} diff --git a/cumulus/scripts/ci/changelog/templates/migrations-runtime.md.tera b/cumulus/scripts/ci/changelog/templates/migrations-runtime.md.tera deleted file mode 100644 index f02499a84d74..000000000000 --- a/cumulus/scripts/ci/changelog/templates/migrations-runtime.md.tera +++ /dev/null @@ -1,14 +0,0 @@ -{%- import "change.md.tera" as m_c %} - -## Runtime Migrations - -{% for pr in changes | sort(attribute="merged_at") -%} - -{%- if pr.meta.B and pr.meta.B.B0 %} -{#- We skip silent ones -#} -{%- else -%} -{%- if pr.meta.E and pr.meta.E.E1 -%} -- {{ m_c::change(c=pr) }} -{% endif -%} -{% endif -%} -{% endfor -%} diff --git a/cumulus/scripts/ci/changelog/templates/pre_release.md.tera b/cumulus/scripts/ci/changelog/templates/pre_release.md.tera deleted file mode 100644 index 53a0e9065412..000000000000 --- a/cumulus/scripts/ci/changelog/templates/pre_release.md.tera +++ /dev/null @@ -1,11 +0,0 @@ -{%- if env.PRE_RELEASE == "true" -%} -
⚠️ This is a pre-release - -**Release candidates** are **pre-releases** may not be final. -Although they are reasonably tested, there may be additional changes or issues -before an official release is tagged. Use at your own discretion, and consider -only using published releases on critical production infrastructure. -
-{% else -%} - -{%- endif %} diff --git a/cumulus/scripts/ci/changelog/templates/runtime.md.tera b/cumulus/scripts/ci/changelog/templates/runtime.md.tera deleted file mode 100644 index d20702458385..000000000000 --- a/cumulus/scripts/ci/changelog/templates/runtime.md.tera +++ /dev/null @@ -1,28 +0,0 @@ -{# This macro shows one runtime #} -{%- macro runtime(runtime) -%} - -### {{ runtime.name | replace(from="-", to=" ") | title }} {%- if runtime.note -%} {{ runtime.note }} {%- endif -%} - -{%- if runtime.data.runtimes.compressed.subwasm.compression.compressed %} -{%- set compressed = "Yes" %} -{%- else %} -{%- set compressed = "No" %} -{%- endif %} - -{%- set comp_ratio = 100 - (runtime.data.runtimes.compressed.subwasm.compression.size_compressed / runtime.data.runtimes.compressed.subwasm.compression.size_decompressed *100) %} - - - - - - - -``` -🏋️ Runtime Size: {{ runtime.data.runtimes.compressed.subwasm.size | filesizeformat }} ({{ runtime.data.runtimes.compressed.subwasm.size }} bytes) -🔥 Core Version: {{ runtime.data.runtimes.compressed.subwasm.core_version.specName }}-{{ runtime.data.runtimes.compressed.subwasm.core_version.specVersion }} ({{ runtime.data.runtimes.compressed.subwasm.core_version.implName }}-{{ runtime.data.runtimes.compressed.subwasm.core_version.implVersion }}.tx{{ runtime.data.runtimes.compressed.subwasm.core_version.transactionVersion }}.au{{ runtime.data.runtimes.compressed.subwasm.core_version.authoringVersion }}) -🗜 Compressed: {{ compressed }}: {{ comp_ratio | round(method="ceil", precision=2) }}% -🎁 Metadata version: V{{ runtime.data.runtimes.compressed.subwasm.metadata_version }} -🗳️ Blake2-256 hash: {{ runtime.data.runtimes.compressed.subwasm.blake2_256 }} -📦 IPFS: {{ runtime.data.runtimes.compressed.subwasm.ipfs_hash }} -``` -{%- endmacro runtime %} diff --git a/cumulus/scripts/ci/changelog/templates/runtimes.md.tera b/cumulus/scripts/ci/changelog/templates/runtimes.md.tera deleted file mode 100644 index fe2e16aa9c28..000000000000 --- a/cumulus/scripts/ci/changelog/templates/runtimes.md.tera +++ /dev/null @@ -1,17 +0,0 @@ -{# This include shows the list and details of the runtimes #} -{%- import "runtime.md.tera" as m_r -%} - -## Runtimes - -{% set rtm = srtool[0] -%} - -The information about the runtimes included in this release can be found below. -The runtimes have been built using [{{ rtm.data.gen }}](https://github.com/paritytech/srtool) and `{{ rtm.data.rustc }}`. - -{%- for runtime in srtool | sort(attribute="order") %} -{%- set HIDE_VAR = "HIDE_SRTOOL_" ~ runtime.name | upper %} -{%- if not env is containing(HIDE_VAR) %} - -{{ m_r::runtime(runtime=runtime) }} -{%- endif %} -{%- endfor %} diff --git a/cumulus/scripts/ci/changelog/templates/template.md.tera b/cumulus/scripts/ci/changelog/templates/template.md.tera deleted file mode 100644 index 8b14db43fe28..000000000000 --- a/cumulus/scripts/ci/changelog/templates/template.md.tera +++ /dev/null @@ -1,38 +0,0 @@ -{# This is the entry point of the template for the parachains-* releases-#} - -{% include "pre_release.md.tera" -%} - -{% if env.PRE_RELEASE == "true" -%} -This pre-release contains the changes from `{{ env.REF1 }}` to `{{ env.REF2 }}`. -{% else -%} -This release contains the changes from `{{ env.REF1 }}` to `{{ env.REF2 }}`. -{% endif -%} - -{%- set changes = cumulus.changes | concat(with=substrate.changes) -%} -{%- set changes = changes | concat(with=polkadot.changes) -%} -{%- include "debug.md.tera" -%} - -{%- set CML = "[C]" -%} -{%- set DOT = "[P]" -%} -{%- set SUB = "[S]" -%} - -{# We check for host function first because no matter what the priority is, #} -{# we will force it to HIGH if at least one host function was detected. #} - -{% include "host_functions.md.tera" -%} - -{% if env.RELEASE_TYPE and env.RELEASE_TYPE == "client" -%} -{% include "global_priority.md.tera" -%} -{% include "compiler.md.tera" -%} -{% include "migrations-db.md.tera" %} - -{% else %} -{% include "migrations-runtime.md.tera" %} -{% include "runtimes.md.tera" -%} -{% endif %} - -{% include "changes.md.tera" -%} - -{% if env.RELEASE_TYPE and env.RELEASE_TYPE == "client" -%} -{% include "docker_image.md.tera" -%} -{% endif %} diff --git a/cumulus/scripts/ci/changelog/test/test_basic.rb b/cumulus/scripts/ci/changelog/test/test_basic.rb deleted file mode 100755 index d099fadca433..000000000000 --- a/cumulus/scripts/ci/changelog/test/test_basic.rb +++ /dev/null @@ -1,23 +0,0 @@ -# frozen_string_literal: true - -require_relative '../lib/changelog' -require 'test/unit' - -class TestChangelog < Test::Unit::TestCase - def test_get_dep_ref_polkadot - c = SubRef.new('paritytech/polkadot') - ref = '13c2695' - package = 'sc-cli' - result = c.get_dependency_reference(ref, package) - assert_equal('7db0768a85dc36a3f2a44d042b32f3715c00a90d', result) - end - - def test_get_dep_ref_invalid_ref - c = SubRef.new('paritytech/polkadot') - ref = '9999999' - package = 'sc-cli' - assert_raise do - c.get_dependency_reference(ref, package) - end - end -end diff --git a/cumulus/scripts/ci/common/lib.sh b/cumulus/scripts/ci/common/lib.sh deleted file mode 100644 index 93e0392b3e29..000000000000 --- a/cumulus/scripts/ci/common/lib.sh +++ /dev/null @@ -1,141 +0,0 @@ -#!/bin/sh - -api_base="https://api.github.com/repos" - -# Function to take 2 git tags/commits and get any lines from commit messages -# that contain something that looks like a PR reference: e.g., (#1234) -sanitised_git_logs(){ - git --no-pager log --pretty=format:"%s" "$1...$2" | - # Only find messages referencing a PR - grep -E '\(#[0-9]+\)' | - # Strip any asterisks - sed 's/^* //g' -} - -# Checks whether a tag on github has been verified -# repo: 'organization/repo' -# tagver: 'v1.2.3' -# Usage: check_tag $repo $tagver -check_tag () { - repo=$1 - tagver=$2 - if [ -n "$GITHUB_RELEASE_TOKEN" ]; then - echo '[+] Fetching tag using privileged token' - tag_out=$(curl -H "Authorization: token $GITHUB_RELEASE_TOKEN" -s "$api_base/$repo/git/refs/tags/$tagver") - else - echo '[+] Fetching tag using unprivileged token' - tag_out=$(curl -H "Authorization: token $GITHUB_PR_TOKEN" -s "$api_base/$repo/git/refs/tags/$tagver") - fi - tag_sha=$(echo "$tag_out" | jq -r .object.sha) - object_url=$(echo "$tag_out" | jq -r .object.url) - if [ "$tag_sha" = "null" ]; then - return 2 - fi - echo "[+] Tag object SHA: $tag_sha" - verified_str=$(curl -H "Authorization: token $GITHUB_RELEASE_TOKEN" -s "$object_url" | jq -r .verification.verified) - if [ "$verified_str" = "true" ]; then - # Verified, everything is good - return 0 - else - # Not verified. Bad juju. - return 1 - fi -} - -# Checks whether a given PR has a given label. -# repo: 'organization/repo' -# pr_id: 12345 -# label: B1-silent -# Usage: has_label $repo $pr_id $label -has_label(){ - repo="$1" - pr_id="$2" - label="$3" - - # These will exist if the function is called in Gitlab. - # If the function's called in Github, we should have GITHUB_ACCESS_TOKEN set - # already. - if [ -n "$GITHUB_RELEASE_TOKEN" ]; then - GITHUB_TOKEN="$GITHUB_RELEASE_TOKEN" - elif [ -n "$GITHUB_PR_TOKEN" ]; then - GITHUB_TOKEN="$GITHUB_PR_TOKEN" - fi - - out=$(curl -H "Authorization: token $GITHUB_TOKEN" -s "$api_base/$repo/pulls/$pr_id") - [ -n "$(echo "$out" | tr -d '\r\n' | jq ".labels | .[] | select(.name==\"$label\")")" ] -} - -github_label () { - echo - echo "# run github-api job for labeling it ${1}" - curl -sS -X POST \ - -F "token=${CI_JOB_TOKEN}" \ - -F "ref=master" \ - -F "variables[LABEL]=${1}" \ - -F "variables[PRNO]=${CI_COMMIT_REF_NAME}" \ - -F "variables[PROJECT]=paritytech/polkadot" \ - "${GITLAB_API}/projects/${GITHUB_API_PROJECT}/trigger/pipeline" -} - -# Formats a message into a JSON string for posting to Matrix -# message: 'any plaintext message' -# formatted_message: 'optional message formatted in html' -# Usage: structure_message $content $formatted_content (optional) -structure_message() { - if [ -z "$2" ]; then - body=$(jq -Rs --arg body "$1" '{"msgtype": "m.text", $body}' < /dev/null) - else - body=$(jq -Rs --arg body "$1" --arg formatted_body "$2" '{"msgtype": "m.text", $body, "format": "org.matrix.custom.html", $formatted_body}' < /dev/null) - fi - echo "$body" -} - -# Post a message to a matrix room -# body: '{body: "JSON string produced by structure_message"}' -# room_id: !fsfSRjgjBWEWffws:matrix.parity.io -# access_token: see https://matrix.org/docs/guides/client-server-api/ -# Usage: send_message $body (json formatted) $room_id $access_token -send_message() { -curl -XPOST -d "$1" "https://matrix.parity.io/_matrix/client/r0/rooms/$2/send/m.room.message?access_token=$3" -} - -# Pretty-printing functions -boldprint () { printf "|\n| \033[1m%s\033[0m\n|\n" "${@}"; } -boldcat () { printf "|\n"; while read -r l; do printf "| \033[1m%s\033[0m\n" "${l}"; done; printf "|\n" ; } - -skip_if_companion_pr() { - url="https://api.github.com/repos/paritytech/polkadot/pulls/${CI_COMMIT_REF_NAME}" - echo "[+] API URL: $url" - - pr_title=$(curl -sSL -H "Authorization: token ${GITHUB_PR_TOKEN}" "$url" | jq -r .title) - echo "[+] PR title: $pr_title" - - if echo "$pr_title" | grep -qi '^companion'; then - echo "[!] PR is a companion PR. Build is already done in substrate" - exit 0 - else - echo "[+] PR is not a companion PR. Proceeding test" - fi -} - -# Fetches the tag name of the latest release from a repository -# repo: 'organisation/repo' -# Usage: latest_release 'paritytech/polkadot' -latest_release() { - curl -s "$api_base/$1/releases/latest" | jq -r '.tag_name' -} - -# Check for runtime changes between two commits. This is defined as any changes -# to /primitives/src/* and any *production* chains under /runtime -has_runtime_changes() { - from=$1 - to=$2 - - if git diff --name-only "${from}...${to}" \ - | grep -q -e '^runtime/polkadot' -e '^runtime/kusama' -e '^primitives/src/' -e '^runtime/common' - then - return 0 - else - return 1 - fi -} diff --git a/cumulus/scripts/ci/create-benchmark-pr.sh b/cumulus/scripts/ci/create-benchmark-pr.sh deleted file mode 100755 index 46927f24b808..000000000000 --- a/cumulus/scripts/ci/create-benchmark-pr.sh +++ /dev/null @@ -1,53 +0,0 @@ -#!/usr/bin/env bash - -set -Eeu -o pipefail -shopt -s inherit_errexit - -PR_TITLE="$1" -HEAD_REF="$2" - -ORG="paritytech" -REPO="$CI_PROJECT_NAME" -BASE_REF="$CI_COMMIT_BRANCH" -# Change threshold in %. Bigger values excludes the small changes. -THRESHOLD=${THRESHOLD:-30} - -WEIGHTS_COMPARISON_URL_PARTS=( - "https://weights.tasty.limo/compare?" - "repo=$REPO&" - "threshold=$THRESHOLD&" - "path_pattern=**%2Fweights%2F*.rs&" - "method=guess-worst&" - "ignore_errors=true&" - "unit=time&" - "old=$BASE_REF&" - "new=$HEAD_REF" -) -printf -v WEIGHTS_COMPARISON_URL %s "${WEIGHTS_COMPARISON_URL_PARTS[@]}" - -PAYLOAD="$(jq -n \ - --arg title "$PR_TITLE" \ - --arg body " -This PR is generated automatically by CI. - -Compare the weights with \`$BASE_REF\`: $WEIGHTS_COMPARISON_URL - -- [ ] Backport to master and node release branch once merged -" \ - --arg base "$BASE_REF" \ - --arg head "$HEAD_REF" \ - '{ - title: $title, - body: $body, - head: $head, - base: $base - }' -)" - -echo "PAYLOAD: $PAYLOAD" - -curl \ - -H "Authorization: token $GITHUB_TOKEN" \ - -X POST \ - -d "$PAYLOAD" \ - "https://api.github.com/repos/$ORG/$REPO/pulls" diff --git a/cumulus/scripts/ci/github/check-rel-br b/cumulus/scripts/ci/github/check-rel-br deleted file mode 100755 index 1b49ae621722..000000000000 --- a/cumulus/scripts/ci/github/check-rel-br +++ /dev/null @@ -1,127 +0,0 @@ -#!/usr/bin/env bash - -# This script helps running sanity checks on a release branch -# It is intended to be ran from the repo and from the release branch - -# NOTE: The diener runs do take time and are not really required because -# if we missed the diener runs, the Cargo.lock that we check won't pass -# the tests. See https://github.com/bkchr/diener/issues/17 - -grv=$(git remote --verbose | grep push) -export RUST_LOG=none -REPO=$(echo "$grv" | cut -d ' ' -f1 | cut -d$'\t' -f2 | sed 's/.*github.com\/\(.*\)/\1/g' | cut -d '/' -f2 | cut -d '.' -f1 | sort | uniq) -echo "[+] Detected repo: $REPO" - -BRANCH=$(git branch --show-current) -if ! [[ "$BRANCH" =~ ^release.*$ || "$BRANCH" =~ ^polkadot.*$ ]]; then - echo "This script is meant to run only on a RELEASE branch." - echo "Try one of the following branch:" - git branch -r --format "%(refname:short)" --sort=-committerdate | grep -Ei '/?release' | head - exit 1 -fi -echo "[+] Working on $BRANCH" - -# Tried to get the version of the release from the branch -# input: release-foo-v0.9.22 or release-bar-v9220 or release-foo-v0.9.220 -# output: 0.9.22 -get_version() { - branch=$1 - [[ $branch =~ -v(.*) ]] - version=${BASH_REMATCH[1]} - if [[ $version =~ \. ]]; then - MAJOR=$(($(echo $version | cut -d '.' -f1))) - MINOR=$(($(echo $version | cut -d '.' -f2))) - PATCH=$(($(echo $version | cut -d '.' -f3))) - echo $MAJOR.$MINOR.${PATCH:0:2} - else - MAJOR=$(echo $(($version / 100000))) - remainer=$(($version - $MAJOR * 100000)) - MINOR=$(echo $(($remainer / 1000))) - remainer=$(($remainer - $MINOR * 1000)) - PATCH=$(echo $(($remainer / 10))) - echo $MAJOR.$MINOR.$PATCH - fi -} - -# return the name of the release branch for a given repo and version -get_release_branch() { - repo=$1 - version=$2 - case $repo in - polkadot) - echo "release-v$version" - ;; - - substrate) - echo "polkadot-v$version" - ;; - - *) - echo "Repo $repo is not supported, exiting" - exit 1 - ;; - esac -} - -# repo = substrate / polkadot -check_release_branch_repo() { - repo=$1 - branch=$2 - - echo "[+] Checking deps for $repo=$branch" - - POSTIVE=$(cat Cargo.lock | grep "$repo?branch=$branch" | sort | uniq | wc -l) - NEGATIVE=$(cat Cargo.lock | grep "$repo?branch=" | grep -v $branch | sort | uniq | wc -l) - - if [[ $POSTIVE -eq 1 && $NEGATIVE -eq 0 ]]; then - echo -e "[+] ✅ Looking good" - cat Cargo.lock | grep "$repo?branch=" | sort | uniq | sed 's/^/\t - /' - return 0 - else - echo -e "[+] ❌ Something seems to be wrong, we want 1 unique match and 0 non match (1, 0) and we got ($(($POSTIVE)), $(($NEGATIVE)))" - cat Cargo.lock | grep "$repo?branch=" | sort | uniq | sed 's/^/\t - /' - return 1 - fi -} - -# Check a release branch -check_release_branches() { - SUBSTRATE_BRANCH=$1 - POLKADOT_BRANCH=$2 - - check_release_branch_repo substrate $SUBSTRATE_BRANCH - ret_a1=$? - - ret_b1=0 - if [ $POLKADOT_BRANCH ]; then - check_release_branch_repo polkadot $POLKADOT_BRANCH - ret_b1=$? - fi - - STATUS=$(($ret_a1 + $ret_b1)) - - return $STATUS -} - -VERSION=$(get_version $BRANCH) -echo "[+] Target version: v$VERSION" - -case $REPO in - polkadot) - substrate=$(get_release_branch substrate $VERSION) - - check_release_branches $substrate - ;; - - cumulus) - polkadot=$(get_release_branch polkadot $VERSION) - substrate=$(get_release_branch substrate $VERSION) - - check_release_branches $substrate $polkadot - ;; - - *) - echo "REPO $REPO is not supported, exiting" - exit 1 - ;; -esac diff --git a/cumulus/scripts/ci/github/check_labels.sh b/cumulus/scripts/ci/github/check_labels.sh deleted file mode 100755 index 102b1a4b0666..000000000000 --- a/cumulus/scripts/ci/github/check_labels.sh +++ /dev/null @@ -1,91 +0,0 @@ -#!/usr/bin/env bash - -#shellcheck source=../common/lib.sh -source "$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )/../common/lib.sh" - -repo="$GITHUB_REPOSITORY" -pr="$GITHUB_PR" - -ensure_labels() { - for label in "$@"; do - if has_label "$repo" "$pr" "$label"; then - return 0 - fi - done - return 1 -} - -# Must have one of the following labels -releasenotes_labels=( - 'B0-silent' - 'B1-note_worthy' -) - -# Must be an ordered list of priorities, lowest first -priority_labels=( - 'C1-low' - 'C3-medium' - 'C5-high' - 'C7-critical' -) - -audit_labels=( - 'D1-audited 👍' - 'D2-notlive 💤' - 'D3-trivial 🧸' - 'D5-nicetohaveaudit ⚠️' - 'D9-needsaudit 👮' -) - -x_labels=( - 'X0-node' - 'X1-runtime' - 'X2-API' - 'X9-misc' -) - -echo "[+] Checking release notes (B) labels for $CI_COMMIT_BRANCH" -if ensure_labels "${releasenotes_labels[@]}"; then - echo "[+] Release notes label detected. All is well." -else - echo "[!] Release notes label not detected. Please add one of: ${releasenotes_labels[*]}" - exit 1 -fi - -if has_label "$repo" "$pr" 'B1-note_worthy'; then - echo "[+] B1-note_worthy is chosen. Checking that there X-labels for $CI_COMMIT_BRANCH" - if ensure_labels "${x_labels[@]}"; then - echo "[+] X-label detected. All is well." - else - echo "[!] X-label not detected. Please add one of: ${x_labels[*]}" - exit 1 - fi -fi - -echo "[+] Checking release priority (C) labels for $CI_COMMIT_BRANCH" -if ensure_labels "${priority_labels[@]}"; then - echo "[+] Release priority label detected. All is well." -else - echo "[!] Release priority label not detected. Please add one of: ${priority_labels[*]}" - exit 1 -fi - -if has_runtime_changes "${BASE_SHA}" "${HEAD_SHA}"; then - echo "[+] Runtime changes detected. Checking audit (D) labels" - if ensure_labels "${audit_labels[@]}"; then - echo "[+] Release audit label detected. All is well." - else - echo "[!] Release audit label not detected. Please add one of: ${audit_labels[*]}" - exit 1 - fi -fi - -# If the priority is anything other than the lowest, we *must not* have a B0-silent -# label -if has_label "$repo" "$GITHUB_PR" 'B0-silent' && - ! has_label "$repo" "$GITHUB_PR" "${priority_labels[0]}"; then - echo "[!] Changes with a priority higher than C1-low *MUST* have a B- label that is not B0-Silent" - exit 1 -fi - -exit 0 diff --git a/cumulus/scripts/ci/github/extrinsic-ordering-filter.sh b/cumulus/scripts/ci/github/extrinsic-ordering-filter.sh deleted file mode 100755 index 4fd3337f64a6..000000000000 --- a/cumulus/scripts/ci/github/extrinsic-ordering-filter.sh +++ /dev/null @@ -1,55 +0,0 @@ -#!/usr/bin/env bash -# This script is used in a Github Workflow. It helps filtering out what is interesting -# when comparing metadata and spot what would require a tx version bump. - -# shellcheck disable=SC2002,SC2086 - -FILE=$1 - -# Higlight indexes that were deleted -function find_deletions() { - echo "\n## Deletions\n" - RES=$(cat "$FILE" | grep -n '\[\-\]' | tr -s " ") - if [ "$RES" ]; then - echo "$RES" | awk '{ printf "%s\\n", $0 }' - else - echo "n/a" - fi -} - -# Highlight indexes that have been deleted -function find_index_changes() { - echo "\n## Index changes\n" - RES=$(cat "$FILE" | grep -E -n -i 'idx:\s*([0-9]+)\s*(->)\s*([0-9]+)' | tr -s " ") - if [ "$RES" ]; then - echo "$RES" | awk '{ printf "%s\\n", $0 }' - else - echo "n/a" - fi -} - -# Highlight values that decreased -function find_decreases() { - echo "\n## Decreases\n" - OUT=$(cat "$FILE" | grep -E -i -o '([0-9]+)\s*(->)\s*([0-9]+)' | awk '$1 > $3 { printf "%s;", $0 }') - IFS=$';' LIST=("$OUT") - unset RES - for line in "${LIST[@]}"; do - RES="$RES\n$(cat "$FILE" | grep -E -i -n \"$line\" | tr -s " ")" - done - - if [ "$RES" ]; then - echo "$RES" | awk '{ printf "%s\\n", $0 }' | sort -u -g | uniq - else - echo "n/a" - fi -} - -echo "\n------------------------------ SUMMARY -------------------------------" -echo "\n⚠️ This filter is here to help spotting changes that should be reviewed carefully." -echo "\n⚠️ It catches only index changes, deletions and value decreases". - -find_deletions "$FILE" -find_index_changes "$FILE" -find_decreases "$FILE" -echo "\n----------------------------------------------------------------------\n" diff --git a/cumulus/scripts/ci/github/runtime-version.rb b/cumulus/scripts/ci/github/runtime-version.rb deleted file mode 100644 index 14663acaf31a..000000000000 --- a/cumulus/scripts/ci/github/runtime-version.rb +++ /dev/null @@ -1,10 +0,0 @@ -# frozen_string_literal: true - -# Gets the runtime version for a given runtime from the filesystem. -# Optionally accepts a path that is the root of the project which defaults to -# the current working directory -def get_runtime(runtime: nil, path: '.', runtime_dir: 'runtime') - File.open(path + "/#{runtime_dir}/#{runtime}/src/lib.rs") do |f| - f.find { |l| l =~ /spec_version/ }.match(/[0-9]+/)[0] - end -end diff --git a/cumulus/scripts/ci/gitlab/pipeline/benchmarks.yml b/cumulus/scripts/ci/gitlab/pipeline/benchmarks.yml deleted file mode 100644 index 0cbc42aabae6..000000000000 --- a/cumulus/scripts/ci/gitlab/pipeline/benchmarks.yml +++ /dev/null @@ -1,84 +0,0 @@ -# This file is part of .gitlab-ci.yml -# Here are all jobs that are executed during "benchmarks" stage -# Work only on release-parachains-v* branches - -benchmarks-build: - stage: benchmarks-build - extends: - - .docker-env - - .collect-artifacts - - .benchmarks-manual-refs - script: - - time cargo build --profile production --locked --features runtime-benchmarks - - mkdir -p artifacts - - cp target/production/polkadot-parachain ./artifacts/ - -benchmarks-assets: - stage: benchmarks-run - timeout: 1d - extends: - - .docker-env - - .collect-artifacts - - .benchmarks-refs - before_script: - - !reference [.docker-env, before_script] - script: - - ./scripts/benchmarks-ci.sh assets asset-hub-kusama ./artifacts - - ./scripts/benchmarks-ci.sh assets asset-hub-polkadot ./artifacts - - ./scripts/benchmarks-ci.sh assets asset-hub-westend ./artifacts - - export CURRENT_TIME=$(date '+%s') - - export BRANCHNAME="weights-asset-hub-polkadot-${CI_COMMIT_BRANCH}-${CURRENT_TIME}" - - !reference [.git-commit-push, script] - - ./scripts/ci/create-benchmark-pr.sh "[benchmarks] Update weights for asset-hub-kusama/-polkadot" "$BRANCHNAME" - - rm -f ./artifacts/polkadot-parachain - - rm -f ./artifacts/test-parachain - after_script: - - rm -rf .git/config - tags: - - weights-vm - -benchmarks-collectives: - stage: benchmarks-run - timeout: 1d - extends: - - .docker-env - - .collect-artifacts - - .benchmarks-refs - before_script: - - !reference [.docker-env, before_script] - script: - - ./scripts/benchmarks-ci.sh collectives collectives-polkadot ./artifacts - - export CURRENT_TIME=$(date '+%s') - - export BRANCHNAME="weights-collectives-${CI_COMMIT_BRANCH}-${CURRENT_TIME}" - - !reference [.git-commit-push, script] - - ./scripts/ci/create-benchmark-pr.sh "[benchmarks] Update weights for collectives" "$BRANCHNAME" - - rm -f ./artifacts/polkadot-parachain - - rm -f ./artifacts/test-parachain - after_script: - - rm -rf .git/config - tags: - - weights-vm - -benchmarks-bridge-hubs: - stage: benchmarks-run - timeout: 1d - extends: - - .docker-env - - .collect-artifacts - - .benchmarks-refs - before_script: - - !reference [.docker-env, before_script] - script: - - ./scripts/benchmarks-ci.sh bridge-hubs bridge-hub-polkadot ./artifacts - - ./scripts/benchmarks-ci.sh bridge-hubs bridge-hub-kusama ./artifacts - - ./scripts/benchmarks-ci.sh bridge-hubs bridge-hub-rococo ./artifacts - - export CURRENT_TIME=$(date '+%s') - - export BRANCHNAME="weights-bridge-hubs-${CI_COMMIT_BRANCH}-${CURRENT_TIME}" - - !reference [.git-commit-push, script] - - ./scripts/ci/create-benchmark-pr.sh "[benchmarks] Update weights for bridge-hubs" "$BRANCHNAME" - - rm -f ./artifacts/polkadot-parachain - - rm -f ./artifacts/test-parachain - after_script: - - rm -rf .git/config - tags: - - weights-vm diff --git a/cumulus/scripts/ci/gitlab/pipeline/build.yml b/cumulus/scripts/ci/gitlab/pipeline/build.yml deleted file mode 100644 index b47dd9fe30df..000000000000 --- a/cumulus/scripts/ci/gitlab/pipeline/build.yml +++ /dev/null @@ -1,138 +0,0 @@ -# This file is part of .gitlab-ci.yml -# Here are all jobs that are executed during "build" stage - -build-linux-stable: - stage: build - extends: - - .docker-env - - .common-refs - - .collect-artifacts - variables: - # Enable debug assertions since we are running optimized builds for testing - # but still want to have debug assertions. - RUSTFLAGS: "-Cdebug-assertions=y -Dwarnings" - # this is an artificial job dependency, for pipeline optimization using GitLab's DAGs - needs: - - job: check-rustdoc - artifacts: false - script: - - echo "___Building a binary, please refrain from using it in production since it goes with the debug assertions.___" - - time cargo build --release --locked --bin polkadot-parachain - - echo "___Packing the artifacts___" - - mkdir -p ./artifacts - - mv ./target/release/polkadot-parachain ./artifacts/. - - echo "___The VERSION is either a tag name or the curent branch if triggered not by a tag___" - - echo ${CI_COMMIT_REF_NAME} | tee ./artifacts/VERSION - -build-test-parachain: - stage: build - extends: - - .docker-env - - .common-refs - - .collect-artifacts - variables: - # Enable debug assertions since we are running optimized builds for testing - # but still want to have debug assertions. - RUSTFLAGS: "-Cdebug-assertions=y -Dwarnings" - # this is an artificial job dependency, for pipeline optimization using GitLab's DAGs - needs: - - job: check-rustdoc - artifacts: false - script: - - echo "___Building a binary, please refrain from using it in production since it goes with the debug assertions.___" - - time cargo build --release --locked --bin test-parachain - - echo "___Packing the artifacts___" - - mkdir -p ./artifacts - - mv ./target/release/test-parachain ./artifacts/. - - mkdir -p ./artifacts/zombienet - - mv ./target/release/wbuild/cumulus-test-runtime/wasm_binary_spec_version_incremented.rs.compact.compressed.wasm ./artifacts/zombienet/. - -# build runtime only if files in $RUNTIME_PATH/$RUNTIME_NAME were changed -.build-runtime-template: &build-runtime-template - stage: build - extends: - - .docker-env - - .pr-refs - # this is an artificial job dependency, for pipeline optimization using GitLab's DAGs - needs: - - job: check-rustdoc - artifacts: false - variables: - RUNTIME_PATH: "parachains/runtimes/assets" - script: - - cd ${RUNTIME_PATH} - - for directory in $(echo */); do - echo "_____Running cargo check for ${directory} ______"; - cd ${directory}; - pwd; - SKIP_WASM_BUILD=1 cargo check --locked; - cd ..; - done - -# DAG: build-runtime-assets -> build-runtime-collectives -> build-runtime-bridge-hubs -# DAG: build-runtime-assets -> build-runtime-collectives -> build-runtime-contracts -# DAG: build-runtime-assets -> build-runtime-starters -> build-runtime-testing -build-runtime-assets: - <<: *build-runtime-template - variables: - RUNTIME_PATH: "parachains/runtimes/assets" - -build-runtime-collectives: - <<: *build-runtime-template - variables: - RUNTIME_PATH: "parachains/runtimes/collectives" - # this is an artificial job dependency, for pipeline optimization using GitLab's DAGs - needs: - - job: build-runtime-assets - artifacts: false - -build-runtime-bridge-hubs: - <<: *build-runtime-template - variables: - RUNTIME_PATH: "parachains/runtimes/bridge-hubs" - # this is an artificial job dependency, for pipeline optimization using GitLab's DAGs - needs: - - job: build-runtime-collectives - artifacts: false - -build-runtime-contracts: - <<: *build-runtime-template - variables: - RUNTIME_PATH: "parachains/runtimes/contracts" - # this is an artificial job dependency, for pipeline optimization using GitLab's DAGs - needs: - - job: build-runtime-collectives - artifacts: false - -build-runtime-starters: - <<: *build-runtime-template - variables: - RUNTIME_PATH: "parachains/runtimes/starters" - # this is an artificial job dependency, for pipeline optimization using GitLab's DAGs - needs: - - job: build-runtime-assets - artifacts: false - -build-runtime-testing: - <<: *build-runtime-template - variables: - RUNTIME_PATH: "parachains/runtimes/testing" - # this is an artificial job dependency, for pipeline optimization using GitLab's DAGs - needs: - - job: build-runtime-starters - artifacts: false - -build-short-benchmark: - stage: build - extends: - - .docker-env - - .common-refs - - .collect-artifacts - # this is an artificial job dependency, for pipeline optimization using GitLab's DAGs - needs: - - job: check-rustdoc - artifacts: false - script: - - cargo build --profile release --locked --features=runtime-benchmarks --bin polkadot-parachain - - mkdir -p ./artifacts - - cp ./target/release/polkadot-parachain ./artifacts/ diff --git a/cumulus/scripts/ci/gitlab/pipeline/integration_tests.yml b/cumulus/scripts/ci/gitlab/pipeline/integration_tests.yml deleted file mode 100644 index a884361aa7cd..000000000000 --- a/cumulus/scripts/ci/gitlab/pipeline/integration_tests.yml +++ /dev/null @@ -1,2 +0,0 @@ -# This file is part of .gitlab-ci.yml -# Here are all jobs that are executed during "integration_stage" stage diff --git a/cumulus/scripts/ci/gitlab/pipeline/publish.yml b/cumulus/scripts/ci/gitlab/pipeline/publish.yml deleted file mode 100644 index e59ff1676981..000000000000 --- a/cumulus/scripts/ci/gitlab/pipeline/publish.yml +++ /dev/null @@ -1,105 +0,0 @@ -# This file is part of .gitlab-ci.yml -# Here are all jobs that are executed during "publish" stage - -.build-push-image: - image: $BUILDAH_IMAGE - variables: - DOCKERFILE: "" # docker/path-to.Dockerfile - IMAGE_NAME: "" # docker.io/paritypr/image_name - VERSION: "${CI_COMMIT_REF_NAME}-${CI_COMMIT_SHORT_SHA}" - script: - - test "$PARITYPR_USER" -a "$PARITYPR_PASS" || - ( echo "no docker credentials provided"; exit 1 ) - - $BUILDAH_COMMAND build - --format=docker - --build-arg VCS_REF="${CI_COMMIT_SHA}" - --build-arg BUILD_DATE="$(date -u '+%Y-%m-%dT%H:%M:%SZ')" - --build-arg IMAGE_NAME="${IMAGE_NAME}" - --tag "$IMAGE_NAME:$VERSION" - --file ${DOCKERFILE} . - - echo "$PARITYPR_PASS" | - buildah login --username "$PARITYPR_USER" --password-stdin docker.io - - $BUILDAH_COMMAND info - - $BUILDAH_COMMAND push --format=v2s2 "$IMAGE_NAME:$VERSION" - after_script: - - buildah logout --all - -build-push-image-polkadot-parachain-debug: - stage: publish - extends: - - .kubernetes-env - - .common-refs - - .build-push-image - needs: - - job: build-linux-stable - artifacts: true - variables: - DOCKERFILE: "docker/polkadot-parachain-debug_unsigned_injected.Dockerfile" - IMAGE_NAME: "docker.io/paritypr/polkadot-parachain-debug" - VERSION: "${CI_COMMIT_REF_NAME}-${CI_COMMIT_SHORT_SHA}" - -build-push-image-test-parachain: - stage: publish - extends: - - .kubernetes-env - - .common-refs - - .build-push-image - needs: - - job: build-test-parachain - artifacts: true - variables: - DOCKERFILE: "docker/test-parachain_injected.Dockerfile" - IMAGE_NAME: "docker.io/paritypr/test-parachain" - VERSION: "${CI_COMMIT_REF_NAME}-${CI_COMMIT_SHORT_SHA}" - -publish-s3: - stage: publish - extends: - - .kubernetes-env - - .publish-refs - image: paritytech/awscli:latest - needs: - - job: build-linux-stable - artifacts: true - variables: - GIT_STRATEGY: none - BUCKET: "releases.parity.io" - PREFIX: "cumulus/${ARCH}-${DOCKER_OS}" - script: - - echo "___Publishing a binary with debug assertions!___" - - echo "___VERSION = $(cat ./artifacts/VERSION) ___" - - aws s3 sync ./artifacts/ s3://${BUCKET}/${PREFIX}/$(cat ./artifacts/VERSION)/ - - echo "___Updating objects in latest path___" - - aws s3 sync s3://${BUCKET}/${PREFIX}/$(cat ./artifacts/VERSION)/ s3://${BUCKET}/${PREFIX}/latest/ - after_script: - - aws s3 ls s3://${BUCKET}/${PREFIX}/latest/ - --recursive --human-readable --summarize - -publish-benchmarks-assets-s3: &publish-benchmarks - stage: publish - extends: - - .kubernetes-env - - .benchmarks-refs - image: paritytech/awscli:latest - needs: - - job: benchmarks-assets - artifacts: true - variables: - GIT_STRATEGY: none - BUCKET: "releases.parity.io" - PREFIX: "cumulus/$CI_COMMIT_REF_NAME/benchmarks-assets" - script: - - echo "___Publishing benchmark results___" - - aws s3 sync ./artifacts/ s3://${BUCKET}/${PREFIX}/ - after_script: - - aws s3 ls s3://${BUCKET}/${PREFIX}/ --recursive --human-readable --summarize - -publish-benchmarks-collectives-s3: - <<: *publish-benchmarks - variables: - GIT_STRATEGY: none - BUCKET: "releases.parity.io" - PREFIX: "cumulus/$CI_COMMIT_REF_NAME/benchmarks-collectives" - needs: - - job: benchmarks-collectives - artifacts: true diff --git a/cumulus/scripts/ci/gitlab/pipeline/short-benchmarks.yml b/cumulus/scripts/ci/gitlab/pipeline/short-benchmarks.yml deleted file mode 100644 index f63ad1e0d045..000000000000 --- a/cumulus/scripts/ci/gitlab/pipeline/short-benchmarks.yml +++ /dev/null @@ -1,56 +0,0 @@ -# This file is part of .gitlab-ci.yml -# Here are all jobs that are executed during "short-benchmarks" stage - -# Run all pallet benchmarks only once to check if there are any errors -.short-benchmark-template: &short-bench - stage: short-benchmarks - extends: - - .common-refs - - .docker-env - needs: - - job: build-short-benchmark - artifacts: true - variables: - RUNTIME_CHAIN: benchmarked-runtime-chain - script: - - ./artifacts/polkadot-parachain benchmark pallet --wasm-execution compiled --chain $RUNTIME_CHAIN --pallet "*" --extrinsic "*" --steps 2 --repeat 1 - -short-benchmark-asset-hub-polkadot: - <<: *short-bench - variables: - RUNTIME_CHAIN: asset-hub-polkadot-dev - -short-benchmark-asset-hub-kusama: - <<: *short-bench - variables: - RUNTIME_CHAIN: asset-hub-kusama-dev - -short-benchmark-asset-hub-westend: - <<: *short-bench - variables: - RUNTIME_CHAIN: asset-hub-westend-dev - -short-benchmark-bridge-hub-polkadot: - <<: *short-bench - variables: - RUNTIME_CHAIN: bridge-hub-polkadot-dev - -short-benchmark-bridge-hub-kusama: - <<: *short-bench - variables: - RUNTIME_CHAIN: bridge-hub-kusama-dev - -short-benchmark-bridge-hub-rococo: - <<: *short-bench - variables: - RUNTIME_CHAIN: bridge-hub-rococo-dev - -short-benchmark-collectives-polkadot : - <<: *short-bench - variables: - RUNTIME_CHAIN: collectives-polkadot-dev - -short-benchmark-glutton-kusama : - <<: *short-bench - variables: - RUNTIME_CHAIN: glutton-kusama-dev-1300 diff --git a/cumulus/scripts/ci/gitlab/pipeline/test.yml b/cumulus/scripts/ci/gitlab/pipeline/test.yml deleted file mode 100644 index 2d84010cb74e..000000000000 --- a/cumulus/scripts/ci/gitlab/pipeline/test.yml +++ /dev/null @@ -1,111 +0,0 @@ -# This file is part of .gitlab-ci.yml -# Here are all jobs that are executed during "test" stage - -# It's more like a check, but we want to run this job with real tests in parallel -find-fail-ci-phrase: - stage: test - variables: - CI_IMAGE: "paritytech/tools:latest" - ASSERT_REGEX: "FAIL-CI" - GIT_DEPTH: 1 - extends: - - .kubernetes-env - script: - - set +e - - rg --line-number --hidden --type rust --glob '!{.git,target}' "$ASSERT_REGEX" .; exit_status=$? - - if [ $exit_status -eq 0 ]; then - echo "$ASSERT_REGEX was found, exiting with 1"; - exit 1; - else - echo "No $ASSERT_REGEX was found, exiting with 0"; - exit 0; - fi - -test-linux-stable: - stage: test - extends: - - .docker-env - - .common-refs - - .pipeline-stopper-artifacts - before_script: - - !reference [.docker-env, before_script] - - !reference [.pipeline-stopper-vars, before_script] - variables: - # Enable debug assertions since we are running optimized builds for testing - # but still want to have debug assertions. - RUSTFLAGS: "-Cdebug-assertions=y -Dwarnings" - script: - - time cargo nextest run --all --release --locked --run-ignored all - -test-doc: - stage: test - extends: - - .docker-env - - .common-refs - variables: - # Enable debug assertions since we are running optimized builds for testing - # but still want to have debug assertions. - RUSTFLAGS: "-Cdebug-assertions=y -Dwarnings" - script: - - time cargo test --doc - -check-runtime-benchmarks: - stage: test - extends: - - .docker-env - - .common-refs - script: - # Check that the node will compile with `runtime-benchmarks` feature flag. - - time cargo check --locked --all --features runtime-benchmarks - # Check that parachain-template will compile with `runtime-benchmarks` feature flag. - - time cargo check --locked -p parachain-template-node --features runtime-benchmarks - -cargo-check-try-runtime: - stage: test - extends: - - .docker-env - - .common-refs - variables: - RUSTFLAGS: "-D warnings" - # this is an artificial job dependency, for pipeline optimization using GitLab's DAGs - needs: - - job: check-runtime-benchmarks - artifacts: false - script: - # Check that the node will compile with `try-runtime` feature flag. - - time cargo check --locked --all --features try-runtime - # Check that parachain-template will compile with `try-runtime` feature flag. - - time cargo check --locked -p parachain-template-node --features try-runtime - -check-rustdoc: - stage: test - extends: - - .docker-env - - .common-refs - variables: - SKIP_WASM_BUILD: 1 - RUSTDOCFLAGS: "-Dwarnings" - script: - - time cargo doc --workspace --all-features --verbose --no-deps - -cargo-check-benches: - stage: test - extends: - - .docker-env - - .common-refs - # this is an artificial job dependency, for pipeline optimization using GitLab's DAGs - needs: - - job: check-rustdoc - artifacts: false - script: - - time cargo check --all --benches - -cargo-clippy: - stage: test - extends: - - .docker-env - - .common-refs - script: - - echo $RUSTFLAGS - - cargo version && cargo clippy --version - - SKIP_WASM_BUILD=1 env -u RUSTFLAGS cargo clippy --locked --all-targets --workspace diff --git a/cumulus/scripts/ci/gitlab/pipeline/zombienet.yml b/cumulus/scripts/ci/gitlab/pipeline/zombienet.yml deleted file mode 100644 index d5ab3e13d42e..000000000000 --- a/cumulus/scripts/ci/gitlab/pipeline/zombienet.yml +++ /dev/null @@ -1,141 +0,0 @@ -# This file is part of .gitlab-ci.yml -# Here are all jobs that are executed during "zombienet" stage - -.zombienet-before-script: - before_script: - - echo "Zombie-net Tests Config" - - echo "${ZOMBIENET_IMAGE}" - - echo "${RELAY_IMAGE}" - - echo "${COL_IMAGE}" - - echo "${GH_DIR}" - - export DEBUG=zombie - - export RELAY_IMAGE=${POLKADOT_IMAGE} - - export COL_IMAGE=${COL_IMAGE} - -.zombienet-after-script: - after_script: - - mkdir -p ./zombienet-logs - - cp /tmp/zombie*/logs/* ./zombienet-logs/ - -# common settings for all zombienet jobs -.zombienet-common: - stage: zombienet - image: "${ZOMBIENET_IMAGE}" - needs: - - job: build-push-image-test-parachain - artifacts: true - variables: - POLKADOT_IMAGE: "docker.io/paritypr/polkadot-debug:master" - GH_DIR: "https://github.com/paritytech/cumulus/tree/${CI_COMMIT_SHORT_SHA}/zombienet/tests" - COL_IMAGE: "docker.io/paritypr/test-parachain:${CI_COMMIT_REF_NAME}-${CI_COMMIT_SHORT_SHA}" - FF_DISABLE_UMASK_FOR_DOCKER_EXECUTOR: 1 - artifacts: - name: "${CI_JOB_NAME}_${CI_COMMIT_REF_NAME}" - when: always - expire_in: 2 days - paths: - - ./zombienet-logs - allow_failure: false - retry: 2 - tags: - - zombienet-polkadot-integration-test - -zombienet-0001-sync_blocks_from_tip_without_connected_collator: - extends: - - .zombienet-common - - .zombienet-refs - - .zombienet-before-script - - .zombienet-after-script - script: - - /home/nonroot/zombie-net/scripts/ci/run-test-env-manager.sh - --github-remote-dir="${GH_DIR}" - --concurrency=1 - --test="0001-sync_blocks_from_tip_without_connected_collator.zndsl" - -zombienet-0002-pov_recovery: - extends: - - .zombienet-common - - .zombienet-refs - - .zombienet-before-script - - .zombienet-after-script - script: - - /home/nonroot/zombie-net/scripts/ci/run-test-env-manager.sh - --github-remote-dir="${GH_DIR}" - --concurrency=1 - --test="0002-pov_recovery.zndsl" - -zombienet-0003-full_node_catching_up: - extends: - - .zombienet-common - - .zombienet-refs - - .zombienet-before-script - - .zombienet-after-script - script: - - /home/nonroot/zombie-net/scripts/ci/run-test-env-manager.sh - --github-remote-dir="${GH_DIR}" - --concurrency=1 - --test="0003-full_node_catching_up.zndsl" - -zombienet-0004-runtime_upgrade: - extends: - - .zombienet-common - - .zombienet-refs - - .zombienet-before-script - - .zombienet-after-script - needs: - - !reference [.zombienet-common, needs] - - job: build-test-parachain - artifacts: true - before_script: - - ls -ltr * - - cp ./artifacts/zombienet/wasm_binary_spec_version_incremented.rs.compact.compressed.wasm /tmp/ - - ls /tmp - - !reference [.zombienet-before-script, before_script] - script: - - /home/nonroot/zombie-net/scripts/ci/run-test-env-manager.sh - --github-remote-dir="${GH_DIR}" - --concurrency=1 - --test="0004-runtime_upgrade.zndsl" - -zombienet-0005-migrate_solo_to_para: - extends: - - .zombienet-common - - .zombienet-refs - - .zombienet-before-script - - .zombienet-after-script - needs: - - !reference [.zombienet-common, needs] - - job: build-test-parachain - artifacts: true - before_script: - - ls -ltr * - - !reference [.zombienet-before-script, before_script] - script: - - /home/nonroot/zombie-net/scripts/ci/run-test-env-manager.sh - --github-remote-dir="${GH_DIR}" - --concurrency=1 - --test="0005-migrate_solo_to_para.zndsl" - -zombienet-0006-rpc_collator_builds_blocks: - extends: - - .zombienet-common - - .zombienet-refs - - .zombienet-before-script - - .zombienet-after-script - script: - - /home/nonroot/zombie-net/scripts/ci/run-test-env-manager.sh - --github-remote-dir="${GH_DIR}" - --concurrency=1 - --test="0006-rpc_collator_builds_blocks.zndsl" - -zombienet-0007-full_node_warp_sync: - extends: - - .zombienet-common - - .zombienet-refs - - .zombienet-before-script - - .zombienet-after-script - script: - - /home/nonroot/zombie-net/scripts/ci/run-test-env-manager.sh - --github-remote-dir="${GH_DIR}" - --concurrency=1 - --test="0007-full_node_warp_sync.zndsl" diff --git a/cumulus/scripts/ci/gitlab/prettier.sh b/cumulus/scripts/ci/gitlab/prettier.sh deleted file mode 100755 index 299bbee179dc..000000000000 --- a/cumulus/scripts/ci/gitlab/prettier.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/sh - -# meant to be installed via -# git config filter.ci-prettier.clean "scripts/ci/gitlab/prettier.sh" - -prettier --parser yaml diff --git a/substrate/scripts/ci/common/lib.sh b/substrate/scripts/ci/common/lib.sh deleted file mode 100755 index 08c2fe81ada0..000000000000 --- a/substrate/scripts/ci/common/lib.sh +++ /dev/null @@ -1,117 +0,0 @@ -#!/bin/sh - -api_base="https://api.github.com/repos" - -# Function to take 2 git tags/commits and get any lines from commit messages -# that contain something that looks like a PR reference: e.g., (#1234) -sanitised_git_logs(){ - git --no-pager log --pretty=format:"%s" "$1...$2" | - # Only find messages referencing a PR - grep -E '\(#[0-9]+\)' | - # Strip any asterisks - sed 's/^* //g' | - # And add them all back - sed 's/^/* /g' -} - -# Returns the last published release on github -# Note: we can't just use /latest because that ignores prereleases -# repo: 'organization/repo' -# Usage: last_github_release "$repo" -last_github_release(){ - i=0 - # Iterate over releases until we find the last release that's not just a draft - while [ $i -lt 29 ]; do - out=$(curl -H "Authorization: token $GITHUB_RELEASE_TOKEN" -s "$api_base/$1/releases" | jq ".[$i]") - echo "$out" - # Ugh when echoing to jq, we need to translate newlines into spaces :/ - if [ "$(echo "$out" | tr '\r\n' ' ' | jq '.draft')" = "false" ]; then - echo "$out" | tr '\r\n' ' ' | jq '.tag_name' - return - else - i=$((i + 1)) - fi - done -} - -# Checks whether a tag on github has been verified -# repo: 'organization/repo' -# tagver: 'v1.2.3' -# Usage: check_tag $repo $tagver -check_tag () { - repo=$1 - tagver=$2 - tag_out=$(curl -H "Authorization: token $GITHUB_RELEASE_TOKEN" -s "$api_base/$repo/git/refs/tags/$tagver") - tag_sha=$(echo "$tag_out" | jq -r .object.sha) - object_url=$(echo "$tag_out" | jq -r .object.url) - if [ "$tag_sha" = "null" ]; then - return 2 - fi - verified_str=$(curl -H "Authorization: token $GITHUB_RELEASE_TOKEN" -s "$object_url" | jq -r .verification.verified) - if [ "$verified_str" = "true" ]; then - # Verified, everything is good - return 0 - else - # Not verified. Bad juju. - return 1 - fi -} - -# Checks whether a given PR has a given label. -# repo: 'organization/repo' -# pr_id: 12345 -# label: B1-silent -# Usage: has_label $repo $pr_id $label -has_label(){ - repo="$1" - pr_id="$2" - label="$3" - - # These will exist if the function is called in Gitlab. - # If the function's called in Github, we should have GITHUB_ACCESS_TOKEN set - # already. - if [ -n "$GITHUB_RELEASE_TOKEN" ]; then - GITHUB_TOKEN="$GITHUB_RELEASE_TOKEN" - elif [ -n "$GITHUB_PR_TOKEN" ]; then - GITHUB_TOKEN="$GITHUB_PR_TOKEN" - fi - - out=$(curl -H "Authorization: token $GITHUB_TOKEN" -s "$api_base/$repo/pulls/$pr_id") - [ -n "$(echo "$out" | tr -d '\r\n' | jq ".labels | .[] | select(.name==\"$label\")")" ] -} - -# Formats a message into a JSON string for posting to Matrix -# message: 'any plaintext message' -# formatted_message: 'optional message formatted in html' -# Usage: structure_message $content $formatted_content (optional) -structure_message() { - if [ -z "$2" ]; then - body=$(jq -Rs --arg body "$1" '{"msgtype": "m.text", $body}' < /dev/null) - else - body=$(jq -Rs --arg body "$1" --arg formatted_body "$2" '{"msgtype": "m.text", $body, "format": "org.matrix.custom.html", $formatted_body}' < /dev/null) - fi - echo "$body" -} - -# Post a message to a matrix room -# body: '{body: "JSON string produced by structure_message"}' -# room_id: !fsfSRjgjBWEWffws:matrix.parity.io -# access_token: see https://matrix.org/docs/guides/client-server-api/ -# Usage: send_message $body (json formatted) $room_id $access_token -send_message() { - curl -XPOST -d "$1" "https://m.parity.io/_matrix/client/r0/rooms/$2/send/m.room.message?access_token=$3" -} - -# Check for runtime changes between two commits. This is defined as any changes -# to bin/node/src/runtime, frame/ and primitives/sr_* trees. -has_runtime_changes() { - from=$1 - to=$2 - if git diff --name-only "${from}...${to}" \ - | grep -q -e '^frame/' -e '^primitives/' - then - return 0 - else - return 1 - fi -} diff --git a/substrate/scripts/ci/github/check_labels.sh b/substrate/scripts/ci/github/check_labels.sh deleted file mode 100755 index 7b0aed9fe734..000000000000 --- a/substrate/scripts/ci/github/check_labels.sh +++ /dev/null @@ -1,68 +0,0 @@ -#!/usr/bin/env bash -set -e - -#shellcheck source=../common/lib.sh -source "$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )/../common/lib.sh" - -repo="$GITHUB_REPOSITORY" -pr="$GITHUB_PR" - -ensure_labels() { - for label in "$@"; do - if has_label "$repo" "$pr" "$label"; then - return 0 - fi - done - return 1 -} - -# Must have one of the following labels -releasenotes_labels=( - 'B0-silent' - 'B3-apinoteworthy' - 'B5-clientnoteworthy' - 'B7-runtimenoteworthy' -) - -criticality_labels=( - 'C1-low 📌' - 'C3-medium 📣' - 'C7-high ❗️' - 'C9-critical ‼️' -) - -audit_labels=( - 'D1-audited 👍' - 'D2-notlive 💤' - 'D3-trivial 🧸' - 'D5-nicetohaveaudit ⚠️' - 'D9-needsaudit 👮' -) - -echo "[+] Checking release notes (B) labels" -if ensure_labels "${releasenotes_labels[@]}"; then - echo "[+] Release notes label detected. All is well." -else - echo "[!] Release notes label not detected. Please add one of: ${releasenotes_labels[*]}" - exit 1 -fi - -echo "[+] Checking release criticality (C) labels" -if ensure_labels "${criticality_labels[@]}"; then - echo "[+] Release criticality label detected. All is well." -else - echo "[!] Release criticality label not detected. Please add one of: ${criticality_labels[*]}" - exit 1 -fi - -if has_runtime_changes origin/master "${HEAD_SHA}"; then - echo "[+] Runtime changes detected. Checking audit (D) labels" - if ensure_labels "${audit_labels[@]}"; then - echo "[+] Release audit label detected. All is well." - else - echo "[!] Release audit label not detected. Please add one of: ${audit_labels[*]}" - exit 1 - fi -fi - -exit 0 diff --git a/substrate/scripts/ci/github/generate_changelog.sh b/substrate/scripts/ci/github/generate_changelog.sh deleted file mode 100755 index 32ac1760a611..000000000000 --- a/substrate/scripts/ci/github/generate_changelog.sh +++ /dev/null @@ -1,85 +0,0 @@ -#!/usr/bin/env bash - -# shellcheck source=../common/lib.sh -source "$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )/../common/lib.sh" - -version="$2" -last_version="$1" - -all_changes="$(sanitised_git_logs "$last_version" "$version")" -runtime_changes="" -api_changes="" -client_changes="" -changes="" -migrations="" - -while IFS= read -r line; do - pr_id=$(echo "$line" | sed -E 's/.*#([0-9]+)\)$/\1/') - - # Skip if the PR has the silent label - this allows us to skip a few requests - if has_label 'paritytech/substrate' "$pr_id" 'B0-silent'; then - continue - fi - if has_label 'paritytech/substrate' "$pr_id" 'B3-apinoteworthy' ; then - api_changes="$api_changes -$line" - fi - if has_label 'paritytech/substrate' "$pr_id" 'B5-clientnoteworthy'; then - client_changes="$client_changes -$line" - fi - if has_label 'paritytech/substrate' "$pr_id" 'B7-runtimenoteworthy'; then - runtime_changes="$runtime_changes -$line" - fi - if has_label 'paritytech/substrate' "$pr_id" 'E1-runtime-migration'; then - migrations="$migrations -$line" - fi -done <<< "$all_changes" - -# Make the substrate section if there are any substrate changes -if [ -n "$runtime_changes" ] || - [ -n "$api_changes" ] || - [ -n "$client_changes" ] || - [ -n "$migrations" ]; then - changes=$(cat << EOF -Substrate changes ------------------ - -EOF -) - if [ -n "$runtime_changes" ]; then - changes="$changes - -Runtime -------- -$runtime_changes" - fi - if [ -n "$client_changes" ]; then - changes="$changes - -Client ------- -$client_changes" - fi - if [ -n "$api_changes" ]; then - changes="$changes - -API ---- -$api_changes" - fi - release_text="$release_text - -$changes" -fi -if [ -n "$migrations" ]; then - changes="$changes - -Runtime Migrations ------------------- -$migrations" -fi - -echo "$changes" diff --git a/substrate/scripts/ci/gitlab/check-each-crate.py b/substrate/scripts/ci/gitlab/check-each-crate.py deleted file mode 100755 index adad4f5bd583..000000000000 --- a/substrate/scripts/ci/gitlab/check-each-crate.py +++ /dev/null @@ -1,57 +0,0 @@ -#!/usr/bin/env python3 - -# A script that checks each workspace crate individually. -# It's relevant to check workspace crates individually because otherwise their compilation problems -# due to feature misconfigurations won't be caught, as exemplified by -# https://github.com/paritytech/substrate/issues/12705 -# -# `check-each-crate.py target_group groups_total` -# -# - `target_group`: Integer starting from 1, the group this script should execute. -# - `groups_total`: Integer starting from 1, total number of groups. - -import subprocess, sys - -# Get all crates -output = subprocess.check_output(["cargo", "tree", "--locked", "--workspace", "--depth", "0", "--prefix", "none"]) - -# Convert the output into a proper list -crates = [] -for line in output.splitlines(): - if line != b"": - crates.append(line.decode('utf8').split(" ")[0]) - -# Make the list unique and sorted -crates = list(set(crates)) -crates.sort() - -target_group = int(sys.argv[1]) - 1 -groups_total = int(sys.argv[2]) - -if len(crates) == 0: - print("No crates detected!", file=sys.stderr) - sys.exit(1) - -print(f"Total crates: {len(crates)}", file=sys.stderr) - -crates_per_group = len(crates) // groups_total - -# If this is the last runner, we need to take care of crates -# after the group that we lost because of the integer division. -if target_group + 1 == groups_total: - overflow_crates = len(crates) % groups_total -else: - overflow_crates = 0 - -print(f"Crates per group: {crates_per_group}", file=sys.stderr) - -# Check each crate -for i in range(0, crates_per_group + overflow_crates): - crate = crates_per_group * target_group + i - - print(f"Checking {crates[crate]}", file=sys.stderr) - - res = subprocess.run(["cargo", "check", "--locked", "-p", crates[crate]]) - - if res.returncode != 0: - sys.exit(1) diff --git a/substrate/scripts/ci/gitlab/check_runtime.sh b/substrate/scripts/ci/gitlab/check_runtime.sh deleted file mode 100755 index 71d6965ecf4f..000000000000 --- a/substrate/scripts/ci/gitlab/check_runtime.sh +++ /dev/null @@ -1,121 +0,0 @@ -#!/bin/sh -# -# -# check for any changes in the node/src/runtime, frame/ and primitives/sr_* trees. if -# there are any changes found, it should mark the PR breaksconsensus and -# "auto-fail" the PR if there isn't a change in the runtime/src/lib.rs file -# that alters the version. - -set -e # fail on any error - -#shellcheck source=../common/lib.sh -. "$(dirname "${0}")/../common/lib.sh" - -VERSIONS_FILE="bin/node/runtime/src/lib.rs" - -boldprint () { printf "|\n| \033[1m%s\033[0m\n|\n" "${@}"; } -boldcat () { printf "|\n"; while read -r l; do printf "| \033[1m%s\033[0m\n" "${l}"; done; printf "|\n" ; } - -github_label () { - echo - echo "# run github-api job for labeling it ${1}" - curl -sS -X POST \ - -F "token=${CI_JOB_TOKEN}" \ - -F "ref=master" \ - -F "variables[LABEL]=${1}" \ - -F "variables[PRNO]=${CI_COMMIT_REF_NAME}" \ - "${GITLAB_API}/projects/${GITHUB_API_PROJECT}/trigger/pipeline" -} - - -boldprint "latest 10 commits of ${CI_COMMIT_REF_NAME}" -git log --graph --oneline --decorate=short -n 10 - -boldprint "make sure the master branch and release tag are available in shallow clones" -git fetch --depth="${GIT_DEPTH:-100}" origin master -git fetch --depth="${GIT_DEPTH:-100}" origin release -git tag -f release FETCH_HEAD -git log -n1 release - - -boldprint "check if the wasm sources changed" -if ! has_runtime_changes origin/master "${CI_COMMIT_SHA}" -then - boldcat <<-EOT - - no changes to the runtime source code detected - - EOT - - exit 0 -fi - - - -# check for spec_version updates: if the spec versions changed, then there is -# consensus-critical logic that has changed. the runtime wasm blobs must be -# rebuilt. - -add_spec_version="$(git diff tags/release ${CI_COMMIT_SHA} -- "${VERSIONS_FILE}" \ - | sed -n -r "s/^\+[[:space:]]+spec_version: +([0-9]+),$/\1/p")" -sub_spec_version="$(git diff tags/release ${CI_COMMIT_SHA} -- "${VERSIONS_FILE}" \ - | sed -n -r "s/^\-[[:space:]]+spec_version: +([0-9]+),$/\1/p")" - - - -if [ "${add_spec_version}" != "${sub_spec_version}" ] -then - - boldcat <<-EOT - - changes to the runtime sources and changes in the spec version. - - spec_version: ${sub_spec_version} -> ${add_spec_version} - - EOT - exit 0 - -else - # check for impl_version updates: if only the impl versions changed, we assume - # there is no consensus-critical logic that has changed. - - add_impl_version="$(git diff tags/release ${CI_COMMIT_SHA} -- "${VERSIONS_FILE}" \ - | sed -n -r 's/^\+[[:space:]]+impl_version: +([0-9]+),$/\1/p')" - sub_impl_version="$(git diff tags/release ${CI_COMMIT_SHA} -- "${VERSIONS_FILE}" \ - | sed -n -r 's/^\-[[:space:]]+impl_version: +([0-9]+),$/\1/p')" - - - # see if the impl version changed - if [ "${add_impl_version}" != "${sub_impl_version}" ] - then - boldcat <<-EOT - - changes to the runtime sources and changes in the impl version. - - impl_version: ${sub_impl_version} -> ${add_impl_version} - - EOT - exit 0 - fi - - - boldcat <<-EOT - - wasm source files changed but not the spec/impl version. If changes made do not alter logic, - just bump 'impl_version'. If they do change logic, bump 'spec_version'. - - source file directories: - - bin/node/src/runtime - - frame - - primitives/sr-* - - versions file: ${VERSIONS_FILE} - - EOT -fi - -# dropped through. there's something wrong; exit 1. - -exit 1 - -# vim: noexpandtab diff --git a/substrate/scripts/ci/gitlab/check_signed.sh b/substrate/scripts/ci/gitlab/check_signed.sh deleted file mode 100755 index 20d47c230476..000000000000 --- a/substrate/scripts/ci/gitlab/check_signed.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/usr/bin/env bash - -# shellcheck source=../common/lib.sh -source "$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )/../common/lib.sh" - -version="$CI_COMMIT_TAG" - -echo '[+] Checking tag has been signed' -check_tag "paritytech/substrate" "$version" -case $? in - 0) echo '[+] Tag found and has been signed'; exit 0 - ;; - 1) echo '[!] Tag found but has not been signed. Aborting release.'; exit 1 - ;; - 2) echo '[!] Tag not found. Aborting release.'; exit 1 -esac diff --git a/substrate/scripts/ci/gitlab/ensure-deps.sh b/substrate/scripts/ci/gitlab/ensure-deps.sh deleted file mode 100755 index 7087200cef51..000000000000 --- a/substrate/scripts/ci/gitlab/ensure-deps.sh +++ /dev/null @@ -1,80 +0,0 @@ -#!/usr/bin/env bash - -# The script is meant to check if the rules regarding packages -# dependencies are satisfied. -# The general format is: -# [top-lvl-dir] MESSAGE/[other-top-dir] - -# For instance no crate within `./client` directory -# is allowed to import any crate with a directory path containing `frame`. -# Such rule is just: `client crates must not depend on anything in /frame`. - -# The script should be run from the main repo directory! - -set -u - -# HARD FAILING -MUST_NOT=( - "client crates must not depend on anything in /frame" - "client crates must not depend on anything in /node" - "frame crates must not depend on anything in /node" - "frame crates must not depend on anything in /client" - "primitives crates must not depend on anything in /frame" -) - -# ONLY DISPLAYED, script still succeeds -PLEASE_DONT=( - "primitives crates should not depend on anything in /client" -) - -VIOLATIONS=() -PACKAGES=() - -function check_rule() { - rule=$1 - from=$(echo $rule | cut -f1 -d\ ) - to=$(echo $rule | cut -f2 -d\/) - - cd $from - echo "Checking rule '$rule'" - packages=$(find -name Cargo.toml | xargs grep -wn "path.*\.\.\/$to") - has_references=$(echo -n $packages | wc -c) - if [ "$has_references" != "0" ]; then - VIOLATIONS+=("$rule") - # Find packages that violate: - PACKAGES+=("$packages") - fi - cd - > /dev/null -} - -for rule in "${MUST_NOT[@]}" -do - check_rule "$rule"; -done - -# Only the MUST NOT will be counted towards failure -HARD_VIOLATIONS=${#VIOLATIONS[@]} - - -for rule in "${PLEASE_DONT[@]}" -do - check_rule "$rule"; -done - -# Display violations and fail -I=0 -for v in "${VIOLATIONS[@]}" -do - cat << EOF - -=========================================== -======= Violation of rule: $v -=========================================== -${PACKAGES[$I]} - - -EOF - I=$I+1 -done - -exit $HARD_VIOLATIONS diff --git a/substrate/scripts/ci/gitlab/pipeline/build.yml b/substrate/scripts/ci/gitlab/pipeline/build.yml deleted file mode 100644 index 8f63f6ecc391..000000000000 --- a/substrate/scripts/ci/gitlab/pipeline/build.yml +++ /dev/null @@ -1,215 +0,0 @@ -# This file is part of .gitlab-ci.yml -# Here are all jobs that are executed during "build" stage - -# PIPELINE_SCRIPTS_TAG can be found in the project variables - -.check-dependent-project: - stage: build - # DAG: this is artificial dependency - needs: - - job: cargo-clippy - artifacts: false - extends: - - .docker-env - - .test-refs-no-trigger-prs-only - variables: - RUSTFLAGS: "-D warnings" - script: - - cargo install --locked --git https://github.com/paritytech/try-runtime-cli --rev a93c9b5abe5d31a4cf1936204f7e5c489184b521 - - git clone - --depth=1 - --branch="$PIPELINE_SCRIPTS_TAG" - https://github.com/paritytech/pipeline-scripts - - ./pipeline-scripts/check_dependent_project.sh - --org paritytech - --dependent-repo "$DEPENDENT_REPO" - --github-api-token "$GITHUB_PR_TOKEN" - --extra-dependencies "$EXTRA_DEPENDENCIES" - --companion-overrides "$COMPANION_OVERRIDES" - -.check-runtime-migration: - extends: - - .check-dependent-project - - .test-refs-no-trigger-prs-only - variables: - DEPENDENT_REPO: polkadot - COMPANION_OVERRIDES: | - substrate: polkadot-v* - polkadot: release-v* - COMPANION_CHECK_COMMAND: > - time cargo build --release -p "$NETWORK"-runtime --features try-runtime && - time try-runtime \ - --runtime ./target/release/wbuild/"$NETWORK"-runtime/target/wasm32-unknown-unknown/release/"$NETWORK"_runtime.wasm \ - on-runtime-upgrade --checks=pre-and-post live --uri wss://${NETWORK}-try-runtime-node.parity-chains.parity.io:443 - -# Individual jobs are set up for each dependent project so that they can be ran in parallel. -# Arguably we could generate a job for each companion in the PR's description using Gitlab's -# parent-child pipelines but that's more complicated. - -check-runtime-migration-polkadot: - extends: - - .check-runtime-migration - variables: - NETWORK: polkadot - -check-runtime-migration-kusama: - extends: .check-runtime-migration - variables: - NETWORK: kusama - -check-runtime-migration-rococo: - extends: .check-runtime-migration - variables: - NETWORK: rococo - allow_failure: true - -check-runtime-migration-westend: - extends: .check-runtime-migration - variables: - NETWORK: westend - -check-dependent-polkadot: - extends: .check-dependent-project - variables: - DEPENDENT_REPO: polkadot - COMPANION_OVERRIDES: | - substrate: polkadot-v* - polkadot: release-v* - # enable the same feature flags as polkadot's test-linux-stable - COMPANION_CHECK_COMMAND: > - cargo check --all-targets --workspace - --features=runtime-benchmarks,runtime-metrics,try-runtime - rules: - - if: $CI_COMMIT_REF_NAME =~ /^[0-9]+$/ #PRs - -check-dependent-cumulus: - extends: .check-dependent-project - variables: - DEPENDENT_REPO: cumulus - EXTRA_DEPENDENCIES: polkadot - COMPANION_OVERRIDES: | - substrate: polkadot-v* - polkadot: release-v* - rules: - - if: $CI_COMMIT_REF_NAME =~ /^[0-9]+$/ #PRs - -build-linux-substrate: - stage: build - extends: - - .collect-artifacts - - .docker-env - - .build-refs - variables: - # this variable gets overriden by "rusty-cachier environment inject", use the value as default - CARGO_TARGET_DIR: "$CI_PROJECT_DIR/target" - needs: - - job: test-linux-stable - artifacts: false - before_script: - - !reference [.timestamp, before_script] - - !reference [.job-switcher, before_script] - - mkdir -p ./artifacts/substrate/ - - !reference [.rusty-cachier, before_script] - # tldr: we need to checkout the branch HEAD explicitly because of our dynamic versioning approach while building the substrate binary - # see https://github.com/paritytech/ci_cd/issues/682#issuecomment-1340953589 - - git checkout -B "$CI_COMMIT_REF_NAME" "$CI_COMMIT_SHA" - script: - - rusty-cachier snapshot create - - WASM_BUILD_NO_COLOR=1 time cargo build --locked --release -p node-cli --verbose - - mv $CARGO_TARGET_DIR/release/substrate-node ./artifacts/substrate/substrate - - echo -n "Substrate version = " - - if [ "${CI_COMMIT_TAG}" ]; then - echo "${CI_COMMIT_TAG}" | tee ./artifacts/substrate/VERSION; - else - ./artifacts/substrate/substrate --version | - cut -d ' ' -f 2 | tee ./artifacts/substrate/VERSION; - fi - - sha256sum ./artifacts/substrate/substrate | tee ./artifacts/substrate/substrate.sha256 - - cp -r ./scripts/ci/docker/substrate.Dockerfile ./artifacts/substrate/ - - printf '\n# building node-template\n\n' - - ./scripts/ci/node-template-release.sh ./artifacts/substrate/substrate-node-template.tar.gz - - rusty-cachier cache upload - -.build-subkey: - stage: build - extends: - - .collect-artifacts - - .docker-env - - .publish-refs - variables: - # this variable gets overriden by "rusty-cachier environment inject", use the value as default - CARGO_TARGET_DIR: "$CI_PROJECT_DIR/target" - before_script: - - !reference [.timestamp, before_script] - - !reference [.job-switcher, before_script] - - mkdir -p ./artifacts/subkey - - !reference [.rusty-cachier, before_script] - script: - - rusty-cachier snapshot create - - cd ./bin/utils/subkey - - SKIP_WASM_BUILD=1 time cargo build --locked --release --verbose - - cd - - - mv $CARGO_TARGET_DIR/release/subkey ./artifacts/subkey/. - - echo -n "Subkey version = " - - ./artifacts/subkey/subkey --version | - sed -n -E 's/^subkey ([0-9.]+.*)/\1/p' | - tee ./artifacts/subkey/VERSION; - - sha256sum ./artifacts/subkey/subkey | tee ./artifacts/subkey/subkey.sha256 - - cp -r ./scripts/ci/docker/subkey.Dockerfile ./artifacts/subkey/ - - rusty-cachier cache upload - -build-subkey-linux: - extends: .build-subkey - -build-subkey-macos: - extends: .build-subkey - # duplicating before_script & script sections from .build-subkey hidden job - # to overwrite rusty-cachier integration as it doesn't work on macos - before_script: - # skip timestamp script, the osx bash doesn't support printf %()T - - !reference [.job-switcher, before_script] - - mkdir -p ./artifacts/subkey - script: - - cd ./bin/utils/subkey - - SKIP_WASM_BUILD=1 time cargo build --locked --release --verbose - - cd - - - mv ./target/release/subkey ./artifacts/subkey/. - - echo -n "Subkey version = " - - ./artifacts/subkey/subkey --version | - sed -n -E 's/^subkey ([0-9.]+.*)/\1/p' | - tee ./artifacts/subkey/VERSION; - - sha256sum ./artifacts/subkey/subkey | tee ./artifacts/subkey/subkey.sha256 - - cp -r ./scripts/ci/docker/subkey.Dockerfile ./artifacts/subkey/ - after_script: [""] - tags: - - osx - -build-rustdoc: - stage: build - extends: - - .docker-env - - .test-refs - variables: - SKIP_WASM_BUILD: 1 - DOC_INDEX_PAGE: "substrate/index.html" # default redirected page - # this variable gets overriden by "rusty-cachier environment inject", use the value as default - CARGO_TARGET_DIR: "$CI_PROJECT_DIR/target" - artifacts: - name: "${CI_JOB_NAME}_${CI_COMMIT_REF_NAME}-doc" - when: on_success - expire_in: 7 days - paths: - - ./crate-docs/ - # DAG: this is artificial dependency - needs: - - job: cargo-clippy - artifacts: false - script: - - rusty-cachier snapshot create - - time cargo doc --locked --workspace --all-features --verbose --no-deps - - rm -f $CARGO_TARGET_DIR/doc/.lock - - mv $CARGO_TARGET_DIR/doc ./crate-docs - # FIXME: remove me after CI image gets nonroot - - chown -R nonroot:nonroot ./crate-docs - - echo "" > ./crate-docs/index.html - - rusty-cachier cache upload diff --git a/substrate/scripts/ci/gitlab/pipeline/check.yml b/substrate/scripts/ci/gitlab/pipeline/check.yml deleted file mode 100644 index 576daec9b433..000000000000 --- a/substrate/scripts/ci/gitlab/pipeline/check.yml +++ /dev/null @@ -1,78 +0,0 @@ -# This file is part of .gitlab-ci.yml -# Here are all jobs that are executed during "check" stage - -check-runtime: - stage: check - extends: - - .kubernetes-env - - .test-refs-no-trigger-prs-only - variables: - CI_IMAGE: "paritytech/tools:latest" - GITLAB_API: "https://gitlab.parity.io/api/v4" - GITHUB_API_PROJECT: "parity%2Finfrastructure%2Fgithub-api" - script: - - ./scripts/ci/gitlab/check_runtime.sh - allow_failure: true - -check-signed-tag: - stage: check - extends: .kubernetes-env - variables: - CI_IMAGE: "paritytech/tools:latest" - rules: - - if: $CI_COMMIT_REF_NAME =~ /^ci-release-.*$/ - - if: $CI_COMMIT_REF_NAME =~ /^v[0-9]+\.[0-9]+.*$/ # i.e. v1.0, v2.1rc1 - script: - - ./scripts/ci/gitlab/check_signed.sh - -test-dependency-rules: - stage: check - extends: - - .kubernetes-env - - .test-refs-no-trigger-prs-only - variables: - CI_IMAGE: "paritytech/tools:latest" - script: - - ./scripts/ci/gitlab/ensure-deps.sh - -test-rust-features: - stage: check - extends: - - .kubernetes-env - - .test-refs-no-trigger-prs-only - script: - - git clone - --depth=1 - --branch="$PIPELINE_SCRIPTS_TAG" - https://github.com/paritytech/pipeline-scripts - - bash ./pipeline-scripts/rust-features.sh . - -test-rust-feature-propagation: - stage: check - extends: - - .kubernetes-env - - .test-refs-no-trigger-prs-only - script: - - cargo install --locked --version 0.7.4 -q -f zepter && zepter --version - - echo "👉 Hello developer! If you see this CI check failing then it means that one of the crates is missing a feature for one of its dependencies. The output below tells you which feature needs to be added for which dependency to which crate. You can do this by modifying the Cargo.toml file. For more context see the MR where this check was introduced https://github.com/paritytech/substrate/pull/14660" - - zepter lint propagate-feature --feature try-runtime --left-side-feature-missing=ignore --workspace --feature-enables-dep="try-runtime:frame-try-runtime" --locked - - zepter lint propagate-feature --feature runtime-benchmarks --left-side-feature-missing=ignore --workspace --feature-enables-dep="runtime-benchmarks:frame-benchmarking" --locked - - zepter lint propagate-feature --feature std --left-side-feature-missing=ignore --workspace --locked - allow_failure: true # Experimental - -test-prometheus-alerting-rules: - stage: check - extends: .kubernetes-env - variables: - CI_IMAGE: "paritytech/tools:latest" - rules: - - if: $CI_PIPELINE_SOURCE == "pipeline" - when: never - - if: $CI_COMMIT_BRANCH - changes: - - .gitlab-ci.yml - - ./scripts/ci/monitoring/**/* - script: - - promtool check rules ./scripts/ci/monitoring/alerting-rules/alerting-rules.yaml - - cat ./scripts/ci/monitoring/alerting-rules/alerting-rules.yaml | - promtool test rules ./scripts/ci/monitoring/alerting-rules/alerting-rule-tests.yaml diff --git a/substrate/scripts/ci/gitlab/pipeline/publish.yml b/substrate/scripts/ci/gitlab/pipeline/publish.yml deleted file mode 100644 index c90af7ba347b..000000000000 --- a/substrate/scripts/ci/gitlab/pipeline/publish.yml +++ /dev/null @@ -1,270 +0,0 @@ -# This file is part of .gitlab-ci.yml -# Here are all jobs that are executed during "publish" stage - -.build-push-docker-image-common: - extends: - - .kubernetes-env - stage: publish - variables: - CI_IMAGE: $BUILDAH_IMAGE - GIT_STRATEGY: none - DOCKERFILE: $PRODUCT.Dockerfile - IMAGE_NAME: docker.io/$IMAGE_PATH - before_script: - - !reference [.kubernetes-env, before_script] - - cd ./artifacts/$PRODUCT/ - - VERSION="$(cat ./VERSION)" - - echo "${PRODUCT} version = ${VERSION}" - - test -z "${VERSION}" && exit 1 - script: - - test "$DOCKER_USER" -a "$DOCKER_PASS" || - ( echo "no docker credentials provided"; exit 1 ) - - $BUILDAH_COMMAND build - --format=docker - --build-arg VCS_REF="${CI_COMMIT_SHA}" - --build-arg BUILD_DATE="$(date -u '+%Y-%m-%dT%H:%M:%SZ')" - --build-arg IMAGE_NAME="${IMAGE_PATH}" - --tag "$IMAGE_NAME:$VERSION" - --tag "$IMAGE_NAME:latest" - --file "$DOCKERFILE" . - - echo "$DOCKER_PASS" | - buildah login --username "$DOCKER_USER" --password-stdin docker.io - - $BUILDAH_COMMAND info - - $BUILDAH_COMMAND push --format=v2s2 "$IMAGE_NAME:$VERSION" - - $BUILDAH_COMMAND push --format=v2s2 "$IMAGE_NAME:latest" - after_script: - - buildah logout --all - - echo "SUBSTRATE_IMAGE_NAME=${IMAGE_NAME}" | tee -a ./artifacts/$PRODUCT/build.env - - IMAGE_TAG="$(cat ./artifacts/$PRODUCT/VERSION)" - - echo "SUBSTRATE_IMAGE_TAG=${IMAGE_TAG}" | tee -a ./artifacts/$PRODUCT/build.env - - cat ./artifacts/$PRODUCT/build.env - -.build-push-docker-image: - extends: - - .publish-refs - - .build-push-docker-image-common - variables: - IMAGE_PATH: parity/$PRODUCT - DOCKER_USER: $Docker_Hub_User_Parity - DOCKER_PASS: $Docker_Hub_Pass_Parity - -.push-docker-image-description: - stage: publish - extends: - - .kubernetes-env - variables: - CI_IMAGE: paritytech/dockerhub-description - DOCKERHUB_REPOSITORY: parity/$PRODUCT - DOCKER_USERNAME: $Docker_Hub_User_Parity - DOCKER_PASSWORD: $Docker_Hub_Pass_Parity - README_FILEPATH: $CI_PROJECT_DIR/scripts/ci/docker/$PRODUCT.Dockerfile.README.md - rules: - - if: $CI_COMMIT_REF_NAME == $CI_DEFAULT_BRANCH && $CI_PIPELINE_SOURCE == "push" - changes: - - scripts/ci/docker/$PRODUCT.Dockerfile.README.md - before_script: - - echo - script: - - cd / && sh entrypoint.sh - -# publish image to docker.io/paritypr, (e.g. for later use in zombienet testing) -.build-push-image-temporary: - extends: - - .build-refs - - .build-push-docker-image-common - variables: - IMAGE_PATH: paritypr/$PRODUCT - DOCKER_USER: $PARITYPR_USER - DOCKER_PASS: $PARITYPR_PASS - -publish-docker-substrate: - extends: .build-push-docker-image - needs: - - job: build-linux-substrate - artifacts: true - variables: - PRODUCT: substrate - -publish-docker-description-substrate: - extends: .push-docker-image-description - variables: - PRODUCT: substrate - SHORT_DESCRIPTION: "Substrate Docker Image." - -publish-docker-substrate-temporary: - extends: .build-push-image-temporary - needs: - - job: build-linux-substrate - artifacts: true - variables: - PRODUCT: substrate - artifacts: - reports: - # this artifact is used in zombienet-tests job - # https://docs.gitlab.com/ee/ci/multi_project_pipelines.html#with-variable-inheritance - dotenv: ./artifacts/$PRODUCT/build.env - expire_in: 24h - -publish-docker-subkey: - extends: .build-push-docker-image - needs: - - job: build-subkey-linux - artifacts: true - variables: - PRODUCT: subkey - -publish-docker-description-subkey: - extends: .push-docker-image-description - variables: - PRODUCT: subkey - SHORT_DESCRIPTION: "The subkey program is a key management utility for Substrate-based blockchains." - -publish-s3-release: - stage: publish - extends: - - .publish-refs - - .kubernetes-env - needs: - - job: build-linux-substrate - artifacts: true - - job: build-subkey-linux - artifacts: true - image: paritytech/awscli:latest - variables: - GIT_STRATEGY: none - BUCKET: "releases.parity.io" - PREFIX: "substrate/${ARCH}-${DOCKER_OS}" - script: - - aws s3 sync ./artifacts/ s3://${BUCKET}/${PREFIX}/$(cat ./artifacts/substrate/VERSION)/ - - echo "update objects in latest path" - - aws s3 sync s3://${BUCKET}/${PREFIX}/$(cat ./artifacts/substrate/VERSION)/ s3://${BUCKET}/${PREFIX}/latest/ - after_script: - - aws s3 ls s3://${BUCKET}/${PREFIX}/latest/ - --recursive --human-readable --summarize - -publish-rustdoc: - stage: publish - extends: .kubernetes-env - variables: - CI_IMAGE: node:16 - GIT_DEPTH: 100 - RUSTDOCS_DEPLOY_REFS: "master" - rules: - - if: $CI_PIPELINE_SOURCE == "pipeline" - when: never - - if: $CI_PIPELINE_SOURCE == "web" && $CI_COMMIT_REF_NAME == "master" - - if: $CI_COMMIT_REF_NAME == "master" - - if: $CI_COMMIT_REF_NAME =~ /^monthly-20[0-9]{2}-[0-9]{2}.*$/ # to support: monthly-2021-09+1 - - if: $CI_COMMIT_REF_NAME =~ /^v[0-9]+\.[0-9]+.*$/ # i.e. v1.0, v2.1rc1 - # `needs:` can be removed after CI image gets nonroot. In this case `needs:` stops other - # artifacts from being dowloaded by this job. - needs: - - job: build-rustdoc - artifacts: true - script: - # If $CI_COMMIT_REF_NAME doesn't match one of $RUSTDOCS_DEPLOY_REFS space-separated values, we - # exit immediately. - # Putting spaces at the front and back to ensure we are not matching just any substring, but the - # whole space-separated value. - - '[[ " ${RUSTDOCS_DEPLOY_REFS} " =~ " ${CI_COMMIT_REF_NAME} " ]] || exit 0' - # setup ssh - - eval $(ssh-agent) - - ssh-add - <<< ${GITHUB_SSH_PRIV_KEY} - - mkdir ~/.ssh && touch ~/.ssh/known_hosts - - ssh-keyscan -t rsa github.com >> ~/.ssh/known_hosts - # Set git config - - git config user.email "devops-team@parity.io" - - git config user.name "${GITHUB_USER}" - - git config remote.origin.url "git@github.com:/paritytech/${CI_PROJECT_NAME}.git" - - git config remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*" - - git fetch origin gh-pages - # Save README and docs - - cp -r ./crate-docs/ /tmp/doc/ - - cp README.md /tmp/doc/ - # we don't need to commit changes because we copy docs to /tmp - - git checkout gh-pages --force - # Install `index-tpl-crud` and generate index.html based on RUSTDOCS_DEPLOY_REFS - - which index-tpl-crud &> /dev/null || yarn global add @substrate/index-tpl-crud - - index-tpl-crud upsert ./index.html ${CI_COMMIT_REF_NAME} - # Ensure the destination dir doesn't exist. - - rm -rf ${CI_COMMIT_REF_NAME} - - mv -f /tmp/doc ${CI_COMMIT_REF_NAME} - # Upload files - - git add --all - # `git commit` has an exit code of > 0 if there is nothing to commit. - # This causes GitLab to exit immediately and marks this job failed. - # We don't want to mark the entire job failed if there's nothing to - # publish though, hence the `|| true`. - - git commit -m "___Updated docs for ${CI_COMMIT_REF_NAME}___" || - echo "___Nothing to commit___" - - git push origin gh-pages --force - after_script: - - rm -rf .git/ ./* - -publish-draft-release: - stage: publish - image: paritytech/tools:latest - rules: - - if: $CI_COMMIT_REF_NAME =~ /^ci-release-.*$/ - - if: $CI_COMMIT_REF_NAME =~ /^v[0-9]+\.[0-9]+.*$/ # i.e. v1.0, v2.1rc1 - script: - - ./scripts/ci/gitlab/publish_draft_release.sh - allow_failure: true - -.publish-crates-template: - stage: publish - extends: - - .crates-publishing-template - - .crates-publishing-pipeline - # We don't want multiple jobs racing to publish crates as it's redundant and they might overwrite - # the releases of one another. Use resource_group to ensure that at most one instance of this job - # is running at any given time. - resource_group: crates-publishing - # crates.io currently rate limits crate publishing at 1 per minute: - # https://github.com/paritytech/release-engineering/issues/123#issuecomment-1335509748 - # Taking into account the 202 (as of Dec 07, 2022) publishable Substrate crates, in the worst - # case, due to the rate limits alone, we'd have to wait through at least 202 minutes of delay. - # Taking into account also the verification steps and extra synchronization delays after - # publishing the crate, the job needs to have a much higher timeout than average. - timeout: 9h - # A custom publishing environment is used for us to be able to set up protected secrets - # specifically for it - environment: publish-crates - script: - - rusty-cachier snapshot create - - git clone - --depth 1 - --branch "$RELENG_SCRIPTS_BRANCH" - https://github.com/paritytech/releng-scripts.git - - CRATESIO_TARGET_INSTANCE=default ./releng-scripts/publish-crates - - rusty-cachier cache upload - -publish-crates: - extends: .publish-crates-template - # publish-crates should only be run if publish-crates-locally passes - needs: - - job: check-crate-publishing - artifacts: false - -publish-crates-manual: - extends: .publish-crates-template - when: manual - interruptible: false - -check-crate-publishing: - stage: publish - extends: - - .crates-publishing-template - - .crates-publishing-pipeline - # When lots of crates are taken into account (for example on master where all crates are tested) - # the job might take a long time, as evidenced by: - # https://gitlab.parity.io/parity/mirrors/substrate/-/jobs/2269364 - timeout: 4h - script: - - rusty-cachier snapshot create - - git clone - --depth 1 - --branch "$RELENG_SCRIPTS_BRANCH" - https://github.com/paritytech/releng-scripts.git - - CRATESIO_TARGET_INSTANCE=local ./releng-scripts/publish-crates - - rusty-cachier cache upload diff --git a/substrate/scripts/ci/gitlab/pipeline/test.yml b/substrate/scripts/ci/gitlab/pipeline/test.yml deleted file mode 100644 index 9c057e8d9158..000000000000 --- a/substrate/scripts/ci/gitlab/pipeline/test.yml +++ /dev/null @@ -1,494 +0,0 @@ -# This file is part of .gitlab-ci.yml -# Here are all jobs that are executed during "test" stage - -# It's more like a check and it belongs to the previous stage, but we want to run this job with real tests in parallel -find-fail-ci-phrase: - stage: test - variables: - CI_IMAGE: "paritytech/tools:latest" - ASSERT_REGEX: "FAIL-CI" - GIT_DEPTH: 1 - extends: - - .kubernetes-env - script: - - set +e - - rg --line-number --hidden --type rust --glob '!{.git,target}' "$ASSERT_REGEX" .; exit_status=$? - - if [ $exit_status -eq 0 ]; then - echo "$ASSERT_REGEX was found, exiting with 1"; - exit 1; - else - echo "No $ASSERT_REGEX was found, exiting with 0"; - exit 0; - fi - -cargo-deny-licenses: - stage: test - extends: - - .docker-env - - .test-refs - variables: - CARGO_DENY_CMD: "cargo deny --all-features check licenses -c ./scripts/ci/deny.toml" - script: - - rusty-cachier snapshot create - - $CARGO_DENY_CMD --hide-inclusion-graph - - rusty-cachier cache upload - after_script: - - !reference [.rusty-cachier, after_script] - - echo "___The complete log is in the artifacts___" - - $CARGO_DENY_CMD 2> deny.log - - if [ $CI_JOB_STATUS != 'success' ]; then - echo 'Please check license of your crate or add an exception to scripts/ci/deny.toml'; - fi - artifacts: - name: $CI_COMMIT_SHORT_SHA - expire_in: 3 days - when: always - paths: - - deny.log - -cargo-fmt: - stage: test - variables: - RUSTY_CACHIER_TOOLCHAIN: nightly - extends: - - .docker-env - - .test-refs - script: - - rusty-cachier snapshot create - - cargo +nightly fmt --all -- --check - - rusty-cachier cache upload - -cargo-fmt-manifest: - stage: test - extends: - - .docker-env - - .test-refs - script: - - cargo install zepter --locked --version 0.11.1 -q -f --no-default-features && zepter --version - - echo "👉 Hello developer! If you see this CI check failing then it means that one of the your changes in a Cargo.toml file introduced ill-formatted or unsorted features. Please take a look at 'docs/STYLE_GUIDE.md#manifest-formatting' to find out more." - - zepter format features --check - allow_failure: true # Experimental - -cargo-clippy: - stage: test - # this is an artificial job dependency, for pipeline optimization using GitLab's DAGs - needs: - - job: cargo-fmt - artifacts: false - extends: - - .docker-env - - .test-refs - script: - - echo $RUSTFLAGS - - cargo version && cargo clippy --version - - rusty-cachier snapshot create - - SKIP_WASM_BUILD=1 env -u RUSTFLAGS cargo clippy --locked --all-targets --workspace - - rusty-cachier cache upload - -cargo-check-benches: - stage: test - variables: - CI_JOB_NAME: "cargo-check-benches" - extends: - - .docker-env - - .test-refs-check-benches - - .collect-artifacts - - .pipeline-stopper-artifacts - before_script: - - !reference [.timestamp, before_script] - # perform rusty-cachier operations before any further modifications to the git repo to make cargo feel cheated not so much - - !reference [.rust-info-script, script] - - !reference [.job-switcher, before_script] - - !reference [.rusty-cachier, before_script] - - !reference [.pipeline-stopper-vars, script] - # merges in the master branch on PRs. skip if base is not master - - 'if [ $CI_COMMIT_REF_NAME != "master" ]; then - BASE=$(curl -s -H "Authorization: Bearer ${GITHUB_PR_TOKEN}" https://api.github.com/repos/paritytech/substrate/pulls/${CI_COMMIT_REF_NAME} | jq -r .base.ref); - printf "Merging base branch %s\n" "${BASE:=master}"; - if [ $BASE != "master" ]; then - echo "$BASE is not master, skipping merge"; - else - git config user.email "ci@gitlab.parity.io"; - git fetch origin "refs/heads/${BASE}"; - git merge --verbose --no-edit FETCH_HEAD; - fi - fi' - parallel: 2 - script: - - rusty-cachier snapshot create - - mkdir -p ./artifacts/benches/$CI_COMMIT_REF_NAME-$CI_COMMIT_SHORT_SHA - # this job is executed in parallel on two runners - - echo "___Running benchmarks___"; - - case ${CI_NODE_INDEX} in - 1) - SKIP_WASM_BUILD=1 time cargo check --locked --benches --all; - cargo run --locked --release -p node-bench -- ::trie::read::small --json - | tee ./artifacts/benches/$CI_COMMIT_REF_NAME-$CI_COMMIT_SHORT_SHA/::trie::read::small.json; - echo "___Uploading cache for rusty-cachier___"; - rusty-cachier cache upload - ;; - 2) - cargo run --locked --release -p node-bench -- ::node::import::sr25519::transfer_keep_alive::paritydb::small --json - | tee ./artifacts/benches/$CI_COMMIT_REF_NAME-$CI_COMMIT_SHORT_SHA/::node::import::sr25519::transfer_keep_alive::paritydb::small.json - ;; - esac - -node-bench-regression-guard: - # it's not belong to `build` semantically, but dag jobs can't depend on each other - # within the single stage - https://gitlab.com/gitlab-org/gitlab/-/issues/30632 - # more: https://github.com/paritytech/substrate/pull/8519#discussion_r608012402 - stage: build - extends: - - .docker-env - - .test-refs-no-trigger-prs-only - needs: - # this is a DAG - - job: cargo-check-benches - artifacts: true - # polls artifact from master to compare with current result - # need to specify both parallel jobs from master because of the bug - # https://gitlab.com/gitlab-org/gitlab/-/issues/39063 - - project: $CI_PROJECT_PATH - job: "cargo-check-benches 1/2" - ref: master - artifacts: true - - project: $CI_PROJECT_PATH - job: "cargo-check-benches 2/2" - ref: master - artifacts: true - variables: - CI_IMAGE: "paritytech/node-bench-regression-guard:latest" - before_script: - - !reference [.timestamp, before_script] - script: - - echo "------- IMPORTANT -------" - - echo "node-bench-regression-guard depends on the results of a cargo-check-benches job" - - echo "In case of this job failure, check your pipeline's cargo-check-benches" - - "node-bench-regression-guard --reference artifacts/benches/master-* - --compare-with artifacts/benches/$CI_COMMIT_REF_NAME-$CI_COMMIT_SHORT_SHA" - after_script: [""] - -cargo-check-try-runtime-and-experimental: - stage: test - extends: - - .docker-env - - .test-refs - script: - - rusty-cachier snapshot create - - time cargo check --workspace --locked --features try-runtime,experimental - - rusty-cachier cache upload - -test-deterministic-wasm: - stage: test - # this is an artificial job dependency, for pipeline optimization using GitLab's DAGs - needs: - - job: cargo-check-try-runtime-and-experimental - artifacts: false - extends: - - .docker-env - - .test-refs - variables: - WASM_BUILD_NO_COLOR: 1 - # this variable gets overriden by "rusty-cachier environment inject", use the value as default - CARGO_TARGET_DIR: "$CI_PROJECT_DIR/target" - script: - - rusty-cachier snapshot create - # build runtime - - cargo build --locked --verbose --release -p kitchensink-runtime - # make checksum - - sha256sum $CARGO_TARGET_DIR/release/wbuild/kitchensink-runtime/target/wasm32-unknown-unknown/release/kitchensink_runtime.wasm > checksum.sha256 - # clean up - - rm -rf $CARGO_TARGET_DIR/release/wbuild - # build again - - cargo build --locked --verbose --release -p kitchensink-runtime - # confirm checksum - - sha256sum -c ./checksum.sha256 - # clean up again, don't put release binaries into the cache - - rm -rf $CARGO_TARGET_DIR/release/wbuild - - rusty-cachier cache upload - -test-linux-stable: - stage: test - extends: - - .docker-env - - .test-refs - - .pipeline-stopper-artifacts - variables: - # Enable debug assertions since we are running optimized builds for testing - # but still want to have debug assertions. - RUSTFLAGS: "-C debug-assertions -D warnings" - RUST_BACKTRACE: 1 - WASM_BUILD_NO_COLOR: 1 - WASM_BUILD_RUSTFLAGS: "-C debug-assertions -D warnings" - # Ensure we run the UI tests. - RUN_UI_TESTS: 1 - # needed for rusty-cachier to keep cache in test-linux-stable folder and not in test-linux-stable-1/3 - CI_JOB_NAME: "test-linux-stable" - parallel: 3 - script: - - rusty-cachier snapshot create - # this job runs all tests in former runtime-benchmarks, frame-staking and wasmtime tests - # tests are partitioned by nextest and executed in parallel on $CI_NODE_TOTAL runners - - echo "Node index - ${CI_NODE_INDEX}. Total amount - ${CI_NODE_TOTAL}" - - time cargo nextest run --workspace - --locked - --release - --verbose - --features runtime-benchmarks,try-runtime,experimental - --manifest-path ./bin/node/cli/Cargo.toml - --partition count:${CI_NODE_INDEX}/${CI_NODE_TOTAL} - # run runtime-api tests with `enable-staging-api` feature - - time cargo nextest run -p sp-api-test --features enable-staging-api - # we need to update cache only from one job - - if [ ${CI_NODE_INDEX} == 1 ]; then rusty-cachier cache upload; fi - # Upload tests results to Elasticsearch - - echo "Upload test results to Elasticsearch" - - cat target/nextest/default/junit.xml | xq . > target/nextest/default/junit.json - - | - curl -v -XPOST --http1.1 \ - -u ${ELASTIC_USERNAME}:${ELASTIC_PASSWORD} \ - https://elasticsearch.parity-build.parity.io/unit-tests/_doc/${CI_JOB_ID} \ - -H 'Content-Type: application/json' \ - -d @target/nextest/default/junit.json || echo "failed to upload junit report" - artifacts: - when: always - paths: - - target/nextest/default/junit.xml - reports: - junit: target/nextest/default/junit.xml - -test-frame-support: - stage: test - extends: - - .docker-env - - .test-refs - variables: - # Enable debug assertions since we are running optimized builds for testing - # but still want to have debug assertions. - RUSTFLAGS: "-C debug-assertions -D warnings" - RUST_BACKTRACE: 1 - WASM_BUILD_NO_COLOR: 1 - WASM_BUILD_RUSTFLAGS: "-C debug-assertions -D warnings" - # Ensure we run the UI tests. - RUN_UI_TESTS: 1 - script: - - rusty-cachier snapshot create - - cat /cargo_target_dir/debug/.fingerprint/memory_units-759eddf317490d2b/lib-memory_units.json || true - - time cargo test --verbose --locked -p frame-support-test --features=frame-feature-testing,no-metadata-docs,try-runtime,experimental --manifest-path ./frame/support/test/Cargo.toml - - time cargo test --verbose --locked -p frame-support-test --features=frame-feature-testing,frame-feature-testing-2,no-metadata-docs,try-runtime,experimental --manifest-path ./frame/support/test/Cargo.toml - - SUBSTRATE_TEST_TIMEOUT=1 time cargo test -p substrate-test-utils --release --verbose --locked -- --ignored timeout - - cat /cargo_target_dir/debug/.fingerprint/memory_units-759eddf317490d2b/lib-memory_units.json || true - - rusty-cachier cache upload - -# This job runs tests that don't work with cargo-nextest in test-linux-stable -test-linux-stable-extra: - stage: test - extends: - - .docker-env - - .test-refs - variables: - # Enable debug assertions since we are running optimized builds for testing - # but still want to have debug assertions. - RUSTFLAGS: "-C debug-assertions -D warnings" - RUST_BACKTRACE: 1 - WASM_BUILD_NO_COLOR: 1 - WASM_BUILD_RUSTFLAGS: "-C debug-assertions -D warnings" - # Ensure we run the UI tests. - RUN_UI_TESTS: 1 - script: - - rusty-cachier snapshot create - # Run node-cli tests - # TODO: add to test-linux-stable-nextest after fix https://github.com/paritytech/substrate/issues/11321 - - time cargo test node-cli --workspace --locked --release --verbose --features runtime-benchmarks --manifest-path ./bin/node/cli/Cargo.toml - # Run doctests - # TODO: add to test-linux-stable-nextest after fix https://github.com/nextest-rs/nextest/issues/16 - - time cargo test --doc --workspace --locked --release --verbose --features runtime-benchmarks --manifest-path ./bin/node/cli/Cargo.toml - - rusty-cachier cache upload - -# This job runs all benchmarks defined in the `/bin/node/runtime` once to check that there are no errors. -quick-benchmarks: - stage: test - extends: - - .docker-env - - .test-refs - variables: - # Enable debug assertions since we are running optimized builds for testing - # but still want to have debug assertions. - RUSTFLAGS: "-C debug-assertions -D warnings" - RUST_BACKTRACE: "full" - WASM_BUILD_NO_COLOR: 1 - WASM_BUILD_RUSTFLAGS: "-C debug-assertions -D warnings" - script: - - rusty-cachier snapshot create - - time cargo run --locked --release -p node-cli --features runtime-benchmarks -- benchmark pallet --wasm-execution compiled --chain dev --pallet "*" --extrinsic "*" --steps 2 --repeat 1 - - rusty-cachier cache upload - -test-frame-examples-compile-to-wasm: - # into one job - stage: test - extends: - - .docker-env - - .test-refs - variables: - # Enable debug assertions since we are running optimized builds for testing - # but still want to have debug assertions. - RUSTFLAGS: "-C debug-assertions" - RUST_BACKTRACE: 1 - script: - - rusty-cachier snapshot create - - cd ./frame/examples/offchain-worker/ - - cargo build --locked --target=wasm32-unknown-unknown --no-default-features - - cd ../basic - - cargo build --locked --target=wasm32-unknown-unknown --no-default-features - - rusty-cachier cache upload - -test-linux-stable-int: - stage: test - extends: - - .docker-env - - .test-refs - - .pipeline-stopper-artifacts - variables: - # Enable debug assertions since we are running optimized builds for testing - # but still want to have debug assertions. - RUSTFLAGS: "-C debug-assertions -D warnings" - RUST_BACKTRACE: 1 - WASM_BUILD_NO_COLOR: 1 - WASM_BUILD_RUSTFLAGS: "-C debug-assertions -D warnings" - # Ensure we run the UI tests. - RUN_UI_TESTS: 1 - script: - - rusty-cachier snapshot create - - WASM_BUILD_NO_COLOR=1 - RUST_LOG=sync=trace,consensus=trace,client=trace,state-db=trace,db=trace,forks=trace,state_db=trace,storage_cache=trace - time cargo test -p node-cli --release --verbose --locked -- --ignored - - rusty-cachier cache upload - -# more information about this job can be found here: -# https://github.com/paritytech/substrate/pull/6916 -check-tracing: - stage: test - # this is an artificial job dependency, for pipeline optimization using GitLab's DAGs - needs: - - job: test-linux-stable-int - artifacts: false - extends: - - .docker-env - - .test-refs - - .pipeline-stopper-artifacts - script: - - rusty-cachier snapshot create - # with-tracing must be explicitly activated, we run a test to ensure this works as expected in both cases - - time cargo test --locked --manifest-path ./primitives/tracing/Cargo.toml --no-default-features - - time cargo test --locked --manifest-path ./primitives/tracing/Cargo.toml --no-default-features --features=with-tracing - - rusty-cachier cache upload - -# more information about this job can be found here: -# https://github.com/paritytech/substrate/pull/3778 -test-full-crypto-feature: - stage: test - # this is an artificial job dependency, for pipeline optimization using GitLab's DAGs - needs: - - job: check-tracing - artifacts: false - extends: - - .docker-env - - .test-refs - variables: - # Enable debug assertions since we are running optimized builds for testing - # but still want to have debug assertions. - RUSTFLAGS: "-C debug-assertions" - RUST_BACKTRACE: 1 - script: - - rusty-cachier snapshot create - - cd primitives/core/ - - time cargo build --locked --verbose --no-default-features --features full_crypto - - cd ../application-crypto - - time cargo build --locked --verbose --no-default-features --features full_crypto - - rusty-cachier cache upload - -check-rustdoc: - stage: test - extends: - - .docker-env - - .test-refs - variables: - SKIP_WASM_BUILD: 1 - RUSTDOCFLAGS: "-Dwarnings" - script: - - rusty-cachier snapshot create - - time cargo doc --locked --workspace --all-features --verbose --no-deps - - rusty-cachier cache upload - -cargo-check-each-crate: - stage: test - extends: - - .docker-env - - .test-refs - - .collect-artifacts - - .pipeline-stopper-artifacts - variables: - # $CI_JOB_NAME is set manually so that rusty-cachier can share the cache for all - # "cargo-check-each-crate I/N" jobs - CI_JOB_NAME: cargo-check-each-crate - script: - - rusty-cachier snapshot create - - PYTHONUNBUFFERED=x time ./scripts/ci/gitlab/check-each-crate.py "$CI_NODE_INDEX" "$CI_NODE_TOTAL" - # need to update cache only from one job - - if [ "$CI_NODE_INDEX" == 1 ]; then rusty-cachier cache upload; fi - parallel: 2 - -cargo-check-each-crate-macos: - stage: test - extends: - - .test-refs - - .collect-artifacts - - .pipeline-stopper-artifacts - before_script: - # skip timestamp script, the osx bash doesn't support printf %()T - - !reference [.job-switcher, before_script] - - !reference [.rust-info-script, script] - - !reference [.pipeline-stopper-vars, script] - variables: - SKIP_WASM_BUILD: 1 - script: - # TODO: enable rusty-cachier once it supports Mac - # TODO: use parallel jobs, as per cargo-check-each-crate, once more Mac runners are available - # - time ./scripts/ci/gitlab/check-each-crate.py 1 1 - - time cargo check --workspace --locked - tags: - - osx - -cargo-hfuzz: - stage: test - extends: - - .docker-env - - .test-refs - - .pipeline-stopper-artifacts - variables: - # max 10s per iteration, 60s per file - HFUZZ_RUN_ARGS: > - --exit_upon_crash - --exit_code_upon_crash 1 - --timeout 10 - --run_time 60 - # use git version of honggfuzz-rs until v0.5.56 is out, we need a few recent changes: - # https://github.com/rust-fuzz/honggfuzz-rs/pull/75 to avoid breakage on debian - # https://github.com/rust-fuzz/honggfuzz-rs/pull/81 fix to the above pr - # https://github.com/rust-fuzz/honggfuzz-rs/pull/82 fix for handling rusty-cachier's absolute CARGO_TARGET_DIR - HFUZZ_BUILD_ARGS: > - --config=patch.crates-io.honggfuzz.git="https://github.com/altaua/honggfuzz-rs" - --config=patch.crates-io.honggfuzz.rev="205f7c8c059a0d98fe1cb912cdac84f324cb6981" - artifacts: - name: "hfuzz-$CI_COMMIT_SHORT_SHA" - expire_in: 7 days - when: on_failure - paths: - - primitives/arithmetic/fuzzer/hfuzz_workspace/ - script: - - cd ./primitives/arithmetic/fuzzer - - rusty-cachier snapshot create - - cargo hfuzz build - - rusty-cachier cache upload - - for target in $(cargo read-manifest | jq -r '.targets | .[] | .name'); do - cargo hfuzz run "$target" || { printf "fuzzing failure for %s\n" "$target"; exit 1; }; done diff --git a/substrate/scripts/ci/gitlab/pipeline/zombienet.yml b/substrate/scripts/ci/gitlab/pipeline/zombienet.yml deleted file mode 100644 index 31ee51034327..000000000000 --- a/substrate/scripts/ci/gitlab/pipeline/zombienet.yml +++ /dev/null @@ -1,67 +0,0 @@ -# This file is part of .gitlab-ci.yml -# Here are all jobs that are executed during "zombienet" stage - -# common settings for all zombienet jobs -.zombienet-common: - before_script: - - echo "Zombie-net Tests Config" - - echo "${ZOMBIENET_IMAGE}" - - echo "${SUBSTRATE_IMAGE_NAME} ${SUBSTRATE_IMAGE_TAG}" - - echo "${GH_DIR}" - - export DEBUG=zombie,zombie::network-node - - export ZOMBIENET_INTEGRATION_TEST_IMAGE=${SUBSTRATE_IMAGE_NAME}:${SUBSTRATE_IMAGE_TAG} - - echo "${ZOMBIENET_INTEGRATION_TEST_IMAGE}" - stage: zombienet - image: "${ZOMBIENET_IMAGE}" - needs: - - job: publish-docker-substrate-temporary - extends: - - .kubernetes-env - - .zombienet-refs - variables: - GH_DIR: "https://github.com/paritytech/substrate/tree/${CI_COMMIT_SHA}/zombienet" - FF_DISABLE_UMASK_FOR_DOCKER_EXECUTOR: 1 - artifacts: - name: "${CI_JOB_NAME}_${CI_COMMIT_REF_NAME}" - when: always - expire_in: 2 days - paths: - - ./zombienet-logs - after_script: - - mkdir -p ./zombienet-logs - - cp /tmp/zombie*/logs/* ./zombienet-logs/ - retry: 2 - tags: - - zombienet-polkadot-integration-test - -zombienet-0000-block-building: - extends: - - .zombienet-common - script: - - /home/nonroot/zombie-net/scripts/ci/run-test-env-manager.sh - --github-remote-dir="${GH_DIR}/0000-block-building" - --test="block-building.zndsl" - -zombienet-0001-basic-warp-sync: - extends: - - .zombienet-common - script: - - /home/nonroot/zombie-net/scripts/ci/run-test-env-manager.sh - --github-remote-dir="${GH_DIR}/0001-basic-warp-sync" - --test="test-warp-sync.zndsl" - -zombienet-0002-validators-warp-sync: - extends: - - .zombienet-common - script: - - /home/nonroot/zombie-net/scripts/ci/run-test-env-manager.sh - --github-remote-dir="${GH_DIR}/0002-validators-warp-sync" - --test="test-validators-warp-sync.zndsl" - -zombienet-0003-block-building-warp-sync: - extends: - - .zombienet-common - script: - - /home/nonroot/zombie-net/scripts/ci/run-test-env-manager.sh - --github-remote-dir="${GH_DIR}/0003-block-building-warp-sync" - --test="test-block-building-warp-sync.zndsl" diff --git a/substrate/scripts/ci/gitlab/prettier.sh b/substrate/scripts/ci/gitlab/prettier.sh deleted file mode 100755 index 299bbee179dc..000000000000 --- a/substrate/scripts/ci/gitlab/prettier.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/sh - -# meant to be installed via -# git config filter.ci-prettier.clean "scripts/ci/gitlab/prettier.sh" - -prettier --parser yaml diff --git a/substrate/scripts/ci/gitlab/publish_draft_release.sh b/substrate/scripts/ci/gitlab/publish_draft_release.sh deleted file mode 100755 index 88d1de0e04fe..000000000000 --- a/substrate/scripts/ci/gitlab/publish_draft_release.sh +++ /dev/null @@ -1,54 +0,0 @@ -#!/usr/bin/env bash - -# shellcheck source=../common/lib.sh -source "$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )/../common/lib.sh" - -version="$CI_COMMIT_TAG" - -# Note that this is not the last *tagged* version, but the last *published* version -last_version=$(last_github_release 'paritytech/substrate') - -release_text="$(./generate_release_text.sh "$last_version" "$version")" - -echo "[+] Pushing release to github" -# Create release on github -release_name="Substrate $version" -data=$(jq -Rs --arg version "$version" \ - --arg release_name "$release_name" \ - --arg release_text "$release_text" \ -'{ - "tag_name": $version, - "target_commitish": "master", - "name": $release_name, - "body": $release_text, - "draft": true, - "prerelease": false -}' < /dev/null) - -out=$(curl -s -X POST --data "$data" -H "Authorization: token $GITHUB_RELEASE_TOKEN" "$api_base/paritytech/substrate/releases") - -html_url=$(echo "$out" | jq -r .html_url) - -if [ "$html_url" == "null" ] -then - echo "[!] Something went wrong posting:" - echo "$out" -else - echo "[+] Release draft created: $html_url" -fi - -echo '[+] Sending draft release URL to Matrix' - -msg_body=$(cat <Release pipeline for Substrate $version complete.
-Draft release created: $html_url -EOF -) -send_message "$(structure_message "$msg_body" "$formatted_msg_body")" "!aJymqQYtCjjqImFLSb:parity.io" "$RELEASENOTES_MATRIX_V2_ACCESS_TOKEN" - -echo "[+] Done! Maybe the release worked..." From 5ada6fa5efd55c8711eaaaa36f10c1a1e9833eb7 Mon Sep 17 00:00:00 2001 From: alvicsam Date: Thu, 31 Aug 2023 09:33:31 +0200 Subject: [PATCH 2/2] add check-runtime-migration jobs for cancel --- .gitlab-ci.yml | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 0e7e9956a56b..2e0465ba1eb1 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -378,3 +378,13 @@ cancel-pipeline-build-short-benchmark: extends: .cancel-pipeline-template needs: - job: build-short-benchmark + +cancel-pipeline-check-runtime-migration-rococo: + extends: .cancel-pipeline-template + needs: + - job: check-runtime-migration-rococo + +cancel-pipeline-check-runtime-migration-westend: + extends: .cancel-pipeline-template + needs: + - job: check-runtime-migration-westend