Skip to content

Commit

Permalink
Merge branch 'master' of https://github.com/paritytech/polkadot-sdk i…
Browse files Browse the repository at this point in the history
…nto HEAD
  • Loading branch information
command-bot committed Jan 12, 2024
2 parents 57660f9 + f7306d3 commit 064b45f
Show file tree
Hide file tree
Showing 23 changed files with 209 additions and 240 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/check-publish.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ jobs:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1

- name: Rust Cache
uses: Swatinem/rust-cache@3cf7f8cc28d1b4e7d01e3783be10a97d55d483c8 # v2.7.1
uses: Swatinem/rust-cache@a22603398250b864f7190077025cf752307154dc # v2.7.2
with:
cache-on-failure: true

Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/claim-crates.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ jobs:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1

- name: Rust Cache
uses: Swatinem/rust-cache@3cf7f8cc28d1b4e7d01e3783be10a97d55d483c8 # v2.7.1
uses: Swatinem/rust-cache@a22603398250b864f7190077025cf752307154dc # v2.7.2
with:
cache-on-failure: true

Expand Down
1 change: 0 additions & 1 deletion .github/workflows/review-trigger.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@ on:
- review_request_removed
- ready_for_review
pull_request_review:
merge_group:

jobs:
trigger-review-bot:
Expand Down
10 changes: 0 additions & 10 deletions .gitlab-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -275,16 +275,6 @@ cancel-pipeline-test-linux-stable3:
needs:
- job: "test-linux-stable 3/3"

cancel-pipeline-test-linux-stable-additional-tests:
extends: .cancel-pipeline-template
needs:
- job: "test-linux-stable-additional-tests"

cancel-pipeline-test-linux-stable-slow:
extends: .cancel-pipeline-template
needs:
- job: "test-linux-stable-slow"

cancel-pipeline-cargo-check-benches1:
extends: .cancel-pipeline-template
needs:
Expand Down
3 changes: 1 addition & 2 deletions .gitlab/pipeline/publish.yml
Original file line number Diff line number Diff line change
Expand Up @@ -71,8 +71,7 @@ publish-rustdoc:
IMAGE_NAME: "" # docker.io/paritypr/image_name
script:
# Dockertag should differ in a merge queue
# TODO: test this
# - if [[ $CI_COMMIT_REF_NAME == *"gh-readonly-queue"* ]]; export DOCKER_IMAGES_VERSION="${CI_COMMIT_SHORT_SHA}"; fi
- if [[ $CI_COMMIT_REF_NAME == *"gh-readonly-queue"* ]]; then export DOCKER_IMAGES_VERSION="${CI_COMMIT_SHORT_SHA}"; fi
- $BUILDAH_COMMAND build
--format=docker
--build-arg VCS_REF="${CI_COMMIT_SHA}"
Expand Down
38 changes: 0 additions & 38 deletions .gitlab/pipeline/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -96,44 +96,6 @@ test-linux-stable-runtime-benchmarks:
# --partition count:${CI_NODE_INDEX}/${CI_NODE_TOTAL}
# # todo: add flacky-test collector

# TODO: remove me
test-linux-stable-additional-tests:
stage: test
extends:
- .docker-env
- .common-refs
- .run-immediately
- .pipeline-stopper-artifacts
variables:
RUST_TOOLCHAIN: stable
# Enable debug assertions since we are running optimized builds for testing
# but still want to have debug assertions.
RUSTFLAGS: "-Cdebug-assertions=y -Dwarnings"
script:
# tests were moved to test-linux-stable
# the jobs should be removed
- exit 0

# TODO: remove me
test-linux-stable-slow:
stage: test
# remove after cache is setup
timeout: 2h
extends:
- .docker-env
- .common-refs
- .run-immediately
- .pipeline-stopper-artifacts
variables:
RUST_TOOLCHAIN: stable
# Enable debug assertions since we are running optimized builds for testing
# but still want to have debug assertions.
RUSTFLAGS: "-Cdebug-assertions=y -Dwarnings"
script:
# tests were moved to test-linux-stable
# the jobs should be removed
- exit 0

# takes about 1,5h without cache
# can be used to check that nextest works correctly
# test-linux-stable-polkadot:
Expand Down
8 changes: 6 additions & 2 deletions .gitlab/pipeline/zombienet/cumulus.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,10 @@
before_script:
# Exit if the job is not merge queue
# - if [[ $CI_COMMIT_REF_NAME != *"gh-readonly-queue"* ]]; then echo "I will run only in a merge queue"; exit 0; fi
# Docker images have different tag in merge queues
- if [[ $CI_COMMIT_REF_NAME == *"gh-readonly-queue"* ]]; then export DOCKER_IMAGES_VERSION="${CI_COMMIT_SHORT_SHA}"; fi
- export POLKADOT_IMAGE="docker.io/paritypr/polkadot-debug:${DOCKER_IMAGES_VERSION}"
- export COL_IMAGE="docker.io/paritypr/test-parachain:${DOCKER_IMAGES_VERSION}"
- echo "Zombie-net Tests Config"
- echo "${ZOMBIENET_IMAGE}"
- echo "${POLKADOT_IMAGE}"
Expand All @@ -30,10 +34,10 @@
- job: build-push-image-polkadot-debug
artifacts: true
variables:
POLKADOT_IMAGE: "docker.io/paritypr/polkadot-debug:${DOCKER_IMAGES_VERSION}"
# POLKADOT_IMAGE: "docker.io/paritypr/polkadot-debug:${DOCKER_IMAGES_VERSION}"
GH_DIR: "https://github.com/paritytech/cumulus/tree/${CI_COMMIT_SHORT_SHA}/zombienet/tests"
LOCAL_DIR: "/builds/parity/mirrors/polkadot-sdk/cumulus/zombienet/tests"
COL_IMAGE: "docker.io/paritypr/test-parachain:${DOCKER_IMAGES_VERSION}"
# COL_IMAGE: "docker.io/paritypr/test-parachain:${DOCKER_IMAGES_VERSION}"
FF_DISABLE_UMASK_FOR_DOCKER_EXECUTOR: 1
RUN_IN_CONTAINER: "1"
artifacts:
Expand Down
14 changes: 13 additions & 1 deletion .gitlab/pipeline/zombienet/polkadot.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,9 @@
before_script:
# Exit if the job is not merge queue
# - if [[ $CI_COMMIT_REF_NAME != *"gh-readonly-queue"* ]]; then echo "I will run only in a merge queue"; exit 0; fi
# Docker images have different tag in merge queues
- if [[ $CI_COMMIT_REF_NAME == *"gh-readonly-queue"* ]]; then export DOCKER_IMAGES_VERSION="${CI_COMMIT_SHORT_SHA}"; fi
- export PIPELINE_IMAGE_TAG=${DOCKER_IMAGES_VERSION}
- export BUILD_RELEASE_VERSION="$(cat ./artifacts/BUILD_RELEASE_VERSION)" # from build-linux-stable job
- export DEBUG=zombie,zombie::network-node
- export ZOMBIENET_INTEGRATION_TEST_IMAGE="${POLKADOT_IMAGE}":${PIPELINE_IMAGE_TAG}
Expand Down Expand Up @@ -46,7 +49,7 @@
- .kubernetes-env
- .zombienet-refs
variables:
PIPELINE_IMAGE_TAG: ${DOCKER_IMAGES_VERSION}
# PIPELINE_IMAGE_TAG: ${DOCKER_IMAGES_VERSION}
POLKADOT_IMAGE: "docker.io/paritypr/polkadot-debug"
COLANDER_IMAGE: "docker.io/paritypr/colander"
MALUS_IMAGE: "docker.io/paritypr/malus"
Expand Down Expand Up @@ -153,6 +156,9 @@ zombienet-polkadot-smoke-0001-parachains-smoke-test:
before_script:
# Exit if the job is not merge queue
# - if [[ $CI_COMMIT_REF_NAME != *"gh-readonly-queue"* ]]; then echo "I will run only in a merge queue"; exit 0; fi
# Docker images have different tag in merge queues
- if [[ $CI_COMMIT_REF_NAME == *"gh-readonly-queue"* ]]; then export DOCKER_IMAGES_VERSION="${CI_COMMIT_SHORT_SHA}"; fi
- export PIPELINE_IMAGE_TAG=${DOCKER_IMAGES_VERSION}
- export ZOMBIENET_INTEGRATION_TEST_IMAGE="${POLKADOT_IMAGE}":${PIPELINE_IMAGE_TAG}
- export COL_IMAGE="${COLANDER_IMAGE}":${PIPELINE_IMAGE_TAG}
- echo "Zombienet Tests Config"
Expand All @@ -172,6 +178,9 @@ zombienet-polkadot-smoke-0002-parachains-parachains-upgrade-smoke:
before_script:
# Exit if the job is not merge queue
# - if [[ $CI_COMMIT_REF_NAME != *"gh-readonly-queue"* ]]; then echo "I will run only in a merge queue"; exit 0; fi
# Docker images have different tag in merge queues
- if [[ $CI_COMMIT_REF_NAME == *"gh-readonly-queue"* ]]; then export DOCKER_IMAGES_VERSION="${CI_COMMIT_SHORT_SHA}"; fi
- export PIPELINE_IMAGE_TAG=${DOCKER_IMAGES_VERSION}
- export ZOMBIENET_INTEGRATION_TEST_IMAGE="${POLKADOT_IMAGE}":${PIPELINE_IMAGE_TAG}
- export CUMULUS_IMAGE="docker.io/paritypr/polkadot-parachain-debug:${DOCKER_IMAGES_VERSION}"
- echo "Zombienet Tests Config"
Expand Down Expand Up @@ -224,6 +233,9 @@ zombienet-polkadot-misc-0002-upgrade-node:
before_script:
# Exit if the job is not merge queue
# - if [[ $CI_COMMIT_REF_NAME != *"gh-readonly-queue"* ]]; then echo "I will run only in a merge queue"; exit 0; fi
# Docker images have different tag in merge queues
- if [[ $CI_COMMIT_REF_NAME == *"gh-readonly-queue"* ]]; then export DOCKER_IMAGES_VERSION="${CI_COMMIT_SHORT_SHA}"; fi
- export PIPELINE_IMAGE_TAG=${DOCKER_IMAGES_VERSION}
- export ZOMBIENET_INTEGRATION_TEST_IMAGE="docker.io/parity/polkadot:latest"
- echo "Overrided polkadot image ${ZOMBIENET_INTEGRATION_TEST_IMAGE}"
- export COL_IMAGE="${COLANDER_IMAGE}":${PIPELINE_IMAGE_TAG}
Expand Down
5 changes: 4 additions & 1 deletion .gitlab/pipeline/zombienet/substrate.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,9 @@
before_script:
# Exit if the job is not merge queue
# - if [[ $CI_COMMIT_REF_NAME != *"gh-readonly-queue"* ]]; then echo "I will run only in a merge queue"; exit 0; fi
# Docker images have different tag in merge queues
- if [[ $CI_COMMIT_REF_NAME == *"gh-readonly-queue"* ]]; then export DOCKER_IMAGES_VERSION="${CI_COMMIT_SHORT_SHA}"; fi
- export SUBSTRATE_IMAGE_TAG=${DOCKER_IMAGES_VERSION}
- echo "Zombienet Tests Config"
- echo "${ZOMBIENET_IMAGE}"
- echo "${GH_DIR}"
Expand All @@ -21,7 +24,7 @@
- .kubernetes-env
- .zombienet-refs
variables:
SUBSTRATE_IMAGE_TAG: ${DOCKER_IMAGES_VERSION}
# SUBSTRATE_IMAGE_TAG: ${DOCKER_IMAGES_VERSION}
SUBSTRATE_IMAGE: "docker.io/paritypr/substrate"
GH_DIR: "https://github.com/paritytech/substrate/tree/${CI_COMMIT_SHA}/zombienet"
LOCAL_DIR: "/builds/parity/mirrors/polkadot-sdk/substrate/zombienet"
Expand Down
4 changes: 2 additions & 2 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

36 changes: 23 additions & 13 deletions cumulus/client/cli/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,27 @@ impl sc_cli::CliConfiguration for PurgeChainCmd {
}
}

/// Get the SCALE encoded genesis header of the parachain.
pub fn get_raw_genesis_header<B, C>(client: Arc<C>) -> sc_cli::Result<Vec<u8>>
where
B: BlockT,
C: HeaderBackend<B> + 'static,
{
let genesis_hash =
client
.hash(Zero::zero())?
.ok_or(sc_cli::Error::Client(sp_blockchain::Error::Backend(
"Failed to lookup genesis block hash when exporting genesis head data.".into(),
)))?;
let genesis_header = client.header(genesis_hash)?.ok_or(sc_cli::Error::Client(
sp_blockchain::Error::Backend(
"Failed to lookup genesis header by hash when exporting genesis head data.".into(),
),
))?;

Ok(genesis_header.encode())
}

/// Command for exporting the genesis head data of the parachain
#[derive(Debug, clap::Parser)]
pub struct ExportGenesisHeadCommand {
Expand All @@ -150,22 +171,11 @@ impl ExportGenesisHeadCommand {
B: BlockT,
C: HeaderBackend<B> + 'static,
{
let genesis_hash = client.hash(Zero::zero())?.ok_or(sc_cli::Error::Client(
sp_blockchain::Error::Backend(
"Failed to lookup genesis block hash when exporting genesis head data.".into(),
),
))?;
let genesis_header = client.header(genesis_hash)?.ok_or(sc_cli::Error::Client(
sp_blockchain::Error::Backend(
"Failed to lookup genesis header by hash when exporting genesis head data.".into(),
),
))?;

let raw_header = genesis_header.encode();
let raw_header = get_raw_genesis_header(client)?;
let output_buf = if self.raw {
raw_header
} else {
format!("0x{:?}", HexDisplay::from(&genesis_header.encode())).into_bytes()
format!("0x{:?}", HexDisplay::from(&raw_header)).into_bytes()
};

if let Some(output) = &self.output {
Expand Down
32 changes: 16 additions & 16 deletions cumulus/polkadot-parachain/src/command.rs
Original file line number Diff line number Diff line change
Expand Up @@ -60,29 +60,29 @@ enum Runtime {
}

trait RuntimeResolver {
fn runtime(&self) -> Runtime;
fn runtime(&self) -> Result<Runtime>;
}

impl RuntimeResolver for dyn ChainSpec {
fn runtime(&self) -> Runtime {
runtime(self.id())
fn runtime(&self) -> Result<Runtime> {
Ok(runtime(self.id()))
}
}

/// Implementation, that can resolve [`Runtime`] from any json configuration file
impl RuntimeResolver for PathBuf {
fn runtime(&self) -> Runtime {
fn runtime(&self) -> Result<Runtime> {
#[derive(Debug, serde::Deserialize)]
struct EmptyChainSpecWithId {
id: String,
}

let file = std::fs::File::open(self).expect("Failed to open file");
let file = std::fs::File::open(self)?;
let reader = std::io::BufReader::new(file);
let chain_spec: EmptyChainSpecWithId = serde_json::from_reader(reader)
.expect("Failed to read 'json' file with ChainSpec configuration");
let chain_spec: EmptyChainSpecWithId =
serde_json::from_reader(reader).map_err(|e| sc_cli::Error::Application(Box::new(e)))?;

runtime(&chain_spec.id)
Ok(runtime(&chain_spec.id))
}
}

Expand Down Expand Up @@ -394,7 +394,7 @@ impl SubstrateCli for RelayChainCli {
/// Creates partial components for the runtimes that are supported by the benchmarks.
macro_rules! construct_partials {
($config:expr, |$partials:ident| $code:expr) => {
match $config.chain_spec.runtime() {
match $config.chain_spec.runtime()? {
Runtime::AssetHubPolkadot => {
let $partials = new_partial::<AssetHubPolkadotRuntimeApi, _>(
&$config,
Expand Down Expand Up @@ -444,7 +444,7 @@ macro_rules! construct_partials {
macro_rules! construct_async_run {
(|$components:ident, $cli:ident, $cmd:ident, $config:ident| $( $code:tt )* ) => {{
let runner = $cli.create_runner($cmd)?;
match runner.config().chain_spec.runtime() {
match runner.config().chain_spec.runtime()? {
Runtime::AssetHubPolkadot => {
runner.async_run(|$config| {
let $components = new_partial::<AssetHubPolkadotRuntimeApi, _>(
Expand Down Expand Up @@ -686,7 +686,7 @@ pub fn run() -> Result<()> {
info!("Parachain Account: {}", parachain_account);
info!("Is collating: {}", if config.role.is_authority() { "yes" } else { "no" });

match config.chain_spec.runtime() {
match config.chain_spec.runtime()? {
AssetHubPolkadot => crate::service::start_asset_hub_node::<
AssetHubPolkadotRuntimeApi,
AssetHubPolkadotAuraId,
Expand Down Expand Up @@ -1032,30 +1032,30 @@ mod tests {
&temp_dir,
Box::new(create_default_with_extensions("shell-1", Extensions1::default())),
);
assert_eq!(Runtime::Shell, path.runtime());
assert_eq!(Runtime::Shell, path.runtime().unwrap());

let path = store_configuration(
&temp_dir,
Box::new(create_default_with_extensions("shell-2", Extensions2::default())),
);
assert_eq!(Runtime::Shell, path.runtime());
assert_eq!(Runtime::Shell, path.runtime().unwrap());

let path = store_configuration(
&temp_dir,
Box::new(create_default_with_extensions("seedling", Extensions2::default())),
);
assert_eq!(Runtime::Seedling, path.runtime());
assert_eq!(Runtime::Seedling, path.runtime().unwrap());

let path = store_configuration(
&temp_dir,
Box::new(crate::chain_spec::rococo_parachain::rococo_parachain_local_config()),
);
assert_eq!(Runtime::Default, path.runtime());
assert_eq!(Runtime::Default, path.runtime().unwrap());

let path = store_configuration(
&temp_dir,
Box::new(crate::chain_spec::contracts::contracts_rococo_local_config()),
);
assert_eq!(Runtime::ContractsRococo, path.runtime());
assert_eq!(Runtime::ContractsRococo, path.runtime().unwrap());
}
}
Loading

0 comments on commit 064b45f

Please sign in to comment.