Skip to content
This repository has been archived by the owner on Nov 15, 2023. It is now read-only.

Refactor primitives #1383

Merged
merged 48 commits into from
Jul 10, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
48 commits
Select commit Hold shift + click to select a range
d0aac4c
create a v1 primitives module
rphmeier Jul 9, 2020
db8a5de
Improve guide on availability types
rphmeier Jul 9, 2020
1312093
punctuate
rphmeier Jul 9, 2020
cbd848a
new parachains runtime uses new primitives
rphmeier Jul 9, 2020
14425bd
tests of new runtime now use new primitives
rphmeier Jul 9, 2020
8313f46
add ErasureChunk to guide
rphmeier Jul 9, 2020
7794af4
export erasure chunk from v1 primitives
rphmeier Jul 9, 2020
888fee0
subsystem crate uses v1 primitives
rphmeier Jul 9, 2020
25e7acf
node-primitives uses new v1 primitives
rphmeier Jul 9, 2020
3872d3e
port overseer to new primitives
rphmeier Jul 9, 2020
53cd37f
new-proposer uses v1 primitives (no ParachainHost anymore)
rphmeier Jul 9, 2020
072c5fe
fix no-std compilation for primitives
rphmeier Jul 9, 2020
5dc4aa7
service-new uses v1 primitives
rphmeier Jul 9, 2020
e0b0a31
network-bridge uses new primitives
rphmeier Jul 9, 2020
d066d3e
statement distribution uses v1 primitives
rphmeier Jul 9, 2020
09c81f8
PoV distribution uses v1 primitives; add PoV::hash fn
rphmeier Jul 9, 2020
438474f
move parachain to v0
rphmeier Jul 9, 2020
6189c4c
remove inclusion_inherent module and place into v1
rphmeier Jul 9, 2020
0aeee53
remove everything from primitives crate root
rphmeier Jul 9, 2020
9cb6238
remove some unused old types from v0 primitives
rphmeier Jul 9, 2020
55956a8
point everything else at primitives::v0
rphmeier Jul 9, 2020
25719ce
squanch some warns up
rphmeier Jul 9, 2020
e5cf500
add RuntimeDebug import to no-std as well
rphmeier Jul 9, 2020
023a1d4
Merge branch 'master' into rh-primitives-refactor
rphmeier Jul 9, 2020
1af103e
Merge branch 'master' into rh-primitives-refactor
rphmeier Jul 9, 2020
0d3013f
port over statement-table and validation
rphmeier Jul 9, 2020
8c2403f
fix final errors in validation and node-primitives
rphmeier Jul 9, 2020
3347e7f
add dummy Ord impl to committed candidate receipt
rphmeier Jul 9, 2020
b241140
guide: update CandidateValidationMessage
rphmeier Jul 9, 2020
c70badd
add primitive for validationoutputs
rphmeier Jul 9, 2020
43e8e30
expand CandidateValidationMessage further
rphmeier Jul 9, 2020
935d536
bikeshed
rphmeier Jul 9, 2020
3ec9437
add some impls to omitted-validation-data and available-data
rphmeier Jul 9, 2020
821c556
expand CandidateValidationMessage
rphmeier Jul 9, 2020
dc7cd08
make erasure-coding generic over v1/v0
rphmeier Jul 9, 2020
6306116
update usages of erasure-coding
rphmeier Jul 9, 2020
1bd4809
implement commitments.hash()
rphmeier Jul 9, 2020
afd20dd
use Arc<Pov> for CandidateValidation
rphmeier Jul 9, 2020
0fbbc42
improve new erasure-coding method names
rphmeier Jul 9, 2020
beb0dc4
fix up candidate backing
rphmeier Jul 9, 2020
d69770c
update docs a bit
rphmeier Jul 10, 2020
bbc2ea1
fix most tests and add short-circuiting to make_pov_available
rphmeier Jul 10, 2020
4015271
fix remainder of candidate backing tests
rphmeier Jul 10, 2020
c8b0af4
squanching warns
rphmeier Jul 10, 2020
8e5e35d
squanch it up
rphmeier Jul 10, 2020
5d352cc
some fallout
rphmeier Jul 10, 2020
9370455
overseer fallout
rphmeier Jul 10, 2020
60c3bb0
free from polkadot-test-service hell
rphmeier Jul 10, 2020
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 3 additions & 5 deletions availability-store/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,12 +25,10 @@
use futures::prelude::*;
use futures::channel::{mpsc, oneshot};
use keystore::KeyStorePtr;
use polkadot_primitives::{
use polkadot_primitives::v0::{
Hash, Block,
parachain::{
PoVBlock, AbridgedCandidateReceipt, ErasureChunk,
ParachainHost, AvailableData, OmittedValidationData,
},
PoVBlock, AbridgedCandidateReceipt, ErasureChunk,
ParachainHost, AvailableData, OmittedValidationData,
};
use sp_runtime::traits::HashFor;
use sp_blockchain::Result as ClientResult;
Expand Down
13 changes: 5 additions & 8 deletions availability-store/src/store.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,8 @@ use kvdb_rocksdb::{Database, DatabaseConfig};
use kvdb::{KeyValueDB, DBTransaction};
use codec::{Encode, Decode};
use polkadot_erasure_coding as erasure;
use polkadot_primitives::{
Hash,
parachain::{
ErasureChunk, AvailableData, AbridgedCandidateReceipt,
},
use polkadot_primitives::v0::{
Hash, ErasureChunk, AvailableData, AbridgedCandidateReceipt,
};
use parking_lot::Mutex;

Expand Down Expand Up @@ -273,7 +270,7 @@ impl Store {
// If there are no block data in the store at this point,
// check that they can be reconstructed now and add them to store if they can.
if self.execution_data(&candidate_hash).is_none() {
if let Ok(available_data) = erasure::reconstruct(
if let Ok(available_data) = erasure::reconstruct_v0(
n_validators as usize,
v.iter().map(|chunk| (chunk.chunk.as_ref(), chunk.index as usize)),
)
Expand Down Expand Up @@ -390,7 +387,7 @@ impl Store {
mod tests {
use super::*;
use polkadot_erasure_coding::{self as erasure};
use polkadot_primitives::parachain::{
use polkadot_primitives::v0::{
Id as ParaId, BlockData, AvailableData, PoVBlock, OmittedValidationData,
};

Expand Down Expand Up @@ -489,7 +486,7 @@ mod tests {
let available_data = available_data(&[42; 8]);
let n_validators = 5;

let erasure_chunks = erasure::obtain_chunks(
let erasure_chunks = erasure::obtain_chunks_v0(
n_validators,
&available_data,
).unwrap();
Expand Down
4 changes: 2 additions & 2 deletions availability-store/src/worker.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,8 +33,8 @@ use consensus_common::{
import_queue::CacheKeyId,
};
use sp_core::traits::SpawnNamed;
use polkadot_primitives::{Block, BlockId, Hash};
use polkadot_primitives::parachain::{
use polkadot_primitives::v0::{
Block, BlockId, Hash,
ParachainHost, ValidatorId, AbridgedCandidateReceipt, AvailableData,
ValidatorPair, ErasureChunk,
};
Expand Down
21 changes: 10 additions & 11 deletions collator/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -55,12 +55,11 @@ use log::warn;
use sc_client_api::{StateBackend, BlockchainEvents};
use sp_blockchain::HeaderBackend;
use sp_core::Pair;
use polkadot_primitives::{
use polkadot_primitives::v0::{
BlockId, Hash, Block, DownwardMessage,
parachain::{
self, BlockData, DutyRoster, HeadData, Id as ParaId,
PoVBlock, ValidatorId, CollatorPair, LocalValidationData, GlobalValidationSchedule,
}
BlockData, DutyRoster, HeadData, Id as ParaId,
PoVBlock, ValidatorId, CollatorPair, LocalValidationData, GlobalValidationSchedule,
Collation, CollationInfo, collator_signature_payload,
};
use polkadot_cli::{
ProvideRuntimeApi, ParachainHost, IdentifyVariant,
Expand All @@ -69,7 +68,7 @@ use polkadot_cli::{
pub use polkadot_cli::service::Configuration;
pub use polkadot_cli::Cli;
pub use polkadot_validation::SignedStatement;
pub use polkadot_primitives::parachain::CollatorId;
pub use polkadot_primitives::v0::CollatorId;
pub use sc_network::PeerId;
pub use service::RuntimeApiCollection;
pub use sc_cli::SubstrateCli;
Expand Down Expand Up @@ -164,7 +163,7 @@ pub async fn collate<P>(
downward_messages: Vec<DownwardMessage>,
mut para_context: P,
key: Arc<CollatorPair>,
) -> Option<parachain::Collation>
) -> Option<Collation>
where
P: ParachainContext,
P::ProduceCandidate: Send,
Expand All @@ -181,13 +180,13 @@ pub async fn collate<P>(
};

let pov_block_hash = pov_block.hash();
let signature = key.sign(&parachain::collator_signature_payload(
let signature = key.sign(&collator_signature_payload(
&relay_parent,
&local_id,
&pov_block_hash,
));

let info = parachain::CollationInfo {
let info = CollationInfo {
parachain_index: local_id,
relay_parent,
collator: key.public(),
Expand All @@ -196,7 +195,7 @@ pub async fn collate<P>(
pov_block_hash,
};

let collation = parachain::Collation {
let collation = Collation {
info,
pov: pov_block,
};
Expand Down Expand Up @@ -456,7 +455,7 @@ where

#[cfg(not(feature = "service-rewr"))]
fn compute_targets(para_id: ParaId, session_keys: &[ValidatorId], roster: DutyRoster) -> HashSet<ValidatorId> {
use polkadot_primitives::parachain::Chain;
use polkadot_primitives::v0::Chain;

roster.validator_duty.iter().enumerate()
.filter(|&(_, c)| c == &Chain::Parachain(para_id))
Expand Down
5 changes: 5 additions & 0 deletions core-primitives/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -94,3 +94,8 @@ pub enum DownwardMessage<AccountId = crate::AccountId> {
/// XCMP message for the Parachain.
XCMPMessage(sp_std::vec::Vec<u8>),
}

/// V1 primitives.
pub mod v1 {
pub use super::*;
}
63 changes: 54 additions & 9 deletions erasure-coding/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,8 @@

use codec::{Encode, Decode};
use reed_solomon::galois_16::{self, ReedSolomon};
use primitives::{Hash as H256, BlakeTwo256, HashT};
use primitives::parachain::AvailableData;
use primitives::v0::{self, Hash as H256, BlakeTwo256, HashT};
use primitives::v1;
use sp_core::Blake2Hasher;
use trie::{EMPTY_PREFIX, MemoryDB, Trie, TrieMut, trie_types::{TrieDBMut, TrieDB}};

Expand Down Expand Up @@ -124,14 +124,32 @@ fn code_params(n_validators: usize) -> Result<CodeParams, Error> {
})
}

/// Obtain erasure-coded chunks for v0 `AvailableData`, one for each validator.
///
/// Works only up to 65536 validators, and `n_validators` must be non-zero.
pub fn obtain_chunks_v0(n_validators: usize, data: &v0::AvailableData)
-> Result<Vec<Vec<u8>>, Error>
{
obtain_chunks(n_validators, data)
}

/// Obtain erasure-coded chunks for v1 `AvailableData`, one for each validator.
///
/// Works only up to 65536 validators, and `n_validators` must be non-zero.
pub fn obtain_chunks_v1(n_validators: usize, data: &v1::AvailableData)
-> Result<Vec<Vec<u8>>, Error>
{
obtain_chunks(n_validators, data)
}

/// Obtain erasure-coded chunks, one for each validator.
///
/// Works only up to 65536 validators, and `n_validators` must be non-zero.
pub fn obtain_chunks(n_validators: usize, available_data: &AvailableData)
fn obtain_chunks<T: Encode>(n_validators: usize, data: &T)
-> Result<Vec<Vec<u8>>, Error>
{
let params = code_params(n_validators)?;
let encoded = available_data.encode();
let encoded = data.encode();

if encoded.is_empty() {
return Err(Error::BadPayload);
Expand All @@ -145,15 +163,42 @@ pub fn obtain_chunks(n_validators: usize, available_data: &AvailableData)
Ok(shards.into_iter().map(|w| w.into_inner()).collect())
}

/// Reconstruct the block data from a set of chunks.
/// Reconstruct the v0 available data from a set of chunks.
///
/// Provide an iterator containing chunk data and the corresponding index.
/// The indices of the present chunks must be indicated. If too few chunks
/// are provided, recovery is not possible.
///
/// Works only up to 65536 validators, and `n_validators` must be non-zero.
pub fn reconstruct_v0<'a, I: 'a>(n_validators: usize, chunks: I)
-> Result<v0::AvailableData, Error>
where I: IntoIterator<Item=(&'a [u8], usize)>
{
reconstruct(n_validators, chunks)
}

/// Reconstruct the v1 available data from a set of chunks.
///
/// Provide an iterator containing chunk data and the corresponding index.
/// The indices of the present chunks must be indicated. If too few chunks
/// are provided, recovery is not possible.
///
/// Works only up to 65536 validators, and `n_validators` must be non-zero.
pub fn reconstruct_v1<'a, I: 'a>(n_validators: usize, chunks: I)
-> Result<v1::AvailableData, Error>
where I: IntoIterator<Item=(&'a [u8], usize)>
{
reconstruct(n_validators, chunks)
}

/// Reconstruct decodable data from a set of chunks.
///
/// Provide an iterator containing chunk data and the corresponding index.
/// The indices of the present chunks must be indicated. If too few chunks
/// are provided, recovery is not possible.
///
/// Works only up to 65536 validators, and `n_validators` must be non-zero.
pub fn reconstruct<'a, I: 'a>(n_validators: usize, chunks: I)
-> Result<AvailableData, Error>
fn reconstruct<'a, I: 'a, T: Decode>(n_validators: usize, chunks: I) -> Result<T, Error>
where I: IntoIterator<Item=(&'a [u8], usize)>
{
let params = code_params(n_validators)?;
Expand Down Expand Up @@ -343,7 +388,7 @@ impl<'a, I: Iterator<Item=&'a [u8]>> codec::Input for ShardInput<'a, I> {
#[cfg(test)]
mod tests {
use super::*;
use primitives::parachain::{BlockData, PoVBlock};
use primitives::v0::{AvailableData, BlockData, PoVBlock};

#[test]
fn field_order_is_right_size() {
Expand Down Expand Up @@ -420,7 +465,7 @@ mod tests {
assert_eq!(chunks.len(), 10);

// any 4 chunks should work.
let reconstructed = reconstruct(
let reconstructed: AvailableData = reconstruct(
10,
[
(&*chunks[1], 1),
Expand Down
5 changes: 2 additions & 3 deletions network/src/legacy/collator_pool.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,7 @@
//! Bridge between the network and consensus service for getting collations to it.

use codec::{Encode, Decode};
use polkadot_primitives::Hash;
use polkadot_primitives::parachain::{CollatorId, Id as ParaId, Collation};
use polkadot_primitives::v0::{Hash, CollatorId, Id as ParaId, Collation};
use sc_network::PeerId;
use futures::channel::oneshot;

Expand Down Expand Up @@ -236,7 +235,7 @@ impl CollatorPool {
mod tests {
use super::*;
use sp_core::crypto::UncheckedInto;
use polkadot_primitives::parachain::{CollationInfo, BlockData, PoVBlock};
use polkadot_primitives::v0::{CollationInfo, BlockData, PoVBlock};
use futures::executor::block_on;

fn make_pov(block_data: Vec<u8>) -> PoVBlock {
Expand Down
2 changes: 1 addition & 1 deletion network/src/legacy/gossip/attestation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
use sc_network_gossip::{ValidationResult as GossipValidationResult};
use sc_network::ReputationChange;
use polkadot_validation::GenericStatement;
use polkadot_primitives::Hash;
use polkadot_primitives::v0::Hash;

use std::collections::HashMap;

Expand Down
6 changes: 3 additions & 3 deletions network/src/legacy/gossip/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -58,8 +58,8 @@ use sc_network_gossip::{
ValidatorContext, MessageIntent,
};
use polkadot_validation::{SignedStatement};
use polkadot_primitives::{Block, Hash};
use polkadot_primitives::parachain::{
use polkadot_primitives::v0::{
Block, Hash,
ParachainHost, ValidatorId, ErasureChunk as PrimitiveChunk, SigningContext, PoVBlock,
};
use polkadot_erasure_coding::{self as erasure};
Expand Down Expand Up @@ -755,7 +755,7 @@ mod tests {
use sc_network_gossip::Validator as ValidatorT;
use std::sync::mpsc;
use parking_lot::Mutex;
use polkadot_primitives::parachain::{AbridgedCandidateReceipt, BlockData};
use polkadot_primitives::v0::{AbridgedCandidateReceipt, BlockData};
use sp_core::sr25519::Signature as Sr25519Signature;
use polkadot_validation::GenericStatement;

Expand Down
4 changes: 2 additions & 2 deletions network/src/legacy/local_collations.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
//! Collations are attempted to be repropagated when a new validator connects,
//! a validator changes his session key, or when they are generated.

use polkadot_primitives::{Hash, parachain::{ValidatorId}};
use polkadot_primitives::v0::{Hash, ValidatorId};
use crate::legacy::collator_pool::Role;
use std::collections::{HashMap, HashSet};
use std::time::Duration;
Expand Down Expand Up @@ -144,7 +144,7 @@ impl<C: Clone> LocalCollations<C> {
mod tests {
use super::*;
use sp_core::crypto::UncheckedInto;
use polkadot_primitives::parachain::ValidatorId;
use polkadot_primitives::v0::ValidatorId;

#[test]
fn add_validator_with_ready_collation() {
Expand Down
2 changes: 1 addition & 1 deletion network/src/legacy/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ pub mod gossip;

use codec::Decode;
use futures::prelude::*;
use polkadot_primitives::Hash;
use polkadot_primitives::v0::Hash;
use sc_network::PeerId;
use sc_network_gossip::TopicNotification;
use log::debug;
Expand Down
2 changes: 1 addition & 1 deletion network/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@

#![recursion_limit="256"]

use polkadot_primitives::{Block, Hash, BlakeTwo256, HashT};
use polkadot_primitives::v0::{Block, Hash, BlakeTwo256, HashT};

pub mod legacy;
pub mod protocol;
Expand Down
8 changes: 3 additions & 5 deletions network/src/protocol/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,12 +30,10 @@ use futures::task::{Context, Poll};
use futures::stream::{FuturesUnordered, StreamFuture};
use log::{debug, trace};

use polkadot_primitives::{
use polkadot_primitives::v0::{
Hash, Block,
parachain::{
PoVBlock, ValidatorId, ValidatorIndex, Collation, AbridgedCandidateReceipt,
ErasureChunk, ParachainHost, Id as ParaId, CollatorId,
},
PoVBlock, ValidatorId, ValidatorIndex, Collation, AbridgedCandidateReceipt,
ErasureChunk, ParachainHost, Id as ParaId, CollatorId,
};
use polkadot_validation::{
SharedTable, TableRouter, Network as ParachainNetwork, Validated, GenericStatement, Collators,
Expand Down
6 changes: 3 additions & 3 deletions network/src/protocol/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@ use super::*;
use crate::legacy::gossip::GossipPoVBlock;
use parking_lot::Mutex;

use polkadot_primitives::Block;
use polkadot_primitives::parachain::{
use polkadot_primitives::v0::{
Block,
Id as ParaId, Chain, DutyRoster, ParachainHost, ValidatorId,
Retriable, CollatorId, AbridgedCandidateReceipt,
GlobalValidationSchedule, LocalValidationData, ErasureChunk, SigningContext,
Expand Down Expand Up @@ -198,7 +198,7 @@ sp_api::mock_impl_runtime_apis! {
parent_hash: Default::default(),
}
}
fn downward_messages(_: ParaId) -> Vec<polkadot_primitives::DownwardMessage> {
fn downward_messages(_: ParaId) -> Vec<polkadot_primitives::v0::DownwardMessage> {
Vec::new()
}
}
Expand Down
Loading