Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fork off: child support #915

Merged
merged 19 commits into from
Feb 21, 2023
Merged
Show file tree
Hide file tree
Changes from 6 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion fork-off/src/account_setting.rs
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ pub fn apply_account_setting(mut state: Storage, setting: AccountSetting) -> Sto
let account_hash = account.clone().into();
let key = &account_map.join(&account_hash);

state.insert(key.clone(), info.clone().into());
state.top.insert(key.clone(), info.clone().into());
info!(target: "fork-off", "Account info of `{:?}` set to `{:?}`", account, info);
}
state
Expand Down
7 changes: 4 additions & 3 deletions fork-off/src/chainspec_combining.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ impl Index<&StoragePath> for PathCounter {
}
}

/// Combines states - ommiting child state as we assume that it is empty in initial chainspec
pub fn combine_states(
mut state: Storage,
initial_state: Storage,
Expand All @@ -40,7 +41,7 @@ pub fn combine_states(
let mut removed_per_path_count = PathCounter::default();
let mut added_per_path_cnt = PathCounter::default();

state.retain(|k, _v| {
state.top.retain(|k, _v| {
match storage_prefixes
.iter()
.find(|(_, prefix)| prefix.is_prefix_of(k))
Expand All @@ -53,13 +54,13 @@ pub fn combine_states(
}
});

for (k, v) in initial_state.iter() {
for (k, v) in initial_state.top.iter() {
if let Some((path, _)) = storage_prefixes
.iter()
.find(|(_, prefix)| prefix.is_prefix_of(k))
{
added_per_path_cnt.bump(path);
state.insert(k.clone(), v.clone());
state.top.insert(k.clone(), v.clone());
}
}

Expand Down
28 changes: 23 additions & 5 deletions fork-off/src/fetching.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use std::{collections::HashMap, iter::repeat_with, sync::Arc};
use std::{iter::repeat_with, sync::Arc};

use async_channel::Receiver;
use futures::{future::join_all, join};
Expand Down Expand Up @@ -37,10 +37,28 @@ impl StateFetcher {
.await
.unwrap();

let child_storage_map =
if let Ok(child_keys) = self.client.get_all_keys_for_child(&key, &block).await {
Some(
self.client
.get_storage_map_for_child(key.clone(), child_keys, block.clone())
.await,
)
} else {
None
};

let mut output_guard = output.lock();
output_guard.insert(key, value);
if output_guard.len() % LOG_PROGRESS_FREQUENCY == 0 {
info!("Fetched {} values", output_guard.len());
output_guard.top.insert(key.clone(), value);
if child_storage_map.is_some() {
output_guard.child_storage.insert(
key.without_child_storage_prefix(),
child_storage_map.unwrap(),
);
}

if output_guard.top.len() % LOG_PROGRESS_FREQUENCY == 0 {
info!("Fetched {} values", output_guard.top.len());
}
}
}
Expand All @@ -49,7 +67,7 @@ impl StateFetcher {
info!("Fetching state at block {:?}", block_hash);

let (input, key_fetcher) = self.client.stream_all_keys(&block_hash);
let output = Arc::new(Mutex::new(HashMap::new()));
let output = Arc::new(Mutex::new(Storage::default()));

let workers = repeat_with(|| {
self.value_fetching_worker(block_hash.clone(), input.clone(), output.clone())
Expand Down
4 changes: 2 additions & 2 deletions fork-off/src/fsio.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ pub fn save_snapshot_to_file(snapshot: Storage, path: String) {
let data = serde_json::to_vec_pretty(&snapshot).unwrap();
info!(
"Writing snapshot of {} key-val pairs and {} total bytes",
snapshot.len(),
snapshot.top.len(),
data.len()
);
write_to_file(path, &data);
Expand All @@ -50,6 +50,6 @@ pub fn read_snapshot_from_file(path: String) -> Storage {
let snapshot: Storage =
serde_json::from_str(&fs::read_to_string(path).expect("Could not read snapshot file"))
.expect("could not parse from snapshot");
info!("Read snapshot of {} key-val pairs", snapshot.len());
info!("Read snapshot of {} key-val pairs", snapshot.top.len());
snapshot
}
44 changes: 43 additions & 1 deletion fork-off/src/jsonrpc_client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ use jsonrpc_core::Error;
use jsonrpc_core_client::{transports::ws, RpcError};
use jsonrpc_derive::rpc;

use crate::types::{BlockHash, StorageKey, StorageValue};
use crate::types::{BlockHash, ChildStorageMap, StorageKey, StorageValue};

#[rpc]
pub trait Rpc {
Expand All @@ -28,6 +28,22 @@ pub trait Rpc {
start_key: Option<StorageKey>,
at: Option<BlockHash>,
) -> Result<Vec<StorageKey>, Error>;

#[rpc(name = "childstate_getKeys")]
DamianStraszak marked this conversation as resolved.
Show resolved Hide resolved
fn get_child_keys(
&self,
child_storage: StorageKey,
prefix: StorageKey,
at: Option<BlockHash>,
) -> Result<Vec<StorageKey>, Error>;

#[rpc(name = "childstate_getStorageEntries")]
fn get_child_storage_entries(
&self,
child_storage: StorageKey,
keys: Vec<StorageKey>,
at: Option<BlockHash>,
) -> Result<Vec<StorageValue>, Error>;
}

type RpcResult<T> = Result<T, RpcError>;
Expand Down Expand Up @@ -71,6 +87,32 @@ impl Client {
(receiver, self.do_stream_all_keys(sender, at.clone()))
}

pub async fn get_all_keys_for_child(
&self,
child_key: &StorageKey,
at: &BlockHash,
) -> RpcResult<Vec<StorageKey>> {
let empty_prefix = StorageKey::new("0x");

self.client
.get_child_keys(child_key.clone(), empty_prefix.clone(), Some(at.clone()))
.await
}

pub async fn get_storage_map_for_child(
&self,
child_key: StorageKey,
keys: Vec<StorageKey>,
at: BlockHash,
) -> ChildStorageMap {
let values = self
.client
.get_child_storage_entries(child_key, keys.clone(), Some(at))
.await
.unwrap();
keys.iter().cloned().zip(values).collect()
}

async fn do_stream_all_keys(&self, sender: Sender<StorageKey>, at: BlockHash) -> RpcResult<()> {
let empty_prefix = StorageKey::new("0x");
let mut start_key = None;
Expand Down
17 changes: 12 additions & 5 deletions fork-off/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ use crate::{
file_content, read_json_from_file, read_snapshot_from_file, save_snapshot_to_file,
write_to_file,
},
types::Storage,
types::{ChildStorage, Storage},
};

mod account_setting;
Expand Down Expand Up @@ -56,9 +56,12 @@ async fn main() -> anyhow::Result<()> {
}
let state = read_snapshot_from_file(snapshot_path);

let initial_state: Storage =
serde_json::from_value(initial_spec["genesis"]["raw"]["top"].take())
.expect("Deserialization of state from given chainspec file failed");
// Initialize with state from chainspec + empty child storage
let initial_state = Storage {
top: serde_json::from_value(initial_spec["genesis"]["raw"]["top"].take())
.expect("Deserialization of state from given chainspec file failed"),
child_storage: ChildStorage::new(),
};
deuszx marked this conversation as resolved.
Show resolved Hide resolved

let state = combine_states(state, initial_state, storage_keep_state);

Expand All @@ -77,8 +80,12 @@ async fn main() -> anyhow::Result<()> {
};
let state = apply_account_setting(state, account_setting);

let json_state = serde_json::to_value(state).expect("Failed to convert a storage map to json");
let json_state =
serde_json::to_value(state.top).expect("Failed to convert a storage map to json");
let json_child_state =
serde_json::to_value(state.child_storage).expect("Failed to convert a storage map to json");
initial_spec["genesis"]["raw"]["top"] = json_state;
initial_spec["genesis"]["raw"]["childrenDefault"] = json_child_state;
let new_spec = serde_json::to_vec_pretty(&initial_spec)?;

info!(target: "fork-off", "Writing new chainspec to {}", &combined_spec_path);
Expand Down
37 changes: 32 additions & 5 deletions fork-off/src/types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,19 +5,23 @@
//! to a function, as `clippy` screams outrageously about changing it to `&str` and then the alias
//! is useless.

use std::{collections::HashMap, fmt::Debug, str::FromStr};
use std::{
collections::{BTreeMap, HashMap},
fmt::Debug,
str::FromStr,
};

use codec::Encode;
use frame_support::{sp_runtime::AccountId32, Blake2_128Concat, StorageHasher, Twox128};
use hex::ToHex;
use hex::{encode, ToHex};
use serde::{Deserialize, Serialize};

pub trait Get<T = String> {
fn get(self) -> T;
}

/// Remove leading `"0x"`.
fn strip_hex<T: ToString + ?Sized>(t: &T) -> String {
pub fn strip_hex<T: ToString + ?Sized>(t: &T) -> String {
let s = t.to_string();
s.strip_prefix("0x").map(ToString::to_string).unwrap_or(s)
}
Expand Down Expand Up @@ -69,7 +73,7 @@ impl Get for StoragePath {
}

/// Hex-encoded key in raw chainspec.
#[derive(Clone, Debug, Eq, PartialEq, Hash, Serialize, Deserialize)]
#[derive(Clone, Debug, Eq, PartialEq, Hash, Serialize, Deserialize, PartialOrd, Ord)]
pub struct StorageKey(String);

impl From<&StorageKey> for Vec<u8> {
Expand All @@ -78,6 +82,8 @@ impl From<&StorageKey> for Vec<u8> {
}
}

pub const CHILD_STORAGE_PREFIX: &[u8] = b":child_storage:default:";

impl StorageKey {
pub fn new<T: ToString + ?Sized>(content: &T) -> Self {
Self(as_hex(content))
Expand All @@ -96,6 +102,16 @@ impl StorageKey {
let longer = as_hex(&other.0);
longer.starts_with(&shorter)
}

pub fn without_child_storage_prefix(self) -> Self {
StorageKey::new(
&(as_hex(
&self
.get()
.split_off(as_hex(&encode(CHILD_STORAGE_PREFIX)).len()),
)),
)
}
}

/// Convert `AccountId` to `StorageKey` using `Blake2_128Concat` hashing algorithm.
Expand Down Expand Up @@ -169,6 +185,17 @@ impl FromStr for BlockHash {
}

/// Content of `chainspec["genesis"]["raw"]["top"]`.
pub type Storage = HashMap<StorageKey, StorageValue>;
pub type TopStorage = HashMap<StorageKey, StorageValue>;

/// Content of `chainspec["genesis"]["raw"]["childrenDefault"]`.
pub type ChildStorage = HashMap<StorageKey, ChildStorageMap>;

pub type ChildStorageMap = BTreeMap<StorageKey, StorageValue>;

#[derive(Debug, Default, Clone, Serialize, Deserialize)]
pub struct Storage {
pub top: TopStorage,
pub child_storage: ChildStorage,
}

pub type Balance = u128;