Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

General cleanup #32980

Merged
merged 5 commits into from
Aug 24, 2023
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Removes unnecessary .into_iter().
  • Loading branch information
Lichtso committed Aug 24, 2023
commit 4b4101c83828e7bc32e3f4a2fef959dfcf0c0b92
2 changes: 1 addition & 1 deletion accounts-db/src/account_storage.rs
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ impl AccountStorage {
pub fn initialize(&mut self, all_storages: AccountStorageMap) {
assert!(self.map.is_empty());
assert!(self.no_shrink_in_progress());
self.map.extend(all_storages.into_iter())
self.map.extend(all_storages)
}

/// remove the append vec at 'slot'
Expand Down
7 changes: 3 additions & 4 deletions accounts-db/src/accounts_db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3487,8 +3487,7 @@ impl AccountsDb {

let (reclaims, pubkeys_removed_from_accounts_index2) =
self.purge_keys_exact(pubkey_to_slot_set.iter());
pubkeys_removed_from_accounts_index
.extend(pubkeys_removed_from_accounts_index2.into_iter());
pubkeys_removed_from_accounts_index.extend(pubkeys_removed_from_accounts_index2);

// Don't reset from clean, since the pubkeys in those stores may need to be unref'ed
// and those stores may be used for background hashing.
Expand Down Expand Up @@ -7358,7 +7357,7 @@ impl AccountsDb {
let mut sort_time = Measure::start("sort_storages");
let min_root = self.accounts_index.min_alive_root();
let storages = SortedStorages::new_with_slots(
combined_maps.iter().zip(slots.into_iter()),
combined_maps.iter().zip(slots),
min_root,
Some(slot),
);
Expand Down Expand Up @@ -7824,7 +7823,7 @@ impl AccountsDb {
let (storages, slots) =
self.get_snapshot_storages(base_slot.checked_add(1).unwrap()..=slot);
let sorted_storages =
SortedStorages::new_with_slots(storages.iter().zip(slots.into_iter()), None, None);
SortedStorages::new_with_slots(storages.iter().zip(slots), None, None);
let calculated_incremental_accounts_hash = self.calculate_incremental_accounts_hash(
&calc_config,
&sorted_storages,
Expand Down
2 changes: 1 addition & 1 deletion accounts-db/src/sorted_storages.rs
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ impl<'a> SortedStorages<'a> {
let slots = source.iter().map(|storage| {
storage.slot() // this must be unique. Will be enforced in new_with_slots
});
Self::new_with_slots(source.iter().zip(slots.into_iter()), None, None)
Self::new_with_slots(source.iter().zip(slots), None, None)
}

/// create [`SortedStorages`] from `source` iterator.
Expand Down
2 changes: 1 addition & 1 deletion cli-output/src/cli_output.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2385,7 +2385,7 @@ pub fn return_signers_data(tx: &Transaction, config: &ReturnSignersConfig) -> Cl
tx.signatures
.iter()
.zip(tx.message.account_keys.iter())
.zip(verify_results.into_iter())
.zip(verify_results)
.for_each(|((sig, key), res)| {
if res {
signers.push(format!("{key}={sig}"))
Expand Down
2 changes: 1 addition & 1 deletion cli/src/cluster_query.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1980,7 +1980,7 @@ pub fn process_show_validators(

let validators: Vec<_> = current_validators
.into_iter()
.chain(delinquent_validators.into_iter())
.chain(delinquent_validators)
.collect();

let (average_skip_rate, average_stake_weighted_skip_rate) = {
Expand Down
2 changes: 1 addition & 1 deletion cli/src/vote.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1385,7 +1385,7 @@ pub fn process_close_vote_account(
if let Some(vote_account) = vote_account_status
.current
.into_iter()
.chain(vote_account_status.delinquent.into_iter())
.chain(vote_account_status.delinquent)
.next()
{
if vote_account.activated_stake != 0 {
Expand Down
2 changes: 1 addition & 1 deletion core/src/shred_fetch_stage.rs
Original file line number Diff line number Diff line change
Expand Up @@ -208,7 +208,7 @@ impl ShredFetchStage {
turbine_disabled.clone(),
);

tvu_threads.extend(repair_receiver.into_iter());
tvu_threads.extend(repair_receiver);
tvu_threads.push(tvu_filter);
tvu_threads.push(repair_handler);

Expand Down
2 changes: 1 addition & 1 deletion gossip/src/cluster_info_metrics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -676,7 +676,7 @@ pub(crate) fn submit_gossip_stats(
.pull
.votes
.into_iter()
.chain(crds_stats.push.votes.into_iter())
.chain(crds_stats.push.votes)
.into_grouping_map()
.aggregate(|acc, _slot, num_votes| Some(acc.unwrap_or_default() + num_votes));
submit_vote_stats("cluster_info_crds_stats_votes", &votes);
Expand Down
2 changes: 1 addition & 1 deletion local-cluster/src/cluster_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -516,7 +516,7 @@ pub fn start_gossip_voter(
let (labels, votes) = cluster_info.get_votes_with_labels(&mut cursor);
let mut parsed_vote_iter: Vec<_> = labels
.into_iter()
.zip(votes.into_iter())
.zip(votes)
.filter_map(&vote_filter)
.collect();

Expand Down
5 changes: 1 addition & 4 deletions metrics/src/metrics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -527,10 +527,7 @@ pub mod test_mocks {
assert!(!points.is_empty());

let new_points = points.len();
self.points_written
.lock()
.unwrap()
.extend(points.into_iter());
self.points_written.lock().unwrap().extend(points);

info!(
"Writing {} points ({} total)",
Expand Down
2 changes: 1 addition & 1 deletion programs/bpf_loader/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -332,7 +332,7 @@ fn create_memory_mapping<'a, 'b, C: ContextObject>(
MemoryRegion::new_writable(heap.as_slice_mut(), MM_HEAP_START),
]
.into_iter()
.chain(additional_regions.into_iter())
.chain(additional_regions)
.collect();

Ok(if let Some(cow_cb) = cow_cb {
Expand Down
2 changes: 1 addition & 1 deletion runtime/src/serde_snapshot.rs
Original file line number Diff line number Diff line change
Expand Up @@ -176,7 +176,7 @@ impl<T> SnapshotAccountsDbFields<T> {
})?;

let mut combined_storages = full_snapshot_storages;
combined_storages.extend(incremental_snapshot_storages.into_iter());
combined_storages.extend(incremental_snapshot_storages);

Ok(AccountsDbFields(
combined_storages,
Expand Down
2 changes: 1 addition & 1 deletion runtime/src/snapshot_bank_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -300,7 +300,7 @@ pub fn bank_from_snapshot_archives(
if let Some(ref mut unarchive_preparation_result) = unarchived_incremental_snapshot {
let incremental_snapshot_storages =
std::mem::take(&mut unarchive_preparation_result.storage);
storage.extend(incremental_snapshot_storages.into_iter());
storage.extend(incremental_snapshot_storages);
}

let storage_and_next_append_vec_id = StorageAndNextAppendVecId {
Expand Down
2 changes: 1 addition & 1 deletion stake-accounts/src/stake_accounts.rs
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,7 @@ fn move_stake_account(
new_withdraw_authority_pubkey,
);

instructions.extend(authorize_instructions.into_iter());
instructions.extend(authorize_instructions);
let message = Message::new(&instructions, Some(fee_payer_pubkey));
Some(message)
}
Expand Down
38 changes: 17 additions & 21 deletions storage-bigtable/src/compression.rs
Original file line number Diff line number Diff line change
Expand Up @@ -48,28 +48,24 @@ pub fn decompress(data: &[u8]) -> Result<Vec<u8>, io::Error> {

pub fn compress(method: CompressionMethod, data: &[u8]) -> Result<Vec<u8>, io::Error> {
let mut compressed_data = bincode::serialize(&method).unwrap();
compressed_data.extend(
match method {
CompressionMethod::Bzip2 => {
let mut e = bzip2::write::BzEncoder::new(Vec::new(), bzip2::Compression::best());
e.write_all(data)?;
e.finish()?
}
CompressionMethod::Gzip => {
let mut e =
flate2::write::GzEncoder::new(Vec::new(), flate2::Compression::default());
e.write_all(data)?;
e.finish()?
}
CompressionMethod::Zstd => {
let mut e = zstd::stream::write::Encoder::new(Vec::new(), 0).unwrap();
e.write_all(data)?;
e.finish()?
}
CompressionMethod::NoCompression => data.to_vec(),
compressed_data.extend(match method {
CompressionMethod::Bzip2 => {
let mut e = bzip2::write::BzEncoder::new(Vec::new(), bzip2::Compression::best());
e.write_all(data)?;
e.finish()?
}
.into_iter(),
);
CompressionMethod::Gzip => {
let mut e = flate2::write::GzEncoder::new(Vec::new(), flate2::Compression::default());
e.write_all(data)?;
e.finish()?
}
CompressionMethod::Zstd => {
let mut e = zstd::stream::write::Encoder::new(Vec::new(), 0).unwrap();
e.write_all(data)?;
e.finish()?
}
CompressionMethod::NoCompression => data.to_vec(),
});

Ok(compressed_data)
}
Expand Down
2 changes: 1 addition & 1 deletion validator/src/admin_rpc_service.rs
Original file line number Diff line number Diff line change
Expand Up @@ -485,7 +485,7 @@ impl AdminRpc for AdminRpcImpl {
.staked_map_id;
let mut write_staked_nodes = meta.staked_nodes_overrides.write().unwrap();
write_staked_nodes.clear();
write_staked_nodes.extend(loaded_config.into_iter());
write_staked_nodes.extend(loaded_config);
info!("Staked nodes overrides loaded from {}", path);
debug!("overrides map: {:?}", write_staked_nodes);
Ok(())
Expand Down
2 changes: 1 addition & 1 deletion validator/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1450,7 +1450,7 @@ pub fn main() {
if let Some(account_shrink_snapshot_paths) = account_shrink_snapshot_paths {
account_snapshot_paths
.into_iter()
.chain(account_shrink_snapshot_paths.into_iter())
.chain(account_shrink_snapshot_paths)
.collect()
} else {
account_snapshot_paths
Expand Down