Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add more clippy checks to avoid potential runtime crashes #408

Merged
merged 6 commits into from
Apr 5, 2023
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/lints.yml
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ jobs:
toolchain: stable
components: clippy
- name: Run cargo clippy
run: cargo clippy --workspace --all-targets -- -D warnings -D deprecated -D clippy::perf -D clippy::complexity -D clippy::style -D clippy::correctness -D clippy::suspicious
run: ./ci/clippy.sh
continue-on-error: false

rustfmt:
Expand Down
15 changes: 15 additions & 0 deletions ci/clippy.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
#!/bin/sh
set -e

# Checks which apply to all code including tests
cargo clippy --workspace --all-targets --all-features -- -D warnings -D deprecated -D clippy::perf \
-D clippy::complexity -D clippy::style -D clippy::correctness \
-D clippy::suspicious -D clippy::dbg_macro -D clippy::if_then_some_else_none \
-D clippy::items-after-statements -D clippy::implicit_clone \
-D clippy::cast_lossless -D clippy::manual_string_new \
-D clippy::redundant_closure_for_method_calls \
-D clippy::unused_self -D clippy::get_first

# Checks which apply to main code (not tests)
cargo clippy --workspace --all-features -- -D clippy::unwrap_used \
-D clippy::indexing_slicing
2 changes: 1 addition & 1 deletion crates/bridge-registry-backends/src/dkg.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ impl super::BridgeRegistryBackend for DkgBridgeRegistryBackend {
index: u32,
) -> webb_relayer_utils::Result<Option<BridgeMetadata>> {
let storage = RuntimeApi::storage().bridge_registry();
let bridges = storage.bridges(dbg!(index));
let bridges = storage.bridges(index);
Ok(self
.client
.storage()
Expand Down
28 changes: 15 additions & 13 deletions crates/bridge-registry-backends/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -78,19 +78,21 @@ pub trait BridgeRegistryBackend {
.resource_to_bridge_index(linked_anchor)
.await
.ok_or(Error::BridgeNotRegistered(*linked_anchor))?;
let bridges = self.bridges(next_bridge_index).await?.unwrap();
Ok(bridges
.resource_ids
.0
.into_iter()
.filter(|r| r.0 != linked_anchor.0)
.map(|r| {
let rr = RawResourceId {
resource_id: H256(r.0),
};
LinkedAnchorConfig::Raw(rr)
})
.collect())
match self.bridges(next_bridge_index).await? {
None => Ok(vec![]),
Some(bridges) => Ok(bridges
.resource_ids
.0
.into_iter()
.filter(|r| r.0 != linked_anchor.0)
.map(|r| {
let rr = RawResourceId {
resource_id: H256(r.0),
};
LinkedAnchorConfig::Raw(rr)
})
.collect()),
}
}
}
}
Expand Down
6 changes: 5 additions & 1 deletion crates/bridge-registry-backends/src/mocked.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,11 +26,15 @@ impl BridgeRegistryBackend for MockedBridgeRegistryBackend {
&self,
_index: u32,
) -> webb_relayer_utils::Result<Option<BridgeMetadata>> {
let display = SerdeData(
Data::decode_all(&mut "mock bridge".as_bytes())
.expect("decode mock bridge data"),
);
Ok(Some(BridgeMetadata {
resource_ids: BoundedVec(vec![
ResourceId(hex!("0000000000000000e69a847cd5bc0c9480ada0b339d7f0a8cac2b6670000138a")),
ResourceId(hex!("000000000000d30c8839c1145609e564b986f667b273ddcb8496010000001389"))]),
info: BridgeInfo { additional: BoundedVec(vec![]), display: SerdeData(Data::decode_all(&mut "mock bridge".as_bytes()).unwrap()) },
info: BridgeInfo { additional: BoundedVec(vec![]), display },
}))
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ async fn submit_anchor_update_proposal() {
PairSigner::new(Pair::from_string("//Alice", None).unwrap());
let account_nonce = api
.rpc()
.system_account_next_index(dbg!(sudo_account.account_id()))
.system_account_next_index(sudo_account.account_id())
.await
.unwrap();

Expand Down
2 changes: 1 addition & 1 deletion crates/event-watcher-traits/src/evm/event_watcher.rs
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,7 @@ pub trait EventWatcher {
// this event will be marked as handled if at least one handler succeeded.
// this because, for the failed events, we arleady tried to handle them
// many times (at this point), and there is no point in trying again.
let mark_as_handled = result.iter().any(|r| r.is_ok());
let mark_as_handled = result.iter().any(Result::is_ok);
// also, for all the failed event handlers, we should print what went
// wrong.
result.iter().for_each(|r| {
Expand Down
5 changes: 3 additions & 2 deletions crates/event-watcher-traits/src/substrate/event_watcher.rs
Original file line number Diff line number Diff line change
Expand Up @@ -149,6 +149,8 @@ where
handlers: Vec<EventHandlerFor<Self, RuntimeConfig>>,
metrics: Arc<Mutex<metric::Metrics>>,
) -> webb_relayer_utils::Result<()> {
const MAX_RETRY_COUNT: usize = 5;

let backoff = backoff::backoff::Constant::new(Duration::from_secs(1));
let metrics_clone = metrics.clone();
let task = || async {
Expand Down Expand Up @@ -244,7 +246,6 @@ where
// wraps each handler future in a retry logic, that will retry the handler
// if it fails, up to `MAX_RETRY_COUNT`, after this it will ignore that event for
// that specific handler.
const MAX_RETRY_COUNT: usize = 5;
let tasks = handlers.iter().map(|handler| {
// a constant backoff with maximum retry count is used here.
let backoff = retry::ConstantWithMaxRetryCount::new(
Expand All @@ -264,7 +265,7 @@ where
// this event will be marked as handled if at least one handler succeeded.
// this because, for the failed events, we arleady tried to handle them
// many times (at this point), and there is no point in trying again.
let mark_as_handled = result.iter().any(|r| r.is_ok());
let mark_as_handled = result.iter().any(Result::is_ok);
// also, for all the failed event handlers, we should print what went
// wrong.
result.iter().for_each(|r| {
Expand Down
2 changes: 1 addition & 1 deletion crates/event-watcher-traits/src/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ async fn substrate_event_watcher_should_work() -> webb_relayer_utils::Result<()>
let client = OnlineClient::<PolkadotConfig>::new().await?;
let watcher = TestEventsWatcher::default();
let config = webb_relayer_config::WebbRelayerConfig::default();
let ctx = RelayerContext::new(config, store.clone());
let ctx = RelayerContext::new(config, store.clone())?;
let metrics = ctx.metrics.clone();
let event_watcher_config = EventsWatcherConfig::default();
watcher
Expand Down
2 changes: 1 addition & 1 deletion crates/proposal-signing-backends/src/mocked.rs
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ where
self.store.enqueue_item(
SledQueueKey::from_bridge_key(bridge_key),
BridgeCommand::ExecuteProposalWithSignature {
data: proposal_bytes.to_vec(),
data: proposal_bytes.clone(),
signature: signature_bytes,
},
)?;
Expand Down
2 changes: 1 addition & 1 deletion crates/relayer-config/src/cli.rs
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ pub async fn create_store(
let dirs = ProjectDirs::from(PACKAGE_ID[0], PACKAGE_ID[1], PACKAGE_ID[2])
.context("failed to get config")?;
let p = match opts.config_dir.as_ref() {
Some(p) => p.to_path_buf(),
Some(p) => p.clone(),
None => dirs.data_local_dir().to_path_buf(),
};
let db_path = match opts.config_dir.as_ref().zip(p.parent()) {
Expand Down
2 changes: 1 addition & 1 deletion crates/relayer-config/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -228,7 +228,7 @@ mod tests {
let config_dirs =
glob::glob(config_dir.join("**").join("**").to_str().unwrap())
.expect("Failed to read config directory")
.filter_map(|p| p.ok())
.filter_map(Result::ok)
.filter(|p| p.is_dir())
.collect::<Vec<_>>();
assert!(
Expand Down
4 changes: 2 additions & 2 deletions crates/relayer-config/src/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,7 @@ pub fn postloading_process(
let linked_anchors: Vec<LinkedAnchorConfig> =
linked_anchors
.into_iter()
.map(|anchor| anchor.into_raw_resource_id())
.map(LinkedAnchorConfig::into_raw_resource_id)
.collect();
cfg.linked_anchors = Some(linked_anchors);
}
Expand All @@ -169,7 +169,7 @@ pub fn postloading_process(
let linked_anchors: Vec<LinkedAnchorConfig> =
linked_anchors
.into_iter()
.map(|anchor| anchor.into_raw_resource_id())
.map(LinkedAnchorConfig::into_raw_resource_id)
.collect();
cfg.linked_anchors = Some(linked_anchors);
}
Expand Down
11 changes: 5 additions & 6 deletions crates/relayer-context/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -64,25 +64,24 @@ impl RelayerContext {
pub fn new(
config: webb_relayer_config::WebbRelayerConfig,
store: SledStore,
) -> Self {
) -> webb_relayer_utils::Result<Self> {
let (notify_shutdown, _) = broadcast::channel(2);
let metrics = Arc::new(Mutex::new(Metrics::new()));
let metrics = Arc::new(Mutex::new(Metrics::new()?));
let coin_gecko_client = Arc::new(CoinGeckoClient::default());
let mut etherscan_clients: HashMap<u32, Client> = HashMap::new();
for (chain, etherscan_config) in config.evm_etherscan.iter() {
let client =
Client::new(*chain, etherscan_config.api_key.to_string())
.unwrap();
Client::new(*chain, etherscan_config.api_key.to_string())?;
etherscan_clients.insert(etherscan_config.chain_id, client);
}
Self {
Ok(Self {
config,
notify_shutdown,
metrics,
store,
coin_gecko_client,
etherscan_clients,
}
})
}
/// Returns a broadcast receiver handle for the shutdown signal.
pub fn shutdown_signal(&self) -> Shutdown {
Expand Down
19 changes: 3 additions & 16 deletions crates/relayer-handlers/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
#![allow(clippy::large_enum_variant)]
#![warn(missing_docs)]
use axum::extract::{Path, State, WebSocketUpgrade};
use axum::http::StatusCode;
use ethereum_types::{Address, U256};
use std::error::Error;
use std::sync::Arc;
Expand All @@ -39,10 +38,10 @@ use webb_relayer_handler_utils::{
};
use webb_relayer_tx_relay::evm::fees::{get_fee_info, FeeInfo};

use crate::routes::HandlerError;
use webb_relayer_tx_relay::evm::vanchor::handle_vanchor_relay_tx;
use webb_relayer_tx_relay::substrate::mixer::handle_substrate_mixer_relay_tx;
use webb_relayer_tx_relay::substrate::vanchor::handle_substrate_vanchor_relay_tx;
use webb_relayer_utils::HandlerError;

/// Module handles relayer API
pub mod routes;
Expand Down Expand Up @@ -208,19 +207,7 @@ pub async fn handle_fee_info(
) -> Result<Json<FeeInfo>, HandlerError> {
let chain_id = TypedChainId::from(chain_id);
let gas_amount = U256::from(gas_amount);
get_fee_info(chain_id, vanchor, gas_amount, ctx.as_ref())
Ok(get_fee_info(chain_id, vanchor, gas_amount, ctx.as_ref())
.await
.map(Json)
.map_err(|e| {
let status = match e {
webb_relayer_utils::Error::FetchTokenPriceError { .. } => {
StatusCode::BAD_REQUEST
}
webb_relayer_utils::Error::EtherscanConfigNotFound {
..
} => StatusCode::BAD_REQUEST,
_ => StatusCode::INTERNAL_SERVER_ERROR,
};
HandlerError(status, e.to_string())
})
.map(Json)?)
shekohex marked this conversation as resolved.
Show resolved Hide resolved
}
15 changes: 8 additions & 7 deletions crates/relayer-handlers/src/routes/encrypted_outputs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@
// See the License for the specific language governing permissions and
// limitations under the License.

use crate::routes::HandlerError;
use axum::extract::{Path, Query, State};
use axum::http::StatusCode;
use axum::Json;
Expand All @@ -22,6 +21,7 @@ use std::{collections::HashMap, sync::Arc};
use webb_proposals::{ResourceId, TargetSystem, TypedChainId};
use webb_relayer_context::RelayerContext;
use webb_relayer_store::EncryptedOutputCacheStore;
use webb_relayer_utils::HandlerError;

use super::OptionalRangeQuery;

Expand Down Expand Up @@ -112,14 +112,15 @@ pub async fn handle_encrypted_outputs_cache_evm(
let src_typed_chain_id = TypedChainId::Evm(chain_id);
let history_store_key =
ResourceId::new(src_target_system, src_typed_chain_id);
let encrypted_output = ctx
.store()
.get_encrypted_output_with_range(history_store_key, query_range.into())
.unwrap();
let encrypted_output = ctx.store().get_encrypted_output_with_range(
history_store_key,
query_range.into(),
)?;
let last_queried_block = ctx
.store()
.get_last_deposit_block_number_for_encrypted_output(history_store_key)
.unwrap();
.get_last_deposit_block_number_for_encrypted_output(
history_store_key,
)?;

Ok(Json(EncryptedOutputsCacheResponse {
encrypted_outputs: encrypted_output,
Expand Down
14 changes: 5 additions & 9 deletions crates/relayer-handlers/src/routes/leaves.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,14 +17,14 @@ use axum::http::StatusCode;
use axum::Json;
use std::{collections::HashMap, sync::Arc};

use crate::routes::HandlerError;
use ethereum_types::Address;
use serde::Serialize;
use webb_proposals::{
ResourceId, SubstrateTargetSystem, TargetSystem, TypedChainId,
};
use webb_relayer_context::RelayerContext;
use webb_relayer_store::LeafCacheStore;
use webb_relayer_utils::HandlerError;

use super::OptionalRangeQuery;

Expand Down Expand Up @@ -116,12 +116,10 @@ pub async fn handle_leaves_cache_evm(
ResourceId::new(src_target_system, src_typed_chain_id);
let leaves = ctx
.store()
.get_leaves_with_range(history_store_key, query_range.into())
.unwrap();
.get_leaves_with_range(history_store_key, query_range.into())?;
let last_queried_block = ctx
.store()
.get_last_deposit_block_number(history_store_key)
.unwrap();
.get_last_deposit_block_number(history_store_key)?;

Ok(Json(LeavesCacheResponse {
leaves,
Expand Down Expand Up @@ -167,12 +165,10 @@ pub async fn handle_leaves_cache_substrate(

let leaves = ctx
.store()
.get_leaves_with_range(history_store_key, query_range.into())
.unwrap();
.get_leaves_with_range(history_store_key, query_range.into())?;
let last_queried_block = ctx
.store()
.get_last_deposit_block_number(history_store_key)
.unwrap();
.get_last_deposit_block_number(history_store_key)?;

Ok(Json(LeavesCacheResponse {
leaves,
Expand Down
13 changes: 9 additions & 4 deletions crates/relayer-handlers/src/routes/metric.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
use axum::extract::{Path, State};
use axum::http::StatusCode;
use axum::Json;
use ethereum_types::Address;
use serde::Serialize;
Expand All @@ -8,6 +9,7 @@ use webb_proposals::{
};
use webb_relayer_context::RelayerContext;
use webb_relayer_utils::metric::Metrics;
use webb_relayer_utils::HandlerError;

/// Response with metrics message
#[derive(Debug, Serialize)]
Expand All @@ -31,11 +33,14 @@ pub struct ResourceMetricResponse {
/// Handles relayer metric requests
///
/// Returns a Result with the `MetricResponse` on success
pub async fn handle_metric_info() -> Json<RelayerMetricResponse> {
let metric_gathered = Metrics::gather_metrics();
Json(RelayerMetricResponse {
pub async fn handle_metric_info(
) -> Result<Json<RelayerMetricResponse>, HandlerError> {
let metric_gathered = Metrics::gather_metrics().map_err(|e| {
HandlerError(StatusCode::INTERNAL_SERVER_ERROR, e.to_string())
})?;
Ok(Json(RelayerMetricResponse {
metrics: metric_gathered,
})
}))
}

/// Handles relayer metric requests for evm based resource
Expand Down
Loading