From e1026a513e57ccd99861492622134897a42f7cfc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Marin=20Ver=C5=A1i=C4=87?= Date: Tue, 10 Sep 2024 11:05:38 +0900 Subject: [PATCH] chore(lints): fix lint errors MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Marin Veršić --- Cargo.lock | 10 -- Cargo.toml | 10 +- Dockerfile | 2 +- cli/Cargo.toml | 1 - cli/src/lib.rs | 5 +- cli/src/samples.rs | 4 +- client/Cargo.toml | 1 - client/benches/tps/utils.rs | 2 +- client/examples/tutorial.rs | 16 +-- client/src/client.rs | 6 +- client/src/config.rs | 6 +- client/src/http_default.rs | 12 +- client/tests/integration/asset.rs | 31 +++-- client/tests/integration/asset_propagation.rs | 6 +- .../tests/integration/events/notification.rs | 6 +- client/tests/integration/events/pipeline.rs | 2 +- .../extra_functional/restart_peer.rs | 4 +- client/tests/integration/multisig.rs | 7 +- client/tests/integration/non_mintable.rs | 14 ++- client/tests/integration/pagination.rs | 2 +- client/tests/integration/permissions.rs | 12 +- client/tests/integration/queries/account.rs | 6 +- client/tests/integration/queries/asset.rs | 5 +- client/tests/integration/queries/mod.rs | 6 +- .../tests/integration/queries/query_errors.rs | 6 +- client/tests/integration/queries/role.rs | 6 +- .../integration/queries/smart_contract.rs | 4 +- client/tests/integration/sorting.rs | 30 +++-- client/tests/integration/status_response.rs | 4 +- client/tests/integration/transfer_asset.rs | 5 +- client/tests/integration/transfer_domain.rs | 2 +- .../integration/triggers/by_call_trigger.rs | 50 ++++---- .../integration/triggers/trigger_rollback.rs | 6 +- client/tests/integration/tx_history.rs | 9 +- client/tests/integration/tx_rollback.rs | 6 +- client_cli/Cargo.toml | 1 - client_cli/src/main.rs | 4 +- config/Cargo.toml | 1 - config/base/src/env.rs | 2 +- config/base/src/read.rs | 7 +- config/base/src/toml.rs | 5 +- config/src/logger.rs | 2 +- config/src/parameters/user.rs | 7 +- core/Cargo.toml | 1 - core/benches/blocks/common.rs | 8 +- core/benches/kura.rs | 4 +- core/benches/validation.rs | 10 +- core/src/block.rs | 32 ++--- core/src/kura.rs | 8 +- core/src/queue.rs | 6 +- core/src/smartcontracts/isi/mod.rs | 25 ++-- core/src/smartcontracts/isi/query.rs | 112 ++++++++---------- core/src/smartcontracts/isi/triggers/set.rs | 4 +- core/src/smartcontracts/mod.rs | 8 +- core/src/smartcontracts/wasm.rs | 8 +- core/src/state.rs | 4 +- crypto/src/lib.rs | 4 +- crypto/src/varint.rs | 2 +- data_model/Cargo.toml | 3 +- data_model/src/block.rs | 8 +- data_model/src/ipfs.rs | 25 ++-- data_model/src/query/mod.rs | 9 +- .../predicate/predicate_atoms/account.rs | 1 + .../query/predicate/predicate_atoms/asset.rs | 2 + .../query/predicate/predicate_atoms/domain.rs | 1 + .../query/predicate/predicate_atoms/mod.rs | 4 +- ffi/derive/src/attr_parse/getset.rs | 4 +- ffi/derive/src/attr_parse/repr.rs | 4 +- ffi/derive/src/convert.rs | 5 +- .../ui_fail/fallible_transmute_mut_ref.stderr | 12 +- ffi/src/ir.rs | 1 + ffi/src/lib.rs | 6 +- ffi/src/option.rs | 6 +- ffi/src/repr_c.rs | 22 ++-- ffi/src/slice.rs | 3 + genesis/Cargo.toml | 1 - genesis/src/lib.rs | 22 ++-- hooks/pre-commit.sample | 2 +- logger/Cargo.toml | 1 - logger/src/lib.rs | 3 +- p2p/src/peer.rs | 4 +- primitives/derive/src/numeric.rs | 5 +- primitives/numeric/src/lib.rs | 2 +- primitives/src/addr.rs | 42 +++---- primitives/src/conststr.rs | 5 +- smart_contract/executor/data_model/src/lib.rs | 1 + test_samples/Cargo.toml | 1 - test_samples/src/lib.rs | 6 +- tools/parity_scale_cli/src/main.rs | 7 +- wasm_builder/src/lib.rs | 7 +- wasm_codec/derive/Cargo.toml | 1 - wasm_codec/derive/src/lib.rs | 4 +- wasm_samples/mint_rose_trigger/src/lib.rs | 5 +- .../mint_rose_trigger_args/src/lib.rs | 5 +- 94 files changed, 409 insertions(+), 400 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 72c30e00cb8..8fd8c318947 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2907,7 +2907,6 @@ dependencies = [ "iroha_wasm_builder", "irohad", "nonzero_ext", - "once_cell", "parity-scale-codec", "rand", "serde", @@ -2940,7 +2939,6 @@ dependencies = [ "iroha_config_base", "iroha_primitives", "json5", - "once_cell", "serde", "serde_json", "supports-color 2.1.0", @@ -2966,7 +2964,6 @@ dependencies = [ "iroha_primitives", "json5", "nonzero_ext", - "once_cell", "serde", "serde_json", "serde_with", @@ -3043,7 +3040,6 @@ dependencies = [ "iroha_wasm_codec", "mv", "nonzero_ext", - "once_cell", "parity-scale-codec", "parking_lot", "rand", @@ -3063,7 +3059,6 @@ version = "2.0.0-rc.1.0" dependencies = [ "iroha_macro_utils", "manyhow", - "once_cell", "proc-macro2", "quote", "syn 2.0.75", @@ -3128,7 +3123,6 @@ dependencies = [ "iroha_schema", "iroha_version", "nonzero_ext", - "once_cell", "parity-scale-codec", "serde", "serde_json", @@ -3283,7 +3277,6 @@ dependencies = [ "iroha_crypto", "iroha_data_model", "iroha_schema", - "once_cell", "parity-scale-codec", "serde", "serde_json", @@ -3300,7 +3293,6 @@ dependencies = [ "derive_more", "iroha_config", "iroha_data_model", - "once_cell", "serde_json", "thiserror", "tokio", @@ -3693,7 +3685,6 @@ dependencies = [ "iroha_torii", "iroha_version", "json5", - "once_cell", "owo-colors", "path-absolutize", "serial_test", @@ -5855,7 +5846,6 @@ version = "2.0.0-rc.1.0" dependencies = [ "iroha_crypto", "iroha_data_model", - "once_cell", "serde", "toml", ] diff --git a/Cargo.toml b/Cargo.toml index ada3e6a396b..8690586bf51 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -66,7 +66,6 @@ tungstenite = "0.21.0" crossbeam-queue = "0.3.11" parking_lot = { version = "0.12.3" } -once_cell = "1.19.0" tempfile = "3.10.1" path-absolutize = "3.1.1" pathdiff = "0.2.1" @@ -129,16 +128,17 @@ mv = { version = "0.1.0" } [workspace.lints] rustdoc.private_doc_tests = "deny" +rust.future_incompatible = {level = "deny", priority = -1 } +rust.nonstandard_style = {level = "deny", priority = -1 } +rust.rust_2018_idioms = {level = "deny", priority = -1 } +rust.unused = { level = "deny", priority = -1 } + rust.anonymous_parameters = "deny" -rust.future_incompatible = "deny" rust.missing_copy_implementations = "deny" rust.missing_docs = "deny" -rust.nonstandard_style = "deny" -rust.rust_2018_idioms = "deny" rust.trivial_casts = "deny" rust.trivial_numeric_casts = "deny" rust.unsafe_code = "deny" -rust.unused = "deny" rust.unused_import_braces = "deny" rust.variant_size_differences = "deny" rust.explicit_outlives_requirements = "deny" diff --git a/Dockerfile b/Dockerfile index afb1255c54c..62291d32590 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,6 +1,6 @@ FROM --platform=linux/amd64 archlinux:base-devel AS builder -ARG NIGHTLY_VERSION=2024-04-18 +ARG NIGHTLY_VERSION=2024-09-09 RUN < Result<(), ConfigError> { // These cause race condition in tests, due to them actually binding TCP listeners // Since these validations are primarily for the convenience of the end user, // it seems a fine compromise to run it only in release mode - #[cfg(release)] + #[cfg(not(test))] { validate_try_bind_address(&mut emitter, &config.network.address); validate_try_bind_address(&mut emitter, &config.torii.address); @@ -608,7 +607,7 @@ fn validate_directory_path(emitter: &mut Emitter, path: &WithOrigin } } -#[cfg(release)] +#[cfg(not(test))] fn validate_try_bind_address(emitter: &mut Emitter, value: &WithOrigin) { use std::net::TcpListener; diff --git a/cli/src/samples.rs b/cli/src/samples.rs index d842d191b44..79578355e2e 100644 --- a/cli/src/samples.rs +++ b/cli/src/samples.rs @@ -1,5 +1,5 @@ //! This module contains the sample configurations used for testing and benchmarking throughout Iroha. -use std::{collections::HashSet, str::FromStr}; +use std::collections::HashSet; use iroha_config::{base::toml::TomlSource, parameters::actual::Root as Config}; use iroha_crypto::{ExposedPrivateKey, KeyPair, PublicKey}; @@ -33,7 +33,7 @@ pub fn get_trusted_peers(public_key: Option<&PublicKey>) -> HashSet { ), ] .iter() - .map(|(a, k)| PeerId::new(a.parse().expect("Valid"), PublicKey::from_str(k).unwrap())) + .map(|(a, k)| PeerId::new(a.parse().expect("Valid"), k.parse().unwrap())) .collect(); if let Some(pubkey) = public_key { trusted_peers.insert(PeerId { diff --git a/client/Cargo.toml b/client/Cargo.toml index 7c9964518a8..b83a5306ddd 100644 --- a/client/Cargo.toml +++ b/client/Cargo.toml @@ -102,7 +102,6 @@ assertables = { workspace = true } tracing-subscriber = { workspace = true, features = ["fmt", "ansi"] } tracing-flame = "0.2.0" -once_cell = { workspace = true } trybuild = { workspace = true } diff --git a/client/benches/tps/utils.rs b/client/benches/tps/utils.rs index 2fc498c0067..1aed91b37ef 100644 --- a/client/benches/tps/utils.rs +++ b/client/benches/tps/utils.rs @@ -203,7 +203,7 @@ impl MeasurerUnit { iroha_logger::error!(?error, "Failed to submit transaction"); } - nonce = nonce.checked_add(1).unwrap_or(nonzero!(1_u32)); + nonce = nonce.checked_add(1).unwrap_or_else(|| nonzero!(1_u32)); thread::sleep(time::Duration::from_micros(interval_us_per_tx)); } Err(mpsc::TryRecvError::Disconnected) => { diff --git a/client/examples/tutorial.rs b/client/examples/tutorial.rs index 106cf0037b1..0ea88e49751 100644 --- a/client/examples/tutorial.rs +++ b/client/examples/tutorial.rs @@ -143,8 +143,6 @@ fn account_registration_test(config: Config) -> Result<(), Error> { fn asset_registration_test(config: Config) -> Result<(), Error> { // #region register_asset_crates - use std::str::FromStr as _; - use iroha::{ client::Client, crypto::KeyPair, @@ -159,7 +157,7 @@ fn asset_registration_test(config: Config) -> Result<(), Error> { // #region register_asset_create_asset // Create an asset - let asset_def_id = AssetDefinitionId::from_str("time#looking_glass") + let asset_def_id = "time#looking_glass".parse::() .expect("Valid, because the string contains no whitespace, has a single '#' character and is not empty after"); // #endregion register_asset_create_asset @@ -193,11 +191,9 @@ fn asset_registration_test(config: Config) -> Result<(), Error> { fn asset_minting_test(config: Config) -> Result<(), Error> { // #region mint_asset_crates - use std::str::FromStr; - use iroha::{ client::Client, - data_model::prelude::{AccountId, AssetDefinitionId, AssetId, Mint}, + data_model::prelude::{AccountId, AssetId, Mint}, }; // #endregion mint_asset_crates @@ -206,7 +202,7 @@ fn asset_minting_test(config: Config) -> Result<(), Error> { // Define the instances of an Asset and Account // #region mint_asset_define_asset_account - let roses = AssetDefinitionId::from_str("rose#wonderland") + let roses = "rose#wonderland".parse() .expect("Valid, because the string contains no whitespace, has a single '#' character and is not empty after"); let alice: AccountId = "ed0120CE7FA46C9DCE7EA4B125E2E36BDB63EA33073E7590AC92816AE1E861B7048B03@wonderland".parse() .expect("Valid, because before @ is a valid public key and after @ is a valid name i.e. a string with no spaces or forbidden chars"); @@ -247,11 +243,9 @@ fn asset_minting_test(config: Config) -> Result<(), Error> { fn asset_burning_test(config: Config) -> Result<(), Error> { // #region burn_asset_crates - use std::str::FromStr; - use iroha::{ client::Client, - data_model::prelude::{AccountId, AssetDefinitionId, AssetId, Burn}, + data_model::prelude::{AccountId, AssetId, Burn}, }; // #endregion burn_asset_crates @@ -260,7 +254,7 @@ fn asset_burning_test(config: Config) -> Result<(), Error> { // #region burn_asset_define_asset_account // Define the instances of an Asset and Account - let roses = AssetDefinitionId::from_str("rose#wonderland") + let roses = "rose#wonderland".parse() .expect("Valid, because the string contains no whitespace, has a single '#' character and is not empty after"); let alice: AccountId = "ed0120CE7FA46C9DCE7EA4B125E2E36BDB63EA33073E7590AC92816AE1E861B7048B03@wonderland".parse() .expect("Valid, because before @ is a valid public key and after @ is a valid name i.e. a string with no spaces or forbidden chars"); diff --git a/client/src/client.rs b/client/src/client.rs index 1f322cb61f0..d9a500b7684 100644 --- a/client/src/client.rs +++ b/client/src/client.rs @@ -1101,13 +1101,11 @@ pub mod executor { #[cfg(test)] mod tests { - use std::str::FromStr; - use iroha_primitives::small::SmallStr; use test_samples::gen_account_in; use super::*; - use crate::config::{BasicAuth, Config, WebLogin}; + use crate::config::{BasicAuth, Config}; const LOGIN: &str = "mad_hatter"; const PASSWORD: &str = "ilovetea"; @@ -1163,7 +1161,7 @@ mod tests { fn authorization_header() { let client = Client::new(Config { basic_auth: Some(BasicAuth { - web_login: WebLogin::from_str(LOGIN).expect("Failed to create valid `WebLogin`"), + web_login: LOGIN.parse().expect("Failed to create valid `WebLogin`"), password: SmallStr::from_str(PASSWORD), }), ..config_factory() diff --git a/client/src/config.rs b/client/src/config.rs index 25a066b50e3..2092f79ebd5 100644 --- a/client/src/config.rs +++ b/client/src/config.rs @@ -101,12 +101,14 @@ mod tests { #[test] fn web_login_ok() { - let _ok = WebLogin::from_str("alice").expect("input is valid"); + let _ok: WebLogin = "alice".parse().expect("input is valid"); } #[test] fn web_login_bad() { - let _err = WebLogin::from_str("alice:wonderland").expect_err("input has `:`"); + let _err = "alice:wonderland" + .parse::() + .expect_err("input has `:`"); } fn config_sample() -> toml::Table { diff --git a/client/src/http_default.rs b/client/src/http_default.rs index 0639ed2d74c..199c68341a1 100644 --- a/client/src/http_default.rs +++ b/client/src/http_default.rs @@ -1,5 +1,5 @@ //! Defaults for various items used in communication over http(s). -use std::{net::TcpStream, str::FromStr}; +use std::net::TcpStream; use attohttpc::{ body as atto_body, RequestBuilder as AttoHttpRequestBuilder, Response as AttoHttpResponse, @@ -16,7 +16,8 @@ type Bytes = Vec; type AttoHttpRequestBuilderWithBytes = AttoHttpRequestBuilder>; fn header_name_from_str(str: &str) -> Result { - HeaderName::from_str(str).wrap_err_with(|| format!("Failed to parse header name {str}")) + str.parse::() + .wrap_err_with(|| format!("Failed to parse header name {str}")) } /// Default request builder implemented on top of `attohttpc` crate. @@ -112,9 +113,12 @@ impl DefaultWebSocketRequestBuilder { let builder = self.0?; let mut request = builder .uri_ref() - .ok_or(eyre!("Missing URI"))? + .ok_or_else(|| eyre!("Missing URI"))? .into_client_request()?; - for (header, value) in builder.headers_ref().ok_or(eyre!("No headers found"))? { + for (header, value) in builder + .headers_ref() + .ok_or_else(|| eyre!("No headers found"))? + { request.headers_mut().entry(header).or_insert(value.clone()); } Ok(DefaultWebSocketStreamRequest(request)) diff --git a/client/tests/integration/asset.rs b/client/tests/integration/asset.rs index ff874b30cd3..764a1f73857 100644 --- a/client/tests/integration/asset.rs +++ b/client/tests/integration/asset.rs @@ -1,4 +1,4 @@ -use std::{str::FromStr as _, thread}; +use std::thread; use eyre::Result; use iroha::{ @@ -26,7 +26,9 @@ fn client_register_asset_should_add_asset_once_but_not_twice() -> Result<()> { // Given let account_id = ALICE_ID.clone(); - let asset_definition_id = AssetDefinitionId::from_str("test_asset#wonderland").expect("Valid"); + let asset_definition_id = "test_asset#wonderland" + .parse::() + .expect("Valid"); let create_asset = Register::asset_definition(AssetDefinition::numeric(asset_definition_id.clone())); let register_asset = Register::asset(Asset::new( @@ -65,7 +67,9 @@ fn unregister_asset_should_remove_asset_from_account() -> Result<()> { // Given let account_id = ALICE_ID.clone(); - let asset_definition_id = AssetDefinitionId::from_str("test_asset#wonderland").expect("Valid"); + let asset_definition_id = "test_asset#wonderland" + .parse::() + .expect("Valid"); let asset_id = AssetId::new(asset_definition_id.clone(), account_id.clone()); let create_asset: InstructionBox = Register::asset_definition(AssetDefinition::numeric(asset_definition_id.clone())).into(); @@ -112,7 +116,9 @@ fn client_add_asset_quantity_to_existing_asset_should_increase_asset_amount() -> // Given let account_id = ALICE_ID.clone(); - let asset_definition_id = AssetDefinitionId::from_str("xor#wonderland").expect("Valid"); + let asset_definition_id = "xor#wonderland" + .parse::() + .expect("Valid"); let create_asset = Register::asset_definition(AssetDefinition::numeric(asset_definition_id.clone())); let metadata = iroha::data_model::metadata::Metadata::default(); @@ -146,7 +152,9 @@ fn client_add_big_asset_quantity_to_existing_asset_should_increase_asset_amount( // Given let account_id = ALICE_ID.clone(); - let asset_definition_id = AssetDefinitionId::from_str("xor#wonderland").expect("Valid"); + let asset_definition_id = "xor#wonderland" + .parse::() + .expect("Valid"); let create_asset = Register::asset_definition(AssetDefinition::numeric(asset_definition_id.clone())); let metadata = iroha::data_model::metadata::Metadata::default(); @@ -180,7 +188,9 @@ fn client_add_asset_with_decimal_should_increase_asset_amount() -> Result<()> { // Given let account_id = ALICE_ID.clone(); - let asset_definition_id = AssetDefinitionId::from_str("xor#wonderland").expect("Valid"); + let asset_definition_id = "xor#wonderland" + .parse::() + .expect("Valid"); let asset_definition = AssetDefinition::numeric(asset_definition_id.clone()); let create_asset = Register::asset_definition(asset_definition); let metadata = iroha::data_model::metadata::Metadata::default(); @@ -240,15 +250,18 @@ fn client_add_asset_with_name_length_more_than_limit_should_not_commit_transacti let pipeline_time = Config::pipeline_time(); // Given - let normal_asset_definition_id = AssetDefinitionId::from_str("xor#wonderland").expect("Valid"); + let normal_asset_definition_id = "xor#wonderland" + .parse::() + .expect("Valid"); let create_asset = Register::asset_definition(AssetDefinition::numeric(normal_asset_definition_id.clone())); test_client.submit(create_asset)?; iroha_logger::info!("Creating asset"); let too_long_asset_name = "0".repeat(2_usize.pow(14)); - let incorrect_asset_definition_id = - AssetDefinitionId::from_str(&(too_long_asset_name + "#wonderland")).expect("Valid"); + let incorrect_asset_definition_id = (too_long_asset_name + "#wonderland") + .parse::() + .expect("Valid"); let create_asset = Register::asset_definition(AssetDefinition::numeric( incorrect_asset_definition_id.clone(), )); diff --git a/client/tests/integration/asset_propagation.rs b/client/tests/integration/asset_propagation.rs index 9b0dd112cd7..4f45922229b 100644 --- a/client/tests/integration/asset_propagation.rs +++ b/client/tests/integration/asset_propagation.rs @@ -1,4 +1,4 @@ -use std::{str::FromStr as _, thread}; +use std::thread; use eyre::Result; use iroha::{ @@ -24,10 +24,10 @@ fn client_add_asset_quantity_to_existing_asset_should_increase_asset_amount_on_a BlockParameter::MaxTransactions(nonzero!(1_u64)), )))?; - let create_domain = Register::domain(Domain::new(DomainId::from_str("domain")?)); + let create_domain = Register::domain(Domain::new("domain".parse()?)); let (account_id, _account_keypair) = gen_account_in("domain"); let create_account = Register::account(Account::new(account_id.clone())); - let asset_definition_id = AssetDefinitionId::from_str("xor#domain")?; + let asset_definition_id = "xor#domain".parse::()?; let create_asset = Register::asset_definition(AssetDefinition::numeric(asset_definition_id.clone())); client.submit_all::([ diff --git a/client/tests/integration/events/notification.rs b/client/tests/integration/events/notification.rs index c1bb9440478..d5937aef20d 100644 --- a/client/tests/integration/events/notification.rs +++ b/client/tests/integration/events/notification.rs @@ -1,4 +1,4 @@ -use std::{str::FromStr as _, sync::mpsc, thread, time::Duration}; +use std::{sync::mpsc, thread, time::Duration}; use eyre::{eyre, Result, WrapErr}; use iroha::data_model::prelude::*; @@ -13,7 +13,7 @@ fn trigger_completion_success_should_produce_event() -> Result<()> { let asset_definition_id = "rose#wonderland".parse()?; let account_id = ALICE_ID.clone(); let asset_id = AssetId::new(asset_definition_id, account_id); - let trigger_id = TriggerId::from_str("mint_rose")?; + let trigger_id = "mint_rose".parse::()?; let instruction = Mint::asset_numeric(1u32, asset_id.clone()); let register_trigger = Register::trigger(Trigger::new( @@ -57,7 +57,7 @@ fn trigger_completion_failure_should_produce_event() -> Result<()> { wait_for_genesis_committed(&vec![test_client.clone()], 0); let account_id = ALICE_ID.clone(); - let trigger_id = TriggerId::from_str("fail_box")?; + let trigger_id = "fail_box".parse::()?; let fail_isi = Unregister::domain("dummy".parse().unwrap()); let register_trigger = Register::trigger(Trigger::new( diff --git a/client/tests/integration/events/pipeline.rs b/client/tests/integration/events/pipeline.rs index f57d4382563..475beb8418b 100644 --- a/client/tests/integration/events/pipeline.rs +++ b/client/tests/integration/events/pipeline.rs @@ -10,12 +10,12 @@ use iroha::{ isi::error::InstructionExecutionError, parameter::BlockParameter, prelude::*, + query::error::FindError, transaction::error::TransactionRejectionReason, ValidationFail, }, }; use iroha_config::parameters::actual::Root as Config; -use iroha_data_model::query::error::FindError; use nonzero_ext::nonzero; use test_network::*; diff --git a/client/tests/integration/extra_functional/restart_peer.rs b/client/tests/integration/extra_functional/restart_peer.rs index b260150b588..94988c78702 100644 --- a/client/tests/integration/extra_functional/restart_peer.rs +++ b/client/tests/integration/extra_functional/restart_peer.rs @@ -1,4 +1,4 @@ -use std::{str::FromStr, thread}; +use std::thread; use eyre::Result; use iroha::{ @@ -14,7 +14,7 @@ use tokio::runtime::Runtime; #[test] fn restarted_peer_should_have_the_same_asset_amount() -> Result<()> { let account_id = ALICE_ID.clone(); - let asset_definition_id = AssetDefinitionId::from_str("xor#wonderland").unwrap(); + let asset_definition_id = "xor#wonderland".parse::().unwrap(); let quantity = numeric!(200); let mut removed_peer = { diff --git a/client/tests/integration/multisig.rs b/client/tests/integration/multisig.rs index 2178f42fa2a..f1975d5d151 100644 --- a/client/tests/integration/multisig.rs +++ b/client/tests/integration/multisig.rs @@ -1,4 +1,4 @@ -use std::{collections::BTreeMap, str::FromStr}; +use std::collections::BTreeMap; use executor_custom_data_model::multisig::{MultisigArgs, MultisigRegisterArgs}; use eyre::Result; @@ -6,11 +6,12 @@ use iroha::{ client, crypto::KeyPair, data_model::{ + parameter::SmartContractParameter, prelude::*, + query::builder::SingleQueryError, transaction::{TransactionBuilder, WasmSmartContract}, }, }; -use iroha_data_model::{parameter::SmartContractParameter, query::builder::SingleQueryError}; use nonzero_ext::nonzero; use test_network::*; use test_samples::{gen_account_in, ALICE_ID}; @@ -30,7 +31,7 @@ fn mutlisig() -> Result<()> { ])?; let account_id = ALICE_ID.clone(); - let multisig_register_trigger_id = TriggerId::from_str("multisig_register")?; + let multisig_register_trigger_id = "multisig_register".parse::()?; let wasm = iroha_wasm_builder::Builder::new("../wasm_samples/multisig_register") .show_output() diff --git a/client/tests/integration/non_mintable.rs b/client/tests/integration/non_mintable.rs index 0251996baca..e9c9d62dd9a 100644 --- a/client/tests/integration/non_mintable.rs +++ b/client/tests/integration/non_mintable.rs @@ -1,5 +1,3 @@ -use std::str::FromStr as _; - use eyre::Result; use iroha::{ client, @@ -15,7 +13,9 @@ fn non_mintable_asset_can_be_minted_once_but_not_twice() -> Result<()> { // Given let account_id = ALICE_ID.clone(); - let asset_definition_id = AssetDefinitionId::from_str("xor#wonderland").expect("Valid"); + let asset_definition_id = "xor#wonderland" + .parse::() + .expect("Valid"); let create_asset = Register::asset_definition( AssetDefinition::numeric(asset_definition_id.clone()).mintable_once(), ); @@ -69,7 +69,9 @@ fn non_mintable_asset_cannot_be_minted_if_registered_with_non_zero_value() -> Re // Given let account_id = ALICE_ID.clone(); - let asset_definition_id = AssetDefinitionId::from_str("xor#wonderland").expect("Valid"); + let asset_definition_id = "xor#wonderland" + .parse::() + .expect("Valid"); let create_asset = Register::asset_definition( AssetDefinition::numeric(asset_definition_id.clone()).mintable_once(), ); @@ -108,7 +110,9 @@ fn non_mintable_asset_can_be_minted_if_registered_with_zero_value() -> Result<() // Given let account_id = ALICE_ID.clone(); - let asset_definition_id = AssetDefinitionId::from_str("xor#wonderland").expect("Valid"); + let asset_definition_id = "xor#wonderland" + .parse::() + .expect("Valid"); let create_asset = Register::asset_definition( AssetDefinition::numeric(asset_definition_id.clone()).mintable_once(), ); diff --git a/client/tests/integration/pagination.rs b/client/tests/integration/pagination.rs index 2431f5abb42..db47e944622 100644 --- a/client/tests/integration/pagination.rs +++ b/client/tests/integration/pagination.rs @@ -54,7 +54,7 @@ fn reported_length_should_be_accurate() -> Result<()> { #[test] fn fetch_size_should_work() -> Result<()> { // use the lower-level API to inspect the batch size - use iroha_data_model::query::{ + use iroha::data_model::query::{ builder::QueryExecutor as _, parameters::{FetchSize, QueryParams, Sorting}, QueryWithFilter, QueryWithParams, diff --git a/client/tests/integration/permissions.rs b/client/tests/integration/permissions.rs index 3a5301c67f6..f8fc9a5341d 100644 --- a/client/tests/integration/permissions.rs +++ b/client/tests/integration/permissions.rs @@ -1,4 +1,4 @@ -use std::{str::FromStr as _, thread, time::Duration}; +use std::{thread, time::Duration}; use eyre::Result; use iroha::{ @@ -133,7 +133,9 @@ fn permissions_disallow_asset_burn() { let alice_id = ALICE_ID.clone(); let bob_id = BOB_ID.clone(); let (mouse_id, _mouse_keypair) = gen_account_in("wonderland"); - let asset_definition_id = AssetDefinitionId::from_str("xor#wonderland").expect("Valid"); + let asset_definition_id = "xor#wonderland" + .parse::() + .expect("Valid"); let create_asset = Register::asset_definition(AssetDefinition::numeric(asset_definition_id.clone())); let mouse_keypair = KeyPair::random(); @@ -258,7 +260,7 @@ fn permissions_differ_not_only_by_names() { client .submit_blocking(SetKeyValue::asset( mouse_hat_id, - Name::from_str("color").expect("Valid"), + "color".parse().expect("Valid"), "red".parse::().expect("Valid"), )) .expect("Failed to modify Mouse's hats"); @@ -267,7 +269,7 @@ fn permissions_differ_not_only_by_names() { let mouse_shoes_id = AssetId::new(shoes_definition_id, mouse_id.clone()); let set_shoes_color = SetKeyValue::asset( mouse_shoes_id.clone(), - Name::from_str("color").expect("Valid"), + "color".parse().expect("Valid"), "yellow".parse::().expect("Valid"), ); let _err = client @@ -338,7 +340,7 @@ fn stored_vs_granted_permission_payload() -> Result<()> { // Check that alice can indeed mint mouse asset let set_key_value = SetKeyValue::asset( mouse_asset, - Name::from_str("color")?, + "color".parse()?, "red".parse::().expect("Valid"), ); iroha diff --git a/client/tests/integration/queries/account.rs b/client/tests/integration/queries/account.rs index 8d026dc4520..f4a8447e403 100644 --- a/client/tests/integration/queries/account.rs +++ b/client/tests/integration/queries/account.rs @@ -1,4 +1,4 @@ -use std::{collections::HashSet, str::FromStr as _}; +use std::collections::HashSet; use eyre::Result; use iroha::{client, data_model::prelude::*}; @@ -11,7 +11,9 @@ fn find_accounts_with_asset() -> Result<()> { wait_for_genesis_committed(&[test_client.clone()], 0); // Registering new asset definition - let definition_id = AssetDefinitionId::from_str("test_coin#wonderland").expect("Valid"); + let definition_id = "test_coin#wonderland" + .parse::() + .expect("Valid"); let asset_definition = AssetDefinition::numeric(definition_id.clone()); test_client.submit_blocking(Register::asset_definition(asset_definition.clone()))?; diff --git a/client/tests/integration/queries/asset.rs b/client/tests/integration/queries/asset.rs index 78fc29b6550..12576fb19a0 100644 --- a/client/tests/integration/queries/asset.rs +++ b/client/tests/integration/queries/asset.rs @@ -1,9 +1,10 @@ use eyre::Result; use iroha::{ client::{Client, QueryError}, - data_model::{asset::AssetValue, isi::Instruction, prelude::*}, + data_model::{ + asset::AssetValue, isi::Instruction, prelude::*, query::builder::SingleQueryError, + }, }; -use iroha_data_model::query::builder::SingleQueryError; use test_network::*; use test_samples::{gen_account_in, ALICE_ID}; diff --git a/client/tests/integration/queries/mod.rs b/client/tests/integration/queries/mod.rs index 089e017df9f..277c1eadca0 100644 --- a/client/tests/integration/queries/mod.rs +++ b/client/tests/integration/queries/mod.rs @@ -1,8 +1,10 @@ use iroha::{ client::{self, QueryError}, - data_model::{prelude::*, query::error::QueryExecutionFail}, + data_model::{ + prelude::*, + query::{error::QueryExecutionFail, parameters::MAX_FETCH_SIZE}, + }, }; -use iroha_data_model::query::parameters::MAX_FETCH_SIZE; use test_network::*; mod account; diff --git a/client/tests/integration/queries/query_errors.rs b/client/tests/integration/queries/query_errors.rs index 741f0fccb06..cb72febabae 100644 --- a/client/tests/integration/queries/query_errors.rs +++ b/client/tests/integration/queries/query_errors.rs @@ -1,5 +1,7 @@ -use iroha::client; -use iroha_data_model::{prelude::QueryBuilderExt, query::builder::SingleQueryError}; +use iroha::{ + client, + data_model::{prelude::QueryBuilderExt, query::builder::SingleQueryError}, +}; use test_samples::gen_account_in; #[test] diff --git a/client/tests/integration/queries/role.rs b/client/tests/integration/queries/role.rs index a36623320cc..e9212044f87 100644 --- a/client/tests/integration/queries/role.rs +++ b/client/tests/integration/queries/role.rs @@ -1,8 +1,10 @@ use std::collections::HashSet; use eyre::Result; -use iroha::{client, data_model::prelude::*}; -use iroha_data_model::query::builder::SingleQueryError; +use iroha::{ + client, + data_model::{prelude::*, query::builder::SingleQueryError}, +}; use iroha_executor_data_model::permission::account::CanSetKeyValueInAccount; use test_network::*; use test_samples::ALICE_ID; diff --git a/client/tests/integration/queries/smart_contract.rs b/client/tests/integration/queries/smart_contract.rs index 9827599fc72..e40c556df45 100644 --- a/client/tests/integration/queries/smart_contract.rs +++ b/client/tests/integration/queries/smart_contract.rs @@ -1,5 +1,3 @@ -use std::str::FromStr as _; - use eyre::Result; use iroha::{ client::QueryError, @@ -24,7 +22,7 @@ fn live_query_is_dropped_after_smart_contract_end() -> Result<()> { let metadata_value: JsonString = client.query_single(FindAccountMetadata::new( client.account.clone(), - Name::from_str("cursor").unwrap(), + "cursor".parse().unwrap(), ))?; let asset_cursor = metadata_value.try_into_any()?; diff --git a/client/tests/integration/sorting.rs b/client/tests/integration/sorting.rs index 315898caaae..10d0dc325d7 100644 --- a/client/tests/integration/sorting.rs +++ b/client/tests/integration/sorting.rs @@ -1,12 +1,14 @@ -use std::{collections::HashSet, str::FromStr as _}; +use std::collections::HashSet; use eyre::{Result, WrapErr as _}; use iroha::{ client::{self, QueryResult}, crypto::KeyPair, - data_model::{account::Account, prelude::*}, + data_model::{ + account::Account, name::Name, prelude::*, + query::predicate::predicate_atoms::asset::AssetPredicateBox, + }, }; -use iroha_data_model::query::predicate::predicate_atoms::asset::AssetPredicateBox; use nonzero_ext::nonzero; use rand::{seq::SliceRandom, thread_rng}; use test_network::*; @@ -27,7 +29,7 @@ fn correct_pagination_assets_after_creating_new_one() { let xor_filter = AssetPredicateBox::build(|asset| asset.id.definition_id.name.starts_with("xor")); - let sort_by_metadata_key = Name::from_str("sort").expect("Valid"); + let sort_by_metadata_key = "sort".parse::().expect("Valid"); let sorting = Sorting::by_metadata_key(sort_by_metadata_key.clone()); let account_id = ALICE_ID.clone(); @@ -43,8 +45,9 @@ fn correct_pagination_assets_after_creating_new_one() { let mut missing_register_assets = vec![]; for i in 0..N_ASSETS { - let asset_definition_id = - AssetDefinitionId::from_str(&format!("xor{i}#wonderland")).expect("Valid"); + let asset_definition_id = format!("xor{i}#wonderland") + .parse::() + .expect("Valid"); let asset_definition = AssetDefinition::store(asset_definition_id.clone()); let mut asset_metadata = Metadata::default(); asset_metadata.insert(sort_by_metadata_key.clone(), i as u32); @@ -120,7 +123,7 @@ fn correct_sorting_of_entities() { let (_rt, _peer, test_client) = ::new().with_port(10_640).start_with_runtime(); wait_for_genesis_committed(&[test_client.clone()], 0); - let sort_by_metadata_key = Name::from_str("test_sort").expect("Valid"); + let sort_by_metadata_key = "test_sort".parse::().expect("Valid"); // Test sorting asset definitions @@ -129,8 +132,9 @@ fn correct_sorting_of_entities() { let mut instructions = vec![]; let n = 10_u32; for i in 0..n { - let asset_definition_id = - AssetDefinitionId::from_str(&format!("xor_{i}#wonderland")).expect("Valid"); + let asset_definition_id = format!("xor_{i}#wonderland") + .parse::() + .expect("Valid"); let mut asset_metadata = Metadata::default(); asset_metadata.insert(sort_by_metadata_key.clone(), n - i - 1); let asset_definition = AssetDefinition::numeric(asset_definition_id.clone()) @@ -217,7 +221,7 @@ fn correct_sorting_of_entities() { let mut instructions = vec![]; let n = 10u32; for i in 0..n { - let domain_id = DomainId::from_str(&format!("neverland{i}")).expect("Valid"); + let domain_id = format!("neverland{i}").parse::().expect("Valid"); let mut domain_metadata = Metadata::default(); domain_metadata.insert(sort_by_metadata_key.clone(), n - i - 1); let domain = Domain::new(domain_id.clone()).with_metadata(domain_metadata.clone()); @@ -252,7 +256,9 @@ fn correct_sorting_of_entities() { let mut metadata_of_domains = vec![]; let mut instructions = vec![]; for (idx, val) in input { - let domain_id = DomainId::from_str(&format!("neverland_{idx}")).expect("Valid"); + let domain_id = format!("neverland_{idx}") + .parse::() + .expect("Valid"); let mut domain_metadata = Metadata::default(); domain_metadata.insert(sort_by_metadata_key.clone(), val); let domain = Domain::new(domain_id.clone()).with_metadata(domain_metadata.clone()); @@ -296,7 +302,7 @@ fn sort_only_elements_which_have_sorting_key() -> Result<()> { .submit_blocking(Register::domain(Domain::new(domain_id.clone()))) .expect("should be committed"); - let sort_by_metadata_key = Name::from_str("test_sort").expect("Valid"); + let sort_by_metadata_key = "test_sort".parse::().expect("Valid"); let mut accounts_a = vec![]; let mut accounts_b = vec![]; diff --git a/client/tests/integration/status_response.rs b/client/tests/integration/status_response.rs index 9136ee61d4c..64af76057b0 100644 --- a/client/tests/integration/status_response.rs +++ b/client/tests/integration/status_response.rs @@ -1,5 +1,3 @@ -use std::str::FromStr as _; - use eyre::Result; use iroha::{data_model::prelude::*, samples::get_status_json}; use iroha_telemetry::metrics::Status; @@ -33,7 +31,7 @@ fn json_and_scale_statuses_equality() -> Result<()> { let (account_id, _account_keypair) = gen_account_in("domain"); for coin in coins { - let asset_definition_id = AssetDefinitionId::from_str(&format!("{coin}#wonderland"))?; + let asset_definition_id = format!("{coin}#wonderland").parse::()?; let create_asset = Register::asset_definition(AssetDefinition::numeric(asset_definition_id.clone())); let mint_asset = Mint::asset_numeric( diff --git a/client/tests/integration/transfer_asset.rs b/client/tests/integration/transfer_asset.rs index 3331365464c..dbeee6c545d 100644 --- a/client/tests/integration/transfer_asset.rs +++ b/client/tests/integration/transfer_asset.rs @@ -1,12 +1,9 @@ -use std::str::FromStr; - use iroha::{ client, data_model::{ account::{Account, AccountId}, asset::{Asset, AssetDefinition}, isi::{Instruction, InstructionBox}, - name::Name, prelude::*, Registered, }, @@ -39,7 +36,7 @@ fn simulate_transfer_store_asset() { Register::asset_definition(AssetDefinition::store(asset_definition_id.clone())); let set_key_value = SetKeyValue::asset( AssetId::new(asset_definition_id.clone(), alice_id.clone()), - Name::from_str("alicek").unwrap(), + "alicek".parse().unwrap(), true, ); diff --git a/client/tests/integration/transfer_domain.rs b/client/tests/integration/transfer_domain.rs index 0afb0ff0523..d7013c6e2cd 100644 --- a/client/tests/integration/transfer_domain.rs +++ b/client/tests/integration/transfer_domain.rs @@ -2,9 +2,9 @@ use eyre::Result; use iroha::{ client, client::Client, + crypto::KeyPair, data_model::{prelude::*, transaction::error::TransactionRejectionReason}, }; -use iroha_crypto::KeyPair; use iroha_executor_data_model::permission::{ account::CanUnregisterAccount, asset::CanUnregisterUserAsset, diff --git a/client/tests/integration/triggers/by_call_trigger.rs b/client/tests/integration/triggers/by_call_trigger.rs index da5b457f25b..dead31d08b3 100644 --- a/client/tests/integration/triggers/by_call_trigger.rs +++ b/client/tests/integration/triggers/by_call_trigger.rs @@ -1,4 +1,4 @@ -use std::{str::FromStr as _, sync::mpsc, thread, time::Duration}; +use std::{sync::mpsc, thread, time::Duration}; use executor_custom_data_model::mint_rose_args::MintRoseArgs; use eyre::{eyre, Result, WrapErr}; @@ -34,7 +34,7 @@ fn call_execute_trigger() -> Result<()> { let register_trigger = build_register_trigger_isi(asset_id.account(), vec![instruction.into()]); test_client.submit_blocking(register_trigger)?; - let trigger_id = TriggerId::from_str(TRIGGER_NAME)?; + let trigger_id = TRIGGER_NAME.parse()?; let call_trigger = ExecuteTrigger::new(trigger_id); test_client.submit_blocking(call_trigger)?; @@ -57,7 +57,7 @@ fn execute_trigger_should_produce_event() -> Result<()> { let register_trigger = build_register_trigger_isi(asset_id.account(), vec![instruction.into()]); test_client.submit_blocking(register_trigger)?; - let trigger_id = TriggerId::from_str(TRIGGER_NAME)?; + let trigger_id = TRIGGER_NAME.parse::()?; let call_trigger = ExecuteTrigger::new(trigger_id.clone()); let thread_client = test_client.clone(); @@ -88,7 +88,7 @@ fn infinite_recursion_should_produce_one_call_per_block() -> Result<()> { let asset_definition_id = "rose#wonderland".parse()?; let account_id = ALICE_ID.clone(); let asset_id = AssetId::new(asset_definition_id, account_id); - let trigger_id = TriggerId::from_str(TRIGGER_NAME)?; + let trigger_id = TRIGGER_NAME.parse()?; let call_trigger = ExecuteTrigger::new(trigger_id); let prev_value = get_asset_value(&mut test_client, asset_id.clone()); @@ -117,7 +117,7 @@ fn trigger_failure_should_not_cancel_other_triggers_execution() -> Result<()> { let asset_id = AssetId::new(asset_definition_id, account_id.clone()); // Registering trigger that should fail on execution - let bad_trigger_id = TriggerId::from_str("bad_trigger")?; + let bad_trigger_id = "bad_trigger".parse::()?; // Invalid instruction let fail_isi = Unregister::domain("dummy".parse()?); let bad_trigger_instructions = vec![fail_isi]; @@ -135,7 +135,7 @@ fn trigger_failure_should_not_cancel_other_triggers_execution() -> Result<()> { test_client.submit(register_bad_trigger)?; // Registering normal trigger - let trigger_id = TriggerId::from_str(TRIGGER_NAME)?; + let trigger_id = TRIGGER_NAME.parse()?; let trigger_instructions = vec![Mint::asset_numeric(1u32, asset_id.clone())]; let register_trigger = Register::trigger(Trigger::new( trigger_id, @@ -172,14 +172,14 @@ fn trigger_should_not_be_executed_with_zero_repeats_count() -> Result<()> { let asset_definition_id = "rose#wonderland".parse()?; let account_id = ALICE_ID.clone(); let asset_id = AssetId::new(asset_definition_id, account_id.clone()); - let trigger_id = TriggerId::from_str("self_modifying_trigger")?; + let trigger_id = "self_modifying_trigger".parse::()?; let trigger_instructions = vec![Mint::asset_numeric(1u32, asset_id.clone())]; let register_trigger = Register::trigger(Trigger::new( trigger_id.clone(), Action::new( trigger_instructions, - Repeats::from(1_u32), + 1_u32, account_id.clone(), ExecuteTriggerEventFilter::new() .for_trigger(trigger_id.clone()) @@ -232,17 +232,17 @@ fn trigger_should_be_able_to_modify_its_own_repeats_count() -> Result<()> { let asset_definition_id = "rose#wonderland".parse()?; let account_id = ALICE_ID.clone(); let asset_id = AssetId::new(asset_definition_id, account_id.clone()); - let trigger_id = TriggerId::from_str("self_modifying_trigger")?; + let trigger_id = "self_modifying_trigger".parse::()?; - let trigger_instructions = vec![ - InstructionBox::from(Mint::trigger_repetitions(1_u32, trigger_id.clone())), - InstructionBox::from(Mint::asset_numeric(1u32, asset_id.clone())), + let trigger_instructions: Vec = vec![ + Mint::trigger_repetitions(1_u32, trigger_id.clone()).into(), + Mint::asset_numeric(1u32, asset_id.clone()).into(), ]; let register_trigger = Register::trigger(Trigger::new( trigger_id.clone(), Action::new( trigger_instructions, - Repeats::from(1_u32), + 1_u32, account_id.clone(), ExecuteTriggerEventFilter::new() .for_trigger(trigger_id.clone()) @@ -293,7 +293,7 @@ fn only_account_with_permission_can_register_trigger() -> Result<()> { }; // Trigger with 'alice' as authority - let trigger_id = TriggerId::from_str("alice_trigger")?; + let trigger_id = "alice_trigger".parse::()?; let trigger = Trigger::new( trigger_id.clone(), Action::new( @@ -352,7 +352,7 @@ fn unregister_trigger() -> Result<()> { let account_id = ALICE_ID.clone(); // Registering trigger - let trigger_id = TriggerId::from_str("empty_trigger")?; + let trigger_id = "empty_trigger".parse::()?; let trigger = Trigger::new( trigger_id.clone(), Action::new( @@ -425,7 +425,7 @@ fn trigger_in_genesis_using_base64() -> Result<()> { let engine = base64::engine::general_purpose::STANDARD; let wasm_base64 = serde_json::json!(base64::engine::Engine::encode(&engine, wasm)).to_string(); let account_id = ALICE_ID.clone(); - let trigger_id = TriggerId::from_str("genesis_trigger")?; + let trigger_id = "genesis_trigger".parse::()?; let trigger = Trigger::new( trigger_id.clone(), @@ -482,8 +482,8 @@ fn trigger_should_be_able_to_modify_other_trigger() -> Result<()> { let asset_definition_id = "rose#wonderland".parse()?; let account_id = ALICE_ID.clone(); let asset_id = AssetId::new(asset_definition_id, account_id.clone()); - let trigger_id_unregister = TriggerId::from_str("unregister_other_trigger")?; - let trigger_id_to_be_unregistered = TriggerId::from_str("should_be_unregistered_trigger")?; + let trigger_id_unregister = "unregister_other_trigger".parse::()?; + let trigger_id_to_be_unregistered = "should_be_unregistered_trigger".parse::()?; let trigger_unregister_instructions = vec![Unregister::trigger(trigger_id_to_be_unregistered.clone())]; @@ -491,7 +491,7 @@ fn trigger_should_be_able_to_modify_other_trigger() -> Result<()> { trigger_id_unregister.clone(), Action::new( trigger_unregister_instructions, - Repeats::from(1_u32), + 1_u32, account_id.clone(), ExecuteTriggerEventFilter::new() .for_trigger(trigger_id_unregister.clone()) @@ -506,7 +506,7 @@ fn trigger_should_be_able_to_modify_other_trigger() -> Result<()> { trigger_id_to_be_unregistered.clone(), Action::new( trigger_should_be_unregistered_instructions, - Repeats::from(1_u32), + 1_u32, account_id.clone(), ExecuteTriggerEventFilter::new() .for_trigger(trigger_id_to_be_unregistered.clone()) @@ -542,14 +542,14 @@ fn trigger_burn_repetitions() -> Result<()> { let asset_definition_id = "rose#wonderland".parse()?; let account_id = ALICE_ID.clone(); let asset_id = AssetId::new(asset_definition_id, account_id.clone()); - let trigger_id = TriggerId::from_str("trigger")?; + let trigger_id = "trigger".parse::()?; let trigger_instructions = vec![Mint::asset_numeric(1u32, asset_id)]; let register_trigger = Register::trigger(Trigger::new( trigger_id.clone(), Action::new( trigger_instructions, - Repeats::from(1_u32), + 1_u32, account_id.clone(), ExecuteTriggerEventFilter::new() .for_trigger(trigger_id.clone()) @@ -576,8 +576,8 @@ fn unregistering_one_of_two_triggers_with_identical_wasm_should_not_cause_origin wait_for_genesis_committed(&vec![test_client.clone()], 0); let account_id = ALICE_ID.clone(); - let first_trigger_id = TriggerId::from_str("mint_rose_1")?; - let second_trigger_id = TriggerId::from_str("mint_rose_2")?; + let first_trigger_id = "mint_rose_1".parse::()?; + let second_trigger_id = "mint_rose_2".parse::()?; let wasm = iroha_wasm_builder::Builder::new("../wasm_samples/mint_rose_trigger") .show_output() @@ -663,7 +663,7 @@ fn call_execute_trigger_with_args() -> Result<()> { let asset_id = AssetId::new(asset_definition_id, account_id.clone()); let prev_value = get_asset_value(&mut test_client, asset_id.clone()); - let trigger_id = TriggerId::from_str(TRIGGER_NAME)?; + let trigger_id = TRIGGER_NAME.parse::()?; let wasm = iroha_wasm_builder::Builder::new("../wasm_samples/mint_rose_trigger_args") .show_output() .build()? diff --git a/client/tests/integration/triggers/trigger_rollback.rs b/client/tests/integration/triggers/trigger_rollback.rs index 8ddb07acb9b..36756ac56a1 100644 --- a/client/tests/integration/triggers/trigger_rollback.rs +++ b/client/tests/integration/triggers/trigger_rollback.rs @@ -1,5 +1,3 @@ -use std::str::FromStr as _; - use eyre::Result; use iroha::{ client, @@ -14,9 +12,9 @@ fn failed_trigger_revert() -> Result<()> { wait_for_genesis_committed(&[client.clone()], 0); //When - let trigger_id = TriggerId::from_str("trigger")?; + let trigger_id = "trigger".parse::()?; let account_id = ALICE_ID.clone(); - let asset_definition_id = AssetDefinitionId::from_str("xor#wonderland")?; + let asset_definition_id = "xor#wonderland".parse::()?; let create_asset = Register::asset_definition(AssetDefinition::numeric(asset_definition_id.clone())); let fail_isi = Unregister::domain("dummy".parse().unwrap()); diff --git a/client/tests/integration/tx_history.rs b/client/tests/integration/tx_history.rs index 281e62a969d..24811b4fb76 100644 --- a/client/tests/integration/tx_history.rs +++ b/client/tests/integration/tx_history.rs @@ -1,4 +1,4 @@ -use std::{str::FromStr as _, thread}; +use std::thread; use eyre::Result; use iroha::{ @@ -20,7 +20,7 @@ fn client_has_rejected_and_acepted_txs_should_return_tx_history() -> Result<()> // Given let account_id = ALICE_ID.clone(); - let asset_definition_id = AssetDefinitionId::from_str("xor#wonderland")?; + let asset_definition_id = "xor#wonderland".parse::()?; let create_asset = Register::asset_definition(AssetDefinition::numeric(asset_definition_id.clone())); client.submit_blocking(create_asset)?; @@ -31,10 +31,7 @@ fn client_has_rejected_and_acepted_txs_should_return_tx_history() -> Result<()> let mint_existed_asset = Mint::asset_numeric(quantity, asset_id); let mint_not_existed_asset = Mint::asset_numeric( quantity, - AssetId::new( - AssetDefinitionId::from_str("foo#wonderland")?, - account_id.clone(), - ), + AssetId::new("foo#wonderland".parse()?, account_id.clone()), ); let transactions_count = 100; diff --git a/client/tests/integration/tx_rollback.rs b/client/tests/integration/tx_rollback.rs index f0d705468be..0815bb73958 100644 --- a/client/tests/integration/tx_rollback.rs +++ b/client/tests/integration/tx_rollback.rs @@ -1,5 +1,3 @@ -use std::str::FromStr as _; - use eyre::Result; use iroha::{client, data_model::prelude::*}; use test_network::*; @@ -12,8 +10,8 @@ fn client_sends_transaction_with_invalid_instruction_should_not_see_any_changes( //When let account_id = ALICE_ID.clone(); - let asset_definition_id = AssetDefinitionId::from_str("xor#wonderland")?; - let wrong_asset_definition_id = AssetDefinitionId::from_str("ksor#wonderland")?; + let asset_definition_id = "xor#wonderland".parse()?; + let wrong_asset_definition_id = "ksor#wonderland".parse::()?; let create_asset = Register::asset_definition(AssetDefinition::numeric(asset_definition_id)); let mint_asset = Mint::asset_numeric( 200u32, diff --git a/client_cli/Cargo.toml b/client_cli/Cargo.toml index 7714ec6466d..1a80836e164 100644 --- a/client_cli/Cargo.toml +++ b/client_cli/Cargo.toml @@ -36,7 +36,6 @@ error-stack = { workspace = true, features = ["eyre"] } eyre = { workspace = true } clap = { workspace = true, features = ["derive"] } json5 = { workspace = true } -once_cell = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } erased-serde = "0.4.5" diff --git a/client_cli/src/main.rs b/client_cli/src/main.rs index d7c4edd1c87..5c06ab8d6b5 100644 --- a/client_cli/src/main.rs +++ b/client_cli/src/main.rs @@ -1199,8 +1199,6 @@ mod json { } #[cfg(test)] mod tests { - use std::str::FromStr; - use super::*; #[test] @@ -1208,7 +1206,7 @@ mod tests { macro_rules! case { ($input:expr, $expected:expr) => { let MetadataValueArg { value } = - MetadataValueArg::from_str($input).expect("should not fail with valid input"); + $input.parse().expect("should not fail with valid input"); assert_eq!(value, $expected); }; } diff --git a/config/Cargo.toml b/config/Cargo.toml index 0269103636c..0d3644e4839 100644 --- a/config/Cargo.toml +++ b/config/Cargo.toml @@ -31,7 +31,6 @@ thiserror = { workspace = true } displaydoc = { workspace = true } derive_more = { workspace = true } cfg-if = { workspace = true } -once_cell = { workspace = true } nonzero_ext = { workspace = true } hex = { workspace = true, features = ["std"] } diff --git a/config/base/src/env.rs b/config/base/src/env.rs index c8f218311f9..9eb0454c568 100644 --- a/config/base/src/env.rs +++ b/config/base/src/env.rs @@ -38,7 +38,7 @@ where where Self: Sized, { - Self::from_str(&value) + value.parse() } } diff --git a/config/base/src/read.rs b/config/base/src/read.rs index 2873911c635..0812a6dfaf3 100644 --- a/config/base/src/read.rs +++ b/config/base/src/read.rs @@ -173,11 +173,8 @@ impl ConfigReader { Ok(()) } - recursion(&mut self, path.as_ref(), 0).map_err(|err| { - // error doesn't mean we need to panic - self.bomb.defuse(); - err - })?; + // NOTE: error doesn't mean we need to panic + recursion(&mut self, path.as_ref(), 0).inspect_err(|_| self.bomb.defuse())?; Ok(self) } diff --git a/config/base/src/toml.rs b/config/base/src/toml.rs index 3ca9e4ce5ba..8338632e198 100644 --- a/config/base/src/toml.rs +++ b/config/base/src/toml.rs @@ -8,7 +8,6 @@ use std::{ fs::File, io::Read, path::{Path, PathBuf}, - str::FromStr, }; use error_stack::ResultExt; @@ -56,7 +55,9 @@ impl TomlSource { .read_to_string(&mut raw_string) .change_context(FromFileError::Read)?; - let table = Table::from_str(&raw_string).change_context(FromFileError::Parse)?; + let table = raw_string + .parse::() + .change_context(FromFileError::Parse)?; Ok(TomlSource::new(path, table)) } diff --git a/config/src/logger.rs b/config/src/logger.rs index 8b56b34048c..55fc2ae3ecd 100644 --- a/config/src/logger.rs +++ b/config/src/logger.rs @@ -49,7 +49,7 @@ impl FromStr for Directives { let directives = dirs .split(',') .filter(|s| !s.is_empty()) - .map(Directive::from_str) + .map(FromStr::from_str) .collect::, _>>()?; Ok(Self(directives)) } diff --git a/config/src/parameters/user.rs b/config/src/parameters/user.rs index db88c729bbc..37aaee2638f 100644 --- a/config/src/parameters/user.rs +++ b/config/src/parameters/user.rs @@ -1,6 +1,7 @@ -//! User configuration view. Contains structures in a format that is -//! convenient from the user perspective. It is less strict and not necessarily valid upon -//! successful parsing of the user-provided content. +//! User configuration view. +//! +//! Contains structures in a format that is convenient from the user perspective. It is less strict +//! and not necessarily valid upon successful parsing of the user-provided content. //! //! It begins with [`Root`], containing sub-modules. Every structure has its `-Partial` //! representation (e.g. [`RootPartial`]). diff --git a/core/Cargo.toml b/core/Cargo.toml index 3a81883bc1f..65094e73169 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -73,7 +73,6 @@ test_samples = { workspace = true } criterion = { workspace = true } hex = { workspace = true } -once_cell = { workspace = true } tempfile = { workspace = true } byte-unit = "5.1.4" diff --git a/core/benches/blocks/common.rs b/core/benches/blocks/common.rs index e88db6e9718..a6a79b2b7f0 100644 --- a/core/benches/blocks/common.rs +++ b/core/benches/blocks/common.rs @@ -1,4 +1,4 @@ -use std::{num::NonZeroU64, str::FromStr as _}; +use std::num::NonZeroU64; use iroha_core::{ block::{BlockBuilder, CommittedBlock}, @@ -224,7 +224,7 @@ pub fn build_state(rt: &tokio::runtime::Handle, account_id: &AccountId) -> State } fn construct_domain_id(i: usize) -> DomainId { - DomainId::from_str(&format!("non_inlinable_domain_name_{i}")).unwrap() + format!("non_inlinable_domain_name_{i}").parse().unwrap() } fn generate_account_id(domain_id: DomainId) -> AccountId { @@ -234,7 +234,9 @@ fn generate_account_id(domain_id: DomainId) -> AccountId { fn construct_asset_definition_id(i: usize, domain_id: DomainId) -> AssetDefinitionId { AssetDefinitionId::new( domain_id, - Name::from_str(&format!("non_inlinable_asset_definition_name_{i}")).unwrap(), + format!("non_inlinable_asset_definition_name_{i}") + .parse() + .unwrap(), ) } diff --git a/core/benches/kura.rs b/core/benches/kura.rs index f602695d735..85ccd6547dd 100644 --- a/core/benches/kura.rs +++ b/core/benches/kura.rs @@ -1,7 +1,5 @@ #![allow(missing_docs)] -use std::str::FromStr as _; - use byte_unit::{Byte, UnitType}; use criterion::{criterion_group, criterion_main, Criterion}; use iroha_config::{base::WithOrigin, parameters::actual::Kura as Config}; @@ -34,7 +32,7 @@ async fn measure_block_size_for_n_executors(n_executors: u32) { let (alice_id, alice_keypair) = gen_account_in("test"); let (bob_id, _bob_keypair) = gen_account_in("test"); - let xor_id = AssetDefinitionId::from_str("xor#test").expect("tested"); + let xor_id = "xor#test".parse().expect("tested"); let alice_xor_id = AssetId::new(xor_id, alice_id.clone()); let transfer = Transfer::asset_numeric(alice_xor_id, 10u32, bob_id); let tx = TransactionBuilder::new(chain_id.clone(), alice_id.clone()) diff --git a/core/benches/validation.rs b/core/benches/validation.rs index 90aaeb63527..61d2ddf5717 100644 --- a/core/benches/validation.rs +++ b/core/benches/validation.rs @@ -1,4 +1,5 @@ #![allow(missing_docs)] +use std::sync::LazyLock; use criterion::{criterion_group, criterion_main, BatchSize, Criterion}; use iroha_core::{ @@ -11,13 +12,12 @@ use iroha_core::{ use iroha_data_model::{ account::AccountId, isi::InstructionBox, prelude::*, transaction::TransactionBuilder, }; -use once_cell::sync::Lazy; use test_samples::gen_account_in; -static STARTER_DOMAIN: Lazy = Lazy::new(|| "start".parse().unwrap()); -static STARTER_KEYPAIR: Lazy = Lazy::new(KeyPair::random); -static STARTER_ID: Lazy = - Lazy::new(|| AccountId::new(STARTER_DOMAIN.clone(), STARTER_KEYPAIR.public_key().clone())); +static STARTER_DOMAIN: LazyLock = LazyLock::new(|| "start".parse().unwrap()); +static STARTER_KEYPAIR: LazyLock = LazyLock::new(KeyPair::random); +static STARTER_ID: LazyLock = + LazyLock::new(|| AccountId::new(STARTER_DOMAIN.clone(), STARTER_KEYPAIR.public_key().clone())); fn build_test_transaction(chain_id: ChainId) -> TransactionBuilder { let domain_id: DomainId = "domain".parse().unwrap(); diff --git a/core/src/block.rs b/core/src/block.rs index 7d0c52be86d..808d8afeb44 100644 --- a/core/src/block.rs +++ b/core/src/block.rs @@ -2,8 +2,8 @@ //! 1. If a new block is constructed by the node: //! `BlockBuilder` -> `BlockBuilder` -> `ValidBlock` -> `CommittedBlock` //! 2. If a block is received, i.e. deserialized: -//! `SignedBlock` -> `ValidBlock` -> `CommittedBlock` -//! [`Block`]s are organised into a linear sequence over time (also known as the block chain). +//! `SignedBlock` -> `ValidBlock` -> `CommittedBlock` +//! [`Block`]s are organised into a linear sequence over time (also known as the block chain). use std::{error::Error as _, time::Duration}; use iroha_crypto::{HashOf, KeyPair, MerkleTree}; @@ -184,14 +184,14 @@ mod pending { .unwrap(); BlockHeader { - height: prev_block - .map(|block| block.header().height) - .map(|height| { + height: prev_block.map(|block| block.header().height).map_or_else( + || nonzero!(1_u64), + |height| { height .checked_add(1) .expect("INTERNAL BUG: Blockchain height exceeds usize::MAX") - }) - .unwrap_or(nonzero!(1_u64)), + }, + ), prev_block_hash: prev_block.map(SignedBlock::hash), transactions_hash: transactions .iter() @@ -1107,8 +1107,6 @@ mod event { #[cfg(test)] mod tests { - use std::str::FromStr as _; - use iroha_data_model::prelude::*; use iroha_genesis::GENESIS_DOMAIN_ID; use test_samples::gen_account_in; @@ -1144,7 +1142,7 @@ mod tests { // Predefined world state let (alice_id, alice_keypair) = gen_account_in("wonderland"); let account = Account::new(alice_id.clone()).build(&alice_id); - let domain_id = DomainId::from_str("wonderland").expect("Valid"); + let domain_id = "wonderland".parse().expect("Valid"); let domain = Domain::new(domain_id).build(&alice_id); let world = World::with([domain], [account], []); let kura = Kura::blank_kura_for_testing(); @@ -1158,7 +1156,7 @@ mod tests { let mut state_block = state.block(); // Creating an instruction - let asset_definition_id = AssetDefinitionId::from_str("xor#wonderland").expect("Valid"); + let asset_definition_id = "xor#wonderland".parse().expect("Valid"); let create_asset_definition = Register::asset_definition(AssetDefinition::numeric(asset_definition_id)); @@ -1202,7 +1200,7 @@ mod tests { // Predefined world state let (alice_id, alice_keypair) = gen_account_in("wonderland"); let account = Account::new(alice_id.clone()).build(&alice_id); - let domain_id = DomainId::from_str("wonderland").expect("Valid"); + let domain_id = "wonderland".parse().expect("Valid"); let domain = Domain::new(domain_id).build(&alice_id); let world = World::with([domain], [account], []); let kura = Kura::blank_kura_for_testing(); @@ -1216,7 +1214,9 @@ mod tests { let mut state_block = state.block(); // Creating an instruction - let asset_definition_id = AssetDefinitionId::from_str("xor#wonderland").expect("Valid"); + let asset_definition_id = "xor#wonderland" + .parse::() + .expect("Valid"); let create_asset_definition = Register::asset_definition(AssetDefinition::numeric(asset_definition_id.clone())); @@ -1280,7 +1280,7 @@ mod tests { // Predefined world state let (alice_id, alice_keypair) = gen_account_in("wonderland"); let account = Account::new(alice_id.clone()).build(&alice_id); - let domain_id = DomainId::from_str("wonderland").expect("Valid"); + let domain_id = "wonderland".parse().expect("Valid"); let domain = Domain::new(domain_id).build(&alice_id); let world = World::with([domain], [account], []); let kura = Kura::blank_kura_for_testing(); @@ -1293,9 +1293,9 @@ mod tests { }; let mut state_block = state.block(); - let domain_id = DomainId::from_str("domain").expect("Valid"); + let domain_id = "domain".parse().expect("Valid"); let create_domain = Register::domain(Domain::new(domain_id)); - let asset_definition_id = AssetDefinitionId::from_str("coin#domain").expect("Valid"); + let asset_definition_id = "coin#domain".parse().expect("Valid"); let create_asset = Register::asset_definition(AssetDefinition::numeric(asset_definition_id)); let fail_isi = Unregister::domain("dummy".parse().unwrap()); diff --git a/core/src/kura.rs b/core/src/kura.rs index 5e28ebae57e..db33b6aa126 100644 --- a/core/src/kura.rs +++ b/core/src/kura.rs @@ -970,8 +970,8 @@ mod tests { } } - #[tokio::test] - async fn strict_init_kura() { + #[test] + fn strict_init_kura() { let temp_dir = TempDir::new().unwrap(); Kura::new(&Config { init_mode: InitMode::Strict, @@ -983,8 +983,8 @@ mod tests { .unwrap(); } - #[test] - fn kura_not_miss_replace_block() { + #[tokio::test] + async fn kura_not_miss_replace_block() { let rt = tokio::runtime::Builder::new_multi_thread() .enable_time() .build() diff --git a/core/src/queue.rs b/core/src/queue.rs index a628e97c7c8..d1d8f273350 100644 --- a/core/src/queue.rs +++ b/core/src/queue.rs @@ -400,7 +400,7 @@ impl Queue { #[cfg(test)] // this is `pub` to re-use internal utils pub mod tests { - use std::{str::FromStr, sync::Arc, thread, time::Duration}; + use std::{sync::Arc, thread, time::Duration}; use iroha_data_model::{parameter::TransactionParameters, prelude::*}; use nonzero_ext::nonzero; @@ -459,7 +459,7 @@ pub mod tests { } pub fn world_with_test_domains() -> World { - let domain_id = DomainId::from_str("wonderland").expect("Valid"); + let domain_id = "wonderland".parse().expect("Valid"); let (account_id, _account_keypair) = gen_account_in("wonderland"); let domain = Domain::new(domain_id).build(&account_id); let account = Account::new(account_id.clone()).build(&account_id); @@ -792,7 +792,7 @@ pub mod tests { let (alice_id, alice_keypair) = gen_account_in("wonderland"); let (bob_id, bob_keypair) = gen_account_in("wonderland"); let world = { - let domain_id = DomainId::from_str("wonderland").expect("Valid"); + let domain_id = "wonderland".parse().expect("Valid"); let domain = Domain::new(domain_id).build(&alice_id); let alice_account = Account::new(alice_id.clone()).build(&alice_id); let bob_account = Account::new(bob_id.clone()).build(&bob_id); diff --git a/core/src/smartcontracts/isi/mod.rs b/core/src/smartcontracts/isi/mod.rs index 343401697c3..c886a7ce737 100644 --- a/core/src/smartcontracts/isi/mod.rs +++ b/core/src/smartcontracts/isi/mod.rs @@ -226,7 +226,6 @@ pub mod prelude { #[cfg(test)] mod tests { - use core::str::FromStr as _; use std::sync::Arc; use test_samples::{ @@ -246,10 +245,10 @@ mod tests { let world = World::with([], [], []); let query_handle = LiveQueryStore::start_test(); let state = State::new(world, kura.clone(), query_handle); - let asset_definition_id = AssetDefinitionId::from_str("rose#wonderland")?; + let asset_definition_id = "rose#wonderland".parse()?; let mut state_block = state.block(); let mut state_transaction = state_block.transaction(); - Register::domain(Domain::new(DomainId::from_str("wonderland")?)) + Register::domain(Domain::new("wonderland".parse()?)) .execute(&SAMPLE_GENESIS_ACCOUNT_ID, &mut state_transaction)?; Register::account(Account::new(ALICE_ID.clone())) .execute(&SAMPLE_GENESIS_ACCOUNT_ID, &mut state_transaction)?; @@ -267,9 +266,9 @@ mod tests { let mut state_block = state.block(); let mut state_transaction = state_block.transaction(); let account_id = ALICE_ID.clone(); - let asset_definition_id = AssetDefinitionId::from_str("rose#wonderland")?; + let asset_definition_id = "rose#wonderland".parse()?; let asset_id = AssetId::new(asset_definition_id, account_id.clone()); - let key = Name::from_str("Bytes")?; + let key = "Bytes".parse::()?; SetKeyValue::asset(asset_id.clone(), key.clone(), vec![1_u32, 2_u32, 3_u32]) .execute(&account_id, &mut state_transaction)?; let asset = state_transaction.world.asset(&asset_id)?; @@ -288,7 +287,7 @@ mod tests { let mut state_block = state.block(); let mut state_transaction = state_block.transaction(); let account_id = ALICE_ID.clone(); - let key = Name::from_str("Bytes")?; + let key = "Bytes".parse::()?; SetKeyValue::account(account_id.clone(), key.clone(), vec![1_u32, 2_u32, 3_u32]) .execute(&account_id, &mut state_transaction)?; let bytes = state_transaction @@ -304,9 +303,9 @@ mod tests { let state = state_with_test_domains(&kura)?; let mut state_block = state.block(); let mut state_transaction = state_block.transaction(); - let definition_id = AssetDefinitionId::from_str("rose#wonderland")?; + let definition_id = "rose#wonderland".parse::()?; let account_id = ALICE_ID.clone(); - let key = Name::from_str("Bytes")?; + let key = "Bytes".parse::()?; SetKeyValue::asset_definition( definition_id.clone(), key.clone(), @@ -329,9 +328,9 @@ mod tests { let state = state_with_test_domains(&kura)?; let mut state_block = state.block(); let mut state_transaction = state_block.transaction(); - let domain_id = DomainId::from_str("wonderland")?; + let domain_id = "wonderland".parse::()?; let account_id = ALICE_ID.clone(); - let key = Name::from_str("Bytes")?; + let key = "Bytes".parse::()?; SetKeyValue::domain(domain_id.clone(), key.clone(), vec![1_u32, 2_u32, 3_u32]) .execute(&account_id, &mut state_transaction)?; let bytes = state_transaction @@ -351,7 +350,7 @@ mod tests { let mut state_block = state.block(); let mut state_transaction = state_block.transaction(); let account_id = ALICE_ID.clone(); - let trigger_id = TriggerId::from_str("test_trigger_id")?; + let trigger_id = "test_trigger_id".parse()?; assert!(matches!( ExecuteTrigger::new(trigger_id) @@ -371,7 +370,7 @@ mod tests { let mut state_transaction = state_block.transaction(); let account_id = ALICE_ID.clone(); let (fake_account_id, _fake_account_keypair) = gen_account_in("wonderland"); - let trigger_id = TriggerId::from_str("test_trigger_id")?; + let trigger_id = "test_trigger_id".parse::()?; // register fake account let register_account = Register::account(Account::new(fake_account_id.clone())); @@ -414,7 +413,7 @@ mod tests { let mut state_transaction = state_block.transaction(); let account_id = ALICE_ID.clone(); assert!(matches!( - Register::domain(Domain::new(DomainId::from_str("genesis")?)) + Register::domain(Domain::new("genesis".parse()?)) .execute(&account_id, &mut state_transaction) .expect_err("Error expected"), Error::InvariantViolation(_) diff --git a/core/src/smartcontracts/isi/query.rs b/core/src/smartcontracts/isi/query.rs index 26b3f4069ce..97070726823 100644 --- a/core/src/smartcontracts/isi/query.rs +++ b/core/src/smartcontracts/isi/query.rs @@ -136,44 +136,40 @@ where } // sort & paginate, erase the iterator with QueryBatchedErasedIterator - let output = match &sorting.sort_by_metadata_key { - Some(key) => { - // if sorting was requested, we need to retrieve all the results first - let mut pairs: Vec<(Option, I::Item)> = iter - .map(|value| { - let key = value.get_metadata_sorting_key(key); - (key, value) - }) - .collect(); - pairs.sort_by( - |(left_key, _), (right_key, _)| match (left_key, right_key) { - (Some(l), Some(r)) => l.cmp(r), - (Some(_), None) => Ordering::Less, - (None, Some(_)) => Ordering::Greater, - (None, None) => Ordering::Equal, - }, - ); - - QueryBatchedErasedIterator::new( - pairs.into_iter().map(|(_, val)| val).paginate(pagination), - fetch_size, - ) - } - // no sorting required, can just paginate the results without constructing the full output vec - None => { - // FP: this collect is very deliberate - #[allow(clippy::needless_collect)] - let output = iter - .paginate(pagination) - // it should theoretically be possible to not collect the results into a vec and build the response lazily - // but: - // - the iterator is bound to the 'state lifetime and this lifetime should somehow be erased - // - for small queries this might not be efficient - // TODO: investigate this - .collect::>(); + let output = if let Some(key) = &sorting.sort_by_metadata_key { + // if sorting was requested, we need to retrieve all the results first + let mut pairs: Vec<(Option, I::Item)> = iter + .map(|value| { + let key = value.get_metadata_sorting_key(key); + (key, value) + }) + .collect(); + pairs.sort_by( + |(left_key, _), (right_key, _)| match (left_key, right_key) { + (Some(l), Some(r)) => l.cmp(r), + (Some(_), None) => Ordering::Less, + (None, Some(_)) => Ordering::Greater, + (None, None) => Ordering::Equal, + }, + ); - QueryBatchedErasedIterator::new(output.into_iter(), fetch_size) - } + QueryBatchedErasedIterator::new( + pairs.into_iter().map(|(_, val)| val).paginate(pagination), + fetch_size, + ) + } else { + // FP: this collect is very deliberate + #[allow(clippy::needless_collect)] + let output = iter + .paginate(pagination) + // it should theoretically be possible to not collect the results into a vec and build the response lazily + // but: + // - the iterator is bound to the 'state lifetime and this lifetime should somehow be erased + // - for small queries this might not be efficient + // TODO: investigate this + .collect::>(); + + QueryBatchedErasedIterator::new(output.into_iter(), fetch_size) }; Ok(output) @@ -341,8 +337,6 @@ impl ValidQueryRequest { #[cfg(test)] mod tests { - use std::str::FromStr as _; - use iroha_crypto::{Hash, KeyPair}; use iroha_data_model::query::predicate::CompoundPredicate; use iroha_primitives::json::JsonString; @@ -362,26 +356,25 @@ mod tests { }; fn world_with_test_domains() -> World { - let domain_id = DomainId::from_str("wonderland").expect("Valid"); + let domain_id = "wonderland".parse().expect("Valid"); let domain = Domain::new(domain_id).build(&ALICE_ID); let account = Account::new(ALICE_ID.clone()).build(&ALICE_ID); - let asset_definition_id = AssetDefinitionId::from_str("rose#wonderland").expect("Valid"); + let asset_definition_id = "rose#wonderland".parse().expect("Valid"); let asset_definition = AssetDefinition::numeric(asset_definition_id).build(&ALICE_ID); World::with([domain], [account], [asset_definition]) } fn world_with_test_asset_with_metadata() -> World { - let asset_definition_id = AssetDefinitionId::from_str("rose#wonderland").expect("Valid"); - let domain = Domain::new(DomainId::from_str("wonderland").expect("Valid")).build(&ALICE_ID); + let asset_definition_id = "rose#wonderland" + .parse::() + .expect("Valid"); + let domain = Domain::new("wonderland".parse().expect("Valid")).build(&ALICE_ID); let account = Account::new(ALICE_ID.clone()).build(&ALICE_ID); let asset_definition = AssetDefinition::numeric(asset_definition_id.clone()).build(&ALICE_ID); let mut store = Metadata::default(); - store.insert( - Name::from_str("Bytes").expect("Valid"), - vec![1_u32, 2_u32, 3_u32], - ); + store.insert("Bytes".parse().expect("Valid"), vec![1_u32, 2_u32, 3_u32]); let asset_id = AssetId::new(asset_definition_id, account.id().clone()); let asset = Asset::new(asset_id, AssetValue::Store(store)); @@ -390,13 +383,13 @@ mod tests { fn world_with_test_account_with_metadata() -> Result { let mut metadata = Metadata::default(); - metadata.insert(Name::from_str("Bytes")?, vec![1_u32, 2_u32, 3_u32]); + metadata.insert("Bytes".parse()?, vec![1_u32, 2_u32, 3_u32]); - let domain = Domain::new(DomainId::from_str("wonderland")?).build(&ALICE_ID); + let domain = Domain::new("wonderland".parse()?).build(&ALICE_ID); let account = Account::new(ALICE_ID.clone()) .with_metadata(metadata) .build(&ALICE_ID); - let asset_definition_id = AssetDefinitionId::from_str("rose#wonderland").expect("Valid"); + let asset_definition_id = "rose#wonderland".parse().expect("Valid"); let asset_definition = AssetDefinition::numeric(asset_definition_id).build(&ALICE_ID); Ok(World::with([domain], [account], [asset_definition])) } @@ -474,10 +467,9 @@ mod tests { let query_handle = LiveQueryStore::start_test(); let state = State::new(world_with_test_asset_with_metadata(), kura, query_handle); - let asset_definition_id = AssetDefinitionId::from_str("rose#wonderland")?; + let asset_definition_id = "rose#wonderland".parse()?; let asset_id = AssetId::new(asset_definition_id, ALICE_ID.clone()); - let bytes = - FindAssetMetadata::new(asset_id, Name::from_str("Bytes")?).execute(&state.view())?; + let bytes = FindAssetMetadata::new(asset_id, "Bytes".parse()?).execute(&state.view())?; assert_eq!(JsonString::from(vec![1_u32, 2_u32, 3_u32,]), bytes,); Ok(()) } @@ -488,8 +480,8 @@ mod tests { let query_handle = LiveQueryStore::start_test(); let state = State::new(world_with_test_account_with_metadata()?, kura, query_handle); - let bytes = FindAccountMetadata::new(ALICE_ID.clone(), Name::from_str("Bytes")?) - .execute(&state.view())?; + let bytes = + FindAccountMetadata::new(ALICE_ID.clone(), "Bytes".parse()?).execute(&state.view())?; assert_eq!(JsonString::from(vec![1_u32, 2_u32, 3_u32,]), bytes,); Ok(()) } @@ -667,12 +659,12 @@ mod tests { let kura = Kura::blank_kura_for_testing(); let state = { let mut metadata = Metadata::default(); - metadata.insert(Name::from_str("Bytes")?, vec![1_u32, 2_u32, 3_u32]); - let domain = Domain::new(DomainId::from_str("wonderland")?) + metadata.insert("Bytes".parse()?, vec![1_u32, 2_u32, 3_u32]); + let domain = Domain::new("wonderland".parse()?) .with_metadata(metadata) .build(&ALICE_ID); let account = Account::new(ALICE_ID.clone()).build(&ALICE_ID); - let asset_definition_id = AssetDefinitionId::from_str("rose#wonderland")?; + let asset_definition_id = "rose#wonderland".parse()?; let asset_definition = AssetDefinition::numeric(asset_definition_id).build(&ALICE_ID); let query_handle = LiveQueryStore::start_test(); State::new( @@ -682,8 +674,8 @@ mod tests { ) }; - let domain_id = DomainId::from_str("wonderland")?; - let key = Name::from_str("Bytes")?; + let domain_id = "wonderland".parse()?; + let key = "Bytes".parse()?; let bytes = FindDomainMetadata::new(domain_id, key).execute(&state.view())?; assert_eq!(JsonString::from(vec![1_u32, 2_u32, 3_u32,]), bytes,); Ok(()) diff --git a/core/src/smartcontracts/isi/triggers/set.rs b/core/src/smartcontracts/isi/triggers/set.rs index 4de0ed0c110..f270a9fbd34 100644 --- a/core/src/smartcontracts/isi/triggers/set.rs +++ b/core/src/smartcontracts/isi/triggers/set.rs @@ -411,7 +411,7 @@ pub trait SetReadOnly { /// Apply `f` to triggers whose action satisfies the predicate. /// /// Return an empty list if [`Set`] doesn't contain any such triggers. - fn inspect_by_action<'a, P, F, R>(&'a self, filter: P, f: F) -> impl Iterator + '_ + fn inspect_by_action<'a, P, F, R>(&'a self, filter: P, f: F) -> impl Iterator + 'a where P: Fn(&dyn LoadedActionTrait) -> bool + 'a, F: Fn(&TriggerId, &dyn LoadedActionTrait) -> R + 'a, @@ -847,7 +847,7 @@ impl<'block, 'set> SetTransaction<'block, 'set> { /// /// - If a trigger with the given id is not found. /// - If updating the current trigger `repeats` causes an overflow. Indefinitely - /// repeating triggers and triggers set for exact time always cause an overflow. + /// repeating triggers and triggers set for exact time always cause an overflow. pub fn mod_repeats( &mut self, id: &TriggerId, diff --git a/core/src/smartcontracts/mod.rs b/core/src/smartcontracts/mod.rs index cc34849a62c..009ba6b1157 100644 --- a/core/src/smartcontracts/mod.rs +++ b/core/src/smartcontracts/mod.rs @@ -1,7 +1,7 @@ -//! Iroha smart contract functionality. Most of the traits mentioned -//! [`isi`] or Iroha Special Instructions are the main way of -//! interacting with the [`State`], even [`wasm`] based -//! smart-contracts can only interact with the `world`, via +//! Iroha smart contract functionality. +//! +//! Most of the traits mentioned [`isi`] or Iroha Special Instructions are the main way of interacting +//! with the [`State`], even [`wasm`] based smart-contracts can only interact with the `world`, via //! instructions. pub mod isi; diff --git a/core/src/smartcontracts/wasm.rs b/core/src/smartcontracts/wasm.rs index fb1e58a78f6..eaecf238c9f 100644 --- a/core/src/smartcontracts/wasm.rs +++ b/core/src/smartcontracts/wasm.rs @@ -915,7 +915,7 @@ impl<'wrld, 'block: 'wrld, 'state: 'block> Runtime = Self::get_typed_func(&smart_contract, &mut store, import::SMART_CONTRACT_MAIN)?; // NOTE: This function takes ownership of the pointer @@ -989,7 +989,8 @@ impl<'wrld, 'block: 'wrld, 'state: 'block> Runtime = + Self::get_typed_func(&instance, &mut store, import::TRIGGER_MAIN)?; // NOTE: This function takes ownership of the pointer main_fn @@ -1377,7 +1378,8 @@ impl<'wrld, 'block, 'state> Runtime = + Self::get_typed_func(&instance, &mut store, import::EXECUTOR_MIGRATE)?; migrate_fn .call(&mut store, ()) diff --git a/core/src/state.rs b/core/src/state.rs index 5f58b30bb59..bd83fd8bd86 100644 --- a/core/src/state.rs +++ b/core/src/state.rs @@ -1030,9 +1030,9 @@ impl WorldTransaction<'_, '_> { /// /// Trigger execution time: /// - If this method is called by ISI inside *transaction*, - /// then *trigger* will be executed on the **current** block + /// then *trigger* will be executed on the **current** block /// - If this method is called by ISI inside *trigger*, - /// then *trigger* will be executed on the **next** block + /// then *trigger* will be executed on the **next** block pub fn execute_trigger(&mut self, event: ExecuteTriggerEvent) { self.triggers.handle_execute_trigger_event(event.clone()); self.events_buffer.push(event.into()); diff --git a/crypto/src/lib.rs b/crypto/src/lib.rs index e9c872a1a59..91061c395fc 100755 --- a/crypto/src/lib.rs +++ b/crypto/src/lib.rs @@ -548,7 +548,7 @@ impl FromStr for PublicKey { type Err = ParseError; fn from_str(key: &str) -> Result { - PublicKeyInner::from_str(key).map(Box::new).map(Self) + key.parse().map(Box::new).map(Self) } } @@ -755,7 +755,7 @@ impl FromStr for ExposedPrivateKey { type Err = ParseError; fn from_str(key: &str) -> Result { - let private_key = PrivateKey::from_str(key)?; + let private_key = key.parse()?; Ok(ExposedPrivateKey(private_key)) } } diff --git a/crypto/src/varint.rs b/crypto/src/varint.rs index 364fa5275ce..9dc990f6d14 100644 --- a/crypto/src/varint.rs +++ b/crypto/src/varint.rs @@ -43,7 +43,7 @@ macro_rules! try_from_var_uint( let bytes = payload.into_iter().map(|byte| byte & 0b0111_1111); let number = bytes .zip(offsets) - .map(|(byte, offset)| (byte as Self) << offset) + .map(|(byte, offset)| Self::from(byte) << offset) .fold(0, |number, part| number + part); Ok(number) } diff --git a/data_model/Cargo.toml b/data_model/Cargo.toml index bd19cd9804d..1dc267101db 100644 --- a/data_model/Cargo.toml +++ b/data_model/Cargo.toml @@ -15,7 +15,7 @@ default = ["std"] # Enable static linkage of the rust standard library. # Disabled for WASM interoperability, to reduce the binary size. # Please refer to https://docs.rust-embedded.org/book/intro/no-std.html -std = ["iroha_macro/std", "iroha_version/std", "iroha_crypto/std", "iroha_primitives/std", "thiserror", "displaydoc/std", "strum/std", "once_cell"] +std = ["iroha_macro/std", "iroha_version/std", "iroha_crypto/std", "iroha_primitives/std", "thiserror", "displaydoc/std", "strum/std"] # Enable API for HTTP requests. Should be activated for HTTP clients http = ["std"] # Replace structures and methods with FFI equivalents to facilitate dynamic linkage (mainly used in smartcontracts) @@ -46,7 +46,6 @@ displaydoc = { workspace = true } getset = { workspace = true } strum = { workspace = true, features = ["derive"] } base64 = { workspace = true, features = ["alloc"] } -once_cell = { workspace = true, optional = true } nonzero_ext = { workspace = true } [dev-dependencies] diff --git a/data_model/src/block.rs b/data_model/src/block.rs index e24faa5c7d0..41819ce3dda 100644 --- a/data_model/src/block.rs +++ b/data_model/src/block.rs @@ -1,8 +1,6 @@ -//! This module contains `Block` structures for each state, it's -//! transitions, implementations and related traits -//! implementations. `Block`s are organised into a linear sequence -//! over time (also known as the block chain). A Block's life-cycle -//! starts from `PendingBlock`. +//! This module contains `Block` and related implementations. +//! +//! `Block`s are organised into a linear sequence over time (also known as the block chain). #[cfg(not(feature = "std"))] use alloc::{boxed::Box, format, string::String, vec::Vec}; diff --git a/data_model/src/ipfs.rs b/data_model/src/ipfs.rs index e6dbaca5c76..9de1eb4fcc2 100644 --- a/data_model/src/ipfs.rs +++ b/data_model/src/ipfs.rs @@ -102,7 +102,7 @@ impl AsRef for IpfsPath { impl Decode for IpfsPath { fn decode(input: &mut I) -> Result { let name = ConstString::decode(input)?; - Self::from_str(&name).map_err(|error| error.reason.into()) + name.parse::().map_err(|error| error.reason.into()) } } @@ -125,19 +125,19 @@ mod tests { #[test] fn test_invalid_ipfs_path() { assert!(matches!( - IpfsPath::from_str(INVALID_IPFS[0]), + INVALID_IPFS[0].parse::(), Err(err) if err.to_string() == "Expected root type, but nothing found" )); assert!(matches!( - IpfsPath::from_str(INVALID_IPFS[1]), + INVALID_IPFS[1].parse::(), Err(err) if err.to_string() == "Expected at least one content id" )); assert!(matches!( - IpfsPath::from_str(INVALID_IPFS[2]), + INVALID_IPFS[2].parse::(), Err(err) if err.to_string() == "IPFS cid is too short" )); assert!(matches!( - IpfsPath::from_str(INVALID_IPFS[3]), + INVALID_IPFS[3].parse::(), Err(err) if err.to_string() == "Unexpected root type, expected `ipfs`, `ipld` or `ipns`" )); } @@ -145,15 +145,20 @@ mod tests { #[test] fn test_valid_ipfs_path() { // Valid paths - IpfsPath::from_str("QmQqzMTavQgT4f4T5v6PWBp7XNKtoPmC9jvn12WPT3gkSE") + "QmQqzMTavQgT4f4T5v6PWBp7XNKtoPmC9jvn12WPT3gkSE" + .parse::() .expect("Path without root should be valid"); - IpfsPath::from_str("/ipfs/QmQqzMTavQgT4f4T5v6PWBp7XNKtoPmC9jvn12WPT3gkSE") + "/ipfs/QmQqzMTavQgT4f4T5v6PWBp7XNKtoPmC9jvn12WPT3gkSE" + .parse::() .expect("Path with ipfs root should be valid"); - IpfsPath::from_str("/ipld/QmQqzMTavQgT4f4T5v6PWBp7XNKtoPmC9jvn12WPT3gkSE") + "/ipld/QmQqzMTavQgT4f4T5v6PWBp7XNKtoPmC9jvn12WPT3gkSE" + .parse::() .expect("Path with ipld root should be valid"); - IpfsPath::from_str("/ipns/QmSrPmbaUKA3ZodhzPWZnpFgcPMFWF4QsxXbkWfEptTBJd") + "/ipns/QmSrPmbaUKA3ZodhzPWZnpFgcPMFWF4QsxXbkWfEptTBJd" + .parse::() .expect("Path with ipns root should be valid"); - IpfsPath::from_str("/ipfs/SomeFolder/SomeImage") + "/ipfs/SomeFolder/SomeImage" + .parse::() .expect("Path with folders should be valid"); } diff --git a/data_model/src/query/mod.rs b/data_model/src/query/mod.rs index 0ca4df2fdbb..78dcb26d765 100644 --- a/data_model/src/query/mod.rs +++ b/data_model/src/query/mod.rs @@ -414,8 +414,9 @@ mod candidate { #[cfg(test)] mod tests { + use std::sync::LazyLock; + use iroha_crypto::KeyPair; - use once_cell::sync::Lazy; use parity_scale_codec::{DecodeAll, Encode}; use crate::{ @@ -426,12 +427,12 @@ mod candidate { }, }; - static ALICE_ID: Lazy = Lazy::new(|| { + static ALICE_ID: LazyLock = LazyLock::new(|| { format!("{}@{}", ALICE_KEYPAIR.public_key(), "wonderland") .parse() .unwrap() }); - static ALICE_KEYPAIR: Lazy = Lazy::new(|| { + static ALICE_KEYPAIR: LazyLock = LazyLock::new(|| { KeyPair::new( "ed0120CE7FA46C9DCE7EA4B125E2E36BDB63EA33073E7590AC92816AE1E861B7048B03" .parse() @@ -443,7 +444,7 @@ mod candidate { .unwrap() }); - static BOB_KEYPAIR: Lazy = Lazy::new(|| { + static BOB_KEYPAIR: LazyLock = LazyLock::new(|| { KeyPair::new( "ed012004FF5B81046DDCCF19E2E451C45DFB6F53759D4EB30FA2EFA807284D1CC33016" .parse() diff --git a/data_model/src/query/predicate/predicate_atoms/account.rs b/data_model/src/query/predicate/predicate_atoms/account.rs index 6a26ee39b3e..30de0984017 100644 --- a/data_model/src/query/predicate/predicate_atoms/account.rs +++ b/data_model/src/query/predicate/predicate_atoms/account.rs @@ -51,6 +51,7 @@ impl EvaluatePredicate for AccountIdPredicateBox { /// A predicate that can be applied to an [`Account`]. #[derive(Debug, Clone, PartialEq, Eq, Decode, Encode, Deserialize, Serialize, IntoSchema)] +#[expect(unreachable_patterns)] pub enum AccountPredicateBox { // projections /// Checks if a predicate applies to the ID of the input. diff --git a/data_model/src/query/predicate/predicate_atoms/asset.rs b/data_model/src/query/predicate/predicate_atoms/asset.rs index e308b8bebf5..63a45f79be1 100644 --- a/data_model/src/query/predicate/predicate_atoms/asset.rs +++ b/data_model/src/query/predicate/predicate_atoms/asset.rs @@ -27,6 +27,7 @@ use crate::{ /// A predicate that can be applied to an [`AssetDefinitionId`]. #[derive(Debug, Clone, PartialEq, Eq, Decode, Encode, Deserialize, Serialize, IntoSchema)] +#[expect(unreachable_patterns)] pub enum AssetDefinitionPredicateBox { // projections /// Checks if a predicate applies to the ID of the input. @@ -51,6 +52,7 @@ impl EvaluatePredicate for AssetDefinitionPredicateBox { /// A predicate that can be applied to an [`Asset`]. #[derive(Debug, Clone, PartialEq, Eq, Decode, Encode, Deserialize, Serialize, IntoSchema)] +#[expect(unreachable_patterns)] pub enum AssetPredicateBox { // projections /// Checks if a predicate applies to the ID of the input. diff --git a/data_model/src/query/predicate/predicate_atoms/domain.rs b/data_model/src/query/predicate/predicate_atoms/domain.rs index 1b21589d008..9f5b8da25a4 100644 --- a/data_model/src/query/predicate/predicate_atoms/domain.rs +++ b/data_model/src/query/predicate/predicate_atoms/domain.rs @@ -21,6 +21,7 @@ use crate::{ /// A predicate that can be applied to a [`Domain`]. #[derive(Debug, Clone, PartialEq, Eq, Decode, Encode, Deserialize, Serialize, IntoSchema)] +#[expect(unreachable_patterns)] pub enum DomainPredicateBox { // projections /// Checks if a predicate applies to the ID of the input. diff --git a/data_model/src/query/predicate/predicate_atoms/mod.rs b/data_model/src/query/predicate/predicate_atoms/mod.rs index 4539554bdcf..71fc9a344aa 100644 --- a/data_model/src/query/predicate/predicate_atoms/mod.rs +++ b/data_model/src/query/predicate/predicate_atoms/mod.rs @@ -145,7 +145,9 @@ where /// A predicate that can be applied to [`Metadata`]. #[derive(Debug, Clone, PartialEq, Eq, Decode, Encode, Deserialize, Serialize, IntoSchema)] -pub enum MetadataPredicateBox {} +pub enum MetadataPredicateBox { + // TODO: populate +} impl_predicate_box!(Metadata: MetadataPredicateBox); diff --git a/ffi/derive/src/attr_parse/getset.rs b/ffi/derive/src/attr_parse/getset.rs index 39dffdb6879..eac1718a0fd 100644 --- a/ffi/derive/src/attr_parse/getset.rs +++ b/ffi/derive/src/attr_parse/getset.rs @@ -1,6 +1,6 @@ //! This module provides parsing of custom attributes from the [`getset`](https://docs.rs/getset/latest/getset/) crate -use std::{collections::hash_map::Entry, str::FromStr}; +use std::collections::hash_map::Entry; use proc_macro2::Span; use rustc_hash::{FxHashMap, FxHashSet}; @@ -244,7 +244,7 @@ impl GetSetRawFieldAttr { } else if attr .path() .get_ident() - .and_then(|ident| GetSetGenMode::from_str(&ident.to_string()).ok()) + .and_then(|ident| ident.to_string().parse::().ok()) .is_some() { accumulator.push( diff --git a/ffi/derive/src/attr_parse/repr.rs b/ffi/derive/src/attr_parse/repr.rs index 3c3d8b38b29..dfc051aa1fe 100644 --- a/ffi/derive/src/attr_parse/repr.rs +++ b/ffi/derive/src/attr_parse/repr.rs @@ -2,8 +2,6 @@ // TODO: it's probably a common functionality, move it to `iroha_derive_primitives` when it will use syn 2.0 -use std::str::FromStr; - use darling::{error::Accumulator, util::SpannedValue, FromAttributes}; use proc_macro2::{Delimiter, Span}; use strum::{Display, EnumString}; @@ -66,7 +64,7 @@ impl Parse for SpannedReprToken { let mut span = ident.span(); let str = ident.to_string(); - if let Ok(primitive) = ReprPrimitive::from_str(&str) { + if let Ok(primitive) = str.parse() { return Ok(((span,ReprToken::Kind(ReprKind::Primitive(primitive))), after_token)); } diff --git a/ffi/derive/src/convert.rs b/ffi/derive/src/convert.rs index 1b5118d9cb3..f5545b35328 100644 --- a/ffi/derive/src/convert.rs +++ b/ffi/derive/src/convert.rs @@ -1,4 +1,3 @@ -use core::str::FromStr as _; use std::fmt::{Display, Formatter}; use darling::{ @@ -510,7 +509,7 @@ fn derive_ffi_type_for_data_carrying_enum( .iter() .enumerate() .map(|(i, variant)| { - let idx = TokenStream::from_str(&format!("{i}")).expect("Valid"); + let idx = format!("{i}").parse::().expect("Valid"); let payload_name = gen_repr_c_enum_payload_name(enum_name); let variant_name = &variant.ident; @@ -540,7 +539,7 @@ fn derive_ffi_type_for_data_carrying_enum( .collect::>(); let variants_try_from_ffi = variants.iter().enumerate().map(|(i, variant)| { - let idx = TokenStream::from_str(&format!("{i}")).expect("Valid"); + let idx = format!("{i}").parse::().expect("Valid"); let variant_name = &variant.ident; variant_mapper( diff --git a/ffi/derive/tests/ui_fail/fallible_transmute_mut_ref.stderr b/ffi/derive/tests/ui_fail/fallible_transmute_mut_ref.stderr index af8e4a34eb3..58f83f0d5e2 100644 --- a/ffi/derive/tests/ui_fail/fallible_transmute_mut_ref.stderr +++ b/ffi/derive/tests/ui_fail/fallible_transmute_mut_ref.stderr @@ -24,7 +24,17 @@ error[E0277]: the trait bound `&mut Wrapper: FfiConvert<'_, *mut u32>` is not sa 21 | #[ffi_export] | ------------- required by a bound introduced by this call 22 | pub fn take_non_robust_ref_mut(_ffi_struct: &mut Wrapper) {} - | ^^^^^^^^^^^ the trait `InfallibleTransmute` is not implemented for `&mut Wrapper`, which is required by `&mut Wrapper: FfiConvert<'_, *mut u32>` + | ^^^^^^^^^^^ the trait `InfallibleTransmute` is not implemented for `Wrapper`, which is required by `&mut Wrapper: FfiConvert<'_, *mut u32>` | + = help: the following other types implement trait `InfallibleTransmute`: + ManuallyDrop + [R; N] + i16 + i32 + i64 + i8 + u16 + u32 + and $N others = note: required for `&mut Wrapper` to implement `Ir` = note: required for `&mut Wrapper` to implement `FfiConvert<'_, *mut u32>` diff --git a/ffi/src/ir.rs b/ffi/src/ir.rs index b8dfa64aa14..7b6ef5cbd16 100644 --- a/ffi/src/ir.rs +++ b/ffi/src/ir.rs @@ -1,4 +1,5 @@ //! Internal representation, a.k.a IR of `Rust` types during conversion into FFI types. +//! //! While you can implement [`FfiType`] on your `Rust` type directly, it is encouraged //! that you map your type into IR by providing the implementation of [`Ir`] and benefit //! from automatic, correct and performant conversions from IR to C type equivalent. diff --git a/ffi/src/lib.rs b/ffi/src/lib.rs index d04d87c7a48..ee3f9825c03 100644 --- a/ffi/src/lib.rs +++ b/ffi/src/lib.rs @@ -246,6 +246,7 @@ pub enum FfiReturn { } /// Macro for defining FFI types of a known category ([`Robust`] or [`Transmute`]). +/// /// The implementation for an FFI type of one of the categories incurs a lot of bloat that /// is reduced by the use of this macro /// @@ -388,8 +389,9 @@ pub struct Extern { } /// Define the correct [`FfiWrapperType::InputType`]/[`FfiWrapperType::ReturnType`] out of -/// the given [`CWrapperType::InputType`]/[`CWrapperType::ReturnType`]. The only situation -/// when this is evident is when [`Ir::Type`] is set to [`Transparent`] or [`Extern`] types +/// the given [`CWrapperType::InputType`]/[`CWrapperType::ReturnType`]. +/// +/// The only situation when this is evident is when [`Ir::Type`] is set to [`Transparent`] or [`Extern`] types /// /// Example: /// diff --git a/ffi/src/option.rs b/ffi/src/option.rs index 676c20e376f..afce6dd5b89 100644 --- a/ffi/src/option.rs +++ b/ffi/src/option.rs @@ -29,8 +29,10 @@ impl<'dummy, R: Niche<'dummy>> OptionIr for R { impl Cloned for Option {} /// Type that has at least one trap representation that can be used as a niche value. The -/// niche value is used in the serialization of [`Option`]. For example, [`Option`] -/// will be serilized into one byte and [`Option<*const T>`] will take the size of the pointer +/// niche value is used in the serialization of [`Option`]. +/// +/// For example, [`Option`] will be serilized into one byte +/// and [`Option<*const T>`] will take the size of the pointer // TODO: Lifetime is used as a hack to deal with https://github.com/rust-lang/rust/issues/48214 pub trait Niche<'dummy>: FfiType { /// The niche value of the type diff --git a/ffi/src/repr_c.rs b/ffi/src/repr_c.rs index cacded81b37..6b529374557 100644 --- a/ffi/src/repr_c.rs +++ b/ffi/src/repr_c.rs @@ -1,7 +1,9 @@ #![allow(trivial_casts)] -//! Logic related to the conversion of IR types to equivalent robust C types. Types that are mapped into -//! one of the predefined [`Ir`] types will be provided an automatic implementation of traits in this module. +//! Logic related to the conversion of IR types to equivalent robust C types. +//! +//! Types that are mapped into one of the predefined [`Ir`] types will be provided an automatic +//! implementation of traits in this module. //! //! Traits in this module mainly exist to bridge the gap between IR and C type equivalents. User should //! only implement these traits if none of the predefined IR types provide an adequate mapping. @@ -109,22 +111,24 @@ pub trait COutPtrRead: COutPtr + Sized { } /// Marker trait indicating that [`CTypeConvert::into_repr_c`] and [`CTypeConvert::try_from_repr_c`] don't -/// return a reference to the store. This is useful to determine which(and how) types can be +/// return a reference to the store. +/// +/// This is useful to determine which(and how) types can be /// returned from an FFI function considering that, after return, local context is destroyed /// /// # Example /// /// 1. `&[u8]` implements [`NonLocal`] -/// This type will be converted to [`RefSlice`] and during conversion will not make use -/// of the store (in any direction). The corresponding out-pointer will be `*mut RefSlice` +/// This type will be converted to [`RefSlice`] and during conversion will not make use +/// of the store (in any direction). The corresponding out-pointer will be `*mut RefSlice` /// /// 2. `&[Opaque]` doesn't implement [`NonLocal`] -/// This type will be converted to [`RefSlice<*const T>`] and during conversion will use the -/// local store `Vec<*const T>`. The corresponding out-pointer will be `*mut OutBoxedSlice<*const T>`. +/// This type will be converted to [`RefSlice<*const T>`] and during conversion will use the +/// local store `Vec<*const T>`. The corresponding out-pointer will be `*mut OutBoxedSlice<*const T>`. /// /// 3. `&(u32, u32)` -/// This type will be converted to `*const FfiTuple2` and during conversion will use the -/// local store `FfiTuple`. The corresponding out-pointer will be `*mut FfiTuple2` +/// This type will be converted to `*const FfiTuple2` and during conversion will use the +/// local store `FfiTuple`. The corresponding out-pointer will be `*mut FfiTuple2` /// /// # Safety /// diff --git a/ffi/src/slice.rs b/ffi/src/slice.rs index e47674cca94..ce2b01f55d3 100644 --- a/ffi/src/slice.rs +++ b/ffi/src/slice.rs @@ -8,18 +8,21 @@ use crate::ReprC; crate::decl_ffi_fns! { dealloc } /// Immutable slice `&[C]` with a defined C ABI layout. Consists of a data pointer and a length. +/// /// If the data pointer is set to `null`, the struct represents `Option<&[C]>`. #[repr(C)] #[derive(Debug)] pub struct RefSlice(*const C, usize); /// Mutable slice `&mut [C]` with a defined C ABI layout. Consists of a data pointer and a length. +/// /// If the data pointer is set to `null`, the struct represents `Option<&mut [C]>`. #[repr(C)] #[derive(Debug)] pub struct RefMutSlice(*mut C, usize); /// Owned slice `Box<[C]>` with a defined C ABI layout. Consists of a data pointer and a length. +/// /// Used in place of a function out-pointer to transfer ownership of the slice to the caller. /// If the data pointer is set to `null`, the struct represents `Option>`. #[repr(C)] diff --git a/genesis/Cargo.toml b/genesis/Cargo.toml index 838fbbc040a..8c44afb5501 100644 --- a/genesis/Cargo.toml +++ b/genesis/Cargo.toml @@ -18,7 +18,6 @@ iroha_data_model = { workspace = true, features = ["http"] } derive_more = { workspace = true, features = ["deref"] } serde = { workspace = true, features = ["derive"] } serde_json = { workspace = true, features = ["std"] } -once_cell = { workspace = true } tracing = { workspace = true } eyre = { workspace = true } parity-scale-codec = { workspace = true } diff --git a/genesis/src/lib.rs b/genesis/src/lib.rs index 73bef89294a..99586d650c9 100644 --- a/genesis/src/lib.rs +++ b/genesis/src/lib.rs @@ -2,10 +2,10 @@ //! [`RawGenesisTransaction`] and the [`GenesisBuilder`] structures. use std::{ fmt::Debug, - fs, - fs::File, + fs::{self, File}, io::BufReader, path::{Path, PathBuf}, + sync::LazyLock, }; use eyre::{eyre, Result, WrapErr}; @@ -14,16 +14,16 @@ use iroha_data_model::{ block::SignedBlock, isi::Instruction, parameter::Parameter, peer::Peer, prelude::*, }; use iroha_schema::IntoSchema; -use once_cell::sync::Lazy; use parity_scale_codec::{Decode, Encode}; use serde::{Deserialize, Serialize}; /// [`DomainId`](iroha_data_model::domain::DomainId) of the genesis account. -pub static GENESIS_DOMAIN_ID: Lazy = Lazy::new(|| "genesis".parse().unwrap()); +pub static GENESIS_DOMAIN_ID: LazyLock = LazyLock::new(|| "genesis".parse().unwrap()); /// Genesis block. -/// First transaction should contain single [`Upgrade`] instruction to set executor. -/// Second transaction should contain all other instructions. +/// +/// First transaction must contain single [`Upgrade`] instruction to set executor. +/// Second transaction must contain all other instructions. /// If there are no other instructions, second transaction will be omitted. #[derive(Debug, Clone)] #[repr(transparent)] @@ -203,7 +203,8 @@ fn get_executor(file: &Path) -> Result { Ok(Executor::new(WasmSmartContract::from_compiled(wasm))) } -/// Builder type for [`GenesisBlock`]/[`RawGenesisTransaction`] +/// Builder type for [`GenesisBlock`]/[`RawGenesisTransaction`]. +/// /// that does not perform any correctness checking on the block produced. /// Use with caution in tests and other things to register domains and accounts. #[must_use] @@ -332,7 +333,7 @@ fn convert_parameters(parameters: Vec) -> Option { return None; } let mut result = Parameters::default(); - for parameter in parameters.into_iter() { + for parameter in parameters { apply_parameter(&mut result, parameter); } Some(result) @@ -538,13 +539,12 @@ mod tests { fn test(parameters: &str) { let genesis_json = format!( r#"{{ - "parameters": {}, + "parameters": {parameters}, "chain": "0", "executor": "./executor.wasm", "topology": [], "instructions": [] - }}"#, - parameters + }}"# ); let _genesis: RawGenesisTransaction = serde_json::from_str(&genesis_json).expect("Failed to deserialize"); diff --git a/hooks/pre-commit.sample b/hooks/pre-commit.sample index 59586491626..8b658a5cd91 100755 --- a/hooks/pre-commit.sample +++ b/hooks/pre-commit.sample @@ -1,5 +1,5 @@ #!/bin/sh -# rustup default nightly-2024-04-18 +# rustup default nightly-2024-09-09 set -e # format checks cargo fmt --all -- --check diff --git a/logger/Cargo.toml b/logger/Cargo.toml index b5bfd82d3d1..078ac30b0ee 100644 --- a/logger/Cargo.toml +++ b/logger/Cargo.toml @@ -22,7 +22,6 @@ tracing-futures = { version = "0.2.5", default-features = false, features = ["st tracing-subscriber = { workspace = true, features = ["fmt", "ansi", "json", "env-filter"] } tokio = { workspace = true, features = ["sync", "rt", "macros"] } console-subscriber = { version = "0.3.0", optional = true } -once_cell = { workspace = true } derive_more = { workspace = true } tracing-error = "0.2.0" thiserror = { workspace = true } diff --git a/logger/src/lib.rs b/logger/src/lib.rs index 65a4de4300a..0f953e9127d 100644 --- a/logger/src/lib.rs +++ b/logger/src/lib.rs @@ -5,7 +5,6 @@ pub mod telemetry; use std::{ fmt::Debug, - str::FromStr, sync::{ atomic::{AtomicBool, Ordering}, OnceLock, @@ -104,7 +103,7 @@ pub fn test_logger() -> LoggerHandle { let config = Config { level: std::env::var("TEST_LOG_LEVEL") .ok() - .and_then(|raw| Level::from_str(&raw).ok()) + .and_then(|raw| raw.parse().ok()) .unwrap_or(Level::DEBUG) .into(), format: Format::Pretty, diff --git a/p2p/src/peer.rs b/p2p/src/peer.rs index 71e0c59094e..8175db62fd6 100644 --- a/p2p/src/peer.rs +++ b/p2p/src/peer.rs @@ -151,7 +151,7 @@ mod run { let disambiguator = cryptographer.disambiguator; - tracing::Span::current().record("peer", &peer_id.to_string()); + tracing::Span::current().record("peer", peer_id.to_string()); tracing::Span::current().record("disambiguator", disambiguator); let (post_sender, mut post_receiver) = unbounded_with_len::unbounded_channel(); @@ -262,7 +262,7 @@ mod run { ping_interval.reset(); } // `message_sender.send()` is safe to be cancelled, it won't advance the queue or write anything if another branch completes first. - // + // // We need to conditionally disable it in case there is no data is to be sent, otherwise `message_sender.send()` will complete immediately // // The only source of data to be sent is other branches of this loop, so we do not need any async waiting mechanism for waiting for readiness. diff --git a/primitives/derive/src/numeric.rs b/primitives/derive/src/numeric.rs index a0aaa2565e2..b652c8ad7b3 100644 --- a/primitives/derive/src/numeric.rs +++ b/primitives/derive/src/numeric.rs @@ -1,12 +1,11 @@ -use core::str::FromStr; - use manyhow::{error_message, Result}; use proc_macro2::TokenStream; use quote::quote; pub fn numeric_impl(input: TokenStream) -> Result { let input = input.to_string(); - let numeric = ::iroha_numeric::Numeric::from_str(&input) + let numeric = input + .parse::<::iroha_numeric::Numeric>() .map_err(|err| error_message!("failed to parse numeric: {err}"))?; let mantissa = numeric.mantissa(); let scale = numeric.scale(); diff --git a/primitives/numeric/src/lib.rs b/primitives/numeric/src/lib.rs index dc27d04fd62..c7166cf7ff0 100644 --- a/primitives/numeric/src/lib.rs +++ b/primitives/numeric/src/lib.rs @@ -336,7 +336,7 @@ impl core::str::FromStr for Numeric { type Err = NumericError; fn from_str(s: &str) -> Result { - let Ok(inner) = Decimal::from_str(s) else { + let Ok(inner) = s.parse::() else { return Err(NumericError::Malformed); }; diff --git a/primitives/src/addr.rs b/primitives/src/addr.rs index 9236f013d58..3c84b144d47 100644 --- a/primitives/src/addr.rs +++ b/primitives/src/addr.rs @@ -610,34 +610,32 @@ mod std_compat { #[cfg(test)] mod test { - use core::str::FromStr; - use super::*; #[test] fn ipv4() { assert_eq!( - Ipv4Addr::from_str("0.0.0.0").unwrap(), + "0.0.0.0".parse::().unwrap(), Ipv4Addr([0, 0, 0, 0]) ); assert_eq!( - Ipv4Addr::from_str("127.0.0.1").unwrap(), + "127.0.0.1".parse::().unwrap(), Ipv4Addr([127, 0, 0, 1]) ); assert_eq!( - Ipv4Addr::from_str("192.168.1.256").unwrap_err(), + "192.168.1.256".parse::().unwrap_err(), ParseError::InvalidSegment ); assert_eq!( - Ipv4Addr::from_str("192.168.1").unwrap_err(), + "192.168.1".parse::().unwrap_err(), ParseError::NotEnoughSegments ); assert_eq!( - Ipv4Addr::from_str("192.168.1.2.3").unwrap_err(), + "192.168.1.2.3".parse::().unwrap_err(), ParseError::TooManySegments ); } @@ -645,37 +643,39 @@ mod test { #[test] fn ipv6() { assert_eq!( - Ipv6Addr::from_str("::1").unwrap(), + "::1".parse::().unwrap(), Ipv6Addr([0, 0, 0, 0, 0, 0, 0, 1]) ); assert_eq!( - Ipv6Addr::from_str("ff02::1").unwrap(), + "ff02::1".parse::().unwrap(), Ipv6Addr([0xff02, 0, 0, 0, 0, 0, 0, 1]) ); assert_eq!( - Ipv6Addr::from_str("2001:0db8::").unwrap(), + "2001:0db8::".parse::().unwrap(), Ipv6Addr([0x2001, 0xdb8, 0, 0, 0, 0, 0, 0]) ); assert_eq!( - Ipv6Addr::from_str("2001:0db8:0000:0000:0000:0000:0000:0001").unwrap(), + "2001:0db8:0000:0000:0000:0000:0000:0001" + .parse::() + .unwrap(), Ipv6Addr([0x2001, 0xdb8, 0, 0, 0, 0, 0, 1]) ); assert_eq!( - Ipv6Addr::from_str("2001:0db8::0001").unwrap(), + "2001:0db8::0001".parse::().unwrap(), Ipv6Addr([0x2001, 0xdb8, 0, 0, 0, 0, 0, 1]) ); assert_eq!( - Ipv6Addr::from_str("2001:db8:0:1:2:3:4").unwrap_err(), + "2001:db8:0:1:2:3:4".parse::().unwrap_err(), ParseError::NotEnoughSegments ); assert_eq!( - Ipv6Addr::from_str("2001:db8:0:1:2:3:4:5:6").unwrap_err(), + "2001:db8:0:1:2:3:4:5:6".parse::().unwrap_err(), ParseError::TooManySegments ); } @@ -683,7 +683,7 @@ mod test { #[test] fn socket_v4() { assert_eq!( - SocketAddrV4::from_str("192.168.1.0:9019").unwrap(), + "192.168.1.0:9019".parse::().unwrap(), SocketAddrV4 { ip: Ipv4Addr([192, 168, 1, 0]), port: 9019 @@ -691,12 +691,12 @@ mod test { ); assert_eq!( - SocketAddrV4::from_str("192.168.1.1").unwrap_err(), + "192.168.1.1".parse::().unwrap_err(), ParseError::NoPort ); assert_eq!( - SocketAddrV4::from_str("192.168.1.1:FOO").unwrap_err(), + "192.168.1.1:FOO".parse::().unwrap_err(), ParseError::InvalidPort ); } @@ -704,7 +704,7 @@ mod test { #[test] fn socket_v6() { assert_eq!( - SocketAddrV6::from_str("[2001:0db8::]:9019").unwrap(), + "[2001:0db8::]:9019".parse::().unwrap(), SocketAddrV6 { ip: Ipv6Addr([0x2001, 0xdb8, 0, 0, 0, 0, 0, 0]), port: 9019 @@ -712,12 +712,12 @@ mod test { ); assert_eq!( - SocketAddrV6::from_str("[2001:0db8::]").unwrap_err(), + "[2001:0db8::]".parse::().unwrap_err(), ParseError::NoPort ); assert_eq!( - SocketAddrV6::from_str("[2001:0db8::]:FOO").unwrap_err(), + "[2001:0db8::]:FOO".parse::().unwrap_err(), ParseError::InvalidPort ); } @@ -784,7 +784,7 @@ mod test { #[test] fn host() { assert_eq!( - SocketAddrHost::from_str("localhost:9019").unwrap(), + "localhost:9019".parse::().unwrap(), SocketAddrHost { host: "localhost".into(), port: 9019 diff --git a/primitives/src/conststr.rs b/primitives/src/conststr.rs index 1749d62475b..9cc03872fd2 100644 --- a/primitives/src/conststr.rs +++ b/primitives/src/conststr.rs @@ -29,13 +29,14 @@ use serde::{ const MAX_INLINED_STRING_LEN: usize = 2 * size_of::() - 1; /// Immutable inlinable string. +/// /// Strings shorter than 15/7/3 bytes (in 64/32/16-bit architecture) are inlined. /// Union represents const-string variants: inlined or boxed. /// Distinction between variants are achieved by tagging most significant bit of field `len`: /// - for inlined variant MSB of `len` is always equal to 1, it's enforced by `InlinedString` constructor; /// - for boxed variant MSB of `len` is always equal to 0, it's enforced by the fact -/// that `Box` and `Vec` never allocate more than`isize::MAX bytes`. -/// For little-endian 64bit architecture memory layout of [`Self`] is following: +/// that `Box` and `Vec` never allocate more than`isize::MAX bytes`. +/// For little-endian 64bit architecture memory layout of [`Self`] is following: /// /// ```text /// +---------+-------+---------+----------+----------------+ diff --git a/smart_contract/executor/data_model/src/lib.rs b/smart_contract/executor/data_model/src/lib.rs index 2a385a4ccd5..7695f7e384b 100644 --- a/smart_contract/executor/data_model/src/lib.rs +++ b/smart_contract/executor/data_model/src/lib.rs @@ -8,6 +8,7 @@ pub mod parameter; pub mod permission; /// An error that might occur while converting a data model object into a native executor type. +/// /// Such objects are [`iroha_data_model::permission::Permission`] and [`iroha_data_model::parameter::Parameter`]. #[derive(Debug)] pub enum TryFromDataModelObjectError { diff --git a/test_samples/Cargo.toml b/test_samples/Cargo.toml index 276af31906f..b24c95e4ebb 100644 --- a/test_samples/Cargo.toml +++ b/test_samples/Cargo.toml @@ -17,7 +17,6 @@ categories.workspace = true iroha_crypto = { workspace = true } iroha_data_model = { workspace = true } -once_cell = { workspace = true } serde = { workspace = true, features = ["derive"] } toml = { workspace = true } diff --git a/test_samples/src/lib.rs b/test_samples/src/lib.rs index d0c362b46be..7a0ea083576 100644 --- a/test_samples/src/lib.rs +++ b/test_samples/src/lib.rs @@ -1,8 +1,8 @@ //! Utility crate for standardized and random signatories. +use std::sync::LazyLock; use iroha_crypto::KeyPair; use iroha_data_model::prelude::AccountId; -use once_cell::sync::Lazy; /// Generate [`AccountId`](iroha_data_model::account::AccountId) in the given `domain`. /// @@ -21,7 +21,7 @@ pub fn gen_account_in(domain: impl core::fmt::Display) -> (AccountId, KeyPair) { macro_rules! declare_keypair { ( $key_pair:ident, $public_key:expr, $private_key:expr ) => { /// A standardized [`KeyPair`](iroha_crypto::KeyPair). - pub static $key_pair: Lazy = Lazy::new(|| { + pub static $key_pair: LazyLock = LazyLock::new(|| { KeyPair::new( $public_key .parse() @@ -38,7 +38,7 @@ macro_rules! declare_keypair { macro_rules! declare_account_with_keypair { ( $account_id:ident, $domain:literal, $key_pair:ident, $public_key:literal, $private_key:literal ) => { /// A standardized [`AccountId`](iroha_data_model::account::AccountId). - pub static $account_id: Lazy = Lazy::new(|| { + pub static $account_id: LazyLock = LazyLock::new(|| { format!("{}@{}", $key_pair.public_key(), $domain) .parse() .expect("domain and public_key should be valid as name and multihash, respectively") diff --git a/tools/parity_scale_cli/src/main.rs b/tools/parity_scale_cli/src/main.rs index c68cb4c8589..376e3b63280 100644 --- a/tools/parity_scale_cli/src/main.rs +++ b/tools/parity_scale_cli/src/main.rs @@ -303,9 +303,7 @@ fn list_types(map: &ConverterMap, writer: &mut W) -> Result<()> { #[cfg(test)] mod tests { - use std::str::FromStr as _; - - use iroha_data_model::{ipfs::IpfsPath, prelude::*}; + use iroha_data_model::prelude::*; use test_samples::ALICE_ID; use super::*; @@ -328,7 +326,8 @@ mod tests { metadata.insert("Is_Jabberwocky_alive".parse().expect("Valid"), true); let domain = Domain::new("wonderland".parse().expect("Valid")) .with_logo( - IpfsPath::from_str("/ipfs/Qme7ss3ARVgxv6rXqVPiikMJ8u2NLgmgszg13pYrDKEoiu") + "/ipfs/Qme7ss3ARVgxv6rXqVPiikMJ8u2NLgmgszg13pYrDKEoiu" + .parse() .expect("Valid"), ) .with_metadata(metadata); diff --git a/wasm_builder/src/lib.rs b/wasm_builder/src/lib.rs index cf0408817d7..a39bf65336e 100644 --- a/wasm_builder/src/lib.rs +++ b/wasm_builder/src/lib.rs @@ -254,8 +254,6 @@ mod internal { } fn retrieve_package_name(&self) -> Result { - use std::str::FromStr as _; - let manifest_output = cargo_command() .current_dir(&self.absolute_path) .arg("read-manifest") @@ -267,10 +265,11 @@ mod internal { let manifest = String::from_utf8(manifest_output.stdout) .wrap_err("Failed to convert `cargo read-manifest` output to string")?; - serde_json::Value::from_str(&manifest) + manifest + .parse::() .wrap_err("Failed to parse `cargo read-manifest` output")? .get("name") - .map(serde_json::Value::to_string) + .map(ToString::to_string) .map(|name| name.trim_matches('"').to_owned()) .ok_or_else(|| { eyre!("Failed to retrieve package name from `cargo read-manifest` output") diff --git a/wasm_codec/derive/Cargo.toml b/wasm_codec/derive/Cargo.toml index 13c66ba9e6c..700448e0124 100644 --- a/wasm_codec/derive/Cargo.toml +++ b/wasm_codec/derive/Cargo.toml @@ -16,6 +16,5 @@ proc-macro = true syn = { workspace = true } quote = { workspace = true } proc-macro2 = { workspace = true } -once_cell = { workspace = true } manyhow = { workspace = true } iroha_macro_utils = { workspace = true } diff --git a/wasm_codec/derive/src/lib.rs b/wasm_codec/derive/src/lib.rs index 55c7f226afe..5d0356e7b48 100644 --- a/wasm_codec/derive/src/lib.rs +++ b/wasm_codec/derive/src/lib.rs @@ -245,11 +245,11 @@ fn gen_output( /// [`TokenStream`] wrapper which will be lazily evaluated /// /// Implements [`quote::ToTokens`] trait -struct LazyTokenStream(once_cell::unsync::Lazy); +struct LazyTokenStream(core::cell::LazyCell); impl TokenStream> LazyTokenStream { pub fn new(f: F) -> Self { - Self(once_cell::unsync::Lazy::new(f)) + Self(core::cell::LazyCell::new(f)) } } diff --git a/wasm_samples/mint_rose_trigger/src/lib.rs b/wasm_samples/mint_rose_trigger/src/lib.rs index b88da469aef..c2d89e45734 100644 --- a/wasm_samples/mint_rose_trigger/src/lib.rs +++ b/wasm_samples/mint_rose_trigger/src/lib.rs @@ -5,8 +5,6 @@ #[cfg(not(test))] extern crate panic_halt; -use core::str::FromStr as _; - use dlmalloc::GlobalDlmalloc; use iroha_trigger::{prelude::*, smart_contract::query_single}; @@ -18,8 +16,7 @@ getrandom::register_custom_getrandom!(iroha_trigger::stub_getrandom); /// Mint 1 rose for owner #[iroha_trigger::main] fn main(id: TriggerId, owner: AccountId, _event: EventBox) { - let rose_definition_id = AssetDefinitionId::from_str("rose#wonderland") - .dbg_expect("Failed to parse `rose#wonderland` asset definition id"); + let rose_definition_id = "rose#wonderland".parse().unwrap(); let rose_id = AssetId::new(rose_definition_id, owner); let val: u32 = query_single(FindTriggerMetadata::new(id, "VAL".parse().unwrap())) diff --git a/wasm_samples/mint_rose_trigger_args/src/lib.rs b/wasm_samples/mint_rose_trigger_args/src/lib.rs index 38f6374fa88..c9999ea2069 100644 --- a/wasm_samples/mint_rose_trigger_args/src/lib.rs +++ b/wasm_samples/mint_rose_trigger_args/src/lib.rs @@ -5,8 +5,6 @@ #[cfg(not(test))] extern crate panic_halt; -use core::str::FromStr as _; - use dlmalloc::GlobalDlmalloc; use executor_custom_data_model::mint_rose_args::MintRoseArgs; use iroha_trigger::{debug::dbg_panic, prelude::*}; @@ -19,8 +17,7 @@ getrandom::register_custom_getrandom!(iroha_trigger::stub_getrandom); /// Mint 1 rose for owner #[iroha_trigger::main] fn main(_id: TriggerId, owner: AccountId, event: EventBox) { - let rose_definition_id = AssetDefinitionId::from_str("rose#wonderland") - .dbg_expect("Failed to parse `rose#wonderland` asset definition id"); + let rose_definition_id = "rose#wonderland".parse().unwrap(); let rose_id = AssetId::new(rose_definition_id, owner); let args: MintRoseArgs = match event {