diff --git a/.github/workflows/iroha2-dev-pr-label.yml b/.github/workflows/iroha2-dev-pr-label.yml index cbf581bd692..4eb18953a92 100644 --- a/.github/workflows/iroha2-dev-pr-label.yml +++ b/.github/workflows/iroha2-dev-pr-label.yml @@ -5,7 +5,6 @@ on: branches: [iroha-dev] paths: - 'docs/source/references/schema.json' - - 'docs/source/references/config.md' jobs: api-changes: @@ -30,7 +29,7 @@ jobs: continue-on-error: true id: config_label - uses: actions-ecosystem/action-add-labels@v1 - if: contains(steps.config_label.outputs.added_modified, 'docs/source/references/config.md') + if: contains(steps.config_label.outputs.added_modified) with: github_token: ${{ secrets.github_token }} labels: | diff --git a/.github/workflows/iroha2-dev-pr.yml b/.github/workflows/iroha2-dev-pr.yml index b78ede1a64a..9ba479df65d 100644 --- a/.github/workflows/iroha2-dev-pr.yml +++ b/.github/workflows/iroha2-dev-pr.yml @@ -25,9 +25,6 @@ jobs: steps: - uses: actions/checkout@v4 - uses: Swatinem/rust-cache@v2 - - name: Check config.md - if: always() - run: ./scripts/tests/consistency.sh docs - name: Check genesis.json if: always() run: ./scripts/tests/consistency.sh genesis diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 2fc9c77a9be..74bafed0f92 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -18,14 +18,13 @@ New to our project? [Make your first contribution](#your-first-code-contribution ### TL;DR -* Find [ZenHub](https://app.zenhub.com/workspaces/iroha-v2-60ddb820813b9100181fc060/board?repos=181739240). -* Fork [Iroha](https://github.com/hyperledger/iroha/tree/iroha2-dev). -* Fix your issue of choice. -* Ensure you follow our [style guides](#style-guides) for code and documentation. -* Write [tests](https://doc.rust-lang.org/cargo/commands/cargo-test.html). Ensure they all pass (`cargo test`). -* Fix [`clippy`](https://lib.rs/crates/cargo-lints) warnings: `cargo lints clippy --workspace --benches --tests --examples --all-features`. -* Format code `cargo +nightly fmt --all` and generate docs `cargo run --bin kagami -- docs >"docs/source/references/config.md" && git add "docs/source/references/config.md"`. -* With the `upstream` set to track [Hyperledger Iroha repository](https://github.com/hyperledger/iroha), `git pull -r upstream iroha2-dev`, `git commit -s`, `git push `, and [create a pull request](https://github.com/hyperledger/iroha/compare) to the `iroha2-dev` branch. Ensure the PR has the `[type] #: Description` [title](#pull-request-titles). +- Find [ZenHub](https://app.zenhub.com/workspaces/iroha-v2-60ddb820813b9100181fc060/board?repos=181739240). +- Fork [Iroha](https://github.com/hyperledger/iroha/tree/iroha2-dev). +- Fix your issue of choice. +- Ensure you follow our [style guides](#style-guides) for code and documentation. +- Write [tests](https://doc.rust-lang.org/cargo/commands/cargo-test.html). Ensure they all pass (`cargo test --workspace`). +- Perform pre-commit routine like formatting & artifacts regeneration (see [`pre-commit.sample`](./hooks/pre-commit.sample)) +- With the `upstream` set to track [Hyperledger Iroha repository](https://github.com/hyperledger/iroha), `git pull -r upstream iroha2-dev`, `git commit -s`, `git push `, and [create a pull request](https://github.com/hyperledger/iroha/compare) to the `iroha2-dev` branch. Ensure the PR has the `[type] #: Description` [title](#pull-request-titles). ### Reporting Bugs @@ -231,7 +230,7 @@ Follow these commit guidelines:
Expand to learn how to change the log level or write logs to a JSON. -If one of your tests is failing, you may want to decrease the maximum logging level. By default, Iroha only logs `INFO` level messages, but retains the ability to produce both `DEBUG` and `TRACE` level logs. This setting can be changed either using the `MAX_LOG_LEVEL` environment variable for code-based tests, or using the `/configuration` endpoint on one of the peers in a deployed network. +If one of your tests is failing, you may want to decrease the maximum logging level. By default, Iroha only logs `INFO` level messages, but retains the ability to produce both `DEBUG` and `TRACE` level logs. This setting can be changed either using the `LOG_LEVEL` environment variable for code-based tests, or using the `/configuration` endpoint on one of the peers in a deployed network. While logs printed in the `stdout` are sufficient, you may find it more convenient to produce `json`-formatted logs into a separate file and parse them using either [node-bunyan](https://www.npmjs.com/package/bunyan) or [rust-bunyan](https://crates.io/crates/bunyan). @@ -251,8 +250,8 @@ In this case you should compile iroha with support of tokio console like that: RUSTFLAGS="--cfg tokio_unstable" cargo build --features tokio-console ``` -Port for tokio console can by configured through `TOKIO_CONSOLE_ADDR` configuration parameter (or environment variable). -Using tokio console require log level to be `TRACE`, can be enabled through configuration parameter or environment variable `MAX_LOG_LEVEL`. +Port for tokio console can by configured through `LOG_TOKIO_CONSOLE_ADDR` configuration parameter (or environment variable). +Using tokio console require log level to be `TRACE`, can be enabled through configuration parameter or environment variable `LOG_LEVEL`. Example of running iroha with tokio console support using `scripts/test_env.sh`: @@ -260,7 +259,7 @@ Example of running iroha with tokio console support using `scripts/test_env.sh`: # 1. Compile iroha RUSTFLAGS="--cfg tokio_unstable" cargo build --features tokio-console # 2. Run iroha with TRACE log level -MAX_LOG_LEVEL=TRACE ./scripts/test_env.sh setup +LOG_LEVEL=TRACE ./scripts/test_env.sh setup # 3. Access iroha. Peers will be available on ports 5555, 5556, ... tokio-console http://127.0.0.1:5555 ``` diff --git a/Cargo.lock b/Cargo.lock index 9dadaa730bd..fb0b976dace 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -35,14 +35,14 @@ dependencies = [ [[package]] name = "ahash" -version = "0.8.3" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f" +checksum = "91429305e9f0a25f6205c5b8e0d2db09e0708a7a6df0f42212bb56c32c8ac97a" dependencies = [ "cfg-if", - "getrandom 0.2.10", "once_cell", "version_check", + "zerocopy", ] [[package]] @@ -102,9 +102,9 @@ checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" [[package]] name = "anstream" -version = "0.6.4" +version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ab91ebe16eb252986481c5b62f6098f3b698a45e34b5b98200cf20dd2484a44" +checksum = "d664a92ecae85fd0a7392615844904654d1d5f5514837f471ddef4a057aba1b6" dependencies = [ "anstyle", "anstyle-parse", @@ -122,30 +122,30 @@ checksum = "7079075b41f533b8c61d2a4d073c4676e1f8b249ff94a393b0595db304e0dd87" [[package]] name = "anstyle-parse" -version = "0.2.2" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "317b9a89c1868f5ea6ff1d9539a69f45dffc21ce321ac1fd1160dfa48c8e2140" +checksum = "c75ac65da39e5fe5ab759307499ddad880d724eed2f6ce5b5e8a26f4f387928c" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-query" -version = "1.0.0" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ca11d4be1bab0c8bc8734a9aa7bf4ee8316d462a08c6ac5052f888fef5b494b" +checksum = "e28923312444cdd728e4738b3f9c9cac739500909bb3d3c94b43551b16517648" dependencies = [ - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] name = "anstyle-wincon" -version = "3.0.1" +version = "3.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0699d10d2f4d628a98ee7b57b289abbc98ff3bad977cb3152709d4bf2330628" +checksum = "1cd54b81ec8d6180e24654d0b371ad22fc3dd083b6ff8ba325b72e00c87660a7" dependencies = [ "anstyle", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] @@ -197,18 +197,18 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.41", ] [[package]] name = "async-trait" -version = "0.1.73" +version = "0.1.74" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc00ceb34980c03614e35a3a4e218276a0a824e911d07651cd0d858a51e8c0f0" +checksum = "a66537f1bb974b254c98ed142ff995236e81b9d0fe4db0575f46612cb15eb0f9" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.41", ] [[package]] @@ -223,7 +223,7 @@ dependencies = [ "rustls", "rustls-native-certs", "url", - "webpki-roots 0.25.2", + "webpki-roots 0.25.3", ] [[package]] @@ -311,15 +311,9 @@ checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf" [[package]] name = "base64" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" - -[[package]] -name = "base64" -version = "0.21.4" +version = "0.21.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ba43ea6f343b788c8764558649e08df62f86c6ef251fdaeb1ffd010a9ae50a2" +checksum = "35636a1494ede3b646cc98f74f8e62c773a38a659ebc777a2cf26b9b74171df9" [[package]] name = "base64ct" @@ -329,9 +323,9 @@ checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" [[package]] name = "basic-toml" -version = "0.1.4" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7bfc506e7a2370ec239e1d072507b2a80c833083699d3c6fa176fbb4de8448c6" +checksum = "2f2139706359229bfa8f19142ac1155b4b80beafb7a60471ac5dd109d4a19778" dependencies = [ "serde", ] @@ -368,9 +362,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.4.0" +version = "2.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4682ae6287fcf752ecaabbfcc7b6f9b72aa33933dc23a554d853aea8eea8635" +checksum = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07" [[package]] name = "bitvec" @@ -425,12 +419,12 @@ dependencies = [ [[package]] name = "bstr" -version = "1.7.0" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c79ad7fb2dd38f3dabd76b09c6a5a20c038fc0213ef1e9afd30eb777f120f019" +checksum = "542f33a8835a0884b006a0c3df3dadd99c0c3f296ed26c2fdc8028e01ad6230c" dependencies = [ "memchr", - "regex-automata 0.4.1", + "regex-automata 0.4.3", "serde", ] @@ -483,15 +477,6 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223" -[[package]] -name = "c2-chacha" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "217192c943108d8b13bac38a1d51df9ce8a407a3f5a71ab633980665e68fbd9a" -dependencies = [ - "ppv-lite86", -] - [[package]] name = "cast" version = "0.3.0" @@ -609,23 +594,23 @@ dependencies = [ [[package]] name = "clap" -version = "4.4.6" +version = "4.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d04704f56c2cde07f43e8e2c154b43f216dc5c92fc98ada720177362f953b956" +checksum = "bfaff671f6b22ca62406885ece523383b9b64022e341e53e009a62ebc47a45f2" dependencies = [ "clap_builder", - "clap_derive 4.4.2", + "clap_derive 4.4.7", ] [[package]] name = "clap_builder" -version = "4.4.6" +version = "4.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e231faeaca65ebd1ea3c737966bf858971cd38c3849107aa3ea7de90a804e45" +checksum = "a216b506622bb1d316cd51328dce24e07bdff4a6128a47c7e7fad11878d5adbb" dependencies = [ "anstream", "anstyle", - "clap_lex 0.5.1", + "clap_lex 0.6.0", "strsim", ] @@ -644,14 +629,14 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.4.2" +version = "4.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0862016ff20d69b84ef8247369fabf5c008a7417002411897d40ee1f4532b873" +checksum = "cf9804afaaf59a91e75b022a30fb7229a7901f60c755489cc61c9b423b836442" dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.41", ] [[package]] @@ -665,9 +650,9 @@ dependencies = [ [[package]] name = "clap_lex" -version = "0.5.1" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd7cc57abe963c6d3b9d8be5b06ba7c8957a930305ca90304f24ef040aa6f961" +checksum = "702fc72eb24e5a1e48ce58027a675bc24edd52096d5397d4aea7c6dd9eca0bd1" [[package]] name = "clru" @@ -702,9 +687,9 @@ dependencies = [ [[package]] name = "color-spantrace" -version = "0.2.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ba75b3d9449ecdccb27ecbc479fdc0b87fa2dd43d2f8298f9bf0e59aacc8dce" +checksum = "cd6be1b2a7e382e2b98b43b2adcca6bb0e465af0bdd38123873ae61eb17a72c2" dependencies = [ "once_cell", "owo-colors", @@ -720,11 +705,10 @@ checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7" [[package]] name = "colored" -version = "2.0.4" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2674ec482fbc38012cf31e6c42ba0177b431a0cb6f15fe40efa5aab1bda516f6" +checksum = "cbf2150cce219b664a8a70df7a1f933836724b503f8a413af9365b4dcc4d90b8" dependencies = [ - "is-terminal", "lazy_static", "windows-sys 0.48.0", ] @@ -787,9 +771,9 @@ checksum = "28c122c3980598d243d63d9a704629a2d748d101f278052ff068be5a4423ab6f" [[package]] name = "core-foundation" -version = "0.9.3" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "194a7a9e6de53fa55116934067c844d9d749312f75c6f6d0980e8c252f8c2146" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" dependencies = [ "core-foundation-sys", "libc", @@ -797,9 +781,9 @@ dependencies = [ [[package]] name = "core-foundation-sys" -version = "0.8.4" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e496a50fda8aacccc86d7529e2c1e0892dbd0f898a6b5645b5561b89c3210efa" +checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" [[package]] name = "core2" @@ -821,27 +805,27 @@ dependencies = [ [[package]] name = "cpufeatures" -version = "0.2.9" +version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a17b76ff3a4162b0b27f354a0c87015ddad39d35f9c0c36607a3bdd175dde1f1" +checksum = "ce420fe07aecd3e67c5f910618fe65e94158f6dcc0adf44e00d69ce2bdfe0fd0" dependencies = [ "libc", ] [[package]] name = "cranelift-bforest" -version = "0.102.0" +version = "0.102.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76eb38f2af690b5a4411d9a8782b6d77dabff3ca939e0518453ab9f9a4392d41" +checksum = "8e7e56668d2263f92b691cb9e4a2fcb186ca0384941fe420484322fa559c3329" dependencies = [ "cranelift-entity", ] [[package]] name = "cranelift-codegen" -version = "0.102.0" +version = "0.102.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39526c036b92912417e8931f52c1e235796688068d3efdbbd8b164f299d19156" +checksum = "2a9ff61938bf11615f55b80361288c68865318025632ea73c65c0b44fa16283c" dependencies = [ "bumpalo", "cranelift-bforest", @@ -851,7 +835,7 @@ dependencies = [ "cranelift-entity", "cranelift-isle", "gimli", - "hashbrown 0.14.1", + "hashbrown 0.14.3", "log", "regalloc2", "smallvec", @@ -860,33 +844,33 @@ dependencies = [ [[package]] name = "cranelift-codegen-meta" -version = "0.102.0" +version = "0.102.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fdb0deedc9fccf2db53a5a3c9c9d0163e44143b0d004dca9bf6ab6a0024cd79a" +checksum = "50656bf19e3d4a153b404ff835b8b59e924cfa3682ebe0d3df408994f37983f6" dependencies = [ "cranelift-codegen-shared", ] [[package]] name = "cranelift-codegen-shared" -version = "0.102.0" +version = "0.102.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cea2d1b274e45aa8e61e9103efa1ba82d4b5a19d12bd1fd10744c3b7380ba3ff" +checksum = "388041deeb26109f1ea73c1812ea26bfd406c94cbce0bb5230aa44277e43b209" [[package]] name = "cranelift-control" -version = "0.102.0" +version = "0.102.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ea5977559a71e63db79a263f0e81a89b996e8a38212c4281e37dd1dbaa8b65c" +checksum = "b39b7c512ffac527e5b5df9beae3d67ab85d07dca6d88942c16195439fedd1d3" dependencies = [ "arbitrary", ] [[package]] name = "cranelift-entity" -version = "0.102.0" +version = "0.102.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f871ada808b58158d84dfc43a6a2e2d2756baaf4ed1c51fd969ca8330e6ca5c" +checksum = "fdb25f573701284fe2bcf88209d405342125df00764b396c923e11eafc94d892" dependencies = [ "serde", "serde_derive", @@ -894,9 +878,9 @@ dependencies = [ [[package]] name = "cranelift-frontend" -version = "0.102.0" +version = "0.102.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8e6890f587ef59824b3debe577e68fdf9b307b3808c54b8d93a18fd0b70941b" +checksum = "e57374fd11d72cf9ffb85ff64506ed831440818318f58d09f45b4185e5e9c376" dependencies = [ "cranelift-codegen", "log", @@ -906,15 +890,15 @@ dependencies = [ [[package]] name = "cranelift-isle" -version = "0.102.0" +version = "0.102.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8d5fc6d5d3b52d1917002b17a8ecce448c2621b5bf394bb4e77e2f676893537" +checksum = "ae769b235f6ea2f86623a3ff157cc04a4ff131dc9fe782c2ebd35f272043581e" [[package]] name = "cranelift-native" -version = "0.102.0" +version = "0.102.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e10c2e7faa65d4ae7de9a83b44f2c31aca7dc638e17d0a79572fdf8103d720b" +checksum = "3dc7bfb8f13a0526fe20db338711d9354729b861c336978380bb10f7f17dd207" dependencies = [ "cranelift-codegen", "libc", @@ -923,9 +907,9 @@ dependencies = [ [[package]] name = "cranelift-wasm" -version = "0.102.0" +version = "0.102.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2755807efc7ec80d1cc0b6815e70f10cedf968889f0469091dbff9c5c0741c48" +checksum = "2c5f41a4af931b756be05af0dd374ce200aae2d52cea16b0beb07e8b52732c35" dependencies = [ "cranelift-codegen", "cranelift-entity", @@ -955,7 +939,7 @@ dependencies = [ "anes", "cast", "ciborium", - "clap 4.4.6", + "clap 4.4.11", "criterion-plot", "is-terminal", "itertools 0.10.5", @@ -998,9 +982,9 @@ dependencies = [ [[package]] name = "crossbeam-channel" -version = "0.5.8" +version = "0.5.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a33c2bf77f2df06183c3aa30d1e96c0695a313d4f9c453cc3762a6db39f99200" +checksum = "14c3242926edf34aec4ac3a77108ad4854bffaa2e4ddc1824124ce59231302d5" dependencies = [ "cfg-if", "crossbeam-utils", @@ -1008,9 +992,9 @@ dependencies = [ [[package]] name = "crossbeam-deque" -version = "0.8.3" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce6fd6f855243022dcecf8702fef0c297d4338e226845fe067f6341ad9fa0cef" +checksum = "fca89a0e215bab21874660c67903c5f143333cab1da83d041c7ded6053774751" dependencies = [ "cfg-if", "crossbeam-epoch", @@ -1019,22 +1003,21 @@ dependencies = [ [[package]] name = "crossbeam-epoch" -version = "0.9.15" +version = "0.9.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae211234986c545741a7dc064309f67ee1e5ad243d0e48335adc0484d960bcc7" +checksum = "2d2fe95351b870527a5d09bf563ed3c97c0cffb87cf1c78a591bf48bb218d9aa" dependencies = [ "autocfg", "cfg-if", "crossbeam-utils", "memoffset", - "scopeguard", ] [[package]] name = "crossbeam-queue" -version = "0.3.8" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1cfb3ea8a53f37c40dea2c7bedcbd88bdfae54f5e2175d6ecaff1c988353add" +checksum = "b9bcf5bdbfdd6030fb4a1c497b5d5fc5921aa2f60d359a17e249c0e6df3de153" dependencies = [ "cfg-if", "crossbeam-utils", @@ -1042,9 +1025,9 @@ dependencies = [ [[package]] name = "crossbeam-utils" -version = "0.8.16" +version = "0.8.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a22b2d63d4d1dc0b7f1b6b2747dd0088008a9be28b6ddf0b1e7d335e3037294" +checksum = "c06d96137f14f244c37f989d9fff8f95e6c18b918e71f36638f8c49112e4c78f" dependencies = [ "cfg-if", ] @@ -1076,9 +1059,9 @@ dependencies = [ [[package]] name = "crypto-bigint" -version = "0.5.3" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "740fe28e594155f10cfc383984cbefd529d7396050557148f79cb0f621204124" +checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" dependencies = [ "generic-array 0.14.7", "rand_core 0.6.4", @@ -1122,14 +1105,14 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.41", ] [[package]] name = "cxx" -version = "1.0.108" +version = "1.0.110" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "292b4841d939b20ba44fff686a35808b0ab31a3256e3629917d9aedd43eb7b3a" +checksum = "7129e341034ecb940c9072817cd9007974ea696844fc4dd582dc1653a7fbe2e8" dependencies = [ "cc", "cxxbridge-flags", @@ -1139,9 +1122,9 @@ dependencies = [ [[package]] name = "cxx-build" -version = "1.0.108" +version = "1.0.110" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e7e35cf85fd4e90dcaba251f3ee95e08fb6f9d66e5c0588816f16a6ab939b40" +checksum = "a2a24f3f5f8eed71936f21e570436f024f5c2e25628f7496aa7ccd03b90109d5" dependencies = [ "cc", "codespan-reporting", @@ -1149,24 +1132,24 @@ dependencies = [ "proc-macro2", "quote", "scratch", - "syn 2.0.38", + "syn 2.0.41", ] [[package]] name = "cxxbridge-flags" -version = "1.0.108" +version = "1.0.110" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7030aff1908ba2b7eb639466df50792b2a3fdf02bea9557c4ee1a531975554b" +checksum = "06fdd177fc61050d63f67f5bd6351fac6ab5526694ea8e359cd9cd3b75857f44" [[package]] name = "cxxbridge-macro" -version = "1.0.108" +version = "1.0.110" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79418ecb0c2322a7926a5fa5a9660535432b5b3588b947e1eb484cc509edbe3c" +checksum = "587663dd5fb3d10932c8aecfe7c844db1bcf0aee93eeab08fac13dc1212c2e7f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.41", ] [[package]] @@ -1190,7 +1173,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn 2.0.38", + "syn 2.0.41", ] [[package]] @@ -1201,7 +1184,7 @@ checksum = "836a9bbc7ad63342d6d6e7b815ccab164bc77a2d95d84bc3117a8c0d5c98e2d5" dependencies = [ "darling_core", "quote", - "syn 2.0.38", + "syn 2.0.41", ] [[package]] @@ -1217,7 +1200,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856" dependencies = [ "cfg-if", - "hashbrown 0.14.1", + "hashbrown 0.14.3", "lock_api", "once_cell", "parking_lot_core", @@ -1225,9 +1208,9 @@ dependencies = [ [[package]] name = "data-encoding" -version = "2.4.0" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2e66c9d817f1720209181c316d28635c050fa304f9c79e47a520882661b7308" +checksum = "7e962a19be5cfc3f3bf6dd8f61eb50107f356ad6270fbb3ed41476571db78be5" [[package]] name = "debugid" @@ -1250,9 +1233,12 @@ dependencies = [ [[package]] name = "deranged" -version = "0.3.8" +version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2696e8a945f658fd14dc3b87242e6b80cd0f36ff04ea560fa39082368847946" +checksum = "8eb30d70a07a3b04884d2677f06bec33509dc67ca60d92949e5535352d3191dc" +dependencies = [ + "powerfmt", +] [[package]] name = "derive_more" @@ -1326,7 +1312,7 @@ checksum = "487585f4d0c6655fe74905e2504d8ad6908e4db67f744eb140876906c2f3175d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.41", ] [[package]] @@ -1349,15 +1335,15 @@ checksum = "56ce8c6da7551ec6c462cbaf3bfbc75131ebbfa1c944aeaa9dab51ca1c5f0c3b" [[package]] name = "dyn-clone" -version = "1.0.14" +version = "1.0.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23d2f3407d9a573d666de4b5bdf10569d73ca9478087346697dcbae6244bfbcd" +checksum = "545b22097d44f8a9581187cdf93de7a71e4722bf51200cfaba810865b49a495d" [[package]] name = "ecdsa" -version = "0.16.8" +version = "0.16.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4b1e0c257a9e9f25f90ff76d7a68360ed497ee519c8e428d1825ef0000799d4" +checksum = "ee27f32b5c5292967d2d4a9d7f1e0b0aed2c15daded5a60300e4abb9d8020bca" dependencies = [ "der", "digest 0.10.7", @@ -1379,15 +1365,16 @@ dependencies = [ [[package]] name = "ed25519-dalek" -version = "2.0.0" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7277392b266383ef8396db7fdeb1e77b6c52fed775f5df15bb24f35b72156980" +checksum = "1f628eaec48bfd21b865dc2950cfa014450c01d2fa2b69a86c2fd5844ec523c0" dependencies = [ "curve25519-dalek", "ed25519", "rand_core 0.6.4", "serde", "sha2", + "subtle", "zeroize", ] @@ -1399,9 +1386,9 @@ checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" [[package]] name = "elliptic-curve" -version = "0.13.6" +version = "0.13.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d97ca172ae9dc9f9b779a6e3a65d308f2af74e5b8c921299075bdb4a0370e914" +checksum = "b5e6043086bf7973472e0c7dff2142ea0b680d30e18d9cc40f267efbf222bd47" dependencies = [ "base16ct", "crypto-bigint", @@ -1448,12 +1435,12 @@ dependencies = [ [[package]] name = "errno" -version = "0.3.5" +version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac3e13f66a2f95e32a39eaa81f6b95d42878ca0e1db0c7543723dfe12557e860" +checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245" dependencies = [ "libc", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] @@ -1468,9 +1455,9 @@ dependencies = [ [[package]] name = "eyre" -version = "0.6.8" +version = "0.6.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c2b6b5a29c02cdc822728b7d7b8ae1bab3e3b05d44522770ddd49722eeac7eb" +checksum = "b6267a1fa6f59179ea4afc8e50fd8612a3cc60bc858f786ff877a4a8cb042799" dependencies = [ "indenter", "once_cell", @@ -1484,9 +1471,9 @@ checksum = "2acce4a10f12dc2fb14a218589d4f1f62ef011b2d0cc4b3cb1bba8e94da14649" [[package]] name = "faster-hex" -version = "0.8.1" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "239f7bfb930f820ab16a9cd95afc26f88264cf6905c960b340a615384aa3338a" +checksum = "a2a2b11eda1d40935b26cf18f6833c526845ae8c41e58d09af6adeb6f0269183" dependencies = [ "serde", ] @@ -1509,20 +1496,20 @@ dependencies = [ [[package]] name = "fiat-crypto" -version = "0.2.2" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a481586acf778f1b1455424c343f71124b048ffa5f4fc3f8f6ae9dc432dcb3c7" +checksum = "27573eac26f4dd11e2b1916c3fe1baa56407c83c71a773a8ba17ec0bca03b6b7" [[package]] name = "filetime" -version = "0.2.22" +version = "0.2.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4029edd3e734da6fe05b6cd7bd2960760a616bd2ddd0d59a0124746d6272af0" +checksum = "1ee447700ac8aa0b2f2bd7bc4462ad686ba06baa6727ac149a2d6277f0d240fd" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.3.5", - "windows-sys 0.48.0", + "redox_syscall", + "windows-sys 0.52.0", ] [[package]] @@ -1547,9 +1534,9 @@ dependencies = [ [[package]] name = "flate2" -version = "1.0.27" +version = "1.0.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6c98ee8095e9d1dcbf2fcc6d95acccb90d1c81db1e44725c6a984b1dbdfb010" +checksum = "46303f565772937ffe1d394a4fac6f411c6013172fadde9dcdb1e147a086940e" dependencies = [ "crc32fast", "miniz_oxide", @@ -1578,9 +1565,9 @@ checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" [[package]] name = "form_urlencoded" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" dependencies = [ "percent-encoding", ] @@ -1593,9 +1580,9 @@ checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" [[package]] name = "futures" -version = "0.3.28" +version = "0.3.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23342abe12aba583913b2e62f22225ff9c950774065e4bfb61a19cd9770fec40" +checksum = "da0290714b38af9b4a7b094b8a37086d1b4e61f2df9122c3cad2577669145335" dependencies = [ "futures-channel", "futures-core", @@ -1608,9 +1595,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.28" +version = "0.3.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "955518d47e09b25bbebc7a18df10b81f0c766eaf4c4f1cccef2fca5f2a4fb5f2" +checksum = "ff4dd66668b557604244583e3e1e1eada8c5c2e96a6d0d6653ede395b78bbacb" dependencies = [ "futures-core", "futures-sink", @@ -1618,15 +1605,15 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.28" +version = "0.3.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4bca583b7e26f571124fe5b7561d49cb2868d79116cfa0eefce955557c6fee8c" +checksum = "eb1d22c66e66d9d72e1758f0bd7d4fd0bee04cad842ee34587d68c07e45d088c" [[package]] name = "futures-executor" -version = "0.3.28" +version = "0.3.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccecee823288125bd88b4d7f565c9e58e41858e47ab72e8ea2d64e93624386e0" +checksum = "0f4fb8693db0cf099eadcca0efe2a5a22e4550f98ed16aba6c48700da29597bc" dependencies = [ "futures-core", "futures-task", @@ -1635,38 +1622,38 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.28" +version = "0.3.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fff74096e71ed47f8e023204cfd0aa1289cd54ae5430a9523be060cdb849964" +checksum = "8bf34a163b5c4c52d0478a4d757da8fb65cabef42ba90515efee0f6f9fa45aaa" [[package]] name = "futures-macro" -version = "0.3.28" +version = "0.3.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" +checksum = "53b153fd91e4b0147f4aced87be237c98248656bb01050b96bf3ee89220a8ddb" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.41", ] [[package]] name = "futures-sink" -version = "0.3.28" +version = "0.3.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f43be4fe21a13b9781a69afa4985b0f6ee0e1afab2c6f454a8cf30e2b2237b6e" +checksum = "e36d3378ee38c2a36ad710c5d30c2911d752cb941c00c72dbabfb786a7970817" [[package]] name = "futures-task" -version = "0.3.28" +version = "0.3.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76d3d132be6c0e6aa1534069c705a74a5997a356c0dc2f86a47765e5617c5b65" +checksum = "efd193069b0ddadc69c46389b740bbccdd97203899b48d09c5f7969591d6bae2" [[package]] name = "futures-util" -version = "0.3.28" +version = "0.3.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26b01e40b772d54cf6c6d721c1d1abd0647a0106a12ecaa1c186273392a69533" +checksum = "a19526d624e703a3179b3d322efec918b6246ea0fa51d41124525f00f1cc8104" dependencies = [ "futures-channel", "futures-core", @@ -1695,7 +1682,7 @@ version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "27d12c0aed7f1e24276a241aadc4cb8ea9f83000f34bc062b7cc2d51e3b0fabd" dependencies = [ - "bitflags 2.4.0", + "bitflags 2.4.1", "debugid", "fxhash", "serde", @@ -1722,16 +1709,6 @@ dependencies = [ "zeroize", ] -[[package]] -name = "gethostname" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1ebd34e35c46e00bb73e81363248d627782724609fe1b6396f553f68fe3862e" -dependencies = [ - "libc", - "winapi", -] - [[package]] name = "getrandom" version = "0.1.16" @@ -1745,9 +1722,9 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.10" +version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427" +checksum = "fe9006bed769170c11f845cf00c7c1e9092aeb3f268e007c3e760ac68008070f" dependencies = [ "cfg-if", "libc", @@ -1768,20 +1745,20 @@ dependencies = [ [[package]] name = "gimli" -version = "0.28.0" +version = "0.28.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6fb8d784f27acf97159b40fc4db5ecd8aa23b9ad5ef69cdd136d3bc80665f0c0" +checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" dependencies = [ "fallible-iterator", - "indexmap 2.0.2", + "indexmap 2.1.0", "stable_deref_trait", ] [[package]] name = "gix" -version = "0.53.1" +version = "0.55.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06a8c9f9452078f474fecd2880de84819b8c77224ab62273275b646bf785f906" +checksum = "002667cd1ebb789313d0d0afe3d23b2821cf3b0e91605095f0e6d8751f0ceeea" dependencies = [ "gix-actor", "gix-commitgraph", @@ -1822,9 +1799,9 @@ dependencies = [ [[package]] name = "gix-actor" -version = "0.26.0" +version = "0.28.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e8c6778cc03bca978b2575a03e04e5ba6f430a9dd9b0f1259f0a8a9a5e5cc66" +checksum = "2eadca029ef716b4378f7afb19f7ee101fde9e58ba1f1445971315ac866db417" dependencies = [ "bstr", "btoi", @@ -1836,41 +1813,41 @@ dependencies = [ [[package]] name = "gix-bitmap" -version = "0.2.7" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ccab4bc576844ddb51b78d81b4a42d73e6229660fa614dfc3d3999c874d1959" +checksum = "d49e1a13a30d3f88be4bceae184dd13a2d3fb9ffa7515f7ed7ae771b857f4916" dependencies = [ "thiserror", ] [[package]] name = "gix-chunk" -version = "0.4.4" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b42ea64420f7994000130328f3c7a2038f639120518870436d31b8bde704493" +checksum = "d411ecd9b558b0c20b3252b7e409eec48eabc41d18324954fe526bac6e2db55f" dependencies = [ "thiserror", ] [[package]] name = "gix-commitgraph" -version = "0.20.0" +version = "0.22.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4676ede3a7d37e7028e2889830349a6aca22efc1d2f2dd9fa3351c1a8ddb0c6a" +checksum = "85a7007ba021f059803afaf6f8a48872422abc20550ac12ede6ddea2936cec36" dependencies = [ "bstr", "gix-chunk", "gix-features", "gix-hash", - "memmap2", + "memmap2 0.9.0", "thiserror", ] [[package]] name = "gix-config" -version = "0.29.0" +version = "0.31.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1108c4ac88248dd25cc8ab0d0dae796e619fb72d92f88e30e00b29d61bb93cc4" +checksum = "5cae98c6b4c66c09379bc35274b172587d6b0ac369a416c39128ad8c6454f9bb" dependencies = [ "bstr", "gix-config-value", @@ -1889,11 +1866,11 @@ dependencies = [ [[package]] name = "gix-config-value" -version = "0.14.0" +version = "0.14.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea7505b97f4d8e7933e29735a568ba2f86d8de466669d9f0e8321384f9972f47" +checksum = "6419db582ea84dfb58c7e7b0af7fd62c808aa14954af2936a33f89b0f4ed018e" dependencies = [ - "bitflags 2.4.0", + "bitflags 2.4.1", "bstr", "gix-path", "libc", @@ -1902,9 +1879,9 @@ dependencies = [ [[package]] name = "gix-date" -version = "0.8.0" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc7df669639582dc7c02737642f76890b03b5544e141caba68a7d6b4eb551e0d" +checksum = "468dfbe411f335f01525a1352271727f8e7772075a93fa747260f502086b30be" dependencies = [ "bstr", "itoa", @@ -1914,9 +1891,9 @@ dependencies = [ [[package]] name = "gix-diff" -version = "0.35.0" +version = "0.37.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b45e342d148373bd9070d557e6fb1280aeae29a3e05e32506682d027278501eb" +checksum = "931394f69fb8c9ed6afc0aae3487bd869e936339bcc13ed8884472af072e0554" dependencies = [ "gix-hash", "gix-object", @@ -1925,9 +1902,9 @@ dependencies = [ [[package]] name = "gix-discover" -version = "0.24.0" +version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da4cacda5ee9dd1b38b0e2506834e40e66c08cf050ef55c344334c76745f277b" +checksum = "a45d5cf0321178883e38705ab2b098f625d609a7d4c391b33ac952eff2c490f2" dependencies = [ "bstr", "dunce", @@ -1940,9 +1917,9 @@ dependencies = [ [[package]] name = "gix-features" -version = "0.34.0" +version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f414c99e1a7abc69b21f3225a6539d203b0513f1d1d448607c4ea81cdcf9ee59" +checksum = "4d46a4a5c6bb5bebec9c0d18b65ada20e6517dbd7cf855b87dd4bbdce3a771b2" dependencies = [ "crc32fast", "flate2", @@ -1958,20 +1935,20 @@ dependencies = [ [[package]] name = "gix-fs" -version = "0.6.0" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "404795da3d4c660c9ab6c3b2ad76d459636d1e1e4b37b0c7ff68eee898c298d4" +checksum = "20e86eb040f5776a5ade092282e51cdcad398adb77d948b88d17583c2ae4e107" dependencies = [ "gix-features", ] [[package]] name = "gix-glob" -version = "0.12.0" +version = "0.14.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3ac79c444193b0660fe0c0925d338bd338bd643e32138784dccfb12c628b892" +checksum = "5db19298c5eeea2961e5b3bf190767a2d1f09b8802aeb5f258e42276350aff19" dependencies = [ - "bitflags 2.4.0", + "bitflags 2.4.1", "bstr", "gix-features", "gix-path", @@ -1979,9 +1956,9 @@ dependencies = [ [[package]] name = "gix-hash" -version = "0.13.0" +version = "0.13.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ccf425543779cddaa4a7c62aba3fa9d90ea135b160be0a72dd93c063121ad4a" +checksum = "1f8cf8c2266f63e582b7eb206799b63aa5fa68ee510ad349f637dfe2d0653de0" dependencies = [ "faster-hex", "thiserror", @@ -1989,22 +1966,22 @@ dependencies = [ [[package]] name = "gix-hashtable" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "409268480841ad008e81c17ca5a293393fbf9f2b6c2f85b8ab9de1f0c5176a16" +checksum = "feb61880816d7ec4f0b20606b498147d480860ddd9133ba542628df2f548d3ca" dependencies = [ "gix-hash", - "hashbrown 0.14.1", + "hashbrown 0.14.3", "parking_lot", ] [[package]] name = "gix-index" -version = "0.24.0" +version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e9599fc30b3d6aad231687a403f85dfa36ae37ccf1b68ee1f621ad5b7fc7a0d" +checksum = "c83a4fcc121b2f2e109088f677f89f85e7a8ebf39e8e6659c0ae54d4283b1650" dependencies = [ - "bitflags 2.4.0", + "bitflags 2.4.1", "bstr", "btoi", "filetime", @@ -2016,16 +1993,16 @@ dependencies = [ "gix-object", "gix-traverse", "itoa", - "memmap2", + "memmap2 0.7.1", "smallvec", "thiserror", ] [[package]] name = "gix-lock" -version = "9.0.0" +version = "11.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1568c3d90594c60d52670f325f5db88c2d572e85c8dd45fabc23d91cadb0fd52" +checksum = "7e5c65e6a29830a435664891ced3f3c1af010f14900226019590ee0971a22f37" dependencies = [ "gix-tempfile", "gix-utils", @@ -2034,20 +2011,20 @@ dependencies = [ [[package]] name = "gix-macros" -version = "0.1.0" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d8acb5ee668d55f0f2d19a320a3f9ef67a6999ad483e11135abcc2464ed18b6" +checksum = "02a5bcaf6704d9354a3071cede7e77d366a5980c7352e102e2c2f9b645b1d3ae" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.41", ] [[package]] name = "gix-object" -version = "0.36.0" +version = "0.38.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e5528d5b2c984044d547e696e44a8c45fa122e83cd8c2ac1da69bd474336be8" +checksum = "740f2a44267f58770a1cb3a3d01d14e67b089c7136c48d4bddbb3cfd2bf86a51" dependencies = [ "bstr", "btoi", @@ -2064,9 +2041,9 @@ dependencies = [ [[package]] name = "gix-odb" -version = "0.52.0" +version = "0.54.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0446eca295459deb3d6dd6ed7d44a631479f1b7381d8087166605c7a9f717c6" +checksum = "8630b56cb80d8fa684d383dad006a66401ee8314e12fbf0e566ddad8c115143b" dependencies = [ "arc-swap", "gix-date", @@ -2083,9 +2060,9 @@ dependencies = [ [[package]] name = "gix-pack" -version = "0.42.0" +version = "0.44.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be19ee650300d7cbac5829b637685ec44a8d921a7c2eaff8a245d8f2f008870c" +checksum = "1431ba2e30deff1405920693d54ab231c88d7c240dd6ccc936ee223d8f8697c3" dependencies = [ "clru", "gix-chunk", @@ -2095,7 +2072,7 @@ dependencies = [ "gix-object", "gix-path", "gix-tempfile", - "memmap2", + "memmap2 0.7.1", "parking_lot", "smallvec", "thiserror", @@ -2103,9 +2080,9 @@ dependencies = [ [[package]] name = "gix-path" -version = "0.10.0" +version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a1d370115171e3ae03c5c6d4f7d096f2981a40ddccb98dfd704c773530ba73b" +checksum = "d86d6fac2fabe07b67b7835f46d07571f68b11aa1aaecae94fe722ea4ef305e1" dependencies = [ "bstr", "gix-trace", @@ -2116,9 +2093,9 @@ dependencies = [ [[package]] name = "gix-quote" -version = "0.4.7" +version = "0.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "475c86a97dd0127ba4465fbb239abac9ea10e68301470c9791a6dd5351cdc905" +checksum = "4f84845efa535468bc79c5a87b9d29219f1da0313c8ecf0365a5daa7e72786f2" dependencies = [ "bstr", "btoi", @@ -2127,9 +2104,9 @@ dependencies = [ [[package]] name = "gix-ref" -version = "0.36.0" +version = "0.38.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3cccbfa8d5cd9b86465f27a521e0c017de54b92d9fd37c143e49c658a2f04f3a" +checksum = "0ec2f6d07ac88d2fb8007ee3fa3e801856fb9d82e7366ec0ca332eb2c9d74a52" dependencies = [ "gix-actor", "gix-date", @@ -2141,16 +2118,16 @@ dependencies = [ "gix-path", "gix-tempfile", "gix-validate", - "memmap2", + "memmap2 0.7.1", "thiserror", "winnow", ] [[package]] name = "gix-refspec" -version = "0.17.0" +version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "678ba30d95baa5462df9875628ed40655d5f5b8aba7028de86ed57f36e762c6c" +checksum = "ccb0974cc41dbdb43a180c7f67aa481e1c1e160fcfa8f4a55291fd1126c1a6e7" dependencies = [ "bstr", "gix-hash", @@ -2162,9 +2139,9 @@ dependencies = [ [[package]] name = "gix-revision" -version = "0.21.0" +version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3e80a5992ae446fe1745dd26523b86084e3f1b6b3e35377fe09b4f35ac8f151" +checksum = "2ca97ac73459a7f3766aa4a5638a6e37d56d4c7962bc1986fbaf4883d0772588" dependencies = [ "bstr", "gix-date", @@ -2178,9 +2155,9 @@ dependencies = [ [[package]] name = "gix-revwalk" -version = "0.7.0" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b806349bc1f668e09035800e07ac8045da4e39a8925a245d93142c4802224ec1" +checksum = "a16d8c892e4cd676d86f0265bf9d40cefd73d8d94f86b213b8b77d50e77efae0" dependencies = [ "gix-commitgraph", "gix-date", @@ -2193,11 +2170,11 @@ dependencies = [ [[package]] name = "gix-sec" -version = "0.10.0" +version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92b9542ac025a8c02ed5d17b3fc031a111a384e859d0be3532ec4d58c40a0f28" +checksum = "a36ea2c5907d64a9b4b5d3cc9f430e6c30f0509646b5e38eb275ca57c5bf29e2" dependencies = [ - "bitflags 2.4.0", + "bitflags 2.4.1", "gix-path", "libc", "windows", @@ -2205,9 +2182,9 @@ dependencies = [ [[package]] name = "gix-tempfile" -version = "9.0.0" +version = "11.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2762b91ff95e27ff3ea95758c0d4efacd7435a1be3629622928b8276de0f72a8" +checksum = "388dd29114a86ec69b28d1e26d6d63a662300ecf61ab3f4cc578f7d7dc9e7e23" dependencies = [ "gix-fs", "libc", @@ -2220,15 +2197,15 @@ dependencies = [ [[package]] name = "gix-trace" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96b6d623a1152c3facb79067d6e2ecdae48130030cf27d6eb21109f13bd7b836" +checksum = "b686a35799b53a9825575ca3f06481d0a053a409c4d97ffcf5ddd67a8760b497" [[package]] name = "gix-traverse" -version = "0.32.0" +version = "0.34.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ec6358f8373fb018af8fc96c9d2ec6a5b66999e2377dc40b7801351fec409ed" +checksum = "14d050ec7d4e1bb76abf0636cf4104fb915b70e54e3ced9a4427c999100ff38a" dependencies = [ "gix-commitgraph", "gix-date", @@ -2242,9 +2219,9 @@ dependencies = [ [[package]] name = "gix-url" -version = "0.23.0" +version = "0.25.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c79d595b99a6c7ab274f3c991735a0c0f5a816a3da460f513c48edf1c7bf2cc" +checksum = "0c427a1a11ccfa53a4a2da47d9442c2241deee63a154bc15cc14b8312fbc4005" dependencies = [ "bstr", "gix-features", @@ -2256,18 +2233,18 @@ dependencies = [ [[package]] name = "gix-utils" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b85d89dc728613e26e0ed952a19583744e7f5240fcd4aa30d6c824ffd8b52f0f" +checksum = "9f82c41937f00e15a1f6cb0b55307f0ca1f77f4407ff2bf440be35aa688c6a3e" dependencies = [ "fastrand", ] [[package]] name = "gix-validate" -version = "0.8.0" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e05cab2b03a45b866156e052aa38619f4ece4adcb2f79978bfc249bc3b21b8c5" +checksum = "75b7d8e4274be69f284bbc7e6bb2ccf7065dbcdeba22d8c549f2451ae426883f" dependencies = [ "bstr", "thiserror", @@ -2292,9 +2269,9 @@ dependencies = [ [[package]] name = "h2" -version = "0.3.21" +version = "0.3.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91fc23aa11be92976ef4729127f1a74adf36d8436f7816b185d18df956790833" +checksum = "4d6250322ef6e60f93f9a2162799302cd6f68f79f6e5d85c8c16f14d1d958178" dependencies = [ "bytes", "fnv", @@ -2302,7 +2279,7 @@ dependencies = [ "futures-sink", "futures-util", "http", - "indexmap 1.9.3", + "indexmap 2.1.0", "slab", "tokio", "tokio-util", @@ -2332,20 +2309,20 @@ dependencies = [ [[package]] name = "hashbrown" -version = "0.14.1" +version = "0.14.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7dfda62a12f55daeae5015f81b0baea145391cb4520f86c248fc615d72640d12" +checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" dependencies = [ "ahash", ] [[package]] name = "hdrhistogram" -version = "7.5.2" +version = "7.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f19b9f54f7c7f55e31401bb647626ce0cf0f67b0004982ce815b3ee72a02aa8" +checksum = "765c9198f173dd59ce26ff9f95ef0aafd0a0fe01fb9d72841bc5066a4c06511d" dependencies = [ - "base64 0.13.1", + "base64", "byteorder", "flate2", "nom", @@ -2358,7 +2335,7 @@ version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06683b93020a07e3dbcf5f8c0f6d40080d725bea7936fc01ad345c01b97dc270" dependencies = [ - "base64 0.21.4", + "base64", "bytes", "headers-core", "http", @@ -2414,9 +2391,9 @@ checksum = "6fe2267d4ed49bc07b63801559be28c718ea06c4738b7a03c94df7386d2cde46" [[package]] name = "hkdf" -version = "0.12.3" +version = "0.12.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "791a029f6b9fc27657f6f188ec6e5e43f6911f6f878e0dc5501396e09809d437" +checksum = "7b5f8eb2ad728638ea2c7d47a21db23b7b58a72ed6a38256b8a1849f15fbbdf7" dependencies = [ "hmac", ] @@ -2441,9 +2418,9 @@ dependencies = [ [[package]] name = "http" -version = "0.2.9" +version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd6effc99afb63425aff9b05836f029929e345a6148a14b7ecd5ab67af944482" +checksum = "8947b1a6fad4393052c7ba1f4cd97bed3e953a95c79c92ad9b051a04611d9fbb" dependencies = [ "bytes", "fnv", @@ -2452,9 +2429,9 @@ dependencies = [ [[package]] name = "http-body" -version = "0.4.5" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1" +checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" dependencies = [ "bytes", "http", @@ -2496,7 +2473,7 @@ dependencies = [ "httpdate", "itoa", "pin-project-lite", - "socket2 0.4.9", + "socket2 0.4.10", "tokio", "tower-service", "tracing", @@ -2517,16 +2494,16 @@ dependencies = [ [[package]] name = "iana-time-zone" -version = "0.1.57" +version = "0.1.58" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2fad5b825842d2b38bd206f3e81d6957625fd7f0a361e345c30e01a0ae2dd613" +checksum = "8326b86b6cff230b97d0d312a6c40a60726df3332e721f72a1b035f451663b20" dependencies = [ "android_system_properties", "core-foundation-sys", "iana-time-zone-haiku", "js-sys", "wasm-bindgen", - "windows", + "windows-core", ] [[package]] @@ -2552,9 +2529,9 @@ checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" [[package]] name = "idna" -version = "0.4.0" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c" +checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" dependencies = [ "unicode-bidi", "unicode-normalization", @@ -2595,12 +2572,12 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.0.2" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8adf3ddd720272c6ea8bf59463c04e0f93d0bbf7c5439b691bca2987e0270897" +checksum = "d530e1a18b1cb4c484e6e34556a0d948706958449fca0cab753d649f2bce3d1f" dependencies = [ "equivalent", - "hashbrown 0.14.1", + "hashbrown 0.14.3", "serde", ] @@ -2686,7 +2663,7 @@ name = "iroha_client" version = "2.0.0-pre-rc.20" dependencies = [ "attohttpc", - "base64 0.21.4", + "base64", "color-eyre", "criterion", "derive_more", @@ -2730,8 +2707,6 @@ dependencies = [ "dialoguer", "erased-serde", "iroha_client", - "iroha_config", - "iroha_crypto", "iroha_primitives", "json5", "once_cell", @@ -2746,6 +2721,7 @@ dependencies = [ "cfg-if", "derive_more", "displaydoc", + "expect-test", "eyre", "iroha_config_base", "iroha_crypto", @@ -2806,6 +2782,7 @@ dependencies = [ "eyre", "futures", "hex", + "indexmap 2.1.0", "iroha_config", "iroha_crypto", "iroha_data_model", @@ -2887,7 +2864,7 @@ dependencies = [ name = "iroha_data_model" version = "2.0.0-pre-rc.20" dependencies = [ - "base64 0.21.4", + "base64", "criterion", "derive_more", "displaydoc", @@ -2925,7 +2902,7 @@ dependencies = [ "quote", "serde", "serde_json", - "syn 2.0.38", + "syn 2.0.41", "trybuild", ] @@ -2939,24 +2916,10 @@ dependencies = [ "manyhow", "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.41", "trybuild", ] -[[package]] -name = "iroha_dsl" -version = "2.0.0-pre-rc.20" -dependencies = [ - "iroha_client", - "iroha_config", - "iroha_crypto", - "iroha_data_model", - "litrs", - "proc-macro2", - "quote", - "serde_json", -] - [[package]] name = "iroha_executor" version = "2.0.0-pre-rc.20" @@ -2978,8 +2941,7 @@ dependencies = [ "manyhow", "proc-macro2", "quote", - "syn 1.0.109", - "syn 2.0.38", + "syn 2.0.41", ] [[package]] @@ -3005,7 +2967,7 @@ dependencies = [ "proc-macro2", "quote", "rustc-hash", - "syn 2.0.38", + "syn 2.0.41", "trybuild", ] @@ -3031,7 +2993,7 @@ dependencies = [ "manyhow", "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.41", ] [[package]] @@ -3063,9 +3025,9 @@ dependencies = [ "iroha_data_model", "once_cell", "serde_json", + "thiserror", "tokio", "tracing", - "tracing-bunyan-formatter", "tracing-core", "tracing-error", "tracing-futures", @@ -3089,7 +3051,7 @@ dependencies = [ "proc-macro2", "quote", "syn 1.0.109", - "syn 2.0.38", + "syn 2.0.41", ] [[package]] @@ -3143,7 +3105,7 @@ dependencies = [ "manyhow", "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.41", ] [[package]] @@ -3167,7 +3129,7 @@ dependencies = [ "manyhow", "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.41", "trybuild", ] @@ -3199,9 +3161,11 @@ dependencies = [ name = "iroha_smart_contract_derive" version = "2.0.0-pre-rc.20" dependencies = [ + "iroha_macro_utils", + "manyhow", "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.41", ] [[package]] @@ -3221,7 +3185,7 @@ version = "2.0.0-pre-rc.20" name = "iroha_swarm" version = "2.0.0-pre-rc.20" dependencies = [ - "clap 4.4.6", + "clap 4.4.11", "color-eyre", "derive_more", "expect-test", @@ -3289,9 +3253,12 @@ dependencies = [ name = "iroha_trigger_derive" version = "2.0.0-pre-rc.20" dependencies = [ + "darling", + "iroha_macro_utils", + "manyhow", "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.41", ] [[package]] @@ -3322,7 +3289,7 @@ dependencies = [ "quote", "serde", "serde_json", - "syn 2.0.38", + "syn 2.0.41", "trybuild", ] @@ -3341,7 +3308,7 @@ dependencies = [ name = "iroha_wasm_builder_cli" version = "2.0.0-pre-rc.20" dependencies = [ - "clap 4.4.6", + "clap 4.4.11", "color-eyre", "iroha_wasm_builder", "owo-colors", @@ -3403,9 +3370,9 @@ dependencies = [ [[package]] name = "itoa" -version = "1.0.9" +version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" +checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c" [[package]] name = "ittapi" @@ -3429,18 +3396,18 @@ dependencies = [ [[package]] name = "jobserver" -version = "0.1.26" +version = "0.1.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "936cfd212a0155903bcbc060e316fb6cc7cbf2e1907329391ebadc1fe0ce77c2" +checksum = "8c37f63953c4c63420ed5fd3d6d398c719489b9f872b9fa683262f8edd363c7d" dependencies = [ "libc", ] [[package]] name = "js-sys" -version = "0.3.64" +version = "0.3.66" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5f195fe497f702db0f318b07fdd68edb16955aed830df8363d837542f8f935a" +checksum = "cee9c64da59eae3b50095c18d3e74f8b73c0b86d2792824ff01bbce68ba229ca" dependencies = [ "wasm-bindgen", ] @@ -3458,9 +3425,9 @@ dependencies = [ [[package]] name = "k256" -version = "0.13.1" +version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cadb76004ed8e97623117f3df85b17aaa6626ab0b0831e6573f104df16cd1bcc" +checksum = "3f01b677d82ef7a676aa37e099defd83a28e15687112cafdd112d60236b6115b" dependencies = [ "cfg-if", "ecdsa", @@ -3474,7 +3441,7 @@ dependencies = [ name = "kagami" version = "2.0.0-pre-rc.20" dependencies = [ - "clap 4.4.6", + "clap 4.4.11", "color-eyre", "derive_more", "iroha_config", @@ -3501,7 +3468,7 @@ dependencies = [ name = "kura_inspector" version = "2.0.0-pre-rc.20" dependencies = [ - "clap 4.4.6", + "clap 4.4.11", "iroha_core", "iroha_data_model", "iroha_version", @@ -3521,9 +3488,9 @@ checksum = "884e2677b40cc8c339eaefcb701c32ef1fd2493d71118dc0ca4b6a736c93bd67" [[package]] name = "libc" -version = "0.2.150" +version = "0.2.151" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89d92a4743f9a61002fae18374ed11e7973f530cb3a3255fb354818118b2203c" +checksum = "302d7ab3130588088d277783b1e2d2e10c9e9e4a16dd9050e6ec93fb3e7048f4" [[package]] name = "libflate" @@ -3555,11 +3522,22 @@ version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" +[[package]] +name = "libredox" +version = "0.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85c833ca1e66078851dba29046874e38f08b2c883700aa29a03ddd3b23814ee8" +dependencies = [ + "bitflags 2.4.1", + "libc", + "redox_syscall", +] + [[package]] name = "libsodium-sys-stable" -version = "1.20.3" +version = "1.20.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cfc31f983531631496f4e621110cd81468ab78b65dee0046cfddea83caa2c327" +checksum = "d1d164bc6f9139c5f95efb4f0be931b2bd5a9edf7e4e3c945d26b95ab8fa669b" dependencies = [ "cc", "libc", @@ -3583,24 +3561,15 @@ dependencies = [ [[package]] name = "linux-raw-sys" -version = "0.4.11" +version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "969488b55f8ac402214f3f5fd243ebb7206cf82de60d3172994707a4bcc2b829" - -[[package]] -name = "litrs" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f17c3668f3cc1132437cdadc93dab05e52d592f06948d3f64828430c36e4a70" -dependencies = [ - "proc-macro2", -] +checksum = "c4cd1a83af159aa67994778be9070f0ae1bd732942279cabb14f86f986a21456" [[package]] name = "lock_api" -version = "0.4.10" +version = "0.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1cc9717a20b1bb222f333e6a92fd32f7d8a18ddc5a3191a11af45dcbf4dcd16" +checksum = "3c168f8615b12bc01f9c17e2eb0cc07dcae1940121185446edc3744920e8ef45" dependencies = [ "autocfg", "scopeguard", @@ -3632,7 +3601,7 @@ dependencies = [ "proc-macro2", "quote", "syn 1.0.109", - "syn 2.0.38", + "syn 2.0.41", ] [[package]] @@ -3685,6 +3654,15 @@ dependencies = [ "libc", ] +[[package]] +name = "memmap2" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "deaba38d7abf1d4cca21cc89e932e542ba2b9258664d2a9ef0e61512039c9375" +dependencies = [ + "libc", +] + [[package]] name = "memoffset" version = "0.9.0" @@ -3733,9 +3711,9 @@ dependencies = [ [[package]] name = "mio" -version = "0.8.8" +version = "0.8.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "927a765cd3fc26206e66b296465fa9d3e5ab003e651c1b3c060e7956d96b19d2" +checksum = "8f3d0b296e374a4e6f3c7b0a1f5a51d748a0d34c85e7dc48fc3fa9a87657fe09" dependencies = [ "libc", "log", @@ -3763,7 +3741,7 @@ dependencies = [ "log", "memchr", "mime", - "spin 0.9.8", + "spin", "version_check", ] @@ -3850,16 +3828,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9cf5f9dd3933bd50a9e1f149ec995f39ae2c496d31fd772c1fd45ebc27e902b0" dependencies = [ "crc32fast", - "hashbrown 0.14.1", - "indexmap 2.0.2", + "hashbrown 0.14.3", + "indexmap 2.1.0", "memchr", ] [[package]] name = "once_cell" -version = "1.18.0" +version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d" +checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" [[package]] name = "oorandom" @@ -3881,11 +3859,11 @@ checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" [[package]] name = "openssl" -version = "0.10.59" +version = "0.10.61" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a257ad03cd8fb16ad4172fedf8094451e1af1c4b70097636ef2eac9a5f0cc33" +checksum = "6b8419dc8cc6d866deb801274bba2e6f8f6108c1bb7fcc10ee5ab864931dbb45" dependencies = [ - "bitflags 2.4.0", + "bitflags 2.4.1", "cfg-if", "foreign-types", "libc", @@ -3902,7 +3880,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.41", ] [[package]] @@ -3913,18 +3891,18 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" [[package]] name = "openssl-src" -version = "300.1.5+3.1.3" +version = "300.2.1+3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "559068e4c12950d7dcaa1857a61725c0d38d4fc03ff8e070ab31a75d6e316491" +checksum = "3fe476c29791a5ca0d1273c697e96085bbabbbea2ef7afd5617e78a4b40332d3" dependencies = [ "cc", ] [[package]] name = "openssl-sys" -version = "0.9.95" +version = "0.9.97" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40a4130519a360279579c2053038317e40eff64d13fd3f004f9e1b72b8a6aaf9" +checksum = "c3eaad34cdd97d81de97964fc7f29e2d104f483840d906ef56daa1912338460b" dependencies = [ "cc", "libc", @@ -3935,9 +3913,9 @@ dependencies = [ [[package]] name = "os_str_bytes" -version = "6.5.1" +version = "6.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d5d9eb14b174ee9aa2ef96dc2b94637a2d4b6e7cb873c7e171f0c20c6cf3eac" +checksum = "e2355d85b9a3786f481747ced0e0ff2ba35213a1f9bd406ed906554d7af805a1" [[package]] name = "overload" @@ -3956,9 +3934,9 @@ dependencies = [ [[package]] name = "parity-scale-codec" -version = "3.6.5" +version = "3.6.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0dec8a8073036902368c2cdc0387e85ff9a37054d7e7c98e592145e0c92cd4fb" +checksum = "881331e34fa842a2fb61cc2db9643a8fedc615e47cfcc52597d1af0db9a7e8fe" dependencies = [ "arrayvec", "bitvec", @@ -3970,9 +3948,9 @@ dependencies = [ [[package]] name = "parity-scale-codec-derive" -version = "3.6.5" +version = "3.6.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "312270ee71e1cd70289dacf597cab7b207aa107d2f28191c2ae45b2ece18a260" +checksum = "be30eaf4b0a9fba5336683b38de57bb86d179a35862ba6bfcf57625d006bde5b" dependencies = [ "proc-macro-crate", "proc-macro2", @@ -3984,7 +3962,7 @@ dependencies = [ name = "parity_scale_decoder" version = "2.0.0-pre-rc.20" dependencies = [ - "clap 4.4.6", + "clap 4.4.11", "colored", "eyre", "iroha_crypto", @@ -4011,15 +3989,15 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.9.8" +version = "0.9.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93f00c865fe7cabf650081affecd3871070f26767e7b2070a3ffae14c654b447" +checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e" dependencies = [ "backtrace", "cfg-if", "libc", "petgraph", - "redox_syscall 0.3.5", + "redox_syscall", "smallvec", "thread-id", "windows-targets 0.48.5", @@ -4048,7 +4026,7 @@ dependencies = [ "regex", "regex-syntax 0.7.5", "structmeta", - "syn 2.0.38", + "syn 2.0.41", ] [[package]] @@ -4083,15 +4061,15 @@ checksum = "8835116a5c179084a830efb3adc117ab007512b535bc1a21c991d3b32a6b44dd" [[package]] name = "percent-encoding" -version = "2.3.0" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "pest" -version = "2.7.4" +version = "2.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c022f1e7b65d6a24c0dbbd5fb344c66881bc01f3e5ae74a1c8100f2f985d98a4" +checksum = "ae9cee2a55a544be8b89dc6848072af97a20f2422603c10865be2a42b580fff5" dependencies = [ "memchr", "thiserror", @@ -4100,9 +4078,9 @@ dependencies = [ [[package]] name = "pest_derive" -version = "2.7.4" +version = "2.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35513f630d46400a977c4cb58f78e1bfbe01434316e60c37d27b9ad6139c66d8" +checksum = "81d78524685f5ef2a3b3bd1cafbc9fcabb036253d9b1463e726a91cd16e2dfc2" dependencies = [ "pest", "pest_generator", @@ -4110,22 +4088,22 @@ dependencies = [ [[package]] name = "pest_generator" -version = "2.7.4" +version = "2.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc9fc1b9e7057baba189b5c626e2d6f40681ae5b6eb064dc7c7834101ec8123a" +checksum = "68bd1206e71118b5356dae5ddc61c8b11e28b09ef6a31acbd15ea48a28e0c227" dependencies = [ "pest", "pest_meta", "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.41", ] [[package]] name = "pest_meta" -version = "2.7.4" +version = "2.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1df74e9e7ec4053ceb980e7c0c8bd3594e977fde1af91daba9c928e8e8c6708d" +checksum = "7c747191d4ad9e4a4ab9c8798f1e82a39affe7ef9648390b7e5548d18e099de6" dependencies = [ "once_cell", "pest", @@ -4139,7 +4117,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e1d3afd2628e69da2be385eb6f2fd57c8ac7977ceeff6dc166ff1657b0e386a9" dependencies = [ "fixedbitset", - "indexmap 2.0.2", + "indexmap 2.1.0", ] [[package]] @@ -4159,7 +4137,7 @@ checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.41", ] [[package]] @@ -4192,9 +4170,9 @@ checksum = "26072860ba924cbfa98ea39c8c19b4dd6a4a25423dbdf219c1eca91aa0cf6964" [[package]] name = "platforms" -version = "3.1.2" +version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4503fa043bf02cee09a9582e9554b4c6403b2ef55e4612e96561d294419429f8" +checksum = "14e6ab3f592e6fb464fc9712d8d6e6912de6473954635fd76a589d832cffcbb0" [[package]] name = "plotters" @@ -4235,6 +4213,12 @@ dependencies = [ "universal-hash", ] +[[package]] +name = "powerfmt" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" + [[package]] name = "ppv-lite86" version = "0.2.17" @@ -4243,11 +4227,11 @@ checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" [[package]] name = "proc-macro-crate" -version = "1.3.1" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919" +checksum = "97dc5fea232fc28d2f597b37c4876b348a40e33f3b02cc975c8d006d78d94b1a" dependencies = [ - "once_cell", + "toml_datetime", "toml_edit", ] @@ -4288,9 +4272,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.69" +version = "1.0.70" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "134c189feb4956b20f6f547d2cf727d4c0fe06722b20a0eec87ed445a97f92da" +checksum = "39278fbbf5fb4f646ce651690877f89d1c5811a3d4acb27700c1cb3cdb78fd3b" dependencies = [ "unicode-ident", ] @@ -4317,19 +4301,19 @@ dependencies = [ [[package]] name = "proptest" -version = "1.3.1" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c003ac8c77cb07bb74f5f198bce836a689bcd5a42574612bf14d17bfd08c20e" +checksum = "31b476131c3c86cb68032fdc5cb6d5a1045e3e42d96b69fa599fd77701e1f5bf" dependencies = [ "bit-set", "bit-vec", - "bitflags 2.4.0", + "bitflags 2.4.1", "lazy_static", "num-traits", "rand 0.8.5", "rand_chacha 0.3.1", "rand_xorshift", - "regex-syntax 0.7.5", + "regex-syntax 0.8.2", "rusty-fork", "tempfile", "unarray", @@ -4337,9 +4321,9 @@ dependencies = [ [[package]] name = "prost" -version = "0.12.1" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4fdd22f3b9c31b53c060df4a0613a1c7f062d4115a2b984dd15b1858f7e340d" +checksum = "146c289cda302b98a28d40c8b3b90498d6e526dd24ac2ecea73e4e491685b94a" dependencies = [ "bytes", "prost-derive", @@ -4347,22 +4331,22 @@ dependencies = [ [[package]] name = "prost-derive" -version = "0.12.1" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "265baba7fabd416cf5078179f7d2cbeca4ce7a9041111900675ea7c4cb8a4c32" +checksum = "efb6c9a1dd1def8e2124d17e83a20af56f1570d6c2d2bd9e266ccb768df3840e" dependencies = [ "anyhow", "itertools 0.11.0", "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.41", ] [[package]] name = "prost-types" -version = "0.12.1" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e081b29f63d83a4bc75cfc9f3fe424f9156cf92d8a4f0c9407cce9a1b67327cf" +checksum = "193898f59edcf43c26227dcd4c8427f00d99d61e95dcde58dabd49fa291d470e" dependencies = [ "prost", ] @@ -4405,7 +4389,7 @@ checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" dependencies = [ "getrandom 0.1.16", "libc", - "rand_chacha 0.2.1", + "rand_chacha 0.2.2", "rand_core 0.5.1", "rand_hc", ] @@ -4423,11 +4407,11 @@ dependencies = [ [[package]] name = "rand_chacha" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03a2a90da8c7523f554344f921aa97283eadf6ac484a6d2a7d0212fa7f8d6853" +checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" dependencies = [ - "c2-chacha", + "ppv-lite86", "rand_core 0.5.1", ] @@ -4456,7 +4440,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom 0.2.10", + "getrandom 0.2.11", ] [[package]] @@ -4499,30 +4483,21 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.2.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" -dependencies = [ - "bitflags 1.3.2", -] - -[[package]] -name = "redox_syscall" -version = "0.3.5" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29" +checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" dependencies = [ "bitflags 1.3.2", ] [[package]] name = "redox_users" -version = "0.4.3" +version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b" +checksum = "a18479200779601e498ada4e8c1e1f50e3ee19deb0259c25825a98b5603b2cb4" dependencies = [ - "getrandom 0.2.10", - "redox_syscall 0.2.16", + "getrandom 0.2.11", + "libredox", "thiserror", ] @@ -4541,14 +4516,14 @@ dependencies = [ [[package]] name = "regex" -version = "1.10.0" +version = "1.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d119d7c7ca818f8a53c300863d4f87566aac09943aef5b355bb83969dae75d87" +checksum = "380b951a9c5e80ddfd6136919eef32310721aa4aacd4889a8d39124b026ab343" dependencies = [ "aho-corasick", "memchr", - "regex-automata 0.4.1", - "regex-syntax 0.8.0", + "regex-automata 0.4.3", + "regex-syntax 0.8.2", ] [[package]] @@ -4562,13 +4537,13 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.1" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "465c6fc0621e4abc4187a2bda0937bfd4f722c2730b29562e19689ea796c9a4b" +checksum = "5f804c7828047e88b2d32e2d7fe5a105da8ee3264f01902f796c8e067dc2483f" dependencies = [ "aho-corasick", "memchr", - "regex-syntax 0.8.0", + "regex-syntax 0.8.2", ] [[package]] @@ -4585,9 +4560,9 @@ checksum = "dbb5fb1acd8a1a18b3dd5be62d25485eb770e05afb408a9627d14d451bae12da" [[package]] name = "regex-syntax" -version = "0.8.0" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3cbb081b9784b07cceb8824c8583f86db4814d172ab043f3c23f7dc600bf83d" +checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f" [[package]] name = "rfc6979" @@ -4601,17 +4576,16 @@ dependencies = [ [[package]] name = "ring" -version = "0.16.20" +version = "0.17.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" +checksum = "688c63d65483050968b2a8937f7995f443e27041a0f7700aa59b0822aedebb74" dependencies = [ "cc", + "getrandom 0.2.11", "libc", - "once_cell", - "spin 0.5.2", + "spin", "untrusted", - "web-sys", - "winapi", + "windows-sys 0.48.0", ] [[package]] @@ -4643,22 +4617,22 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.25" +version = "0.38.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc99bc2d4f1fed22595588a013687477aedf3cdcfb26558c559edb67b4d9b22e" +checksum = "72e572a5e8ca657d7366229cdde4bd14c4eb5499a9573d4d366fe1b599daa316" dependencies = [ - "bitflags 2.4.0", + "bitflags 2.4.1", "errno", "libc", "linux-raw-sys", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] name = "rustls" -version = "0.21.7" +version = "0.21.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd8d6c9f025a446bc4d18ad9632e69aec8f287aa84499ee335599fabd20c3fd8" +checksum = "f9d5a6813c0759e4609cd494e8e725babae6a2ca7b62a5536a13daaec6fcb7ba" dependencies = [ "log", "ring", @@ -4680,18 +4654,18 @@ dependencies = [ [[package]] name = "rustls-pemfile" -version = "1.0.3" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d3987094b1d07b653b7dfdc3f70ce9a1da9c51ac18c1b06b662e4f9a0e9f4b2" +checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" dependencies = [ - "base64 0.21.4", + "base64", ] [[package]] name = "rustls-webpki" -version = "0.101.6" +version = "0.101.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c7d5dece342910d9ba34d259310cae3e0154b873b35408b787b59bce53d34fe" +checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" dependencies = [ "ring", "untrusted", @@ -4717,9 +4691,9 @@ dependencies = [ [[package]] name = "ryu" -version = "1.0.15" +version = "1.0.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741" +checksum = "f98d2aa92eebf49b69786be48e4477826b256916e84a57ff2a4f21923b48eb4c" [[package]] name = "same-file" @@ -4759,9 +4733,9 @@ checksum = "a3cf7c11c38cb994f3d40e8a8cde3bbd1f72a435e4c49e85d6553d8312306152" [[package]] name = "sct" -version = "0.7.0" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d53dcdb7c9f8158937a7981b48accfd39a43af418591a5d008c7b22b5e1b7ca4" +checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" dependencies = [ "ring", "untrusted", @@ -4794,9 +4768,9 @@ dependencies = [ [[package]] name = "secp256k1-sys" -version = "0.9.0" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09e67c467c38fd24bd5499dc9a18183b31575c12ee549197e3e20d57aa4fe3b7" +checksum = "4dd97a086ec737e30053fd5c46f097465d25bb81dd3608825f65298c4c98be83" dependencies = [ "cc", ] @@ -4832,9 +4806,9 @@ checksum = "836fa6a3e1e547f9a2c4040802ec865b5d85f4014efe00555d7090a3dcaa1090" [[package]] name = "serde" -version = "1.0.188" +version = "1.0.193" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf9e0fcba69a370eed61bcf2b728575f726b50b55cba78064753d708ddc7549e" +checksum = "25dd9975e68d0cb5aa1120c288333fc98731bd1dd12f561e468ea4728c042b89" dependencies = [ "serde_derive", ] @@ -4850,20 +4824,20 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.188" +version = "1.0.193" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4eca7ac642d82aa35b60049a6eccb4be6be75e599bd2e9adb5f875a737654af2" +checksum = "43576ca501357b9b071ac53cdc7da8ef0cbd9493d8df094cd821777ea6e894d3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.41", ] [[package]] name = "serde_json" -version = "1.0.107" +version = "1.0.108" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b420ce6e3d8bd882e9b243c6eed35dbc9a6110c9769e74b584e0d68d1f20c65" +checksum = "3d1c7e3eac408d115102c4c24ad393e0821bb3a5df4d506a80f85f7a742a526b" dependencies = [ "itoa", "ryu", @@ -4884,9 +4858,9 @@ dependencies = [ [[package]] name = "serde_with" -version = "3.3.0" +version = "3.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ca3b16a3d82c4088f343b7480a93550b3eabe1a358569c2dfe38bbcead07237" +checksum = "64cd236ccc1b7a29e7e2739f27c0b2dd199804abc4290e32f59f3b68d6405c23" dependencies = [ "serde", "serde_with_macros", @@ -4894,23 +4868,23 @@ dependencies = [ [[package]] name = "serde_with_macros" -version = "3.3.0" +version = "3.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e6be15c453eb305019bfa438b1593c731f36a289a7853f7707ee29e870b3b3c" +checksum = "93634eb5f75a2323b16de4748022ac4297f9e76b6dced2be287a099f41b5e788" dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.41", ] [[package]] name = "serde_yaml" -version = "0.9.25" +version = "0.9.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a49e178e4452f45cb61d0cd8cebc1b0fafd3e41929e996cef79aa3aca91f574" +checksum = "3cc7a1570e38322cfe4154732e5110f887ea57e22b76f4bfd32b5bdd3368666c" dependencies = [ - "indexmap 2.0.2", + "indexmap 2.1.0", "itoa", "ryu", "serde", @@ -4939,7 +4913,7 @@ checksum = "91d129178576168c589c9ec973feedf7d3126c01ac2bf08795109aa35b69fb8f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.41", ] [[package]] @@ -5043,9 +5017,9 @@ dependencies = [ [[package]] name = "signature" -version = "2.1.0" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e1788eed21689f9cf370582dfc467ef36ed9c707f073528ddafa8d83e3b8500" +checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" dependencies = [ "digest 0.10.7", "rand_core 0.6.4", @@ -5078,18 +5052,18 @@ dependencies = [ [[package]] name = "smallvec" -version = "1.11.1" +version = "1.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "942b4a808e05215192e39f4ab80813e599068285906cc91aa64f923db842bd5a" +checksum = "4dccd0940a2dcdf68d092b8cbab7dc0ad8fa938bf95787e1b916b0e3d0e8e970" dependencies = [ "serde", ] [[package]] name = "socket2" -version = "0.4.9" +version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64a4a911eed85daf18834cfaa86a79b7d266ff93ff5ba14005426219480ed662" +checksum = "9f7916fc008ca5542385b89a3d3ce689953c143e9304a9bf8beec1de48994c0d" dependencies = [ "libc", "winapi", @@ -5097,20 +5071,14 @@ dependencies = [ [[package]] name = "socket2" -version = "0.5.4" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4031e820eb552adee9295814c0ced9e5cf38ddf1e8b7d566d6de8e2538ea989e" +checksum = "7b5fac59a5cb5dd637972e5fca70daf0523c9067fcdc4842f053dae04a18f8e9" dependencies = [ "libc", "windows-sys 0.48.0", ] -[[package]] -name = "spin" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" - [[package]] name = "spin" version = "0.9.8" @@ -5130,9 +5098,9 @@ dependencies = [ [[package]] name = "spki" -version = "0.7.2" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d1e996ef02c474957d681f1b05213dfb0abab947b446a62d37770b23500184a" +checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" dependencies = [ "base64ct", "der", @@ -5193,7 +5161,7 @@ dependencies = [ "proc-macro2", "quote", "structmeta-derive", - "syn 2.0.38", + "syn 2.0.41", ] [[package]] @@ -5204,7 +5172,7 @@ checksum = "a60bcaff7397072dca0017d1db428e30d5002e00b6847703e2e42005c95fbe00" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.41", ] [[package]] @@ -5219,7 +5187,7 @@ version = "0.25.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "290d54ea6f91c969195bdbcd7442c8c2a2ba87da8bf60a7ee86a235d4bc1e125" dependencies = [ - "strum_macros 0.25.2", + "strum_macros 0.25.3", ] [[package]] @@ -5237,15 +5205,15 @@ dependencies = [ [[package]] name = "strum_macros" -version = "0.25.2" +version = "0.25.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad8d03b598d3d0fff69bf533ee3ef19b8eeb342729596df84bcc7e1f96ec4059" +checksum = "23dc1fa9ac9c169a78ba62f0b841814b7abae11bdd047b9c58f893439e309ea0" dependencies = [ "heck", "proc-macro2", "quote", "rustversion", - "syn 2.0.38", + "syn 2.0.41", ] [[package]] @@ -5296,9 +5264,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.38" +version = "2.0.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e96b79aaa137db8f61e26363a0c9b47d8b4ec75da28b7d1d614c2303e232408b" +checksum = "44c8b28c477cc3bf0e7966561e3460130e1255f7a1cf71931075f1c5e7a7e269" dependencies = [ "proc-macro2", "quote", @@ -5336,22 +5304,22 @@ checksum = "14c39fd04924ca3a864207c66fc2cd7d22d7c016007f9ce846cbb9326331930a" [[package]] name = "tempfile" -version = "3.8.0" +version = "3.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb94d2f3cc536af71caac6b6fcebf65860b347e7ce0cc9ebe8f70d3e521054ef" +checksum = "7ef1adac450ad7f4b3c28589471ade84f25f731a7a0fe30d71dfa9f60fd808e5" dependencies = [ "cfg-if", "fastrand", - "redox_syscall 0.3.5", + "redox_syscall", "rustix", "windows-sys 0.48.0", ] [[package]] name = "termcolor" -version = "1.3.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6093bad37da69aab9d123a8091e4be0aa4a03e4d601ec641c327398315f62b64" +checksum = "ff1bc3d3f05aff0403e8ac0d92ced918ec05b666a43f83297ccef5bea8a3d449" dependencies = [ "winapi-util", ] @@ -5402,17 +5370,16 @@ checksum = "266b2e40bc00e5a6c09c3584011e08b06f123c00362c92b975ba9843aaaa14b8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.41", ] [[package]] name = "thread-id" -version = "4.2.0" +version = "4.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79474f573561cdc4871a0de34a51c92f7f5a56039113fbb5b9c9f96bdb756669" +checksum = "f0ec81c46e9eb50deaa257be2f148adf052d1fb7701cfd55ccfab2525280b70b" dependencies = [ "libc", - "redox_syscall 0.2.16", "winapi", ] @@ -5434,14 +5401,15 @@ dependencies = [ [[package]] name = "time" -version = "0.3.29" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "426f806f4089c493dcac0d24c29c01e2c38baf8e30f1b716ee37e83d200b18fe" +checksum = "c4a34ab300f2dee6e562c10a046fc05e358b29f9bf92277f30c3c8d82275f6f5" dependencies = [ "deranged", "itoa", "libc", "num_threads", + "powerfmt", "serde", "time-core", "time-macros", @@ -5489,9 +5457,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.33.0" +version = "1.35.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f38200e3ef7995e5ef13baec2f432a6da0aa9ac495b2c0e8f3b7eec2c92d653" +checksum = "841d45b238a16291a4e1584e61820b8ae57d696cc5015c459c229ccc6990cc1c" dependencies = [ "backtrace", "bytes", @@ -5500,7 +5468,7 @@ dependencies = [ "num_cpus", "pin-project-lite", "signal-hook-registry", - "socket2 0.5.4", + "socket2 0.5.5", "tokio-macros", "tracing", "windows-sys 0.48.0", @@ -5518,13 +5486,13 @@ dependencies = [ [[package]] name = "tokio-macros" -version = "2.1.0" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e" +checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.41", ] [[package]] @@ -5556,6 +5524,7 @@ dependencies = [ "futures-core", "pin-project-lite", "tokio", + "tokio-util", ] [[package]] @@ -5573,14 +5542,14 @@ dependencies = [ "tokio-native-tls", "tokio-rustls", "tungstenite", - "webpki-roots 0.25.2", + "webpki-roots 0.25.3", ] [[package]] name = "tokio-util" -version = "0.7.9" +version = "0.7.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d68074620f57a0b21594d9735eb2e98ab38b17f80d3fcb189fca266771ca60d" +checksum = "5419f34732d9eb6ee4c3578b7989078579b7f039cbbb9ca2c4da015749371e15" dependencies = [ "bytes", "futures-core", @@ -5607,11 +5576,11 @@ checksum = "7cda73e2f1397b1262d6dfdcef8aafae14d1de7748d66822d3bfeeb6d03e5e4b" [[package]] name = "toml_edit" -version = "0.19.15" +version = "0.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" +checksum = "396e4d48bbb2b7554c944bde63101b5ae446cff6ec4a24227428f15eb72ef338" dependencies = [ - "indexmap 2.0.2", + "indexmap 2.1.0", "toml_datetime", "winnow", ] @@ -5625,7 +5594,7 @@ dependencies = [ "async-stream", "async-trait", "axum", - "base64 0.21.4", + "base64", "bytes", "h2", "http", @@ -5677,11 +5646,10 @@ checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" [[package]] name = "tracing" -version = "0.1.37" +version = "0.1.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8" +checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" dependencies = [ - "cfg-if", "log", "pin-project-lite", "tracing-attributes", @@ -5690,38 +5658,20 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.26" +version = "0.1.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab" +checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", -] - -[[package]] -name = "tracing-bunyan-formatter" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5c266b9ac83dedf0e0385ad78514949e6d89491269e7065bee51d2bb8ec7373" -dependencies = [ - "ahash", - "gethostname", - "log", - "serde", - "serde_json", - "time", - "tracing", - "tracing-core", - "tracing-log", - "tracing-subscriber", + "syn 2.0.41", ] [[package]] name = "tracing-core" -version = "0.1.31" +version = "0.1.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0955b8137a1df6f1a2e9a37d8a6656291ff0297c1a97c24e0d8425fe2312f79a" +checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" dependencies = [ "once_cell", "valuable", @@ -5759,38 +5709,40 @@ dependencies = [ ] [[package]] -name = "tracing-log" +name = "tracing-serde" version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78ddad33d2d10b1ed7eb9d1f518a5674713876e97e5bb9b7345a7984fbb4f922" +checksum = "bc6b213177105856957181934e4920de57730fc69bf42c37ee5bb664d406d9e1" dependencies = [ - "lazy_static", - "log", + "serde", "tracing-core", ] [[package]] name = "tracing-subscriber" -version = "0.3.17" +version = "0.3.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30a651bc37f915e81f087d86e62a18eec5f79550c7faff886f7090b4ea757c77" +checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" dependencies = [ "matchers", "nu-ansi-term", "once_cell", "regex", + "serde", + "serde_json", "sharded-slab", "smallvec", "thread_local", "tracing", "tracing-core", + "tracing-serde", ] [[package]] name = "try-lock" -version = "0.2.4" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" [[package]] name = "trybuild" @@ -5859,15 +5811,15 @@ dependencies = [ [[package]] name = "unicode-bidi" -version = "0.3.13" +version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460" +checksum = "6f2528f27a9eb2b21e69c95319b30bd0efd85d09c379741b0f78ea1d86be2416" [[package]] name = "unicode-bom" -version = "2.0.2" +version = "2.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "98e90c70c9f0d4d1ee6d0a7d04aa06cb9bbd53d8cfbdd62a0269a7c2eb640552" +checksum = "7eec5d1121208364f6793f7d2e222bf75a915c19557537745b195b253dd64217" [[package]] name = "unicode-ident" @@ -5929,30 +5881,30 @@ checksum = "f28467d3e1d3c6586d8f25fa243f544f5800fec42d97032474e17222c2b75cfa" [[package]] name = "untrusted" -version = "0.7.1" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] name = "ureq" -version = "2.8.0" +version = "2.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f5ccd538d4a604753ebc2f17cd9946e89b77bf87f6a8e2309667c6f2e87855e3" +checksum = "f8cdd25c339e200129fe4de81451814e5228c9b771d57378817d6117cc2b3f97" dependencies = [ - "base64 0.21.4", + "base64", "log", "once_cell", "rustls", "rustls-webpki", "url", - "webpki-roots 0.25.2", + "webpki-roots 0.25.3", ] [[package]] name = "url" -version = "2.4.1" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "143b538f18257fac9cad154828a57c6bf5157e1aa604d4816b5995bf6de87ae5" +checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633" dependencies = [ "form_urlencoded", "idna", @@ -5968,9 +5920,9 @@ checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" [[package]] name = "utf8-width" -version = "0.1.6" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5190c9442dcdaf0ddd50f37420417d219ae5261bbf5db120d0f9bab996c9cba1" +checksum = "86bd8d4e895da8537e5315b8254664e6b769c4ff3db18321b297a1e7004392e3" [[package]] name = "utf8parse" @@ -5980,11 +5932,11 @@ checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" [[package]] name = "uuid" -version = "1.4.1" +version = "1.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79daa5ed5740825c40b389c5e50312b9c86df53fccd33f281df655642b43869d" +checksum = "5e395fcf16a7a3d8127ec99782007af141946b4795001f876d54fb0d55978560" dependencies = [ - "getrandom 0.2.10", + "getrandom 0.2.11", ] [[package]] @@ -6001,9 +5953,9 @@ checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" [[package]] name = "vergen" -version = "8.2.5" +version = "8.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85e7dc29b3c54a2ea67ef4f953d5ec0c4085035c0ae2d325be1c0d2144bd9f16" +checksum = "1290fd64cc4e7d3c9b07d7f333ce0ce0007253e32870e632624835cc80b83939" dependencies = [ "anyhow", "gix", @@ -6090,9 +6042,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.87" +version = "0.2.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7706a72ab36d8cb1f80ffbf0e071533974a60d0a308d01a5d0375bf60499a342" +checksum = "0ed0d4f68a3015cc185aff4db9506a015f4b96f95303897bfa23f846db54064e" dependencies = [ "cfg-if", "wasm-bindgen-macro", @@ -6100,24 +6052,24 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.87" +version = "0.2.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ef2b6d3c510e9625e5fe6f509ab07d66a760f0885d858736483c32ed7809abd" +checksum = "1b56f625e64f3a1084ded111c4d5f477df9f8c92df113852fa5a374dbda78826" dependencies = [ "bumpalo", "log", "once_cell", "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.41", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-macro" -version = "0.2.87" +version = "0.2.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dee495e55982a3bd48105a7b947fd2a9b4a8ae3010041b9e0faab3f9cd028f1d" +checksum = "0162dbf37223cd2afce98f3d0785506dcb8d266223983e4b5b525859e6e182b2" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -6125,22 +6077,22 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.87" +version = "0.2.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" +checksum = "f0eb82fcb7930ae6219a7ecfd55b217f5f0893484b7a13022ebb2b2bf20b5283" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.41", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.87" +version = "0.2.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1" +checksum = "7ab9b36309365056cd639da3134bf87fa8f3d86008abf99e612384a6eecd459f" [[package]] name = "wasm-encoder" @@ -6153,9 +6105,9 @@ dependencies = [ [[package]] name = "wasm-encoder" -version = "0.38.0" +version = "0.38.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b09bc5df933a3dabbdb72ae4b6b71be8ae07f58774d5aa41bd20adcd41a235a" +checksum = "0ad2b51884de9c7f4fe2fd1043fccb8dcad4b1e29558146ee57a144d15779f3f" dependencies = [ "leb128", ] @@ -6206,15 +6158,15 @@ version = "0.116.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a58e28b80dd8340cb07b8242ae654756161f6fc8d0038123d679b7b99964fa50" dependencies = [ - "indexmap 2.0.2", + "indexmap 2.1.0", "semver", ] [[package]] name = "wasmtime" -version = "15.0.0" +version = "15.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae4b1702ef55144d6f594085f4989dc71fb71a791be1c8354ecc8e489b81199b" +checksum = "642e12d108e800215263e3b95972977f473957923103029d7d617db701d67ba4" dependencies = [ "anyhow", "async-trait", @@ -6222,7 +6174,7 @@ dependencies = [ "bumpalo", "cfg-if", "fxprof-processed-profile", - "indexmap 2.0.2", + "indexmap 2.1.0", "libc", "log", "object", @@ -6249,21 +6201,21 @@ dependencies = [ [[package]] name = "wasmtime-asm-macros" -version = "15.0.0" +version = "15.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c981d0e87bb3e98e08e76644e7ae5dfdef7f1d4105145853f3d677bb4535d65f" +checksum = "beada8bb15df52503de0a4c58de4357bfd2f96d9a44a6e547bad11efdd988b47" dependencies = [ "cfg-if", ] [[package]] name = "wasmtime-cache" -version = "15.0.0" +version = "15.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d7ba8adaa84fdb9dd659275edcf7fc5282c44b9c9f829986c71d44fd52ea80a" +checksum = "aba5bf44d044d25892c03fb3534373936ee204141ff92bac8297787ac7f22318" dependencies = [ "anyhow", - "base64 0.21.4", + "base64", "bincode", "directories-next", "log", @@ -6278,14 +6230,14 @@ dependencies = [ [[package]] name = "wasmtime-component-macro" -version = "15.0.0" +version = "15.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c91dcbbd0e1f094351d1ae0e53463c63ba53ec8f8e0e21d17567c1979a8c3758" +checksum = "56ccba556991465cca68d5a54769684bcf489fb532059da55105f851642d52c1" dependencies = [ "anyhow", "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.41", "wasmtime-component-util", "wasmtime-wit-bindgen", "wit-parser", @@ -6293,15 +6245,15 @@ dependencies = [ [[package]] name = "wasmtime-component-util" -version = "15.0.0" +version = "15.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e85f1319a7ed36aa59446ab7e967d0c2fb0cd179bf56913633190b44572023e" +checksum = "05492a177a6006cb73f034d6e9a6fad6da55b23c4398835cb0012b5fa51ecf67" [[package]] name = "wasmtime-cranelift" -version = "15.0.0" +version = "15.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1453665878e16245b9a25405e550c4a36c6731c6e34ea804edc002a38c3e6741" +checksum = "fe2e7532f1d6adbcc57e69bb6a7c503f0859076d07a9b4b6aabe8021ff8a05fd" dependencies = [ "anyhow", "cfg-if", @@ -6324,9 +6276,9 @@ dependencies = [ [[package]] name = "wasmtime-cranelift-shared" -version = "15.0.0" +version = "15.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3dface3d9b72b4670781ff72675eabb291e2836b5dded6bb312b577d2bb561f" +checksum = "8c98d5378a856cbf058d36278627dfabf0ed68a888142958c7ae8e6af507dafa" dependencies = [ "anyhow", "cranelift-codegen", @@ -6340,14 +6292,14 @@ dependencies = [ [[package]] name = "wasmtime-environ" -version = "15.0.0" +version = "15.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0116108e7d231cce15fe7dd642c66c3abb14dbcf169b0130e11f223ce8d1ad7" +checksum = "a6d33a9f421da810a070cd56add9bc51f852bd66afbb8b920489d6242f15b70e" dependencies = [ "anyhow", "cranelift-entity", "gimli", - "indexmap 2.0.2", + "indexmap 2.1.0", "log", "object", "serde", @@ -6360,9 +6312,9 @@ dependencies = [ [[package]] name = "wasmtime-fiber" -version = "15.0.0" +version = "15.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8a5896355c37bf0f9feb4f1299142ef4bed8c92576aa3a41d150fed0cafa056" +checksum = "404741f4c6d7f4e043be2e8b466406a2aee289ccdba22bf9eba6399921121b97" dependencies = [ "anyhow", "cc", @@ -6375,9 +6327,9 @@ dependencies = [ [[package]] name = "wasmtime-jit" -version = "15.0.0" +version = "15.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e32b210767452f6b20157bb7c7d98295b92cc47aaad2a8aa31652f4469813a5d" +checksum = "8d0994a86d6dca5f7d9740d7f2bd0568be06d2014a550361dc1c397d289d81ef" dependencies = [ "addr2line", "anyhow", @@ -6402,9 +6354,9 @@ dependencies = [ [[package]] name = "wasmtime-jit-debug" -version = "15.0.0" +version = "15.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bffd2785a16c55ac77565613ebda625f5850d4014af0499df750e8de97c04547" +checksum = "4e0c4b74e606d1462d648631d5bc328e3d5b14e7f9d3ff93bc6db062fb8c5cd8" dependencies = [ "object", "once_cell", @@ -6414,9 +6366,9 @@ dependencies = [ [[package]] name = "wasmtime-jit-icache-coherence" -version = "15.0.0" +version = "15.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b73ad1395eda136baec5ece7e079e0536a82ef73488e345456cc9b89858ad0ec" +checksum = "3090a69ba1476979e090aa7ed4bc759178bafdb65b22f98b9ba24fc6e7e578d5" dependencies = [ "cfg-if", "libc", @@ -6425,14 +6377,14 @@ dependencies = [ [[package]] name = "wasmtime-runtime" -version = "15.0.0" +version = "15.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77b50f7f3c1a8dabb2607f32a81242917bd77cee75f3dec66e04b02ccbb8ba07" +checksum = "b993ac8380385ed67bf71b51b9553edcf1ab0801b78a805a067de581b9a3e88a" dependencies = [ "anyhow", "cc", "cfg-if", - "indexmap 2.0.2", + "indexmap 2.1.0", "libc", "log", "mach", @@ -6454,9 +6406,9 @@ dependencies = [ [[package]] name = "wasmtime-types" -version = "15.0.0" +version = "15.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "447973db3dc5c24db14130ab0922795c58790aec296d198ad9d253b82ec67471" +checksum = "8b5778112fcab2dc3d4371f4203ab8facf0c453dd94312b0a88dd662955e64e0" dependencies = [ "cranelift-entity", "serde", @@ -6467,59 +6419,59 @@ dependencies = [ [[package]] name = "wasmtime-versioned-export-macros" -version = "15.0.0" +version = "15.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a347bb8ecf12275fb180afb1b1c85c9e186553c43109737bffed4f54c2aa365" +checksum = "f50f51f8d79bfd2aa8e9d9a0ae7c2d02b45fe412e62ff1b87c0c81b07c738231" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.41", ] [[package]] name = "wasmtime-wit-bindgen" -version = "15.0.0" +version = "15.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41786c7bbbf250c0e685b291323b50c6bb65f0505a2c0b4f0b598c740f13f185" +checksum = "4b804dfd3d0c0d6d37aa21026fe7772ba1a769c89ee4f5c4f13b82d91d75216f" dependencies = [ "anyhow", "heck", - "indexmap 2.0.2", + "indexmap 2.1.0", "wit-parser", ] [[package]] name = "wasmtime-wmemcheck" -version = "15.0.0" +version = "15.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47907bdd67500c66fa308acbce7387c7bfb63b5505ef81be7fc897709afcca60" +checksum = "9b6060bc082cc32d9a45587c7640e29e3c7b89ada82677ac25d87850aaccb368" [[package]] name = "wast" -version = "69.0.0" +version = "69.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "efa51b5ad1391943d1bfad537e50f28fe938199ee76b115be6bae83802cd5185" +checksum = "c1ee37317321afde358e4d7593745942c48d6d17e0e6e943704de9bbee121e7a" dependencies = [ "leb128", "memchr", "unicode-width", - "wasm-encoder 0.38.0", + "wasm-encoder 0.38.1", ] [[package]] name = "wat" -version = "1.0.81" +version = "1.0.82" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74a4c2488d058326466e086a43f5d4ea448241a8d0975e3eb0642c0828be1eb3" +checksum = "aeb338ee8dee4d4cd05e6426683f21c5087dc7cfc8903e839ccf48d43332da3c" dependencies = [ "wast", ] [[package]] name = "web-sys" -version = "0.3.64" +version = "0.3.66" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b85cbef8c220a6abc02aefd892dfc0fc23afb1c6a426316ec33253a3877249b" +checksum = "50c24a44ec86bb68fbecd1b3efed7e85ea5621b39b35ef2766b66cd984f8010f" dependencies = [ "js-sys", "wasm-bindgen", @@ -6546,9 +6498,9 @@ dependencies = [ [[package]] name = "webpki-roots" -version = "0.25.2" +version = "0.25.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14247bb57be4f377dfb94c72830b8ce8fc6beac03cf4bf7b9732eadd414123fc" +checksum = "1778a42e8b3b90bff8d0f5032bf22250792889a5cdc752aa0020c84abe3aaf10" [[package]] name = "winapi" @@ -6590,6 +6542,15 @@ dependencies = [ "windows-targets 0.48.5", ] +[[package]] +name = "windows-core" +version = "0.51.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1f8cf84f35d2db49a46868f947758c7a1138116f7fac3bc844f43ade1292e64" +dependencies = [ + "windows-targets 0.48.5", +] + [[package]] name = "windows-sys" version = "0.45.0" @@ -6608,6 +6569,15 @@ dependencies = [ "windows-targets 0.48.5", ] +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.0", +] + [[package]] name = "windows-targets" version = "0.42.2" @@ -6638,6 +6608,21 @@ dependencies = [ "windows_x86_64_msvc 0.48.5", ] +[[package]] +name = "windows-targets" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd" +dependencies = [ + "windows_aarch64_gnullvm 0.52.0", + "windows_aarch64_msvc 0.52.0", + "windows_i686_gnu 0.52.0", + "windows_i686_msvc 0.52.0", + "windows_x86_64_gnu 0.52.0", + "windows_x86_64_gnullvm 0.52.0", + "windows_x86_64_msvc 0.52.0", +] + [[package]] name = "windows_aarch64_gnullvm" version = "0.42.2" @@ -6650,6 +6635,12 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea" + [[package]] name = "windows_aarch64_msvc" version = "0.42.2" @@ -6662,6 +6653,12 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef" + [[package]] name = "windows_i686_gnu" version = "0.42.2" @@ -6674,6 +6671,12 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" +[[package]] +name = "windows_i686_gnu" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313" + [[package]] name = "windows_i686_msvc" version = "0.42.2" @@ -6686,6 +6689,12 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" +[[package]] +name = "windows_i686_msvc" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a" + [[package]] name = "windows_x86_64_gnu" version = "0.42.2" @@ -6698,6 +6707,12 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd" + [[package]] name = "windows_x86_64_gnullvm" version = "0.42.2" @@ -6710,6 +6725,12 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e" + [[package]] name = "windows_x86_64_msvc" version = "0.42.2" @@ -6722,11 +6743,17 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04" + [[package]] name = "winnow" -version = "0.5.16" +version = "0.5.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "037711d82167854aff2018dfd193aa0fef5370f456732f0d5a0c59b0f1b4b907" +checksum = "6c830786f7720c2fd27a1a0e27a709dbd3c4d009b56d098fc742d4f4eab91fe2" dependencies = [ "memchr", ] @@ -6739,7 +6766,7 @@ checksum = "15df6b7b28ce94b8be39d8df5cb21a08a4f3b9f33b631aedb4aa5776f785ead3" dependencies = [ "anyhow", "id-arena", - "indexmap 2.0.2", + "indexmap 2.1.0", "log", "semver", "serde", @@ -6771,18 +6798,40 @@ dependencies = [ [[package]] name = "xattr" -version = "1.0.1" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4686009f71ff3e5c4dbcf1a282d0a44db3f021ba69350cd42086b3e5f1c6985" +checksum = "a7dae5072fe1f8db8f8d29059189ac175196e410e40ba42d5d4684ae2f750995" dependencies = [ "libc", + "linux-raw-sys", + "rustix", +] + +[[package]] +name = "zerocopy" +version = "0.7.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c4061bedbb353041c12f413700357bec76df2c7e2ca8e4df8bac24c6bf68e3d" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.7.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b3c129550b3e6de3fd0ba67ba5c81818f9805e58b8d7fee80a3a59d2c9fc601a" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.41", ] [[package]] name = "zeroize" -version = "1.6.0" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a0956f1ba7c7909bfb66c2e9e4124ab6f6482560f6628b5aaeba39207c9aad9" +checksum = "525b4ec142c6b68a2d10f01f7bbf6755599ca3f81ea53b8431b7dd348f5fdb2d" dependencies = [ "zeroize_derive", ] @@ -6795,7 +6844,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.41", ] [[package]] @@ -6831,11 +6880,10 @@ dependencies = [ [[package]] name = "zstd-sys" -version = "2.0.8+zstd.1.5.5" +version = "2.0.9+zstd.1.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5556e6ee25d32df2586c098bbfa278803692a20d0ab9565e049480d52707ec8c" +checksum = "9e16efa8a874a0481a574084d34cc26fdb3b99627480f785888deb6386506656" dependencies = [ "cc", - "libc", "pkg-config", ] diff --git a/Cargo.toml b/Cargo.toml index 73b53e5a8bc..3da58f35d73 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -218,7 +218,6 @@ members = [ "genesis", "primitives", "primitives/derive", - "dsl", "ffi", "ffi/derive", "futures", diff --git a/README.md b/README.md index 237481c5a48..d93882849bc 100644 --- a/README.md +++ b/README.md @@ -157,9 +157,7 @@ A brief overview on how to configure and maintain an Iroha instance: ### Configuration -You can provide configuration parameters either as a `config.json` or using environment variables. Refer to the [detailed list](./docs/source/references/config.md) of all available configuration parameters. - -Configuration example you may use as a reference point: [cli/src/samples.rs](./cli/src/samples.rs) +**Note:** this section is under development. You can track it in the [issue](https://github.com/hyperledger/iroha-2-docs/issues/392). ### Endpoints @@ -169,7 +167,7 @@ For a list of all endpoints, available operations, and ways to customize them wi By default, Iroha provides logs in a human-readable format and prints them out to `stdout`. -The logging level can be changed either via a [configuration option](./docs/source/references/config.md#loggermax_log_level) or at run-time using the `configuration` endpoint. +The logging level can be changed either via the `logger.level` configuration parameter or at run-time using the `configuration` endpoint.
Example: changing log level @@ -178,17 +176,13 @@ For example, if your Iroha instance is running at `127.0.0.1:8080` and you want curl -X POST \ -H 'content-type: application/json' \ http://127.0.0.1:8080/configuration \ - -d '{"LogLevel": "DEBUG"}' -i + -d '{"logger": {"level": "DEBUG"}}' -i ```
-#### JSON Logging Mode - -Additionally, Iroha supports a JSON logging mode. - -To enable it, provide the [logging file](./docs/source/references/config.md#loggerlog_file_path) to store the logs in. On UNIX, you can also specify `/dev/stdout` or `/dev/stderr` if you prefer to pipe the output to [`bunyan`](https://www.npmjs.com/package/bunyan). +The log format might be configured via the `logger.format` configuration parameter. Possible values are: `full` (default), `compact`, `pretty`, and `json`. -[Log rotation](https://www.commandlinux.com/man-page/man5/logrotate.conf.5.html) is the responsibility of the peer administrator. +Output goes to `/dev/stdout`. Piping to files or [log rotation](https://www.commandlinux.com/man-page/man5/logrotate.conf.5.html) is the responsibility of the peer administrator. ### Monitoring @@ -222,7 +216,7 @@ We encourage you to check out our [Iroha 2 Tutorial](https://hyperledger.github. * [Glossary](https://hyperledger.github.io/iroha-2-docs/guide/glossary) * [Iroha Special Instructions](https://hyperledger.github.io/iroha-2-docs/guide/blockchain/instructions) * [API Reference](https://hyperledger.github.io/iroha-2-docs/api/torii-endpoints) -* [Configuration Reference](./docs/source/references/config.md) + * [Iroha 2 Whitepaper](./docs/source/iroha_2_whitepaper.md) Iroha SDKs: diff --git a/cli/README.md b/cli/README.md index 9da57cc943e..c5912212253 100644 --- a/cli/README.md +++ b/cli/README.md @@ -74,27 +74,7 @@ Refer to [generating key pairs with `kagami`](../tools/kagami#crypto) for more d ### Configuration file -You must provide a configuration file to run the Iroha peer binary. Iroha will not run with defaults if the configuration file is not available. - -The Iroha binary looks for either a `config.json` file in the current directory or a JSON file in `IROHA2_CONFIG_PATH`. If the configuration file is not valid, the Iroha peer binary exits and does nothing. If neither of these files is provided, all the fields from the default `config.json` should be specified as environment variables. Note that environment variables override the variables in their respective fields provided via `config.json`. - -The environment variables replacing `config.json` should be passed as JSON strings, meaning that any inner quotes should be properly escaped in the command line as shown in the example below. - -
Expand to see the example - -``` bash -IROHA_TORII="{\"P2P_ADDR\": \"127.0.0.1:1339\", \"API_URL\": \"127.0.0.1:8080\"}" IROHA_SUMERAGI="{\"TRUSTED_PEERS\": [{\"address\": \"127.0.0.1:1337\",\"public_key\": \"ed01201C61FAF8FE94E253B93114240394F79A607B7FA55F9E5A41EBEC74B88055768B\"},{\"address\": \"127.0.0.1:1338\",\"public_key\": \"ed0120CC25624D62896D3A0BFD8940F928DC2ABF27CC57CEFEB442AA96D9081AAE58A1\"},{\"address\": \"127.0.0.1:1339\",\"public_key\": \"ed0120FACA9E8AA83225CB4D16D67F27DD4F93FC30FFA11ADC1F5C88FD5495ECC91020\"},{\"address\": \"127.0.0.1:1340\",\"public_key\": \"ed01208E351A70B6A603ED285D666B8D689B680865913BA03CE29FB7D13A166C4E7F1F\"}]}" IROHA_KURA="{\"INIT_MODE\": \"strict\",\"BLOCK_STORE_PATH\": \"./storage\"}" IROHA_BLOCK_SYNC="{\"GOSSIP_PERIOD_MS\": 10000,\"BATCH_SIZE\": 2}" IROHA_PUBLIC_KEY="ed01201C61FAF8FE94E253B93114240394F79A607B7FA55F9E5A41EBEC74B88055768B" IROHA_PRIVATE_KEY="{\"digest_function\": \"ed25519\",\"payload\": \"282ED9F3CF92811C3818DBC4AE594ED59DC1A2F78E4241E31924E101D6B1FB831C61FAF8FE94E253B93114240394F79A607B7FA55F9E5A41EBEC74B88055768B\"}" IROHA_GENESIS="{\"ACCOUNT_PUBLIC_KEY\": \"ed01204CFFD0EE429B1BDD36B3910EC570852B8BB63F18750341772FB46BC856C5CAAF\",\"ACCOUNT_PRIVATE_KEY\": {\"digest_function\": \"ed25519\",\"payload\": \"D748E18CE60CB30DEA3E73C9019B7AF45A8D465E3D71BCC9A5EF99A008205E534CFFD0EE429B1BDD36B3910EC570852B8BB63F18750341772FB46BC856C5CAAF\"}}" ./iroha -``` - -
- -:grey_exclamation: We do not recommend using environment variables for configuration outside docker-compose and Kubernetes deployments. Please change the values in the configuration file instead. That would also help us debug the problems that you might be having. - -The [configuration options reference](../docs/source/references/config.md) provides detailed explanations of each configuration variable. You may use the [sample configuration file](../configs/peer/config.json) for quick testing. - -One of the peers on your network must be provided with the genesis block, which is either `IROHA2_GENESIS_PATH` or `genesis.json` in the working directory. -Check [configuration options](https://github.com/hyperledger/iroha/blob/iroha2-dev/docs/source/references/config.md#genesis) for details. -Learn more about the genesis block in [our tutorial](https://hyperledger.github.io/iroha-2-docs/guide/configure/genesis.html). +**Note:** this section is under development. You can track it in the [issue](https://github.com/hyperledger/iroha-2-docs/issues/392). ## Deployment diff --git a/cli/src/lib.rs b/cli/src/lib.rs index bb72b92c049..2d8a9a9c078 100644 --- a/cli/src/lib.rs +++ b/cli/src/lib.rs @@ -13,11 +13,13 @@ use iroha_config::{ base::proxy::{LoadFromDisk, LoadFromEnv, Override}, iroha::{Configuration, ConfigurationProxy}, path::Path as ConfigPath, + telemetry::Configuration as TelemetryConfiguration, }; use iroha_core::{ block_sync::{BlockSynchronizer, BlockSynchronizerHandle}, gossiper::{TransactionGossiper, TransactionGossiperHandle}, handler::ThreadHandler, + kiso::KisoHandle, kura::Kura, prelude::{World, WorldStateView}, query::store::LiveQueryStore, @@ -30,6 +32,7 @@ use iroha_core::{ }; use iroha_data_model::prelude::*; use iroha_genesis::GenesisNetwork; +use iroha_logger::actor::LoggerHandle; use tokio::{ signal, sync::{broadcast, mpsc, Notify}, @@ -74,6 +77,25 @@ impl Default for Arguments { } } +/// Reflects user decision (or its absence) about ANSI colored output +#[derive(Copy, Clone, Debug)] +pub enum TerminalColorsArg { + /// Coloring should be decided automatically + Default, + /// User explicitly specified the value + UserSet(bool), +} + +impl TerminalColorsArg { + /// Transforms the enumeration into flag + pub fn evaluate(self) -> bool { + match self { + Self::Default => supports_color::on(supports_color::Stream::Stdout).is_some(), + Self::UserSet(x) => x, + } + } +} + /// Iroha is an /// [Orchestrator](https://en.wikipedia.org/wiki/Orchestration_%28computing%29) /// of the system. It configures, coordinates and manages transactions @@ -85,6 +107,8 @@ impl Default for Arguments { /// forgot this step. #[must_use = "run `.start().await?` to not immediately stop Iroha"] pub struct Iroha { + /// Actor responsible for the configuration + pub kiso: KisoHandle, /// Queue of transactions pub queue: Arc, /// Sumeragi consensus @@ -225,7 +249,7 @@ impl Iroha { pub async fn with_genesis( genesis: Option, config: Configuration, - telemetry: Option, + logger: LoggerHandle, ) -> Result { let listen_addr = config.torii.p2p_addr.clone(); let network = IrohaNetwork::start(listen_addr, config.sumeragi.key_pair.clone()) @@ -234,15 +258,11 @@ impl Iroha { let (events_sender, _) = broadcast::channel(10000); let world = World::with( - [genesis_domain(&config)], + [genesis_domain(config.genesis.account_public_key.clone())], config.sumeragi.trusted_peers.peers.clone(), ); - let kura = Kura::new( - config.kura.init_mode, - std::path::Path::new(&config.kura.block_store_path), - config.kura.debug_output_new_blocks, - )?; + let kura = Kura::new(&config.kura)?; let live_query_store_handle = LiveQueryStore::from_configuration(config.live_query_store).start(); @@ -273,11 +293,10 @@ impl Iroha { ); let queue = Arc::new(Queue::from_configuration(&config.queue)); - if Self::start_telemetry(telemetry, &config).await? { - iroha_logger::info!("Telemetry started") - } else { - iroha_logger::warn!("Telemetry not started") - } + match Self::start_telemetry(&logger, &config.telemetry).await? { + TelemetryStartStatus::Started => iroha_logger::info!("Telemetry started"), + TelemetryStartStatus::NotStarted => iroha_logger::warn!("Telemetry not started"), + }; let kura_thread_handler = Kura::start(Arc::clone(&kura)); @@ -328,8 +347,11 @@ impl Iroha { let snapshot_maker = SnapshotMaker::from_configuration(&config.snapshot, sumeragi.clone()).start(); - let torii = Torii::from_configuration( - config.clone(), + let kiso = KisoHandle::new(config.clone()); + + let torii = Torii::new( + kiso.clone(), + &config.torii, Arc::clone(&queue), events_sender, Arc::clone(¬ify_shutdown), @@ -338,12 +360,15 @@ impl Iroha { Arc::clone(&kura), ); + Self::spawn_configuration_updates_broadcasting(kiso.clone(), logger.clone()); + Self::start_listening_signal(Arc::clone(¬ify_shutdown))?; Self::prepare_panic_hook(notify_shutdown); let torii = Some(torii); Ok(Self { + kiso, queue, sumeragi, kura, @@ -389,37 +414,46 @@ impl Iroha { #[cfg(feature = "telemetry")] async fn start_telemetry( - telemetry: Option<( - iroha_logger::SubstrateTelemetry, - iroha_logger::FutureTelemetry, - )>, - config: &Configuration, - ) -> Result { + logger: &LoggerHandle, + config: &TelemetryConfiguration, + ) -> Result { #[allow(unused)] - if let Some((substrate_telemetry, telemetry_future)) = telemetry { - #[cfg(feature = "dev-telemetry")] - { - iroha_telemetry::dev::start(&config.telemetry, telemetry_future) + let (config_for_regular, config_for_dev) = config.parse(); + + #[cfg(feature = "dev-telemetry")] + { + if let Some(config) = config_for_dev { + let receiver = logger + .subscribe_on_telemetry(iroha_logger::telemetry::Channel::Future) + .await + .wrap_err("Failed to subscribe on telemetry")?; + let _handle = iroha_telemetry::dev::start(config, receiver) .await .wrap_err("Failed to setup telemetry for futures")?; } - iroha_telemetry::ws::start(&config.telemetry, substrate_telemetry) + } + + if let Some(config) = config_for_regular { + let receiver = logger + .subscribe_on_telemetry(iroha_logger::telemetry::Channel::Regular) + .await + .wrap_err("Failed to subscribe on telemetry")?; + let _handle = iroha_telemetry::ws::start(config, receiver) .await - .wrap_err("Failed to setup telemetry for websocket communication") + .wrap_err("Failed to setup telemetry for websocket communication")?; + + Ok(TelemetryStartStatus::Started) } else { - Ok(false) + Ok(TelemetryStartStatus::NotStarted) } } #[cfg(not(feature = "telemetry"))] async fn start_telemetry( - _telemetry: Option<( - iroha_logger::SubstrateTelemetry, - iroha_logger::FutureTelemetry, - )>, - _config: &Configuration, - ) -> Result { - Ok(false) + _logger: &LoggerHandle, + _config: &TelemetryConfiguration, + ) -> Result { + Ok(TelemetryStartStatus::NotStarted) } #[allow(clippy::redundant_pub_crate)] @@ -448,22 +482,52 @@ impl Iroha { Ok(handle) } + + /// Spawns a task which subscribes on updates from configuration actor + /// and broadcasts them further to interested actors. This way, neither config actor nor other ones know + /// about each other, achieving loose coupling of code and system. + fn spawn_configuration_updates_broadcasting( + kiso: KisoHandle, + logger: LoggerHandle, + ) -> task::JoinHandle<()> { + tokio::spawn(async move { + let mut log_level_update = kiso + .subscribe_on_log_level() + .await + // FIXME: don't like neither the message nor inability to throw Result to the outside + .expect("Cannot proceed without working subscriptions"); + + loop { + tokio::select! { + Ok(()) = log_level_update.changed() => { + let value = *log_level_update.borrow_and_update(); + if let Err(error) = logger.reload_level(value).await { + iroha_logger::error!("Failed to reload log level: {error}"); + }; + } + }; + } + }) + } } -fn genesis_account(public_key: iroha_crypto::PublicKey) -> Account { +enum TelemetryStartStatus { + Started, + NotStarted, +} + +fn genesis_account(public_key: PublicKey) -> Account { Account::new(iroha_genesis::GENESIS_ACCOUNT_ID.clone(), [public_key]) .build(&iroha_genesis::GENESIS_ACCOUNT_ID) } -fn genesis_domain(configuration: &Configuration) -> Domain { - let account_public_key = &configuration.genesis.account_public_key; - +fn genesis_domain(public_key: PublicKey) -> Domain { let mut domain = Domain::new(iroha_genesis::GENESIS_DOMAIN_ID.clone()) .build(&iroha_genesis::GENESIS_ACCOUNT_ID); domain.accounts.insert( iroha_genesis::GENESIS_ACCOUNT_ID.clone(), - genesis_account(account_public_key.clone()), + genesis_account(public_key), ); domain diff --git a/cli/src/main.rs b/cli/src/main.rs index e2a07e6ae74..16629ea7ea7 100644 --- a/cli/src/main.rs +++ b/cli/src/main.rs @@ -2,7 +2,7 @@ use std::env; use color_eyre::eyre::WrapErr as _; -use iroha::style::Styling; +use iroha::{style::Styling, TerminalColorsArg}; use iroha_config::path::Path as ConfigPath; use iroha_genesis::{GenesisNetwork, RawGenesisBlock}; use owo_colors::OwoColorize as _; @@ -10,6 +10,8 @@ use owo_colors::OwoColorize as _; const HELP_ARG: [&str; 2] = ["--help", "-h"]; const SUBMIT_ARG: [&str; 2] = ["--submit-genesis", "-s"]; const VERSION_ARG: [&str; 2] = ["--version", "-V"]; +const TERMINAL_COLORS_ARG: &str = "--terminal-colors"; +const NO_TERMINAL_COLORS_ARG: &str = "--no-terminal-colors"; const REQUIRED_ENV_VARS: [(&str, &str); 7] = [ ("IROHA_TORII", "Torii (gateway) endpoint configuration"), @@ -42,11 +44,29 @@ const REQUIRED_ENV_VARS: [(&str, &str); 7] = [ /// - Telemetry setup /// - [`Sumeragi`] init async fn main() -> Result<(), color_eyre::Report> { - let styling = Styling::new(); - if !iroha::style::should_disable_color() { + let mut args = iroha::Arguments::default(); + + let terminal_colors = env::var("TERMINAL_COLORS") + .ok() + .map(|s| !s.as_str().parse().unwrap_or(true)) + .or_else(|| { + if env::args().any(|a| a == TERMINAL_COLORS_ARG) { + Some(true) + } else if env::args().any(|a| a == NO_TERMINAL_COLORS_ARG) { + Some(false) + } else { + None + } + }) + .map_or(TerminalColorsArg::Default, TerminalColorsArg::UserSet) + .evaluate(); + + if terminal_colors { color_eyre::install()?; } - let mut args = iroha::Arguments::default(); + + let styling = Styling::new(terminal_colors); + if env::args().any(|a| HELP_ARG.contains(&a.as_str())) { print_help(&styling)?; return Ok(()); @@ -109,7 +129,7 @@ async fn main() -> Result<(), color_eyre::Report> { } let config = iroha::combine_configs(&args)?; - let telemetry = iroha_logger::init(&config.logger)?; + let logger = iroha_logger::init_global(&config.logger, terminal_colors)?; if !config.disable_panic_terminal_colors { // FIXME: it shouldn't be logged here; it is a part of configuration domain // this message can be very simply broken by the changes in the configuration @@ -140,7 +160,7 @@ async fn main() -> Result<(), color_eyre::Report> { }) .transpose()?; - iroha::Iroha::with_genesis(genesis, config, telemetry) + iroha::Iroha::with_genesis(genesis, config, logger) .await? .start() .await?; diff --git a/cli/src/style.rs b/cli/src/style.rs index dbf64b975e7..393ae591140 100644 --- a/cli/src/style.rs +++ b/cli/src/style.rs @@ -25,22 +25,14 @@ impl Default for Styling { } } -/// Determine if message colourisation is to be enabled -pub fn should_disable_color() -> bool { - supports_color::on(supports_color::Stream::Stdout).is_none() - || std::env::var("TERMINAL_COLORS") - .map(|s| !s.as_str().parse().unwrap_or(true)) - .unwrap_or(false) -} - impl Styling { #[must_use] /// Constructor - pub fn new() -> Self { - if should_disable_color() { - Self::no_color() - } else { + pub fn new(terminal_colors: bool) -> Self { + if terminal_colors { Self::default() + } else { + Self::no_color() } } diff --git a/cli/src/torii/mod.rs b/cli/src/torii/mod.rs index 9594362ab5d..7780d4e5b74 100644 --- a/cli/src/torii/mod.rs +++ b/cli/src/torii/mod.rs @@ -10,7 +10,9 @@ use std::{ }; use futures::{stream::FuturesUnordered, StreamExt}; +use iroha_config::torii::Configuration as ToriiConfiguration; use iroha_core::{ + kiso::{Error as KisoError, KisoHandle}, kura::Kura, prelude::*, query::store::LiveQueryStoreHandle, @@ -18,6 +20,7 @@ use iroha_core::{ sumeragi::SumeragiHandle, EventsSender, }; +use iroha_primitives::addr::SocketAddr; use tokio::sync::Notify; use utils::*; use warp::{ @@ -33,13 +36,15 @@ mod routing; /// Main network handler and the only entrypoint of the Iroha. pub struct Torii { - iroha_cfg: super::Configuration, + kiso: KisoHandle, queue: Arc, events: EventsSender, notify_shutdown: Arc, sumeragi: SumeragiHandle, query_service: LiveQueryStoreHandle, kura: Arc, + transaction_max_content_length: u64, + address: SocketAddr, } /// Torii errors. @@ -53,13 +58,13 @@ pub enum Error { Config(#[source] eyre::Report), /// Failed to push into queue PushIntoQueue(#[from] Box), - /// Attempt to change configuration failed - ConfigurationReload(#[from] iroha_config::base::runtime_upgrades::ReloadError), #[cfg(feature = "telemetry")] /// Error while getting Prometheus metrics Prometheus(#[source] eyre::Report), /// Internal error while getting status StatusFailure(#[source] eyre::Report), + /// Failure caused by configuration subsystem + ConfigurationFailure(#[from] KisoError), /// Cannot find status segment by provided path StatusSegmentNotFound(#[source] eyre::Report), } @@ -82,7 +87,7 @@ impl Error { match self { Query(e) => Self::query_status_code(e), - AcceptTransaction(_) | ConfigurationReload(_) => StatusCode::BAD_REQUEST, + AcceptTransaction(_) => StatusCode::BAD_REQUEST, Config(_) | StatusSegmentNotFound(_) => StatusCode::NOT_FOUND, PushIntoQueue(err) => match **err { queue::Error::Full => StatusCode::INTERNAL_SERVER_ERROR, @@ -90,7 +95,9 @@ impl Error { _ => StatusCode::BAD_REQUEST, }, #[cfg(feature = "telemetry")] - Prometheus(_) | StatusFailure(_) => StatusCode::INTERNAL_SERVER_ERROR, + Prometheus(_) | StatusFailure(_) | ConfigurationFailure(_) => { + StatusCode::INTERNAL_SERVER_ERROR + } } } @@ -105,9 +112,7 @@ impl Error { QueryFailed(query_error) | InstructionFailed(InstructionExecutionError::Query(query_error)) => match query_error { - Evaluate(_) | Conversion(_) | UnknownCursor | FetchSizeTooBig => { - StatusCode::BAD_REQUEST - } + Conversion(_) | UnknownCursor | FetchSizeTooBig => StatusCode::BAD_REQUEST, Signature(_) => StatusCode::UNAUTHORIZED, Find(_) => StatusCode::NOT_FOUND, }, diff --git a/cli/src/torii/routing.rs b/cli/src/torii/routing.rs index 8326453d7af..6a9298974ce 100644 --- a/cli/src/torii/routing.rs +++ b/cli/src/torii/routing.rs @@ -7,12 +7,7 @@ use eyre::{eyre, WrapErr}; use futures::TryStreamExt; -use iroha_config::{ - base::proxy::Documented, - iroha::{Configuration, ConfigurationView}, - torii::uri, - GetConfiguration, PostConfiguration, -}; +use iroha_config::{client_api::ConfigurationDTO, torii::uri}; use iroha_core::{ query::{pagination::Paginate, store::LiveQueryStoreHandle}, smartcontracts::query::ValidQueryRequest, @@ -79,7 +74,7 @@ fn fetch_size() -> impl warp::Filter, sumeragi: SumeragiHandle, transaction: SignedTransaction, @@ -169,42 +164,18 @@ async fn handle_pending_transactions( } #[iroha_futures::telemetry_future] -async fn handle_get_configuration( - iroha_cfg: Configuration, - get_cfg: GetConfiguration, -) -> Result { - use GetConfiguration::*; - - match get_cfg { - Docs(field) => ::get_doc_recursive( - field.iter().map(AsRef::as_ref).collect::>(), - ) - .wrap_err("Failed to get docs {:?field}") - .and_then(|doc| serde_json::to_value(doc).wrap_err("Failed to serialize docs")), - // Cast to configuration view to hide private keys. - Value => serde_json::to_value(ConfigurationView::from(iroha_cfg)) - .wrap_err("Failed to serialize value"), - } - .map(|v| reply::json(&v)) - .map_err(Error::Config) +async fn handle_get_configuration(kiso: KisoHandle) -> Result { + let dto = kiso.get_dto().await?; + Ok(reply::json(&dto)) } #[iroha_futures::telemetry_future] async fn handle_post_configuration( - iroha_cfg: Configuration, - cfg: PostConfiguration, -) -> Result { - use iroha_config::base::runtime_upgrades::Reload; - use PostConfiguration::*; - - iroha_logger::debug!(?cfg); - match cfg { - LogLevel(level) => { - iroha_cfg.logger.max_log_level.reload(level)?; - } - }; - - Ok(reply::json(&true)) + kiso: KisoHandle, + value: ConfigurationDTO, +) -> Result { + kiso.update_with_dto(value).await?; + Ok(reply::with_status(reply::reply(), StatusCode::ACCEPTED)) } #[iroha_futures::telemetry_future] @@ -403,8 +374,9 @@ fn handle_status( impl Torii { /// Construct `Torii`. #[allow(clippy::too_many_arguments)] - pub fn from_configuration( - iroha_cfg: Configuration, + pub fn new( + kiso: KisoHandle, + config: &ToriiConfiguration, queue: Arc, events: EventsSender, notify_shutdown: Arc, @@ -413,13 +385,15 @@ impl Torii { kura: Arc, ) -> Self { Self { - iroha_cfg, + kiso, queue, events, notify_shutdown, sumeragi, query_service, kura, + address: config.api_url.clone(), + transaction_max_content_length: config.max_content_len.into(), } } @@ -437,12 +411,11 @@ impl Torii { .and(add_state!(self.queue, self.sumeragi,)) .and(paginate()), ) - .or(endpoint2( - handle_get_configuration, - warp::path(uri::CONFIGURATION) - .and(add_state!(self.iroha_cfg)) - .and(warp::body::json()), - )), + .or(warp::path(uri::CONFIGURATION) + .and(add_state!(self.kiso)) + .and_then(|kiso| async move { + Ok::<_, Infallible>(WarpResult(handle_get_configuration(kiso).await)) + })), ); let get_router_status = warp::path(uri::STATUS) @@ -474,11 +447,11 @@ impl Torii { let post_router = warp::post() .and( endpoint3( - handle_instructions, + handle_transaction, warp::path(uri::TRANSACTION) .and(add_state!(self.queue, self.sumeragi)) .and(warp::body::content_length_limit( - self.iroha_cfg.torii.max_content_len.into(), + self.transaction_max_content_length, )) .and(body::versioned()), ) @@ -491,7 +464,7 @@ impl Torii { .or(endpoint2( handle_post_configuration, warp::path(uri::CONFIGURATION) - .and(add_state!(self.iroha_cfg)) + .and(add_state!(self.kiso)) .and(warp::body::json()), )), ) @@ -549,10 +522,10 @@ impl Torii { /// # Errors /// Can fail due to listening to network or if http server fails fn start_api(self: Arc) -> eyre::Result>> { - let api_url = &self.iroha_cfg.torii.api_url; + let torii_address = &self.address; let mut handles = vec![]; - match api_url.to_socket_addrs() { + match torii_address.to_socket_addrs() { Ok(addrs) => { for addr in addrs { let torii = Arc::clone(&self); @@ -568,7 +541,7 @@ impl Torii { Ok(handles) } Err(error) => { - iroha_logger::error!(%api_url, %error, "API address configuration parse error"); + iroha_logger::error!(%torii_address, %error, "API address configuration parse error"); Err(eyre::Error::new(error)) } } diff --git a/client/benches/torii.rs b/client/benches/torii.rs index 5dc72359570..4ca452b930f 100644 --- a/client/benches/torii.rs +++ b/client/benches/torii.rs @@ -6,10 +6,9 @@ use criterion::{criterion_group, criterion_main, Criterion, Throughput}; use iroha::samples::{construct_executor, get_config}; use iroha_client::{ client::{asset, Client}, + crypto::KeyPair, data_model::prelude::*, }; -use iroha_config::base::runtime_upgrades::Reload; -use iroha_crypto::KeyPair; use iroha_genesis::{GenesisNetwork, RawGenesisBlockBuilder}; use iroha_primitives::unique_vec; use iroha_version::Encode; @@ -40,29 +39,31 @@ fn query_requests(criterion: &mut Criterion) { .expect("genesis creation failed"); let builder = PeerBuilder::new() - .with_configuration(configuration.clone()) + .with_configuration(configuration) .with_into_genesis(genesis); rt.block_on(builder.start_with_peer(&mut peer)); - configuration - .logger - .max_log_level - .reload(iroha_client::data_model::Level::ERROR) - .expect("Should not fail"); + rt.block_on(async { + iroha_logger::test_logger() + .reload_level(iroha_client::data_model::Level::ERROR) + .await + .unwrap() + }); let mut group = criterion.benchmark_group("query-requests"); let domain_id: DomainId = "domain".parse().expect("Valid"); - let create_domain = RegisterExpr::new(Domain::new(domain_id.clone())); + let create_domain = Register::domain(Domain::new(domain_id.clone())); let account_id = AccountId::new("account".parse().expect("Valid"), domain_id.clone()); let (public_key, _) = KeyPair::generate() .expect("Failed to generate KeyPair") .into(); - let create_account = RegisterExpr::new(Account::new(account_id.clone(), [public_key])); + let create_account = Register::account(Account::new(account_id.clone(), [public_key])); let asset_definition_id = AssetDefinitionId::new("xor".parse().expect("Valid"), domain_id); - let create_asset = RegisterExpr::new(AssetDefinition::quantity(asset_definition_id.clone())); + let create_asset = + Register::asset_definition(AssetDefinition::quantity(asset_definition_id.clone())); let quantity: u32 = 200; - let mint_asset = MintExpr::new( - quantity.to_value(), - IdBox::AssetId(AssetId::new(asset_definition_id, account_id.clone())), + let mint_asset = Mint::asset_quantity( + quantity, + AssetId::new(asset_definition_id, account_id.clone()), ); let mut client_config = iroha_client::samples::get_client_config(&get_key_pair()); @@ -71,7 +72,7 @@ fn query_requests(criterion: &mut Criterion) { let iroha_client = Client::new(&client_config).expect("Invalid client configuration"); thread::sleep(std::time::Duration::from_millis(5000)); - let instructions: [InstructionExpr; 4] = [ + let instructions: [InstructionBox; 4] = [ create_domain.into(), create_account.into(), create_asset.into(), @@ -140,12 +141,12 @@ fn instruction_submits(criterion: &mut Criterion) { rt.block_on(builder.start_with_peer(&mut peer)); let mut group = criterion.benchmark_group("instruction-requests"); let domain_id: DomainId = "domain".parse().expect("Valid"); - let create_domain = RegisterExpr::new(Domain::new(domain_id.clone())); + let create_domain: InstructionBox = Register::domain(Domain::new(domain_id.clone())).into(); let account_id = AccountId::new("account".parse().expect("Valid"), domain_id.clone()); let (public_key, _) = KeyPair::generate() .expect("Failed to generate Key-pair.") .into(); - let create_account = RegisterExpr::new(Account::new(account_id.clone(), [public_key])); + let create_account = Register::account(Account::new(account_id.clone(), [public_key])).into(); let asset_definition_id = AssetDefinitionId::new("xor".parse().expect("Valid"), domain_id); let mut client_config = iroha_client::samples::get_client_config(&get_key_pair()); client_config.torii_api_url = format!("http://{}", peer.api_address).parse().unwrap(); @@ -160,12 +161,9 @@ fn instruction_submits(criterion: &mut Criterion) { let _dropable = group.bench_function("instructions", |b| { b.iter(|| { let quantity: u32 = 200; - let mint_asset = MintExpr::new( - quantity.to_value(), - IdBox::AssetId(AssetId::new( - asset_definition_id.clone(), - account_id.clone(), - )), + let mint_asset = Mint::asset_quantity( + quantity, + AssetId::new(asset_definition_id.clone(), account_id.clone()), ); match iroha_client.submit(mint_asset) { Ok(_) => success_count += 1, diff --git a/client/benches/tps/oneshot.rs b/client/benches/tps/oneshot.rs index 6fd57cf00ba..99efceac8b2 100644 --- a/client/benches/tps/oneshot.rs +++ b/client/benches/tps/oneshot.rs @@ -20,7 +20,7 @@ fn main() { flush_guard = Some(flame_layer.flush_on_drop()); tracing_subscriber::registry().with(flame_layer).init(); - iroha_logger::disable_logger(); + iroha_logger::disable_global().expect("Logger should not be set yet"); } let config = utils::Config::from_path("benches/tps/config.json").expect("Failed to configure"); diff --git a/client/benches/tps/utils.rs b/client/benches/tps/utils.rs index d21611f7d53..c1a3494260f 100644 --- a/client/benches/tps/utils.rs +++ b/client/benches/tps/utils.rs @@ -19,7 +19,6 @@ use iroha_client::{ }, }; use serde::Deserialize; -use serde_json::json; use test_network::*; pub type Tps = f64; @@ -57,11 +56,11 @@ impl Config { pub fn measure(self) -> Result { // READY - let (_rt, network, client) = ::start_test_with_runtime(self.peers, None); + let (_rt, network, client) = Network::start_test_with_runtime(self.peers, None); let clients = network.clients(); wait_for_genesis_committed(&clients, 0); - client.submit_blocking( + client.submit_all_blocking( ParametersBuilder::new() .add_parameter(MAX_TRANSACTIONS_IN_BLOCK, self.max_txs_per_block)? .into_set_parameters(), @@ -70,13 +69,12 @@ impl Config { let unit_names = (UnitName::MIN..).take(self.peers as usize); let units = clients .into_iter() - .zip(unit_names.clone().zip(unit_names.cycle().skip(1))) - .map(|(client, pair)| { + .zip(unit_names) + .map(|(client, name)| { let unit = MeasurerUnit { config: self, client, - name: pair.0, - next_name: pair.1, + name, }; unit.ready() }) @@ -155,7 +153,6 @@ struct MeasurerUnit { pub config: Config, pub client: Client, pub name: UnitName, - pub next_name: UnitName, } type UnitName = u32; @@ -166,37 +163,17 @@ impl MeasurerUnit { /// Submit initial transactions for measurement fn ready(self) -> Result { - let keypair = iroha_crypto::KeyPair::generate().expect("Failed to generate KeyPair."); + let keypair = + iroha_client::crypto::KeyPair::generate().expect("Failed to generate KeyPair."); let account_id = account_id(self.name); - let alice_id = AccountId::from_str("alice@wonderland")?; let asset_id = asset_id(self.name); - let register_me = RegisterExpr::new(Account::new( - account_id.clone(), - [keypair.public_key().clone()], - )); + let register_me = + Register::account(Account::new(account_id, [keypair.public_key().clone()])); self.client.submit_blocking(register_me)?; - let can_burn_my_asset = PermissionToken::new( - "CanBurnUserAsset".parse().unwrap(), - &json!({ "asset_id": asset_id }), - ); - let allow_alice_to_burn_my_asset = GrantExpr::new(can_burn_my_asset, alice_id.clone()); - let can_transfer_my_asset = PermissionToken::new( - "CanTransferUserAsset".parse().unwrap(), - &json!({ "asset_id": asset_id }), - ); - let allow_alice_to_transfer_my_asset = GrantExpr::new(can_transfer_my_asset, alice_id); - let grant_tx = TransactionBuilder::new(account_id) - .with_instructions([ - allow_alice_to_burn_my_asset, - allow_alice_to_transfer_my_asset, - ]) - .sign(keypair)?; - self.client.submit_transaction_blocking(&grant_tx)?; - - let mint_a_rose = MintExpr::new(1_u32, asset_id); + let mint_a_rose = Mint::asset_quantity(1_u32, asset_id); self.client.submit_blocking(mint_a_rose)?; Ok(self) @@ -267,42 +244,12 @@ impl MeasurerUnit { }) } - fn instructions(&self) -> impl Iterator { - [self.mint_or_burn(), self.relay_a_rose()] - .into_iter() - .cycle() - } - - fn mint_or_burn(&self) -> InstructionExpr { - let is_running_out = Less::new( - EvaluatesTo::new_unchecked(Expression::Query( - FindAssetQuantityById::new(asset_id(self.name)).into(), - )), - 100_u32, - ); - let supply_roses = MintExpr::new(100_u32.to_value(), asset_id(self.name)); - let burn_a_rose = BurnExpr::new(1_u32.to_value(), asset_id(self.name)); - - ConditionalExpr::with_otherwise(is_running_out, supply_roses, burn_a_rose).into() + fn instructions(&self) -> impl Iterator { + std::iter::once(self.mint()).cycle() } - fn relay_a_rose(&self) -> InstructionExpr { - // Save at least one rose - // because if asset value hits 0 it's automatically deleted from account - // and query `FindAssetQuantityById` return error - let enough_to_transfer = Greater::new( - EvaluatesTo::new_unchecked(Expression::Query( - FindAssetQuantityById::new(asset_id(self.name)).into(), - )), - 1_u32, - ); - let transfer_rose = TransferExpr::new( - asset_id(self.name), - 1_u32.to_value(), - account_id(self.next_name), - ); - - ConditionalExpr::new(enough_to_transfer, transfer_rose).into() + fn mint(&self) -> InstructionBox { + Mint::asset_quantity(1_u32, asset_id(self.name)).into() } } diff --git a/client/examples/million_accounts_genesis.rs b/client/examples/million_accounts_genesis.rs index 079c0ecc7e5..57993c1a972 100644 --- a/client/examples/million_accounts_genesis.rs +++ b/client/examples/million_accounts_genesis.rs @@ -3,6 +3,7 @@ use std::{thread, time::Duration}; use iroha::samples::{construct_executor, get_config}; use iroha_client::data_model::prelude::*; +use iroha_data_model::isi::InstructionBox; use iroha_genesis::{GenesisNetwork, RawGenesisBlock, RawGenesisBlockBuilder}; use iroha_primitives::unique_vec; use test_network::{ @@ -64,8 +65,8 @@ fn create_million_accounts_directly() { format!("bob-{i}").parse().expect("Valid"), domain_id.clone(), ); - let create_domain = RegisterExpr::new(Domain::new(domain_id)); - let create_account = RegisterExpr::new(Account::new(normal_account_id.clone(), [])); + let create_domain: InstructionBox = Register::domain(Domain::new(domain_id)).into(); + let create_account = Register::account(Account::new(normal_account_id.clone(), [])).into(); if test_client .submit_all([create_domain, create_account]) .is_err() diff --git a/client/examples/tutorial.rs b/client/examples/tutorial.rs index a961a1932e7..cead2516b4a 100644 --- a/client/examples/tutorial.rs +++ b/client/examples/tutorial.rs @@ -3,9 +3,8 @@ use std::fs::File; use eyre::{Error, WrapErr}; -use iroha_client::data_model::TryToValue; +use iroha_client::config::Configuration; // #region rust_config_crates -use iroha_config::client::Configuration; // #endregion rust_config_crates fn main() { @@ -51,7 +50,7 @@ fn domain_registration_test(config: &Configuration) -> Result<(), Error> { client::Client, data_model::{ metadata::UnlimitedMetadata, - prelude::{Domain, DomainId, InstructionExpr, RegisterExpr}, + prelude::{Domain, DomainId, InstructionBox, Register}, }, }; // #endregion domain_register_example_crates @@ -63,7 +62,7 @@ fn domain_registration_test(config: &Configuration) -> Result<(), Error> { // #region domain_register_example_create_isi // Create an ISI - let create_looking_glass = RegisterExpr::new(Domain::new(looking_glass)); + let create_looking_glass = Register::domain(Domain::new(looking_glass)); // #endregion domain_register_example_create_isi // #region rust_client_create @@ -74,7 +73,7 @@ fn domain_registration_test(config: &Configuration) -> Result<(), Error> { // #region domain_register_example_prepare_tx // Prepare a transaction let metadata = UnlimitedMetadata::default(); - let instructions: Vec = vec![create_looking_glass.into()]; + let instructions: Vec = vec![create_looking_glass.into()]; let tx = iroha_client .build_transaction(instructions, metadata) .wrap_err("Error building a domain registration transaction")?; @@ -115,12 +114,12 @@ fn account_registration_test(config: &Configuration) -> Result<(), Error> { // #region register_account_crates use iroha_client::{ client::Client, + crypto::KeyPair, data_model::{ metadata::UnlimitedMetadata, - prelude::{Account, AccountId, InstructionExpr, RegisterExpr}, + prelude::{Account, AccountId, InstructionBox, Register}, }, }; - use iroha_crypto::KeyPair; // #endregion register_account_crates // Create an Iroha client @@ -141,14 +140,14 @@ fn account_registration_test(config: &Configuration) -> Result<(), Error> { // #region register_account_generate // Generate a new account - let create_account = RegisterExpr::new(Account::new(account_id, [public_key])); + let create_account = Register::account(Account::new(account_id, [public_key])); // #endregion register_account_generate // #region register_account_prepare_tx // Prepare a transaction using the - // Account's RegisterExpr + // Account's RegisterBox let metadata = UnlimitedMetadata::new(); - let instructions: Vec = vec![create_account.into()]; + let instructions: Vec = vec![create_account.into()]; let tx = iroha_client.build_transaction(instructions, metadata)?; // #endregion register_account_prepare_tx @@ -168,7 +167,7 @@ fn asset_registration_test(config: &Configuration) -> Result<(), Error> { use iroha_client::{ client::Client, data_model::prelude::{ - AccountId, AssetDefinition, AssetDefinitionId, AssetId, IdBox, MintExpr, RegisterExpr, + AccountId, AssetDefinition, AssetDefinitionId, AssetId, Mint, Register, }, }; // #endregion register_asset_crates @@ -185,7 +184,7 @@ fn asset_registration_test(config: &Configuration) -> Result<(), Error> { // #region register_asset_init_submit // Initialise the registration time let register_time = - RegisterExpr::new(AssetDefinition::fixed(asset_def_id.clone()).mintable_once()); + Register::asset_definition(AssetDefinition::fixed(asset_def_id.clone()).mintable_once()); // Submit a registration time iroha_client.submit(register_time)?; @@ -197,10 +196,10 @@ fn asset_registration_test(config: &Configuration) -> Result<(), Error> { .expect("Valid, because the string contains no whitespace, has a single '@' character and is not empty after"); // #region register_asset_mint_submit - // Create a MintExpr using a previous asset and account - let mint = MintExpr::new( - 12.34_f64.try_to_value()?, - IdBox::AssetId(AssetId::new(asset_def_id, account_id)), + // Create a MintBox using a previous asset and account + let mint = Mint::asset_fixed( + 12.34_f64.try_into()?, + AssetId::new(asset_def_id, account_id), ); // Submit a minting transaction @@ -217,10 +216,7 @@ fn asset_minting_test(config: &Configuration) -> Result<(), Error> { use iroha_client::{ client::Client, - data_model::{ - prelude::{AccountId, AssetDefinitionId, AssetId, MintExpr, ToValue}, - IdBox, - }, + data_model::prelude::{AccountId, AssetDefinitionId, AssetId, Mint}, }; // #endregion mint_asset_crates @@ -237,10 +233,7 @@ fn asset_minting_test(config: &Configuration) -> Result<(), Error> { // Mint the Asset instance // #region mint_asset_mint - let mint_roses = MintExpr::new( - 42_u32.to_value(), - IdBox::AssetId(AssetId::new(roses, alice)), - ); + let mint_roses = Mint::asset_quantity(42_u32, AssetId::new(roses, alice)); // #endregion mint_asset_mint // #region mint_asset_submit_tx @@ -255,10 +248,7 @@ fn asset_minting_test(config: &Configuration) -> Result<(), Error> { // or `roses.to_string() + "#" + alice.to_string()`. // The `##` is a short-hand for the rose `which belongs to the same domain as the account // to which it belongs to. - let mint_roses_alt = MintExpr::new( - 10_u32.to_value(), - IdBox::AssetId("rose##alice@wonderland".parse()?), - ); + let mint_roses_alt = Mint::asset_quantity(10_u32, "rose##alice@wonderland".parse()?); // #endregion mint_asset_mint_alt // #region mint_asset_submit_tx_alt @@ -277,10 +267,7 @@ fn asset_burning_test(config: &Configuration) -> Result<(), Error> { use iroha_client::{ client::Client, - data_model::{ - prelude::{AccountId, AssetDefinitionId, AssetId, BurnExpr, ToValue}, - IdBox, - }, + data_model::prelude::{AccountId, AssetDefinitionId, AssetId, Burn}, }; // #endregion burn_asset_crates @@ -297,10 +284,7 @@ fn asset_burning_test(config: &Configuration) -> Result<(), Error> { // #region burn_asset_burn // Burn the Asset instance - let burn_roses = BurnExpr::new( - 10_u32.to_value(), - IdBox::AssetId(AssetId::new(roses, alice)), - ); + let burn_roses = Burn::asset_quantity(10_u32, AssetId::new(roses, alice)); // #endregion burn_asset_burn // #region burn_asset_submit_tx @@ -315,10 +299,7 @@ fn asset_burning_test(config: &Configuration) -> Result<(), Error> { // or `roses.to_string() + "#" + alice.to_string()`. // The `##` is a short-hand for the rose `which belongs to the same domain as the account // to which it belongs to. - let burn_roses_alt = BurnExpr::new( - 10_u32.to_value(), - IdBox::AssetId("rose##alice@wonderland".parse()?), - ); + let burn_roses_alt = Burn::asset_quantity(10_u32, "rose##alice@wonderland".parse()?); // #endregion burn_asset_burn_alt // #region burn_asset_submit_tx_alt diff --git a/client/src/client.rs b/client/src/client.rs index 942e5444a8d..3a4c7615397 100644 --- a/client/src/client.rs +++ b/client/src/client.rs @@ -13,18 +13,17 @@ use derive_more::{DebugCustom, Display}; use eyre::{eyre, Result, WrapErr}; use futures_util::StreamExt; use http_default::{AsyncWebSocketStream, WebSocketStream}; -use iroha_config::{client::Configuration, torii::uri, GetConfiguration, PostConfiguration}; -use iroha_crypto::{HashOf, KeyPair}; use iroha_logger::prelude::*; use iroha_telemetry::metrics::Status; use iroha_version::prelude::*; use parity_scale_codec::DecodeAll; use rand::Rng; -use serde::de::DeserializeOwned; use url::Url; use self::{blocks_api::AsyncBlockStream, events_api::AsyncEventStream}; use crate::{ + config::{api::ConfigurationDTO, Configuration}, + crypto::{HashOf, KeyPair}, data_model::{ block::SignedBlock, isi::Instruction, @@ -70,15 +69,15 @@ pub trait Sign { /// Fails if signature creation fails fn sign( self, - key_pair: iroha_crypto::KeyPair, - ) -> Result; + key_pair: crate::crypto::KeyPair, + ) -> Result; } impl Sign for TransactionBuilder { fn sign( self, - key_pair: iroha_crypto::KeyPair, - ) -> Result { + key_pair: crate::crypto::KeyPair, + ) -> Result { self.sign(key_pair) } } @@ -86,8 +85,8 @@ impl Sign for TransactionBuilder { impl Sign for SignedTransaction { fn sign( self, - key_pair: iroha_crypto::KeyPair, - ) -> Result { + key_pair: crate::crypto::KeyPair, + ) -> Result { self.sign(key_pair) } } @@ -373,7 +372,7 @@ pub struct QueryRequest { impl QueryRequest { #[cfg(test)] fn dummy() -> Self { - let torii_url = iroha_config::torii::uri::DEFAULT_API_ADDR; + let torii_url = crate::config::torii::DEFAULT_API_ADDR; Self { torii_url: format!("http://{torii_url}").parse().unwrap(), @@ -392,7 +391,9 @@ impl QueryRequest { fn assemble(self) -> DefaultRequestBuilder { let builder = DefaultRequestBuilder::new( HttpMethod::POST, - self.torii_url.join(uri::QUERY).expect("Valid URI"), + self.torii_url + .join(crate::config::torii::QUERY) + .expect("Valid URI"), ) .headers(self.headers); @@ -683,7 +684,9 @@ impl Client { ( B::new( HttpMethod::POST, - self.torii_url.join(uri::TRANSACTION).expect("Valid URI"), + self.torii_url + .join(crate::config::torii::TRANSACTION) + .expect("Valid URI"), ) .headers(self.headers.clone()) .body(transaction_bytes), @@ -753,7 +756,7 @@ impl Client { /// /// ```ignore /// use eyre::Result; - /// use iroha_client::{ + /// use crate::{ /// data_model::{predicate::PredicateBox, prelude::{Account, FindAllAccounts, Pagination}}, /// client::Client, /// http::{RequestBuilder, Response, Method}, @@ -952,7 +955,9 @@ impl Client { events_api::flow::Init::new( event_filter, self.headers.clone(), - self.torii_url.join(uri::SUBSCRIPTION).expect("Valid URI"), + self.torii_url + .join(crate::config::torii::SUBSCRIPTION) + .expect("Valid URI"), ) } @@ -986,7 +991,9 @@ impl Client { blocks_api::flow::Init::new( height, self.headers.clone(), - self.torii_url.join(uri::BLOCKS_STREAM).expect("Valid URI"), + self.torii_url + .join(crate::config::torii::BLOCKS_STREAM) + .expect("Valid URI"), ) } @@ -1019,7 +1026,7 @@ impl Client { let response = DefaultRequestBuilder::new( HttpMethod::GET, self.torii_url - .join(uri::PENDING_TRANSACTIONS) + .join(crate::config::torii::PENDING_TRANSACTIONS) .expect("Valid URI"), ) .params(pagination.clone()) @@ -1073,13 +1080,18 @@ impl Client { ) } - fn get_config(&self, get_config: &GetConfiguration) -> Result { + /// Get value of config on peer + /// + /// # Errors + /// Fails if sending request or decoding fails + pub fn get_config(&self) -> Result { let resp = DefaultRequestBuilder::new( HttpMethod::GET, - self.torii_url.join(uri::CONFIGURATION).expect("Valid URI"), + self.torii_url + .join(crate::config::torii::CONFIGURATION) + .expect("Valid URI"), ) .header(http::header::CONTENT_TYPE, APPLICATION_JSON) - .body(serde_json::to_vec(get_config).wrap_err("Failed to serialize")?) .build()? .send()?; @@ -1097,44 +1109,27 @@ impl Client { /// /// # Errors /// If sending request or decoding fails - pub fn set_config(&self, post_config: PostConfiguration) -> Result { - let body = serde_json::to_vec(&post_config) - .wrap_err(format!("Failed to serialize {post_config:?}"))?; - let url = self.torii_url.join(uri::CONFIGURATION).expect("Valid URI"); + pub fn set_config(&self, dto: ConfigurationDTO) -> Result<()> { + let body = serde_json::to_vec(&dto).wrap_err(format!("Failed to serialize {dto:?}"))?; + let url = self + .torii_url + .join(crate::config::torii::CONFIGURATION) + .expect("Valid URI"); let resp = DefaultRequestBuilder::new(HttpMethod::POST, url) .header(http::header::CONTENT_TYPE, APPLICATION_JSON) .body(body) .build()? .send()?; - if resp.status() != StatusCode::OK { + if resp.status() != StatusCode::ACCEPTED { return Err(eyre!( "Failed to post configuration with HTTP status: {}. {}", resp.status(), std::str::from_utf8(resp.body()).unwrap_or(""), )); - } - serde_json::from_slice(resp.body()) - .wrap_err(format!("Failed to decode body {:?}", resp.body())) - } - - /// Get documentation of some field on config - /// - /// # Errors - /// Fails if sending request or decoding fails - pub fn get_config_docs(&self, field: &[&str]) -> Result> { - let field = field.iter().copied().map(ToOwned::to_owned).collect(); - self.get_config(&GetConfiguration::Docs(field)) - .wrap_err("Failed to get docs for field") - } + }; - /// Get value of config on peer - /// - /// # Errors - /// Fails if sending request or decoding fails - pub fn get_config_value(&self) -> Result { - self.get_config(&GetConfiguration::Value) - .wrap_err("Failed to get configuration value") + Ok(()) } /// Gets network status seen from the peer @@ -1156,7 +1151,9 @@ impl Client { pub fn prepare_status_request(&self) -> B { B::new( HttpMethod::GET, - self.torii_url.join(uri::STATUS).expect("Valid URI"), + self.torii_url + .join(crate::config::torii::STATUS) + .expect("Valid URI"), ) .headers(self.headers.clone()) } @@ -1490,14 +1487,12 @@ pub mod account { } /// Construct a query to get account by id - pub fn by_id(account_id: impl Into>) -> FindAccountById { + pub fn by_id(account_id: AccountId) -> FindAccountById { FindAccountById::new(account_id) } /// Construct a query to get all accounts containing specified asset - pub fn all_with_asset( - asset_definition_id: impl Into>, - ) -> FindAccountsWithAsset { + pub fn all_with_asset(asset_definition_id: AssetDefinitionId) -> FindAccountsWithAsset { FindAccountsWithAsset::new(asset_definition_id) } } @@ -1517,19 +1512,17 @@ pub mod asset { } /// Construct a query to get asset definition by its id - pub fn definition_by_id( - asset_definition_id: impl Into>, - ) -> FindAssetDefinitionById { + pub fn definition_by_id(asset_definition_id: AssetDefinitionId) -> FindAssetDefinitionById { FindAssetDefinitionById::new(asset_definition_id) } /// Construct a query to get all assets by account id - pub fn by_account_id(account_id: impl Into>) -> FindAssetsByAccountId { + pub fn by_account_id(account_id: AccountId) -> FindAssetsByAccountId { FindAssetsByAccountId::new(account_id) } /// Construct a query to get an asset by its id - pub fn by_id(asset_id: impl Into>) -> FindAssetById { + pub fn by_id(asset_id: AssetId) -> FindAssetById { FindAssetById::new(asset_id) } } @@ -1550,9 +1543,7 @@ pub mod block { } /// Construct a query to find block header by hash - pub fn header_by_hash( - hash: impl Into>>, - ) -> FindBlockHeaderByHash { + pub fn header_by_hash(hash: HashOf) -> FindBlockHeaderByHash { FindBlockHeaderByHash::new(hash) } } @@ -1567,7 +1558,7 @@ pub mod domain { } /// Construct a query to get all domain by id - pub fn by_id(domain_id: impl Into>) -> FindDomainById { + pub fn by_id(domain_id: DomainId) -> FindDomainById { FindDomainById::new(domain_id) } } @@ -1583,16 +1574,12 @@ pub mod transaction { } /// Construct a query to retrieve transactions for account - pub fn by_account_id( - account_id: impl Into>, - ) -> FindTransactionsByAccountId { + pub fn by_account_id(account_id: AccountId) -> FindTransactionsByAccountId { FindTransactionsByAccountId::new(account_id) } /// Construct a query to retrieve transaction by hash - pub fn by_hash( - hash: impl Into>>, - ) -> FindTransactionByHash { + pub fn by_hash(hash: HashOf) -> FindTransactionByHash { FindTransactionByHash::new(hash) } } @@ -1602,7 +1589,7 @@ pub mod trigger { use super::*; /// Construct a query to get triggers by domain id - pub fn by_domain_id(domain_id: impl Into>) -> FindTriggersByDomainId { + pub fn by_domain_id(domain_id: DomainId) -> FindTriggersByDomainId { FindTriggersByDomainId::new(domain_id) } } @@ -1618,10 +1605,8 @@ pub mod permission { /// Construct a query to get all [`PermissionToken`] granted /// to account with given [`Id`][AccountId] - pub fn by_account_id( - account_id: impl Into>, - ) -> FindPermissionTokensByAccountId { - FindPermissionTokensByAccountId::new(account_id.into()) + pub fn by_account_id(account_id: AccountId) -> FindPermissionTokensByAccountId { + FindPermissionTokensByAccountId::new(account_id) } } @@ -1640,12 +1625,12 @@ pub mod role { } /// Construct a query to retrieve a role by its id - pub fn by_id(role_id: impl Into>) -> FindRoleByRoleId { + pub fn by_id(role_id: RoleId) -> FindRoleByRoleId { FindRoleByRoleId::new(role_id) } /// Construct a query to retrieve all roles for an account - pub fn by_account_id(account_id: impl Into>) -> FindRolesByAccountId { + pub fn by_account_id(account_id: AccountId) -> FindRolesByAccountId { FindRolesByAccountId::new(account_id) } } @@ -1664,13 +1649,10 @@ pub mod parameter { mod tests { use std::str::FromStr; - use iroha_config::{ - client::{BasicAuth, ConfigurationProxy, WebLogin}, - torii::uri::DEFAULT_API_ADDR, - }; use iroha_primitives::small::SmallStr; use super::*; + use crate::config::{torii::DEFAULT_API_ADDR, BasicAuth, ConfigurationProxy, WebLogin}; const LOGIN: &str = "mad_hatter"; const PASSWORD: &str = "ilovetea"; @@ -1699,7 +1681,7 @@ mod tests { let build_transaction = || { client - .build_transaction(Vec::::new(), UnlimitedMetadata::new()) + .build_transaction(Vec::::new(), UnlimitedMetadata::new()) .unwrap() }; let tx1 = build_transaction(); @@ -1729,8 +1711,8 @@ mod tests { .parse() .expect("Public key not in mulithash format"), ), - private_key: Some(iroha_crypto::PrivateKey::from_hex( - iroha_crypto::Algorithm::Ed25519, + private_key: Some(crate::crypto::PrivateKey::from_hex( + crate::crypto::Algorithm::Ed25519, "9AC47ABF59B356E0BD7DCBBBB4DEC080E302156A48CA907E47CB6AEA1D32719E7233BFC89DCBD68C19FDE6CE6158225298EC1131B6A130D1AEB454C1AB5183C0" ).expect("Private key not hex encoded")), account_id: Some( @@ -1772,13 +1754,6 @@ mod tests { )), ), (StatusCode::UNPROCESSABLE_ENTITY, ValidationFail::TooComplex), - ( - StatusCode::NOT_FOUND, - // Here should be `Find`, but actually handler doesn't care - ValidationFail::QueryFailed(QueryExecutionFail::Evaluate( - "whatever".to_owned(), - )), - ), ]; for (status_code, err) in responses { let resp = Response::builder().status(status_code).body(err.encode())?; diff --git a/client/src/lib.rs b/client/src/lib.rs index 3d12fef5365..78a3cbeac13 100644 --- a/client/src/lib.rs +++ b/client/src/lib.rs @@ -9,11 +9,10 @@ mod query_builder; /// Module containing sample configurations for tests and benchmarks. pub mod samples { - use iroha_config::{ - client::{Configuration, ConfigurationProxy}, - torii::uri::DEFAULT_API_ADDR, + use crate::{ + config::{torii::DEFAULT_API_ADDR, Configuration, ConfigurationProxy}, + crypto::KeyPair, }; - use iroha_crypto::KeyPair; /// Get sample client configuration. pub fn get_client_config(key_pair: &KeyPair) -> Configuration { @@ -38,4 +37,11 @@ pub mod samples { } } +pub mod config { + //! Module for client-related configuration and structs + + pub use iroha_config::{client::*, client_api as api, path, torii::uri as torii}; +} + +pub use iroha_crypto as crypto; pub use iroha_data_model as data_model; diff --git a/client/tests/integration/add_account.rs b/client/tests/integration/add_account.rs index f463266399e..d46b3bb65af 100644 --- a/client/tests/integration/add_account.rs +++ b/client/tests/integration/add_account.rs @@ -2,6 +2,7 @@ use std::thread; use eyre::Result; use iroha_client::{client, data_model::prelude::*}; +use iroha_config::iroha::Configuration; use test_network::*; #[test] @@ -10,17 +11,17 @@ fn client_add_account_with_name_length_more_than_limit_should_not_commit_transac let (_rt, _peer, test_client) = ::new().with_port(10_505).start_with_runtime(); wait_for_genesis_committed(&vec![test_client.clone()], 0); - let pipeline_time = super::Configuration::pipeline_time(); + let pipeline_time = Configuration::pipeline_time(); let normal_account_id: AccountId = "bob@wonderland".parse().expect("Valid"); - let create_account = RegisterExpr::new(Account::new(normal_account_id.clone(), [])); + let create_account = Register::account(Account::new(normal_account_id.clone(), [])); test_client.submit(create_account)?; let too_long_account_name = "0".repeat(2_usize.pow(14)); let incorrect_account_id: AccountId = (too_long_account_name + "@wonderland") .parse() .expect("Valid"); - let create_account = RegisterExpr::new(Account::new(incorrect_account_id.clone(), [])); + let create_account = Register::account(Account::new(incorrect_account_id.clone(), [])); test_client.submit(create_account)?; thread::sleep(pipeline_time * 2); diff --git a/client/tests/integration/add_domain.rs b/client/tests/integration/add_domain.rs index f963fe31d10..bb889c25c15 100644 --- a/client/tests/integration/add_domain.rs +++ b/client/tests/integration/add_domain.rs @@ -2,10 +2,9 @@ use std::thread; use eyre::Result; use iroha_client::{client, data_model::prelude::*}; +use iroha_config::iroha::Configuration; use test_network::*; -use super::Configuration; - #[test] fn client_add_domain_with_name_length_more_than_limit_should_not_commit_transaction() -> Result<()> { @@ -16,11 +15,11 @@ fn client_add_domain_with_name_length_more_than_limit_should_not_commit_transact // Given let normal_domain_id: DomainId = "sora".parse()?; - let create_domain = RegisterExpr::new(Domain::new(normal_domain_id.clone())); + let create_domain = Register::domain(Domain::new(normal_domain_id.clone())); test_client.submit(create_domain)?; let too_long_domain_name: DomainId = "0".repeat(2_usize.pow(14)).parse()?; - let create_domain = RegisterExpr::new(Domain::new(too_long_domain_name.clone())); + let create_domain = Register::domain(Domain::new(too_long_domain_name.clone())); test_client.submit(create_domain)?; thread::sleep(pipeline_time * 2); diff --git a/client/tests/integration/asset.rs b/client/tests/integration/asset.rs index 26e672fc857..3b151b99ec8 100644 --- a/client/tests/integration/asset.rs +++ b/client/tests/integration/asset.rs @@ -3,15 +3,14 @@ use std::{str::FromStr as _, thread}; use eyre::Result; use iroha_client::{ client::{self, QueryResult}, + crypto::{KeyPair, PublicKey}, data_model::prelude::*, }; -use iroha_crypto::{KeyPair, PublicKey}; +use iroha_config::iroha::Configuration; use iroha_primitives::fixed::Fixed; use serde_json::json; use test_network::*; -use super::Configuration; - #[test] fn client_register_asset_should_add_asset_once_but_not_twice() -> Result<()> { let (_rt, _peer, test_client) = ::new().with_port(10_620).start_with_runtime(); @@ -21,11 +20,13 @@ fn client_register_asset_should_add_asset_once_but_not_twice() -> Result<()> { let account_id = AccountId::from_str("alice@wonderland").expect("Valid"); let asset_definition_id = AssetDefinitionId::from_str("test_asset#wonderland").expect("Valid"); - let create_asset = RegisterExpr::new(AssetDefinition::quantity(asset_definition_id.clone())); - let register_asset = RegisterExpr::new(Asset::new( + let create_asset: InstructionBox = + Register::asset_definition(AssetDefinition::quantity(asset_definition_id.clone())).into(); + let register_asset: InstructionBox = Register::asset(Asset::new( AssetId::new(asset_definition_id.clone(), account_id.clone()), AssetValue::Quantity(0), - )); + )) + .into(); test_client.submit_all([create_asset, register_asset.clone()])?; @@ -56,9 +57,11 @@ fn unregister_asset_should_remove_asset_from_account() -> Result<()> { let asset_definition_id = AssetDefinitionId::from_str("test_asset#wonderland").expect("Valid"); let asset_id = AssetId::new(asset_definition_id.clone(), account_id.clone()); - let create_asset = RegisterExpr::new(AssetDefinition::quantity(asset_definition_id.clone())); - let register_asset = RegisterExpr::new(Asset::new(asset_id.clone(), AssetValue::Quantity(0))); - let unregister_asset = UnregisterExpr::new(asset_id); + let create_asset: InstructionBox = + Register::asset_definition(AssetDefinition::quantity(asset_definition_id.clone())).into(); + let register_asset = + Register::asset(Asset::new(asset_id.clone(), AssetValue::Quantity(0))).into(); + let unregister_asset = Unregister::asset(asset_id); test_client.submit_all([create_asset, register_asset])?; @@ -93,18 +96,16 @@ fn client_add_asset_quantity_to_existing_asset_should_increase_asset_amount() -> // Given let account_id = AccountId::from_str("alice@wonderland").expect("Valid"); let asset_definition_id = AssetDefinitionId::from_str("xor#wonderland").expect("Valid"); - let create_asset = RegisterExpr::new(AssetDefinition::quantity(asset_definition_id.clone())); + let create_asset = + Register::asset_definition(AssetDefinition::quantity(asset_definition_id.clone())); let metadata = iroha_client::data_model::metadata::UnlimitedMetadata::default(); //When let quantity: u32 = 200; - let mint = MintExpr::new( - quantity.to_value(), - IdBox::AssetId(AssetId::new( - asset_definition_id.clone(), - account_id.clone(), - )), + let mint = Mint::asset_quantity( + quantity, + AssetId::new(asset_definition_id.clone(), account_id.clone()), ); - let instructions: [InstructionExpr; 2] = [create_asset.into(), mint.into()]; + let instructions: [InstructionBox; 2] = [create_asset.into(), mint.into()]; let tx = test_client.build_transaction(instructions, metadata)?; test_client.submit_transaction(&tx)?; test_client.poll_request(client::asset::by_account_id(account_id), |result| { @@ -127,18 +128,15 @@ fn client_add_big_asset_quantity_to_existing_asset_should_increase_asset_amount( let account_id = AccountId::from_str("alice@wonderland").expect("Valid"); let asset_definition_id = AssetDefinitionId::from_str("xor#wonderland").expect("Valid"); let create_asset = - RegisterExpr::new(AssetDefinition::big_quantity(asset_definition_id.clone())); + Register::asset_definition(AssetDefinition::big_quantity(asset_definition_id.clone())); let metadata = iroha_client::data_model::metadata::UnlimitedMetadata::default(); //When let quantity: u128 = 2_u128.pow(65); - let mint = MintExpr::new( - quantity.to_value(), - IdBox::AssetId(AssetId::new( - asset_definition_id.clone(), - account_id.clone(), - )), + let mint = Mint::asset_big_quantity( + quantity, + AssetId::new(asset_definition_id.clone(), account_id.clone()), ); - let instructions: [InstructionExpr; 2] = [create_asset.into(), mint.into()]; + let instructions: [InstructionBox; 2] = [create_asset.into(), mint.into()]; let tx = test_client.build_transaction(instructions, metadata)?; test_client.submit_transaction(&tx)?; test_client.poll_request(client::asset::by_account_id(account_id), |result| { @@ -160,20 +158,17 @@ fn client_add_asset_with_decimal_should_increase_asset_amount() -> Result<()> { // Given let account_id = AccountId::from_str("alice@wonderland").expect("Valid"); let asset_definition_id = AssetDefinitionId::from_str("xor#wonderland").expect("Valid"); - let identifiable_box = AssetDefinition::fixed(asset_definition_id.clone()); - let create_asset = RegisterExpr::new(identifiable_box); + let asset_definition = AssetDefinition::fixed(asset_definition_id.clone()); + let create_asset = Register::asset_definition(asset_definition); let metadata = iroha_client::data_model::metadata::UnlimitedMetadata::default(); //When let quantity: Fixed = Fixed::try_from(123.456_f64).unwrap(); - let mint = MintExpr::new( - quantity.to_value(), - IdBox::AssetId(AssetId::new( - asset_definition_id.clone(), - account_id.clone(), - )), + let mint = Mint::asset_fixed( + quantity, + AssetId::new(asset_definition_id.clone(), account_id.clone()), ); - let instructions: [InstructionExpr; 2] = [create_asset.into(), mint.into()]; + let instructions: [InstructionBox; 2] = [create_asset.into(), mint.into()]; let tx = test_client.build_transaction(instructions, metadata)?; test_client.submit_transaction(&tx)?; test_client.poll_request(client::asset::by_account_id(account_id.clone()), |result| { @@ -187,12 +182,9 @@ fn client_add_asset_with_decimal_should_increase_asset_amount() -> Result<()> { // Add some fractional part let quantity2: Fixed = Fixed::try_from(0.55_f64).unwrap(); - let mint = MintExpr::new( - quantity2.to_value(), - IdBox::AssetId(AssetId::new( - asset_definition_id.clone(), - account_id.clone(), - )), + let mint = Mint::asset_fixed( + quantity2, + AssetId::new(asset_definition_id.clone(), account_id.clone()), ); // and check that it is added without errors let sum = quantity @@ -217,7 +209,7 @@ fn client_add_asset_with_name_length_more_than_limit_should_not_commit_transacti // Given let normal_asset_definition_id = AssetDefinitionId::from_str("xor#wonderland").expect("Valid"); - let create_asset = RegisterExpr::new(AssetDefinition::quantity( + let create_asset = Register::asset_definition(AssetDefinition::quantity( normal_asset_definition_id.clone(), )); test_client.submit(create_asset)?; @@ -226,7 +218,7 @@ fn client_add_asset_with_name_length_more_than_limit_should_not_commit_transacti let too_long_asset_name = "0".repeat(2_usize.pow(14)); let incorrect_asset_definition_id = AssetDefinitionId::from_str(&(too_long_asset_name + "#wonderland")).expect("Valid"); - let create_asset = RegisterExpr::new(AssetDefinition::quantity( + let create_asset = Register::asset_definition(AssetDefinition::quantity( incorrect_asset_definition_id.clone(), )); @@ -273,11 +265,11 @@ fn find_rate_and_make_exchange_isi_should_succeed() { let buyer_keypair = KeyPair::generate().expect("Failed to generate seller KeyPair."); let register_account = |account_id: AccountId, signature: PublicKey| { - RegisterExpr::new(Account::new(account_id, [signature])) + Register::account(Account::new(account_id, [signature])) }; let grant_alice_asset_transfer_permission = |asset_id: AssetId, owner_keypair: KeyPair| { - let allow_alice_to_transfer_asset = GrantExpr::new( + let allow_alice_to_transfer_asset = Grant::permission_token( PermissionToken::new( "CanTransferUserAsset".parse().unwrap(), &json!({ "asset_id": asset_id }), @@ -304,7 +296,7 @@ fn find_rate_and_make_exchange_isi_should_succeed() { "exchange", account_id_new("dex", "exchange"), ); - let instructions: [InstructionExpr; 12] = [ + let instructions: [InstructionBox; 12] = [ register::domain("exchange").into(), register::domain("company").into(), register::domain("crypto").into(), @@ -314,17 +306,17 @@ fn find_rate_and_make_exchange_isi_should_succeed() { register::asset_definition("btc", "crypto").into(), register::asset_definition("eth", "crypto").into(), register::asset_definition("btc2eth_rate", "exchange").into(), - MintExpr::new( - 200_u32.to_value(), - IdBox::AssetId(asset_id_new("eth", "crypto", buyer_account_id.clone())), + Mint::asset_quantity( + 200_u32, + asset_id_new("eth", "crypto", buyer_account_id.clone()), ) .into(), - MintExpr::new( - 20_u32.to_value(), - IdBox::AssetId(asset_id_new("btc", "crypto", seller_account_id.clone())), + Mint::asset_quantity( + 20_u32, + asset_id_new("btc", "crypto", seller_account_id.clone()), ) .into(), - MintExpr::new(20_u32.to_value(), IdBox::AssetId(asset_id.clone())).into(), + Mint::asset_quantity(20_u32, asset_id.clone()).into(), ]; test_client .submit_all_blocking(instructions) @@ -333,23 +325,26 @@ fn find_rate_and_make_exchange_isi_should_succeed() { grant_alice_asset_transfer_permission(seller_btc, seller_keypair); grant_alice_asset_transfer_permission(buyer_eth, buyer_keypair); + let to_transfer = test_client + .request(FindAssetQuantityById::new(asset_id)) + .expect("Failed to execute query to find asset quantity by id."); + let to_transfer = match to_transfer { + NumericValue::U32(value) => value, + _ => panic!("Wrong asset quantity type."), + }; test_client - .submit_all_blocking([PairExpr::new( - TransferExpr::new( - IdBox::AssetId(asset_id_new("btc", "crypto", seller_account_id.clone())), - EvaluatesTo::new_evaluates_to_value(Expression::Query( - FindAssetQuantityById::new(asset_id.clone()).into(), - )), - IdBox::AccountId(buyer_account_id.clone()), + .submit_all_blocking([ + Transfer::asset_quantity( + asset_id_new("btc", "crypto", seller_account_id.clone()), + to_transfer, + buyer_account_id.clone(), ), - TransferExpr::new( - IdBox::AssetId(asset_id_new("eth", "crypto", buyer_account_id)), - EvaluatesTo::new_evaluates_to_value(Expression::Query( - FindAssetQuantityById::new(asset_id).into(), - )), - IdBox::AccountId(seller_account_id), + Transfer::asset_quantity( + asset_id_new("eth", "crypto", buyer_account_id), + to_transfer, + seller_account_id, ), - )]) + ]) .expect("Failed to exchange eth for btc."); let expected_seller_eth = NumericValue::U32(20); @@ -403,7 +398,7 @@ fn transfer_asset_definition() { let asset_definition_id: AssetDefinitionId = "asset#wonderland".parse().expect("Valid"); test_client - .submit_blocking(RegisterExpr::new(AssetDefinition::quantity( + .submit_blocking(Register::asset_definition(AssetDefinition::quantity( asset_definition_id.clone(), ))) .expect("Failed to submit transaction"); @@ -414,7 +409,7 @@ fn transfer_asset_definition() { assert_eq!(asset_definition.owned_by(), &alice_id); test_client - .submit_blocking(TransferExpr::new( + .submit_blocking(Transfer::asset_definition( alice_id, asset_definition_id.clone(), bob_id.clone(), @@ -447,12 +442,12 @@ fn asset_id_new(definition_name: &str, definition_domain: &str, account_id: Acco mod register { use super::*; - pub fn domain(name: &str) -> RegisterExpr { - RegisterExpr::new(Domain::new(DomainId::from_str(name).expect("Valid"))) + pub fn domain(name: &str) -> Register { + Register::domain(Domain::new(DomainId::from_str(name).expect("Valid"))) } - pub fn account(account_name: &str, domain_name: &str) -> RegisterExpr { - RegisterExpr::new(Account::new( + pub fn account(account_name: &str, domain_name: &str) -> Register { + Register::account(Account::new( AccountId::new( account_name.parse().expect("Valid"), domain_name.parse().expect("Valid"), @@ -461,8 +456,8 @@ mod register { )) } - pub fn asset_definition(asset_name: &str, domain_name: &str) -> RegisterExpr { - RegisterExpr::new(AssetDefinition::quantity(AssetDefinitionId::new( + pub fn asset_definition(asset_name: &str, domain_name: &str) -> Register { + Register::asset_definition(AssetDefinition::quantity(AssetDefinitionId::new( asset_name.parse().expect("Valid"), domain_name.parse().expect("Valid"), ))) diff --git a/client/tests/integration/asset_propagation.rs b/client/tests/integration/asset_propagation.rs index fb50c5b3f2d..d248d160f86 100644 --- a/client/tests/integration/asset_propagation.rs +++ b/client/tests/integration/asset_propagation.rs @@ -3,46 +3,44 @@ use std::{str::FromStr as _, thread}; use eyre::Result; use iroha_client::{ client::{self, QueryResult}, + crypto::KeyPair, data_model::{ parameter::{default::MAX_TRANSACTIONS_IN_BLOCK, ParametersBuilder}, prelude::*, }, }; -use iroha_crypto::KeyPair; +use iroha_config::iroha::Configuration; use test_network::*; -use super::Configuration; - #[test] fn client_add_asset_quantity_to_existing_asset_should_increase_asset_amount_on_another_peer( ) -> Result<()> { // Given - let (_rt, network, client) = ::start_test_with_runtime(4, Some(10_450)); + let (_rt, network, client) = Network::start_test_with_runtime(4, Some(10_450)); wait_for_genesis_committed(&network.clients(), 0); let pipeline_time = Configuration::pipeline_time(); - client.submit_blocking( + client.submit_all_blocking( ParametersBuilder::new() .add_parameter(MAX_TRANSACTIONS_IN_BLOCK, 1u32)? .into_set_parameters(), )?; - let create_domain = RegisterExpr::new(Domain::new(DomainId::from_str("domain")?)); + let create_domain: InstructionBox = + Register::domain(Domain::new(DomainId::from_str("domain")?)).into(); let account_id = AccountId::from_str("account@domain")?; let (public_key, _) = KeyPair::generate()?.into(); - let create_account = RegisterExpr::new(Account::new(account_id.clone(), [public_key])); + let create_account = Register::account(Account::new(account_id.clone(), [public_key])).into(); let asset_definition_id = AssetDefinitionId::from_str("xor#domain")?; - let create_asset = RegisterExpr::new(AssetDefinition::quantity(asset_definition_id.clone())); + let create_asset = + Register::asset_definition(AssetDefinition::quantity(asset_definition_id.clone())).into(); client.submit_all([create_domain, create_account, create_asset])?; thread::sleep(pipeline_time * 3); //When let quantity: u32 = 200; - client.submit(MintExpr::new( - quantity.to_value(), - IdBox::AssetId(AssetId::new( - asset_definition_id.clone(), - account_id.clone(), - )), + client.submit(Mint::asset_quantity( + quantity, + AssetId::new(asset_definition_id.clone(), account_id.clone()), ))?; thread::sleep(pipeline_time); diff --git a/client/tests/integration/burn_public_keys.rs b/client/tests/integration/burn_public_keys.rs index 4c1431c0639..f207894995d 100644 --- a/client/tests/integration/burn_public_keys.rs +++ b/client/tests/integration/burn_public_keys.rs @@ -1,8 +1,8 @@ use iroha_client::{ client::{account, transaction, Client}, + crypto::{HashOf, KeyPair, PublicKey}, data_model::{isi::Instruction, prelude::*, transaction::TransactionPayload}, }; -use iroha_crypto::{HashOf, KeyPair, PublicKey}; use test_network::*; fn submit( @@ -29,7 +29,7 @@ fn submit( } fn get(client: &Client, hash: HashOf) -> TransactionValue { - client + *client .request(transaction::by_hash(hash)) .unwrap() .transaction @@ -51,7 +51,7 @@ fn public_keys_cannot_be_burned_to_nothing() { wait_for_genesis_committed(&vec![client.clone()], 0); let charlie_initial_keypair = KeyPair::generate().unwrap(); - let register_charlie = RegisterExpr::new(Account::new( + let register_charlie = Register::account(Account::new( charlie_id.clone(), [charlie_initial_keypair.public_key().clone()], )); @@ -64,7 +64,7 @@ fn public_keys_cannot_be_burned_to_nothing() { let mint_keys = (0..KEYS_COUNT - 1).map(|_| { let (public_key, _) = KeyPair::generate().unwrap().into(); - MintExpr::new(public_key, charlie_id.clone()) + Mint::account_public_key(public_key, charlie_id.clone()) }); let (tx_hash, res) = submit( @@ -79,7 +79,8 @@ fn public_keys_cannot_be_burned_to_nothing() { let charlie = client.request(account::by_id(charlie_id.clone())).unwrap(); let mut keys = charlie.signatories(); - let burn = |key: PublicKey| InstructionExpr::from(BurnExpr::new(key, charlie_id.clone())); + let burn = + |key: PublicKey| InstructionBox::from(Burn::account_public_key(key, charlie_id.clone())); let burn_keys_leaving_one = keys .by_ref() .filter(|pub_key| pub_key != &charlie_initial_keypair.public_key()) diff --git a/client/tests/integration/config.rs b/client/tests/integration/config.rs index 7a6470a9087..1c71aba683d 100644 --- a/client/tests/integration/config.rs +++ b/client/tests/integration/config.rs @@ -1,27 +1,41 @@ +use iroha_client::data_model::Level; use test_network::*; -use super::{Builder, Configuration, ConfigurationProxy}; - #[test] -fn get_config() { - // The underscored variables must not be dropped until end of closure. - let (_dont_drop, _dont_drop_either, test_client) = - ::new().with_port(10_685).start_with_runtime(); +fn config_endpoints() { + const NEW_LOG_LEVEL: Level = Level::ERROR; + + let (rt, peer, test_client) = ::new().with_port(10_685).start_with_runtime(); wait_for_genesis_committed(&vec![test_client.clone()], 0); - let field = test_client.get_config_docs(&["torii"]).unwrap().unwrap(); - assert!(field.contains("IROHA_TORII")); - - let test = Configuration::test(); - let cfg_proxy: ConfigurationProxy = - serde_json::from_value(test_client.get_config_value().unwrap()).unwrap(); - assert_eq!( - cfg_proxy.block_sync.unwrap().build().unwrap(), - test.block_sync - ); - assert_eq!(cfg_proxy.network.unwrap().build().unwrap(), test.network); - assert_eq!( - cfg_proxy.telemetry.unwrap().build().unwrap(), - *test.telemetry - ); + let init_log_level = rt.block_on(async move { + peer.iroha + .as_ref() + .unwrap() + .kiso + .get_dto() + .await + .unwrap() + .logger + .level + }); + + // Just to be sure this test suite is not useless + assert_ne!(init_log_level, NEW_LOG_LEVEL); + + // Retrieving through API + let mut dto = test_client.get_config().expect("Client can always get it"); + assert_eq!(dto.logger.level, init_log_level); + + // Updating the log level + dto.logger.level = NEW_LOG_LEVEL; + test_client.set_config(dto).expect("New config is valid"); + + // Checking the updated value + dto = test_client.get_config().unwrap(); + assert_eq!(dto.logger.level, NEW_LOG_LEVEL); + + // Restoring value + dto.logger.level = init_log_level; + test_client.set_config(dto).expect("Also valid DTO"); } diff --git a/client/tests/integration/connected_peers.rs b/client/tests/integration/connected_peers.rs index 0ad808b20c2..9d9634316d1 100644 --- a/client/tests/integration/connected_peers.rs +++ b/client/tests/integration/connected_peers.rs @@ -4,14 +4,15 @@ use eyre::{Context, Result}; use iroha_client::{ client::Client, data_model::{ - parameter::{default::MAX_TRANSACTIONS_IN_BLOCK, ParametersBuilder}, + isi::{Register, Unregister}, peer::Peer as DataModelPeer, - prelude::*, }, }; +use iroha_config::iroha::Configuration; +use iroha_primitives::unique_vec; +use rand::{seq::SliceRandom, thread_rng, Rng}; use test_network::*; - -use super::Configuration; +use tokio::runtime::Runtime; #[ignore = "ignore, more in #2851"] #[test] @@ -24,11 +25,50 @@ fn connected_peers_with_f_1_0_1() -> Result<()> { connected_peers_with_f(1, Some(11_000)) } +#[test] +fn register_new_peer() -> Result<()> { + let (_rt, network, _) = Network::start_test_with_runtime(4, Some(11_180)); + wait_for_genesis_committed(&network.clients(), 0); + let pipeline_time = Configuration::pipeline_time(); + + let mut peer_clients: Vec<_> = Network::peers(&network) + .zip(Network::clients(&network)) + .collect(); + + check_status(&peer_clients, 1); + + // Start new peer + let mut configuration = Configuration::test(); + configuration.sumeragi.trusted_peers.peers = + unique_vec![peer_clients.choose(&mut thread_rng()).unwrap().0.id.clone()]; + let rt = Runtime::test(); + let new_peer = rt.block_on( + PeerBuilder::new() + .with_configuration(configuration) + .with_into_genesis(WithGenesis::None) + .with_port(11_200) + .start(), + ); + + let register_peer = Register::peer(DataModelPeer::new(new_peer.id.clone())); + peer_clients + .choose(&mut thread_rng()) + .unwrap() + .1 + .submit_blocking(register_peer)?; + peer_clients.push((&new_peer, Client::test(&new_peer.api_address))); + thread::sleep(pipeline_time * 2); // Wait for some time to allow peers to connect + + check_status(&peer_clients, 2); + + Ok(()) +} + /// Test the number of connected peers, changing the number of faults tolerated down and up fn connected_peers_with_f(faults: u64, start_port: Option) -> Result<()> { let n_peers = 3 * faults + 1; - let (_rt, network, client) = ::start_test_with_runtime( + let (_rt, network, _) = Network::start_test_with_runtime( (n_peers) .try_into() .wrap_err("`faults` argument `u64` value too high, cannot convert to `u32`")?, @@ -37,40 +77,52 @@ fn connected_peers_with_f(faults: u64, start_port: Option) -> Result<()> { wait_for_genesis_committed(&network.clients(), 0); let pipeline_time = Configuration::pipeline_time(); - client.submit_blocking( - ParametersBuilder::new() - .add_parameter(MAX_TRANSACTIONS_IN_BLOCK, 1u32)? - .into_set_parameters(), - )?; - - // Confirm all peers connected - let mut status = client.get_status()?; - assert_eq!(status.peers, n_peers - 1); - assert_eq!(status.blocks, 2); - - // Unregister a peer: committed with f = `faults` - // then `status.peers` decrements - let peer = network.peers.values().last().unwrap(); - let peer_client = Client::test(&peer.api_address); - let unregister_peer = UnregisterExpr::new(IdBox::PeerId(peer.id.clone())); - client.submit_blocking(unregister_peer)?; + let mut peer_clients: Vec<_> = Network::peers(&network) + .zip(Network::clients(&network)) + .collect(); + + check_status(&peer_clients, 1); + + // Unregister a peer: committed with f = `faults` then `status.peers` decrements + let removed_peer_idx = rand::thread_rng().gen_range(0..peer_clients.len()); + let (removed_peer, _) = &peer_clients[removed_peer_idx]; + let unregister_peer = Unregister::peer(removed_peer.id.clone()); + peer_clients + .choose(&mut thread_rng()) + .unwrap() + .1 + .submit_blocking(unregister_peer)?; thread::sleep(pipeline_time * 2); // Wait for some time to allow peers to connect - status = client.get_status()?; - assert_eq!(status.peers, n_peers - 2); - assert_eq!(status.blocks, 3); - status = peer_client.get_status()?; + let (removed_peer, removed_peer_client) = peer_clients.remove(removed_peer_idx); + + check_status(&peer_clients, 2); + let status = removed_peer_client.get_status()?; + // Peer might have been disconnected before getting the block + assert!(status.blocks == 1 || status.blocks == 2); assert_eq!(status.peers, 0); - // Re-register the peer: committed with f = `faults` - 1 then - // `status.peers` increments - let register_peer = RegisterExpr::new(DataModelPeer::new(peer.id.clone())); - client.submit_blocking(register_peer)?; - thread::sleep(pipeline_time * 4); // Wait for some time to allow peers to connect - status = client.get_status()?; - assert_eq!(status.peers, n_peers - 1); - assert_eq!(status.blocks, 4); - status = peer_client.get_status()?; - assert_eq!(status.peers, n_peers - 1); - assert_eq!(status.blocks, 4); + // Re-register the peer: committed with f = `faults` - 1 then `status.peers` increments + let register_peer = Register::peer(DataModelPeer::new(removed_peer.id.clone())); + peer_clients + .choose(&mut thread_rng()) + .unwrap() + .1 + .submit_blocking(register_peer)?; + peer_clients.insert(removed_peer_idx, (removed_peer, removed_peer_client)); + thread::sleep(pipeline_time * 2); // Wait for some time to allow peers to connect + + check_status(&peer_clients, 3); + Ok(()) } + +fn check_status(peer_clients: &[(&Peer, Client)], expected_blocks: u64) { + let n_peers = peer_clients.len() as u64; + + for (_, peer_client) in peer_clients { + let status = peer_client.get_status().unwrap(); + + assert_eq!(status.peers, n_peers - 1); + assert_eq!(status.blocks, expected_blocks); + } +} diff --git a/client/tests/integration/domain_owner.rs b/client/tests/integration/domain_owner.rs index 608eb38bdba..eeeb881b324 100644 --- a/client/tests/integration/domain_owner.rs +++ b/client/tests/integration/domain_owner.rs @@ -1,6 +1,8 @@ use eyre::Result; -use iroha_client::data_model::{account::SignatureCheckCondition, prelude::*}; -use iroha_crypto::KeyPair; +use iroha_client::{ + crypto::KeyPair, + data_model::{account::SignatureCheckCondition, prelude::*}, +}; use serde_json::json; use test_network::*; @@ -13,13 +15,13 @@ fn domain_owner_domain_permissions() -> Result<()> { // "alice@wonderland" is owner of "kingdom" domain let kingdom = Domain::new(kingdom_id.clone()); - test_client.submit_blocking(RegisterExpr::new(kingdom))?; + test_client.submit_blocking(Register::domain(kingdom))?; // check that "alice@wonderland" as owner of domain can edit metadata in her domain let key: Name = "key".parse()?; let value: Name = "value".parse()?; - test_client.submit_blocking(SetKeyValueExpr::new(kingdom_id.clone(), key.clone(), value))?; - test_client.submit_blocking(RemoveKeyValueExpr::new(kingdom_id.clone(), key))?; + test_client.submit_blocking(SetKeyValue::domain(kingdom_id.clone(), key.clone(), value))?; + test_client.submit_blocking(RemoveKeyValue::domain(kingdom_id.clone(), key))?; // check that "alice@wonderland" as owner of domain can grant and revoke domain related permission tokens let bob_id: AccountId = "bob@wonderland".parse()?; @@ -27,11 +29,11 @@ fn domain_owner_domain_permissions() -> Result<()> { "CanUnregisterDomain".parse().unwrap(), &json!({ "domain_id": kingdom_id }), ); - test_client.submit_blocking(GrantExpr::new(token.clone(), bob_id.clone()))?; - test_client.submit_blocking(RevokeExpr::new(token, bob_id))?; + test_client.submit_blocking(Grant::permission_token(token.clone(), bob_id.clone()))?; + test_client.submit_blocking(Revoke::permission_token(token, bob_id))?; // check that "alice@wonderland" as owner of domain can unregister her domain - test_client.submit_blocking(UnregisterExpr::new(kingdom_id))?; + test_client.submit_blocking(Unregister::domain(kingdom_id))?; Ok(()) } @@ -46,28 +48,28 @@ fn domain_owner_account_permissions() -> Result<()> { // "alice@wonderland" is owner of "kingdom" domain let kingdom = Domain::new(kingdom_id); - test_client.submit_blocking(RegisterExpr::new(kingdom))?; + test_client.submit_blocking(Register::domain(kingdom))?; let mad_hatter_keypair = KeyPair::generate()?; let mad_hatter = Account::new( mad_hatter_id.clone(), [mad_hatter_keypair.public_key().clone()], ); - test_client.submit_blocking(RegisterExpr::new(mad_hatter))?; + test_client.submit_blocking(Register::account(mad_hatter))?; // check that "alice@wonderland" as owner of domain can burn and mint public keys for accounts in her domain let mad_hatter_new_keypair = KeyPair::generate()?; - test_client.submit_blocking(MintExpr::new( + test_client.submit_blocking(Mint::account_public_key( mad_hatter_new_keypair.public_key().clone(), mad_hatter_id.clone(), ))?; - test_client.submit_blocking(BurnExpr::new( + test_client.submit_blocking(Burn::account_public_key( mad_hatter_new_keypair.public_key().clone(), mad_hatter_id.clone(), ))?; // check that "alice@wonderland" as owner of domain can change signature check condition for accounts in her domain - test_client.submit_blocking(MintExpr::new( + test_client.submit_blocking(Mint::account_signature_check_condition( SignatureCheckCondition::AnyAccountSignatureOr(Vec::new().into()), mad_hatter_id.clone(), ))?; @@ -75,12 +77,12 @@ fn domain_owner_account_permissions() -> Result<()> { // check that "alice@wonderland" as owner of domain can edit metadata of account in her domain let key: Name = "key".parse()?; let value: Name = "value".parse()?; - test_client.submit_blocking(SetKeyValueExpr::new( + test_client.submit_blocking(SetKeyValue::account( mad_hatter_id.clone(), key.clone(), value, ))?; - test_client.submit_blocking(RemoveKeyValueExpr::new(mad_hatter_id.clone(), key))?; + test_client.submit_blocking(RemoveKeyValue::account(mad_hatter_id.clone(), key))?; // check that "alice@wonderland" as owner of domain can grant and revoke account related permission tokens in her domain let bob_id: AccountId = "bob@wonderland".parse()?; @@ -88,11 +90,11 @@ fn domain_owner_account_permissions() -> Result<()> { "CanUnregisterAccount".parse().unwrap(), &json!({ "account_id": mad_hatter_id }), ); - test_client.submit_blocking(GrantExpr::new(token.clone(), bob_id.clone()))?; - test_client.submit_blocking(RevokeExpr::new(token, bob_id))?; + test_client.submit_blocking(Grant::permission_token(token.clone(), bob_id.clone()))?; + test_client.submit_blocking(Revoke::permission_token(token, bob_id))?; // check that "alice@wonderland" as owner of domain can unregister accounts in her domain - test_client.submit_blocking(UnregisterExpr::new(mad_hatter_id))?; + test_client.submit_blocking(Unregister::account(mad_hatter_id))?; Ok(()) } @@ -109,30 +111,38 @@ fn domain_owner_asset_definition_permissions() -> Result<()> { // "alice@wonderland" is owner of "kingdom" domain let kingdom = Domain::new(kingdom_id); - test_client.submit_blocking(RegisterExpr::new(kingdom))?; + test_client.submit_blocking(Register::domain(kingdom))?; let bob_keypair = KeyPair::generate()?; let bob = Account::new(bob_id.clone(), [bob_keypair.public_key().clone()]); - test_client.submit_blocking(RegisterExpr::new(bob))?; + test_client.submit_blocking(Register::account(bob))?; let rabbit = Account::new(rabbit_id.clone(), []); - test_client.submit_blocking(RegisterExpr::new(rabbit))?; + test_client.submit_blocking(Register::account(rabbit))?; // register asset definitions by "bob@kingdom" so he is owner of it let coin = AssetDefinition::quantity(coin_id.clone()); let transaction = TransactionBuilder::new(bob_id.clone()) - .with_instructions([RegisterExpr::new(coin)]) + .with_instructions([Register::asset_definition(coin)]) .sign(bob_keypair)?; test_client.submit_transaction_blocking(&transaction)?; // check that "alice@wonderland" as owner of domain can transfer asset definitions in her domain - test_client.submit_blocking(TransferExpr::new(bob_id, coin_id.clone(), rabbit_id))?; + test_client.submit_blocking(Transfer::asset_definition( + bob_id, + coin_id.clone(), + rabbit_id, + ))?; // check that "alice@wonderland" as owner of domain can edit metadata of asset definition in her domain let key: Name = "key".parse()?; let value: Name = "value".parse()?; - test_client.submit_blocking(SetKeyValueExpr::new(coin_id.clone(), key.clone(), value))?; - test_client.submit_blocking(RemoveKeyValueExpr::new(coin_id.clone(), key))?; + test_client.submit_blocking(SetKeyValue::asset_definition( + coin_id.clone(), + key.clone(), + value, + ))?; + test_client.submit_blocking(RemoveKeyValue::asset_definition(coin_id.clone(), key))?; // check that "alice@wonderland" as owner of domain can grant and revoke asset definition related permission tokens in her domain let bob_id: AccountId = "bob@wonderland".parse()?; @@ -140,11 +150,11 @@ fn domain_owner_asset_definition_permissions() -> Result<()> { "CanUnregisterAssetDefinition".parse().unwrap(), &json!({ "asset_definition_id": coin_id }), ); - test_client.submit_blocking(GrantExpr::new(token.clone(), bob_id.clone()))?; - test_client.submit_blocking(RevokeExpr::new(token, bob_id))?; + test_client.submit_blocking(Grant::permission_token(token.clone(), bob_id.clone()))?; + test_client.submit_blocking(Revoke::permission_token(token, bob_id))?; // check that "alice@wonderland" as owner of domain can unregister asset definitions in her domain - test_client.submit_blocking(UnregisterExpr::new(coin_id))?; + test_client.submit_blocking(Unregister::asset_definition(coin_id))?; Ok(()) } @@ -162,41 +172,40 @@ fn domain_owner_asset_permissions() -> Result<()> { // "alice@wonderland" is owner of "kingdom" domain let kingdom = Domain::new(kingdom_id); - test_client.submit_blocking(RegisterExpr::new(kingdom))?; + test_client.submit_blocking(Register::domain(kingdom))?; let bob_keypair = KeyPair::generate()?; let bob = Account::new(bob_id.clone(), [bob_keypair.public_key().clone()]); - test_client.submit_blocking(RegisterExpr::new(bob))?; + test_client.submit_blocking(Register::account(bob))?; // register asset definitions by "bob@kingdom" so he is owner of it let coin = AssetDefinition::quantity(coin_id.clone()); let store = AssetDefinition::store(store_id.clone()); let transaction = TransactionBuilder::new(bob_id.clone()) - .with_instructions([RegisterExpr::new(coin), RegisterExpr::new(store)]) + .with_instructions([ + Register::asset_definition(coin), + Register::asset_definition(store), + ]) .sign(bob_keypair)?; test_client.submit_transaction_blocking(&transaction)?; // check that "alice@wonderland" as owner of domain can register and unregister assets in her domain let bob_coin_id = AssetId::new(coin_id, bob_id.clone()); let bob_coin = Asset::new(bob_coin_id.clone(), 30u32); - test_client.submit_blocking(RegisterExpr::new(bob_coin))?; - test_client.submit_blocking(UnregisterExpr::new(bob_coin_id.clone()))?; + test_client.submit_blocking(Register::asset(bob_coin))?; + test_client.submit_blocking(Unregister::asset(bob_coin_id.clone()))?; // check that "alice@wonderland" as owner of domain can burn, mint and transfer assets in her domain - test_client.submit_blocking(MintExpr::new(10u32.to_value(), bob_coin_id.clone()))?; - test_client.submit_blocking(BurnExpr::new(5u32.to_value(), bob_coin_id.clone()))?; - test_client.submit_blocking(TransferExpr::new(bob_coin_id, 5u32.to_value(), alice_id))?; + test_client.submit_blocking(Mint::asset_quantity(10u32, bob_coin_id.clone()))?; + test_client.submit_blocking(Burn::asset_quantity(5u32, bob_coin_id.clone()))?; + test_client.submit_blocking(Transfer::asset_quantity(bob_coin_id, 5u32, alice_id))?; // check that "alice@wonderland" as owner of domain can edit metadata of store asset in her domain let key: Name = "key".parse()?; let value: Name = "value".parse()?; let bob_store_id = AssetId::new(store_id, bob_id); - test_client.submit_blocking(SetKeyValueExpr::new( - bob_store_id.clone(), - key.clone(), - value, - ))?; - test_client.submit_blocking(RemoveKeyValueExpr::new(bob_store_id.clone(), key))?; + test_client.submit_blocking(SetKeyValue::asset(bob_store_id.clone(), key.clone(), value))?; + test_client.submit_blocking(RemoveKeyValue::asset(bob_store_id.clone(), key))?; // check that "alice@wonderland" as owner of domain can grant and revoke asset related permission tokens in her domain let bob_id: AccountId = "bob@wonderland".parse()?; @@ -204,8 +213,8 @@ fn domain_owner_asset_permissions() -> Result<()> { "CanUnregisterUserAsset".parse().unwrap(), &json!({ "asset_id": bob_store_id }), ); - test_client.submit_blocking(GrantExpr::new(token.clone(), bob_id.clone()))?; - test_client.submit_blocking(RevokeExpr::new(token, bob_id))?; + test_client.submit_blocking(Grant::permission_token(token.clone(), bob_id.clone()))?; + test_client.submit_blocking(Revoke::permission_token(token, bob_id))?; Ok(()) } @@ -221,18 +230,18 @@ fn domain_owner_trigger_permissions() -> Result<()> { // "alice@wonderland" is owner of "kingdom" domain let kingdom = Domain::new(kingdom_id); - test_client.submit_blocking(RegisterExpr::new(kingdom))?; + test_client.submit_blocking(Register::domain(kingdom))?; let bob_keypair = KeyPair::generate()?; let bob = Account::new(bob_id.clone(), [bob_keypair.public_key().clone()]); - test_client.submit_blocking(RegisterExpr::new(bob))?; + test_client.submit_blocking(Register::account(bob))?; let asset_definition_id = "rose#wonderland".parse()?; let asset_id = AssetId::new(asset_definition_id, alice_id.clone()); let trigger_id: TriggerId = "trigger$kingdom".parse()?; - let trigger_instructions = vec![MintExpr::new(1_u32, asset_id)]; - let register_trigger = RegisterExpr::new(Trigger::new( + let trigger_instructions = vec![Mint::asset_quantity(1_u32, asset_id)]; + let register_trigger = Register::trigger(Trigger::new( trigger_id.clone(), Action::new( trigger_instructions, @@ -248,11 +257,11 @@ fn domain_owner_trigger_permissions() -> Result<()> { test_client.submit_blocking(register_trigger)?; // check that "alice@wonderland" as owner of domain can edit repetitions of triggers in her domain - test_client.submit_blocking(MintExpr::new(1_u32, trigger_id.clone()))?; - test_client.submit_blocking(BurnExpr::new(1_u32, trigger_id.clone()))?; + test_client.submit_blocking(Mint::trigger_repetitions(1_u32, trigger_id.clone()))?; + test_client.submit_blocking(Burn::trigger_repetitions(1_u32, trigger_id.clone()))?; // check that "alice@wonderland" as owner of domain can call triggers in her domain - let execute_trigger = ExecuteTriggerExpr::new(trigger_id.clone()); + let execute_trigger = ExecuteTrigger::new(trigger_id.clone()); let _result = test_client.submit_blocking(execute_trigger)?; // check that "alice@wonderland" as owner of domain can grant and revoke trigger related permission tokens in her domain @@ -261,19 +270,15 @@ fn domain_owner_trigger_permissions() -> Result<()> { "CanUnregisterUserTrigger".parse().unwrap(), &json!({ "trigger_id": trigger_id }), ); - test_client.submit_blocking(GrantExpr::new(token.clone(), bob_id.clone()))?; - test_client.submit_blocking(RevokeExpr::new(token, bob_id))?; + test_client.submit_blocking(Grant::permission_token(token.clone(), bob_id.clone()))?; + test_client.submit_blocking(Revoke::permission_token(token, bob_id))?; // check that "alice@wonderland" as owner of domain can unregister triggers in her domain - test_client.submit_blocking(UnregisterExpr::new(trigger_id))?; + test_client.submit_blocking(Unregister::trigger(trigger_id))?; Ok(()) } -#[deprecated( - since = "2.0.0-pre-rc.20", - note = "This test suite is deprecated, use test_transfer_domains.py instead" -)] #[ignore = "migrated to client cli python tests"] #[test] fn domain_owner_transfer() -> Result<()> { @@ -286,17 +291,17 @@ fn domain_owner_transfer() -> Result<()> { // "alice@wonderland" is owner of "kingdom" domain let kingdom = Domain::new(kingdom_id.clone()); - test_client.submit_blocking(RegisterExpr::new(kingdom))?; + test_client.submit_blocking(Register::domain(kingdom))?; let bob_keypair = KeyPair::generate()?; let bob = Account::new(bob_id.clone(), [bob_keypair.public_key().clone()]); - test_client.submit_blocking(RegisterExpr::new(bob))?; + test_client.submit_blocking(Register::account(bob))?; let domain = test_client.request(FindDomainById::new(kingdom_id.clone()))?; assert_eq!(domain.owned_by(), &alice_id); test_client - .submit_blocking(TransferExpr::new( + .submit_blocking(Transfer::domain( alice_id, kingdom_id.clone(), bob_id.clone(), diff --git a/client/tests/integration/events/data.rs b/client/tests/integration/events/data.rs index 161ec489ac7..d3aa2a96834 100644 --- a/client/tests/integration/events/data.rs +++ b/client/tests/integration/events/data.rs @@ -8,37 +8,15 @@ use test_network::*; use crate::wasm::utils::wasm_template; -fn produce_instructions() -> Vec { +fn produce_instructions() -> Vec { let domains = (0..4) .map(|domain_index: usize| Domain::new(domain_index.to_string().parse().expect("Valid"))); - let registers: [InstructionExpr; 4] = domains + domains .into_iter() - .map(RegisterExpr::new) - .map(InstructionExpr::from) + .map(Register::domain) + .map(InstructionBox::from) .collect::>() - .try_into() - .unwrap(); - - // TODO: should we re-introduce the DSL? - vec![ - // domain "0" - // pair - // domain "1" - // if false fail else sequence - // domain "2" - // domain "3" - registers[0].clone(), - PairExpr::new( - registers[1].clone(), - ConditionalExpr::with_otherwise( - false, - Fail::new("unreachable"), - SequenceExpr::new([registers[2].clone(), registers[3].clone()]), - ), - ) - .into(), - ] } #[test] @@ -69,7 +47,7 @@ fn wasm_execution_should_produce_events() -> Result<()> { ptr_len = ptr_len / 2, )?; - ptr_offset = ptr_len; + ptr_offset += ptr_len; } let wat = format!( @@ -121,8 +99,10 @@ fn transaction_execution_should_produce_events( client.submit_transaction_blocking(&transaction)?; // assertion + iroha_logger::info!("Listening for events"); for i in 0..4_usize { let event: DataEvent = event_receiver.recv()??.try_into()?; + iroha_logger::info!("Event: {:?}", event); assert!(matches!(event, DataEvent::Domain(_))); if let DataEvent::Domain(domain_event) = event { assert!(matches!(domain_event, DomainEvent::Created(_))); @@ -174,16 +154,16 @@ fn produce_multiple_events() -> Result<()> { let role = iroha_client::data_model::role::Role::new(role_id.clone()) .add_permission(token_1.clone()) .add_permission(token_2.clone()); - let instructions = [RegisterExpr::new(role.clone())]; + let instructions = [Register::role(role.clone())]; client.submit_all_blocking(instructions)?; // Grants role to Bob let bob_id = AccountId::from_str("bob@wonderland")?; - let grant_role = GrantExpr::new(role_id.clone(), bob_id.clone()); + let grant_role = Grant::role(role_id.clone(), bob_id.clone()); client.submit_blocking(grant_role)?; // Unregister role - let unregister_role = UnregisterExpr::new(role_id.clone()); + let unregister_role = Unregister::role(role_id.clone()); client.submit_blocking(unregister_role)?; // Inspect produced events diff --git a/client/tests/integration/events/mod.rs b/client/tests/integration/events/mod.rs index cf35d126a5e..2f478366933 100644 --- a/client/tests/integration/events/mod.rs +++ b/client/tests/integration/events/mod.rs @@ -1,4 +1,3 @@ -pub use super::Configuration; mod data; mod notification; mod pipeline; diff --git a/client/tests/integration/events/notification.rs b/client/tests/integration/events/notification.rs index 2c5cf522fd2..2cd033e2b7c 100644 --- a/client/tests/integration/events/notification.rs +++ b/client/tests/integration/events/notification.rs @@ -14,11 +14,11 @@ fn trigger_completion_success_should_produce_event() -> Result<()> { let asset_id = AssetId::new(asset_definition_id, account_id); let trigger_id = TriggerId::from_str("mint_rose")?; - let instruction = MintExpr::new(1_u32, asset_id.clone()); - let register_trigger = RegisterExpr::new(Trigger::new( + let instruction = Mint::asset_quantity(1_u32, asset_id.clone()); + let register_trigger = Register::trigger(Trigger::new( trigger_id.clone(), Action::new( - vec![InstructionExpr::from(instruction)], + vec![InstructionBox::from(instruction)], Repeats::Indefinitely, asset_id.account_id.clone(), TriggeringFilterBox::ExecuteTrigger(ExecuteTriggerEventFilter::new( @@ -29,7 +29,7 @@ fn trigger_completion_success_should_produce_event() -> Result<()> { )); test_client.submit_blocking(register_trigger)?; - let call_trigger = ExecuteTriggerExpr::new(trigger_id.clone()); + let call_trigger = ExecuteTrigger::new(trigger_id.clone()); let thread_client = test_client.clone(); let (sender, receiver) = mpsc::channel(); @@ -63,11 +63,11 @@ fn trigger_completion_failure_should_produce_event() -> Result<()> { let account_id: AccountId = "alice@wonderland".parse()?; let trigger_id = TriggerId::from_str("fail_box")?; - let instruction = Fail::new("Fail box"); - let register_trigger = RegisterExpr::new(Trigger::new( + let instruction = Fail::new("Fail box".to_owned()); + let register_trigger = Register::trigger(Trigger::new( trigger_id.clone(), Action::new( - vec![InstructionExpr::from(instruction)], + vec![InstructionBox::from(instruction)], Repeats::Indefinitely, account_id.clone(), TriggeringFilterBox::ExecuteTrigger(ExecuteTriggerEventFilter::new( @@ -78,7 +78,7 @@ fn trigger_completion_failure_should_produce_event() -> Result<()> { )); test_client.submit_blocking(register_trigger)?; - let call_trigger = ExecuteTriggerExpr::new(trigger_id.clone()); + let call_trigger = ExecuteTrigger::new(trigger_id.clone()); let thread_client = test_client.clone(); let (sender, receiver) = mpsc::channel(); diff --git a/client/tests/integration/events/pipeline.rs b/client/tests/integration/events/pipeline.rs index 8a9ce281d06..77d99dd1b57 100644 --- a/client/tests/integration/events/pipeline.rs +++ b/client/tests/integration/events/pipeline.rs @@ -1,15 +1,16 @@ use std::thread::{self, JoinHandle}; use eyre::Result; -use iroha_client::data_model::{ - parameter::{default::MAX_TRANSACTIONS_IN_BLOCK, ParametersBuilder}, - prelude::*, +use iroha_client::{ + crypto::HashOf, + data_model::{ + parameter::{default::MAX_TRANSACTIONS_IN_BLOCK, ParametersBuilder}, + prelude::*, + }, }; -use iroha_crypto::HashOf; +use iroha_config::iroha::Configuration; use test_network::*; -use super::Configuration; - // Needed to re-enable ignored tests. #[allow(dead_code)] const PEER_COUNT: usize = 7; @@ -24,7 +25,7 @@ fn transaction_with_no_instructions_should_be_committed() -> Result<()> { // #[ignore = "Experiment"] #[test] fn transaction_with_fail_instruction_should_be_rejected() -> Result<()> { - let fail = Fail::new("Should be rejected"); + let fail = Fail::new("Should be rejected".to_owned()); test_with_instruction_and_status_and_port( Some(fail.into()), PipelineStatusKind::Rejected, @@ -34,17 +35,17 @@ fn transaction_with_fail_instruction_should_be_rejected() -> Result<()> { #[allow(dead_code, clippy::needless_range_loop, clippy::needless_pass_by_value)] fn test_with_instruction_and_status_and_port( - instruction: Option, + instruction: Option, should_be: PipelineStatusKind, port: u16, ) -> Result<()> { let (_rt, network, client) = - ::start_test_with_runtime(PEER_COUNT.try_into().unwrap(), Some(port)); + Network::start_test_with_runtime(PEER_COUNT.try_into().unwrap(), Some(port)); let clients = network.clients(); wait_for_genesis_committed(&clients, 0); let pipeline_time = Configuration::pipeline_time(); - client.submit_blocking( + client.submit_all_blocking( ParametersBuilder::new() .add_parameter(MAX_TRANSACTIONS_IN_BLOCK, 1u32)? .into_set_parameters(), @@ -75,7 +76,7 @@ fn test_with_instruction_and_status_and_port( #[derive(Clone)] struct Checker { listener: iroha_client::client::Client, - hash: iroha_crypto::HashOf, + hash: HashOf, } impl Checker { @@ -110,7 +111,7 @@ fn committed_block_must_be_available_in_kura() { .expect("Failed to subscribe for events"); client - .submit(Fail::new("Dummy instruction")) + .submit(Fail::new("Dummy instruction".to_owned())) .expect("Failed to submit transaction"); let event = event_iter.next().expect("Block must be committed"); diff --git a/client/tests/integration/mod.rs b/client/tests/integration/mod.rs index 5bb44460bb0..8c8008ce649 100644 --- a/client/tests/integration/mod.rs +++ b/client/tests/integration/mod.rs @@ -1,8 +1,3 @@ -pub use iroha_config::{ - base::proxy::Builder, - iroha::{Configuration, ConfigurationProxy}, -}; - mod add_account; mod add_domain; mod asset; diff --git a/client/tests/integration/multiple_blocks_created.rs b/client/tests/integration/multiple_blocks_created.rs index f2a13a8089b..bac51f52cd7 100644 --- a/client/tests/integration/multiple_blocks_created.rs +++ b/client/tests/integration/multiple_blocks_created.rs @@ -3,38 +3,38 @@ use std::thread; use eyre::Result; use iroha_client::{ client::{self, Client, QueryResult}, + crypto::KeyPair, data_model::{ parameter::{default::MAX_TRANSACTIONS_IN_BLOCK, ParametersBuilder}, prelude::*, }, }; -use iroha_crypto::KeyPair; +use iroha_config::iroha::Configuration; use test_network::*; -use super::Configuration; - const N_BLOCKS: usize = 510; #[ignore = "Takes a lot of time."] #[test] fn long_multiple_blocks_created() -> Result<()> { // Given - let (_rt, network, client) = ::start_test_with_runtime(4, Some(10_965)); + let (_rt, network, client) = Network::start_test_with_runtime(4, Some(10_965)); wait_for_genesis_committed(&network.clients(), 0); let pipeline_time = Configuration::pipeline_time(); - client.submit_blocking( + client.submit_all_blocking( ParametersBuilder::new() .add_parameter(MAX_TRANSACTIONS_IN_BLOCK, 1u32)? .into_set_parameters(), )?; - let create_domain = RegisterExpr::new(Domain::new("domain".parse()?)); + let create_domain: InstructionBox = Register::domain(Domain::new("domain".parse()?)).into(); let account_id: AccountId = "account@domain".parse()?; let (public_key, _) = KeyPair::generate()?.into(); - let create_account = RegisterExpr::new(Account::new(account_id.clone(), [public_key])); + let create_account = Register::account(Account::new(account_id.clone(), [public_key])).into(); let asset_definition_id: AssetDefinitionId = "xor#domain".parse()?; - let create_asset = RegisterExpr::new(AssetDefinition::quantity(asset_definition_id.clone())); + let create_asset = + Register::asset_definition(AssetDefinition::quantity(asset_definition_id.clone())).into(); client.submit_all([create_domain, create_account, create_asset])?; @@ -44,12 +44,9 @@ fn long_multiple_blocks_created() -> Result<()> { //When for _ in 0..N_BLOCKS { let quantity: u32 = 1; - let mint_asset = MintExpr::new( - quantity.to_value(), - IdBox::AssetId(AssetId::new( - asset_definition_id.clone(), - account_id.clone(), - )), + let mint_asset = Mint::asset_quantity( + quantity, + AssetId::new(asset_definition_id.clone(), account_id.clone()), ); client.submit(mint_asset)?; account_has_quantity += quantity; diff --git a/client/tests/integration/multisignature_account.rs b/client/tests/integration/multisignature_account.rs index 44f289dcfb1..6a7650a7a34 100644 --- a/client/tests/integration/multisignature_account.rs +++ b/client/tests/integration/multisignature_account.rs @@ -3,13 +3,12 @@ use std::thread; use eyre::Result; use iroha_client::{ client::{self, Client, QueryResult}, + crypto::KeyPair, data_model::prelude::*, }; -use iroha_crypto::KeyPair; +use iroha_config::iroha::Configuration; use test_network::*; -use super::Configuration; - #[test] fn transaction_signed_by_new_signatory_of_account_should_pass() -> Result<()> { let (_rt, peer, client) = ::new().with_port(10_605).start_with_runtime(); @@ -19,24 +18,19 @@ fn transaction_signed_by_new_signatory_of_account_should_pass() -> Result<()> { // Given let account_id: AccountId = "alice@wonderland".parse().expect("Valid"); let asset_definition_id: AssetDefinitionId = "xor#wonderland".parse().expect("Valid"); - let create_asset = RegisterExpr::new(AssetDefinition::quantity(asset_definition_id.clone())); + let create_asset = + Register::asset_definition(AssetDefinition::quantity(asset_definition_id.clone())); let key_pair = KeyPair::generate()?; - let add_signatory = MintExpr::new( - key_pair.public_key().clone(), - IdBox::AccountId(account_id.clone()), - ); + let add_signatory = Mint::account_public_key(key_pair.public_key().clone(), account_id.clone()); - let instructions: [InstructionExpr; 2] = [create_asset.into(), add_signatory.into()]; + let instructions: [InstructionBox; 2] = [create_asset.into(), add_signatory.into()]; client.submit_all(instructions)?; thread::sleep(pipeline_time * 2); //When let quantity: u32 = 200; - let mint_asset = MintExpr::new( - quantity.to_value(), - IdBox::AssetId(AssetId::new( - asset_definition_id.clone(), - account_id.clone(), - )), + let mint_asset = Mint::asset_quantity( + quantity, + AssetId::new(asset_definition_id.clone(), account_id.clone()), ); Client::test_with_key(&peer.api_address, key_pair).submit_till( mint_asset, diff --git a/client/tests/integration/multisignature_transaction.rs b/client/tests/integration/multisignature_transaction.rs index 4d999d5c920..4cf5739788b 100644 --- a/client/tests/integration/multisignature_transaction.rs +++ b/client/tests/integration/multisignature_transaction.rs @@ -3,25 +3,24 @@ use std::{str::FromStr as _, thread, time::Duration}; use eyre::Result; use iroha_client::{ client::{self, Client, QueryResult}, + config::Configuration as ClientConfiguration, + crypto::KeyPair, data_model::{ parameter::{default::MAX_TRANSACTIONS_IN_BLOCK, ParametersBuilder}, prelude::*, }, }; -use iroha_config::client::Configuration as ClientConfiguration; -use iroha_crypto::KeyPair; +use iroha_config::iroha::Configuration; use test_network::*; -use super::Configuration; - #[allow(clippy::too_many_lines)] #[test] fn multisignature_transactions_should_wait_for_all_signatures() -> Result<()> { - let (_rt, network, client) = ::start_test_with_runtime(4, Some(10_945)); + let (_rt, network, client) = Network::start_test_with_runtime(4, Some(10_945)); wait_for_genesis_committed(&network.clients(), 0); let pipeline_time = Configuration::pipeline_time(); - client.submit_blocking( + client.submit_all_blocking( ParametersBuilder::new() .add_parameter(MAX_TRANSACTIONS_IN_BLOCK, 1u32)? .into_set_parameters(), @@ -31,23 +30,24 @@ fn multisignature_transactions_should_wait_for_all_signatures() -> Result<()> { let alice_key_pair = get_key_pair(); let key_pair_2 = KeyPair::generate()?; let asset_definition_id = AssetDefinitionId::from_str("camomile#wonderland")?; - let create_asset = RegisterExpr::new(AssetDefinition::quantity(asset_definition_id.clone())); - let set_signature_condition = MintExpr::new( + let create_asset = + Register::asset_definition(AssetDefinition::quantity(asset_definition_id.clone())); + let set_signature_condition = Mint::account_signature_check_condition( SignatureCheckCondition::AllAccountSignaturesAnd( vec![key_pair_2.public_key().clone()].into(), ), - IdBox::AccountId(alice_id.clone()), + alice_id.clone(), ); let mut client_configuration = ClientConfiguration::test(&network.genesis.api_address); let client = Client::new(&client_configuration)?; - let instructions: [InstructionExpr; 2] = [create_asset.into(), set_signature_condition.into()]; + let instructions: [InstructionBox; 2] = [create_asset.into(), set_signature_condition.into()]; client.submit_all_blocking(instructions)?; //When let quantity: u32 = 200; let asset_id = AssetId::new(asset_definition_id, alice_id.clone()); - let mint_asset = MintExpr::new(quantity.to_value(), IdBox::AssetId(asset_id.clone())); + let mint_asset = Mint::asset_quantity(quantity, asset_id.clone()); let (public_key1, private_key1) = alice_key_pair.into(); client_configuration.account_id = alice_id.clone(); diff --git a/client/tests/integration/non_mintable.rs b/client/tests/integration/non_mintable.rs index 1dca25910a2..c80be2ca4d9 100644 --- a/client/tests/integration/non_mintable.rs +++ b/client/tests/integration/non_mintable.rs @@ -5,6 +5,7 @@ use iroha_client::{ client::{self, QueryResult}, data_model::{metadata::UnlimitedMetadata, prelude::*}, }; +use iroha_data_model::isi::InstructionBox; use test_network::*; #[test] @@ -15,20 +16,18 @@ fn non_mintable_asset_can_be_minted_once_but_not_twice() -> Result<()> { // Given let account_id = AccountId::from_str("alice@wonderland").expect("Valid"); let asset_definition_id = AssetDefinitionId::from_str("xor#wonderland").expect("Valid"); - let create_asset = - RegisterExpr::new(AssetDefinition::quantity(asset_definition_id.clone()).mintable_once()); + let create_asset = Register::asset_definition( + AssetDefinition::quantity(asset_definition_id.clone()).mintable_once(), + ); let metadata = UnlimitedMetadata::default(); - let mint = MintExpr::new( - 200_u32.to_value(), - IdBox::AssetId(AssetId::new( - asset_definition_id.clone(), - account_id.clone(), - )), + let mint = Mint::asset_quantity( + 200_u32, + AssetId::new(asset_definition_id.clone(), account_id.clone()), ); - let instructions: [InstructionExpr; 2] = [create_asset.into(), mint.clone().into()]; + let instructions: [InstructionBox; 2] = [create_asset.into(), mint.clone().into()]; let tx = test_client.build_transaction(instructions, metadata)?; // We can register and mint the non-mintable token @@ -65,11 +64,14 @@ fn non_mintable_asset_cannot_be_minted_if_registered_with_non_zero_value() -> Re // Given let account_id = AccountId::from_str("alice@wonderland").expect("Valid"); let asset_definition_id = AssetDefinitionId::from_str("xor#wonderland").expect("Valid"); - let create_asset = - RegisterExpr::new(AssetDefinition::quantity(asset_definition_id.clone()).mintable_once()); + let create_asset: InstructionBox = Register::asset_definition( + AssetDefinition::quantity(asset_definition_id.clone()).mintable_once(), + ) + .into(); let asset_id = AssetId::new(asset_definition_id.clone(), account_id.clone()); - let register_asset = RegisterExpr::new(Asset::new(asset_id.clone(), 1_u32)); + let register_asset: InstructionBox = + Register::asset(Asset::new(asset_id.clone(), 1_u32)).into(); // We can register the non-mintable token test_client.submit_all([create_asset, register_asset.clone()])?; @@ -85,7 +87,7 @@ fn non_mintable_asset_cannot_be_minted_if_registered_with_non_zero_value() -> Re assert!(test_client.submit_blocking(register_asset).is_err()); // And can't be minted - let mint = MintExpr::new(1_u32.to_value(), IdBox::AssetId(asset_id)); + let mint = Mint::asset_quantity(1_u32, asset_id); assert!(test_client.submit_blocking(mint).is_err()); Ok(()) @@ -99,15 +101,16 @@ fn non_mintable_asset_can_be_minted_if_registered_with_zero_value() -> Result<() // Given let account_id = AccountId::from_str("alice@wonderland").expect("Valid"); let asset_definition_id = AssetDefinitionId::from_str("xor#wonderland").expect("Valid"); - let create_asset = - RegisterExpr::new(AssetDefinition::quantity(asset_definition_id.clone()).mintable_once()); + let create_asset = Register::asset_definition( + AssetDefinition::quantity(asset_definition_id.clone()).mintable_once(), + ); let asset_id = AssetId::new(asset_definition_id.clone(), account_id.clone()); - let register_asset = RegisterExpr::new(Asset::new(asset_id.clone(), 0_u32)); - let mint = MintExpr::new(1_u32.to_value(), IdBox::AssetId(asset_id)); + let register_asset = Register::asset(Asset::new(asset_id.clone(), 0_u32)); + let mint = Mint::asset_quantity(1_u32, asset_id); // We can register the non-mintable token wih zero value and then mint it - let instructions: [InstructionExpr; 3] = + let instructions: [InstructionBox; 3] = [create_asset.into(), register_asset.into(), mint.into()]; test_client.submit_all(instructions)?; test_client.poll_request(client::asset::by_account_id(account_id), |result| { diff --git a/client/tests/integration/offline_peers.rs b/client/tests/integration/offline_peers.rs index 86838146000..fc14502caa3 100644 --- a/client/tests/integration/offline_peers.rs +++ b/client/tests/integration/offline_peers.rs @@ -1,11 +1,13 @@ use eyre::Result; use iroha_client::{ - client::{self, QueryResult}, + client::{self, Client, QueryResult}, data_model::{ - parameter::{default::MAX_TRANSACTIONS_IN_BLOCK, ParametersBuilder}, + peer::{Peer as DataModelPeer, PeerId}, prelude::*, }, }; +use iroha_config::iroha::Configuration; +use iroha_crypto::KeyPair; use test_network::*; use tokio::runtime::Runtime; @@ -14,19 +16,13 @@ fn genesis_block_is_committed_with_some_offline_peers() -> Result<()> { // Given let rt = Runtime::test(); - let (network, client) = rt.block_on(::start_test_with_offline_and_set_n_shifts( + let (network, client) = rt.block_on(Network::start_test_with_offline_and_set_n_shifts( 4, 1, Some(10_560), )); wait_for_genesis_committed(&network.clients(), 1); - client.submit_blocking( - ParametersBuilder::new() - .add_parameter(MAX_TRANSACTIONS_IN_BLOCK, 1u32)? - .into_set_parameters(), - )?; - //When let alice_id: AccountId = "alice@wonderland".parse()?; let roses = "rose#wonderland".parse()?; @@ -43,3 +39,41 @@ fn genesis_block_is_committed_with_some_offline_peers() -> Result<()> { assert_eq!(AssetValue::Quantity(alice_has_roses), *asset.value()); Ok(()) } + +#[test] +fn register_offline_peer() -> Result<()> { + let n_peers = 4; + + let (_rt, network, client) = Network::start_test_with_runtime(n_peers, Some(11_160)); + wait_for_genesis_committed(&network.clients(), 0); + let pipeline_time = Configuration::pipeline_time(); + let peer_clients = Network::clients(&network); + + check_status(&peer_clients, 1); + + let address = "128.0.0.2:8085".parse()?; + let key_pair = KeyPair::generate().unwrap(); + let public_key = key_pair.public_key().clone(); + let peer_id = PeerId::new(&address, &public_key); + let register_peer = Register::peer(DataModelPeer::new(peer_id)); + + // Wait for some time to allow peers to connect + client.submit_blocking(register_peer)?; + std::thread::sleep(pipeline_time * 2); + + // Make sure status hasn't change + check_status(&peer_clients, 2); + + Ok(()) +} + +fn check_status(peer_clients: &[Client], expected_blocks: u64) { + let n_peers = peer_clients.len() as u64; + + for peer_client in peer_clients { + let status = peer_client.get_status().unwrap(); + + assert_eq!(status.peers, n_peers - 1); + assert_eq!(status.blocks, expected_blocks); + } +} diff --git a/client/tests/integration/pagination.rs b/client/tests/integration/pagination.rs index 1ab7ab4c14d..6e0ed462ec2 100644 --- a/client/tests/integration/pagination.rs +++ b/client/tests/integration/pagination.rs @@ -46,11 +46,11 @@ fn fetch_size_should_work() -> Result<()> { } fn register_assets(client: &Client) -> Result<()> { - let register: Vec = ('a'..='z') + let register: Vec = ('a'..='z') .map(|c| c.to_string()) .map(|name| (name + "#wonderland").parse().expect("Valid")) .map(|asset_definition_id| { - RegisterExpr::new(AssetDefinition::quantity(asset_definition_id)).into() + Register::asset_definition(AssetDefinition::quantity(asset_definition_id)).into() }) .collect(); let _ = client.submit_all_blocking(register)?; diff --git a/client/tests/integration/permissions.rs b/client/tests/integration/permissions.rs index 11234a6b16a..da443789009 100644 --- a/client/tests/integration/permissions.rs +++ b/client/tests/integration/permissions.rs @@ -3,6 +3,7 @@ use std::{str::FromStr as _, thread, time::Duration}; use eyre::Result; use iroha_client::{ client::{self, Client, QueryResult}, + crypto::KeyPair, data_model::prelude::*, }; use iroha_genesis::GenesisNetwork; @@ -18,7 +19,7 @@ fn genesis_transactions_are_validated() { let mut genesis = GenesisNetwork::test(true).expect("Expected genesis"); - let grant_invalid_token = GrantExpr::new( + let grant_invalid_token = Grant::permission_token( PermissionToken::new("InvalidToken".parse().unwrap(), &json!(null)), AccountId::from_str("alice@wonderland").unwrap(), ); @@ -78,8 +79,9 @@ fn permissions_disallow_asset_transfer() { let bob_id: AccountId = "bob@wonderland".parse().expect("Valid"); let mouse_id: AccountId = "mouse@wonderland".parse().expect("Valid"); let asset_definition_id: AssetDefinitionId = "xor#wonderland".parse().expect("Valid"); - let create_asset = RegisterExpr::new(AssetDefinition::quantity(asset_definition_id.clone())); - let mouse_keypair = iroha_crypto::KeyPair::generate().expect("Failed to generate KeyPair."); + let create_asset = + Register::asset_definition(AssetDefinition::quantity(asset_definition_id.clone())); + let mouse_keypair = KeyPair::generate().expect("Failed to generate KeyPair."); let alice_start_assets = get_assets(&iroha_client, &alice_id); iroha_client @@ -87,19 +89,19 @@ fn permissions_disallow_asset_transfer() { .expect("Failed to prepare state."); let quantity: u32 = 200; - let mint_asset = MintExpr::new( - quantity.to_value(), - IdBox::AssetId(AssetId::new(asset_definition_id.clone(), bob_id.clone())), + let mint_asset = Mint::asset_quantity( + quantity, + AssetId::new(asset_definition_id.clone(), bob_id.clone()), ); iroha_client .submit_blocking(mint_asset) .expect("Failed to create asset."); //When - let transfer_asset = TransferExpr::new( - IdBox::AssetId(AssetId::new(asset_definition_id, bob_id)), - quantity.to_value(), - IdBox::AccountId(alice_id.clone()), + let transfer_asset = Transfer::asset_quantity( + AssetId::new(asset_definition_id, bob_id), + quantity, + alice_id.clone(), ); let transfer_tx = TransactionBuilder::new(mouse_id) .with_instructions([transfer_asset]) @@ -131,8 +133,9 @@ fn permissions_disallow_asset_burn() { let bob_id: AccountId = "bob@wonderland".parse().expect("Valid"); let mouse_id: AccountId = "mouse@wonderland".parse().expect("Valid"); let asset_definition_id = AssetDefinitionId::from_str("xor#wonderland").expect("Valid"); - let create_asset = RegisterExpr::new(AssetDefinition::quantity(asset_definition_id.clone())); - let mouse_keypair = iroha_crypto::KeyPair::generate().expect("Failed to generate KeyPair."); + let create_asset = + Register::asset_definition(AssetDefinition::quantity(asset_definition_id.clone())); + let mouse_keypair = KeyPair::generate().expect("Failed to generate KeyPair."); let alice_start_assets = get_assets(&iroha_client, &alice_id); @@ -141,16 +144,14 @@ fn permissions_disallow_asset_burn() { .expect("Failed to prepare state."); let quantity: u32 = 200; - let mint_asset = MintExpr::new( - quantity.to_value(), - IdBox::AssetId(AssetId::new(asset_definition_id.clone(), bob_id)), - ); + let mint_asset = + Mint::asset_quantity(quantity, AssetId::new(asset_definition_id.clone(), bob_id)); iroha_client .submit_blocking(mint_asset) .expect("Failed to create asset."); - let burn_asset = BurnExpr::new( - quantity.to_value(), - IdBox::AssetId(AssetId::new(asset_definition_id, mouse_id.clone())), + let burn_asset = Burn::asset_quantity( + quantity, + AssetId::new(asset_definition_id, mouse_id.clone()), ); let burn_tx = TransactionBuilder::new(mouse_id) .with_instructions([burn_asset]) @@ -184,7 +185,7 @@ fn account_can_query_only_its_own_domain() -> Result<()> { // Given let domain_id: DomainId = "wonderland".parse()?; let new_domain_id: DomainId = "wonderland2".parse()?; - let register_domain = RegisterExpr::new(Domain::new(new_domain_id.clone())); + let register_domain = Register::domain(Domain::new(new_domain_id.clone())); client.submit_blocking(register_domain)?; @@ -204,7 +205,7 @@ fn permissions_differ_not_only_by_names() { let alice_id: AccountId = "alice@wonderland".parse().expect("Valid"); let mouse_id: AccountId = "mouse@wonderland".parse().expect("Valid"); - let mouse_keypair = iroha_crypto::KeyPair::generate().expect("Failed to generate KeyPair."); + let mouse_keypair = KeyPair::generate().expect("Failed to generate KeyPair."); // Registering `Store` asset definitions let hat_definition_id: AssetDefinitionId = "hat#wonderland".parse().expect("Valid"); @@ -213,20 +214,20 @@ fn permissions_differ_not_only_by_names() { let new_shoes_definition = AssetDefinition::store(shoes_definition_id.clone()); client .submit_all_blocking([ - RegisterExpr::new(new_hat_definition), - RegisterExpr::new(new_shoes_definition), + Register::asset_definition(new_hat_definition), + Register::asset_definition(new_shoes_definition), ]) .expect("Failed to register new asset definitions"); // Registering mouse let new_mouse_account = Account::new(mouse_id.clone(), [mouse_keypair.public_key().clone()]); client - .submit_blocking(RegisterExpr::new(new_mouse_account)) + .submit_blocking(Register::account(new_mouse_account)) .expect("Failed to register mouse"); // Granting permission to Alice to modify metadata in Mouse's hats let mouse_hat_id = AssetId::new(hat_definition_id, mouse_id.clone()); - let allow_alice_to_set_key_value_in_hats = GrantExpr::new( + let allow_alice_to_set_key_value_in_hats = Grant::permission_token( PermissionToken::new( "CanSetKeyValueInUserAsset".parse().unwrap(), &json!({ "asset_id": mouse_hat_id }), @@ -244,7 +245,7 @@ fn permissions_differ_not_only_by_names() { // Checking that Alice can modify Mouse's hats ... client - .submit_blocking(SetKeyValueExpr::new( + .submit_blocking(SetKeyValue::asset( mouse_hat_id, Name::from_str("color").expect("Valid"), "red".to_owned(), @@ -253,7 +254,7 @@ fn permissions_differ_not_only_by_names() { // ... but not shoes let mouse_shoes_id = AssetId::new(shoes_definition_id, mouse_id.clone()); - let set_shoes_color = SetKeyValueExpr::new( + let set_shoes_color = SetKeyValue::asset( mouse_shoes_id.clone(), Name::from_str("color").expect("Valid"), "yellow".to_owned(), @@ -263,7 +264,7 @@ fn permissions_differ_not_only_by_names() { .expect_err("Expected Alice to fail to modify Mouse's shoes"); // Granting permission to Alice to modify metadata in Mouse's shoes - let allow_alice_to_set_key_value_in_shoes = GrantExpr::new( + let allow_alice_to_set_key_value_in_shoes = Grant::permission_token( PermissionToken::new( "CanSetKeyValueInUserAsset".parse().unwrap(), &json!({ "asset_id": mouse_shoes_id }), @@ -287,6 +288,7 @@ fn permissions_differ_not_only_by_names() { } #[test] +#[allow(deprecated)] fn stored_vs_granted_token_payload() -> Result<()> { let (_rt, _peer, iroha_client) = ::new().with_port(10_730).start_with_runtime(); wait_for_genesis_committed(&[iroha_client.clone()], 0); @@ -296,12 +298,13 @@ fn stored_vs_granted_token_payload() -> Result<()> { // Registering mouse and asset definition let asset_definition_id: AssetDefinitionId = "xor#wonderland".parse().expect("Valid"); - let create_asset = RegisterExpr::new(AssetDefinition::store(asset_definition_id.clone())); + let create_asset = + Register::asset_definition(AssetDefinition::store(asset_definition_id.clone())); let mouse_id: AccountId = "mouse@wonderland".parse().expect("Valid"); - let mouse_keypair = iroha_crypto::KeyPair::generate().expect("Failed to generate KeyPair."); + let mouse_keypair = KeyPair::generate().expect("Failed to generate KeyPair."); let new_mouse_account = Account::new(mouse_id.clone(), [mouse_keypair.public_key().clone()]); - let instructions: [InstructionExpr; 2] = [ - RegisterExpr::new(new_mouse_account).into(), + let instructions: [InstructionBox; 2] = [ + Register::account(new_mouse_account).into(), create_asset.into(), ]; iroha_client @@ -310,7 +313,7 @@ fn stored_vs_granted_token_payload() -> Result<()> { // Allow alice to mint mouse asset and mint initial value let mouse_asset = AssetId::new(asset_definition_id, mouse_id.clone()); - let allow_alice_to_set_key_value_in_mouse_asset = GrantExpr::new( + let allow_alice_to_set_key_value_in_mouse_asset = Grant::permission_token( PermissionToken::from_str_unchecked( "CanSetKeyValueInUserAsset".parse().unwrap(), // NOTE: Introduced additional whitespaces in the serialized form @@ -328,8 +331,7 @@ fn stored_vs_granted_token_payload() -> Result<()> { .expect("Failed to grant permission to alice."); // Check that alice can indeed mint mouse asset - let set_key_value = - SetKeyValueExpr::new(mouse_asset, Name::from_str("color")?, "red".to_owned()); + let set_key_value = SetKeyValue::asset(mouse_asset, Name::from_str("color")?, "red".to_owned()); iroha_client .submit_blocking(set_key_value) .expect("Failed to mint asset for mouse."); diff --git a/client/tests/integration/queries/account.rs b/client/tests/integration/queries/account.rs index d19a4498361..69d28c66e6f 100644 --- a/client/tests/integration/queries/account.rs +++ b/client/tests/integration/queries/account.rs @@ -15,7 +15,7 @@ fn find_accounts_with_asset() -> Result<()> { // Registering new asset definition let definition_id = AssetDefinitionId::from_str("test_coin#wonderland").expect("Valid"); let asset_definition = AssetDefinition::quantity(definition_id.clone()); - test_client.submit_blocking(RegisterExpr::new(asset_definition.clone()))?; + test_client.submit_blocking(Register::asset_definition(asset_definition.clone()))?; // Checking results before all let received_asset_definition = @@ -40,7 +40,7 @@ fn find_accounts_with_asset() -> Result<()> { .iter() .skip(1) // Alice has already been registered in genesis .cloned() - .map(|account_id| RegisterExpr::new(Account::new(account_id, []))) + .map(|account_id| Register::account(Account::new(account_id, []))) .collect::>(); test_client.submit_all_blocking(register_accounts)?; @@ -48,7 +48,7 @@ fn find_accounts_with_asset() -> Result<()> { .iter() .cloned() .map(|account_id| AssetId::new(definition_id.clone(), account_id)) - .map(|asset_id| MintExpr::new(1_u32, asset_id)) + .map(|asset_id| Mint::asset_quantity(1_u32, asset_id)) .collect::>(); test_client.submit_all_blocking(mint_asset)?; diff --git a/client/tests/integration/queries/asset.rs b/client/tests/integration/queries/asset.rs index 4748cd6d7c2..6046c8cbf88 100644 --- a/client/tests/integration/queries/asset.rs +++ b/client/tests/integration/queries/asset.rs @@ -1,12 +1,14 @@ use eyre::Result; use iroha_client::{ - client::ClientQueryError, + client::{Client, ClientQueryError}, + crypto::KeyPair, data_model::{ + asset::AssetValue, + isi::Instruction, prelude::*, query::{asset::FindTotalAssetQuantityByAssetDefinitionId, error::QueryExecutionFail}, }, }; -use iroha_crypto::KeyPair; use iroha_primitives::fixed::Fixed; use test_network::*; @@ -19,7 +21,7 @@ fn find_asset_total_quantity() -> Result<()> { // Register new domain let domain_id: DomainId = "looking_glass".parse()?; let domain = Domain::new(domain_id); - test_client.submit_blocking(RegisterExpr::new(domain))?; + test_client.submit_blocking(Register::domain(domain))?; let accounts: [AccountId; 5] = [ "alice@wonderland".parse()?, @@ -40,121 +42,52 @@ fn find_asset_total_quantity() -> Result<()> { .skip(1) // Alice has already been registered in genesis .cloned() .zip(keys.iter().map(KeyPair::public_key).cloned()) - .map(|(account_id, public_key)| RegisterExpr::new(Account::new(account_id, [public_key]))) + .map(|(account_id, public_key)| Register::account(Account::new(account_id, [public_key]))) .collect::>(); test_client.submit_all_blocking(register_accounts)?; // Test for numeric assets value types - for ( - definition, - asset_value_type, - initial_value, - to_mint, - to_burn, - expected_total_asset_quantity, - ) in [ - ( - "quantity#wonderland", - AssetValueType::Quantity, - AssetValue::Quantity(1_u32), - 10_u32.to_value(), - 5_u32.to_value(), - NumericValue::U32(30_u32), - ), - ( - "big-quantity#wonderland", - AssetValueType::BigQuantity, - AssetValue::BigQuantity(1_u128), - 10_u128.to_value(), - 5_u128.to_value(), - NumericValue::U128(30_u128), - ), - ( - "fixed#wonderland", - AssetValueType::Fixed, - AssetValue::Fixed(Fixed::try_from(1.0)?), - 10.0_f64.try_to_value()?, - 5.0_f64.try_to_value()?, - NumericValue::Fixed(Fixed::try_from(30.0)?), - ), - ] { - // Registering new asset definition - let definition_id: AssetDefinitionId = - definition.parse().expect("Failed to parse `definition_id`"); - let asset_definition = AssetDefinition::new(definition_id.clone(), asset_value_type); - test_client.submit_blocking(RegisterExpr::new(asset_definition.clone()))?; - - let asset_ids = accounts - .iter() - .cloned() - .map(|account_id| AssetId::new(definition_id.clone(), account_id)) - .collect::>(); - - // Assert that initial total quantity before any burns and mints is zero - let initial_total_asset_quantity = test_client.request( - FindTotalAssetQuantityByAssetDefinitionId::new(definition_id.clone()), - )?; - assert!(initial_total_asset_quantity.is_zero_value()); - - let register_asset = asset_ids - .iter() - .cloned() - .map(|asset_id| Asset::new(asset_id, initial_value.clone())) - .map(RegisterExpr::new) - .collect::>(); - test_client.submit_all_blocking(register_asset)?; - - let mint_asset = asset_ids - .iter() - .cloned() - .map(|asset_id| MintExpr::new(to_mint.clone(), asset_id)); - test_client.submit_all_blocking(mint_asset)?; - - let burn_asset = asset_ids - .iter() - .cloned() - .map(|asset_id| BurnExpr::new(to_burn.clone(), asset_id)) - .collect::>(); - test_client.submit_all_blocking(burn_asset)?; - - // Assert that total asset quantity is equal to: `n_accounts * (initial_value + to_mint - to_burn)` - let total_asset_quantity = test_client.request( - FindTotalAssetQuantityByAssetDefinitionId::new(definition_id.clone()), - )?; - assert_eq!(expected_total_asset_quantity, total_asset_quantity); - - let unregister_asset = asset_ids - .iter() - .cloned() - .map(UnregisterExpr::new) - .collect::>(); - test_client.submit_all_blocking(unregister_asset)?; - - // Assert that total asset quantity is zero after unregistering asset from all accounts - let total_asset_quantity = test_client.request( - FindTotalAssetQuantityByAssetDefinitionId::new(definition_id.clone()), - )?; - assert!(total_asset_quantity.is_zero_value()); - - // Unregister asset definition - test_client.submit_blocking(UnregisterExpr::new(definition_id.clone()))?; - - // Assert that total asset quantity cleared with unregistering of asset definition - let result = test_client.request(FindTotalAssetQuantityByAssetDefinitionId::new( - definition_id.clone(), - )); - assert!(matches!( - result, - Err(ClientQueryError::Validation(ValidationFail::QueryFailed( - QueryExecutionFail::Find(_) - ))) - )); - } + test_total_quantity( + &test_client, + &accounts, + "quantity#wonderland", + AssetValueType::Quantity, + 1_u32, + 10_u32, + 5_u32, + NumericValue::U32(30_u32), + Mint::asset_quantity, + Burn::asset_quantity, + )?; + test_total_quantity( + &test_client, + &accounts, + "big-quantity#wonderland", + AssetValueType::BigQuantity, + 1_u128, + 10_u128, + 5_u128, + NumericValue::U128(30_u128), + Mint::asset_big_quantity, + Burn::asset_big_quantity, + )?; + test_total_quantity( + &test_client, + &accounts, + "fixed#wonderland", + AssetValueType::Fixed, + Fixed::try_from(1.0)?, + Fixed::try_from(10.0)?, + Fixed::try_from(5.0)?, + NumericValue::Fixed(Fixed::try_from(30.0)?), + Mint::asset_fixed, + Burn::asset_fixed, + )?; // Test for `Store` asset value type let definition_id: AssetDefinitionId = "store#wonderland".parse().expect("Valid"); let asset_definition = AssetDefinition::store(definition_id.clone()); - test_client.submit_blocking(RegisterExpr::new(asset_definition))?; + test_client.submit_blocking(Register::asset_definition(asset_definition))?; let asset_ids = accounts .iter() @@ -168,13 +101,13 @@ fn find_asset_total_quantity() -> Result<()> { )?; assert!(initial_total_asset_quantity.is_zero_value()); - let register_asset = asset_ids + let register_assets = asset_ids .iter() .cloned() .map(|asset_id| Asset::new(asset_id, Metadata::default())) - .map(RegisterExpr::new) + .map(Register::asset) .collect::>(); - test_client.submit_all_blocking(register_asset)?; + test_client.submit_all_blocking(register_assets)?; // Assert that total quantity is equal to number of registrations let result = test_client.request(FindTotalAssetQuantityByAssetDefinitionId::new( @@ -182,12 +115,106 @@ fn find_asset_total_quantity() -> Result<()> { ))?; assert_eq!(NumericValue::U32(5), result); - let unregister_asset = asset_ids + let unregister_assets = asset_ids + .iter() + .cloned() + .map(Unregister::asset) + .collect::>(); + test_client.submit_all_blocking(unregister_assets)?; + + // Assert that total asset quantity is zero after unregistering asset from all accounts + let total_asset_quantity = test_client.request( + FindTotalAssetQuantityByAssetDefinitionId::new(definition_id.clone()), + )?; + assert!(total_asset_quantity.is_zero_value()); + + // Unregister asset definition + test_client.submit_blocking(Unregister::asset_definition(definition_id.clone()))?; + + // Assert that total asset quantity cleared with unregistering of asset definition + let result = test_client.request(FindTotalAssetQuantityByAssetDefinitionId::new( + definition_id, + )); + assert!(matches!( + result, + Err(ClientQueryError::Validation(ValidationFail::QueryFailed( + QueryExecutionFail::Find(_) + ))) + )); + + Ok(()) +} + +#[allow(clippy::too_many_arguments)] +fn test_total_quantity( + test_client: &Client, + accounts: &[AccountId; 5], + definition: &str, + asset_value_type: AssetValueType, + initial_value: T, + to_mint: T, + to_burn: T, + expected_total_asset_quantity: NumericValue, + mint_ctr: impl Fn(T, AssetId) -> Mint, + burn_ctr: impl Fn(T, AssetId) -> Burn, +) -> Result<()> +where + T: Copy + Into, + Value: From, + Mint: Instruction, + Burn: Instruction, +{ + // Registering new asset definition + let definition_id: AssetDefinitionId = + definition.parse().expect("Failed to parse `definition_id`"); + let asset_definition = AssetDefinition::new(definition_id.clone(), asset_value_type); + test_client.submit_blocking(Register::asset_definition(asset_definition))?; + + let asset_ids = accounts + .iter() + .cloned() + .map(|account_id| AssetId::new(definition_id.clone(), account_id)) + .collect::>(); + + // Assert that initial total quantity before any burns and mints is zero + let initial_total_asset_quantity = test_client.request( + FindTotalAssetQuantityByAssetDefinitionId::new(definition_id.clone()), + )?; + assert!(initial_total_asset_quantity.is_zero_value()); + + let register_assets = asset_ids + .iter() + .cloned() + .map(|asset_id| Asset::new(asset_id, initial_value)) + .map(Register::asset) + .collect::>(); + test_client.submit_all_blocking(register_assets)?; + + let mint_assets = asset_ids + .iter() + .cloned() + .map(|asset_id| mint_ctr(to_mint, asset_id)); + test_client.submit_all_blocking(mint_assets)?; + + let burn_assets = asset_ids + .iter() + .cloned() + .map(|asset_id| burn_ctr(to_burn, asset_id)) + .collect::>(); + test_client.submit_all_blocking(burn_assets)?; + + // Assert that total asset quantity is equal to: `n_accounts * (initial_value + to_mint - to_burn)` + let total_asset_quantity = test_client.request( + FindTotalAssetQuantityByAssetDefinitionId::new(definition_id.clone()), + )?; + assert_eq!(expected_total_asset_quantity, total_asset_quantity); + + let unregister_assets = asset_ids .iter() .cloned() - .map(UnregisterExpr::new) + .map(Unregister::asset) .collect::>(); - test_client.submit_all_blocking(unregister_asset)?; + test_client.submit_all_blocking(unregister_assets)?; // Assert that total asset quantity is zero after unregistering asset from all accounts let total_asset_quantity = test_client.request( @@ -196,7 +223,7 @@ fn find_asset_total_quantity() -> Result<()> { assert!(total_asset_quantity.is_zero_value()); // Unregister asset definition - test_client.submit_blocking(UnregisterExpr::new(definition_id.clone()))?; + test_client.submit_blocking(Unregister::asset_definition(definition_id.clone()))?; // Assert that total asset quantity cleared with unregistering of asset definition let result = test_client.request(FindTotalAssetQuantityByAssetDefinitionId::new( diff --git a/client/tests/integration/queries/role.rs b/client/tests/integration/queries/role.rs index 5432dc2e547..9d18b523910 100644 --- a/client/tests/integration/queries/role.rs +++ b/client/tests/integration/queries/role.rs @@ -29,7 +29,7 @@ fn find_roles() -> Result<()> { let register_roles = role_ids .iter() .cloned() - .map(|role_id| RegisterExpr::new(Role::new(role_id))) + .map(|role_id| Register::role(Role::new(role_id))) .collect::>(); test_client.submit_all_blocking(register_roles)?; @@ -61,7 +61,7 @@ fn find_role_ids() -> Result<()> { let register_roles = role_ids .iter() .cloned() - .map(|role_id| RegisterExpr::new(Role::new(role_id))) + .map(|role_id| Register::role(Role::new(role_id))) .collect::>(); test_client.submit_all_blocking(register_roles)?; @@ -87,7 +87,7 @@ fn find_role_by_id() -> Result<()> { let new_role = Role::new(role_id.clone()); // Registering role - let register_role = RegisterExpr::new(new_role.clone()); + let register_role = Register::role(new_role.clone()); test_client.submit_blocking(register_role)?; let found_role = test_client.request(client::role::by_id(role_id))?; @@ -130,7 +130,7 @@ fn find_roles_by_account_id() -> Result<()> { .iter() .cloned() .map(|role_id| { - RegisterExpr::new(Role::new(role_id).add_permission(PermissionToken::new( + Register::role(Role::new(role_id).add_permission(PermissionToken::new( "CanSetKeyValueInUserAccount".parse().unwrap(), &json!({ "account_id": alice_id }), ))) @@ -142,7 +142,7 @@ fn find_roles_by_account_id() -> Result<()> { let grant_roles = role_ids .iter() .cloned() - .map(|role_id| GrantExpr::new(role_id, alice_id.clone())) + .map(|role_id| Grant::role(role_id, alice_id.clone())) .collect::>(); test_client.submit_all_blocking(grant_roles)?; diff --git a/client/tests/integration/restart_peer.rs b/client/tests/integration/restart_peer.rs index cfe153e3c9d..8249edee078 100644 --- a/client/tests/integration/restart_peer.rs +++ b/client/tests/integration/restart_peer.rs @@ -1,53 +1,51 @@ -use std::{str::FromStr, sync::Arc}; +use std::{str::FromStr, thread}; use eyre::Result; use iroha_client::{ - client::{self, QueryResult}, + client::{self, Client, QueryResult}, data_model::prelude::*, }; -use iroha_primitives::unique_vec; -use tempfile::TempDir; +use iroha_config::iroha::Configuration; +use rand::{seq::SliceRandom, thread_rng, Rng}; use test_network::*; use tokio::runtime::Runtime; -use super::Configuration; - #[test] fn restarted_peer_should_have_the_same_asset_amount() -> Result<()> { - let temp_dir = Arc::new(TempDir::new()?); - - let mut configuration = Configuration::test(); - let mut peer = ::new().with_port(10_000).build()?; - configuration.sumeragi.trusted_peers.peers = unique_vec![peer.id.clone()]; - let account_id = AccountId::from_str("alice@wonderland").unwrap(); let asset_definition_id = AssetDefinitionId::from_str("xor#wonderland").unwrap(); - let create_asset = RegisterExpr::new(AssetDefinition::quantity(asset_definition_id.clone())); let quantity: u32 = 200; - let iroha_client = client::Client::test(&peer.api_address); + let mut removed_peer = { + let n_peers = 4; - { - let rt = Runtime::test(); - rt.block_on( - PeerBuilder::new() - .with_configuration(configuration.clone()) - .with_dir(temp_dir.clone()) - .start_with_peer(&mut peer), - ); - wait_for_genesis_committed(&vec![iroha_client.clone()], 0); + let (_rt, network, _) = Network::start_test_with_runtime(n_peers, Some(11_200)); + wait_for_genesis_committed(&network.clients(), 0); + let pipeline_time = Configuration::pipeline_time(); + let peer_clients = Network::clients(&network); + + let create_asset = + Register::asset_definition(AssetDefinition::quantity(asset_definition_id.clone())); + peer_clients + .choose(&mut thread_rng()) + .unwrap() + .submit_blocking(create_asset)?; - iroha_client.submit_blocking(create_asset)?; - let mint_asset = MintExpr::new( - quantity.to_value(), - IdBox::AssetId(AssetId::new( - asset_definition_id.clone(), - account_id.clone(), - )), + let mint_asset = Mint::asset_quantity( + quantity, + AssetId::new(asset_definition_id.clone(), account_id.clone()), ); - iroha_client.submit_blocking(mint_asset)?; + peer_clients + .choose(&mut thread_rng()) + .unwrap() + .submit_blocking(mint_asset)?; - let assets = iroha_client + // Wait for observing peer to get the block + thread::sleep(pipeline_time); + + let assets = peer_clients + .choose(&mut thread_rng()) + .unwrap() .request(client::asset::by_account_id(account_id.clone()))? .collect::>>()?; let asset = assets @@ -55,20 +53,29 @@ fn restarted_peer_should_have_the_same_asset_amount() -> Result<()> { .find(|asset| asset.id().definition_id == asset_definition_id) .expect("Asset not found"); assert_eq!(AssetValue::Quantity(quantity), *asset.value()); - peer.stop(); - } + let mut all_peers: Vec<_> = core::iter::once(network.genesis) + .chain(network.peers.into_values()) + .collect(); + let removed_peer_idx = rand::thread_rng().gen_range(0..all_peers.len()); + let mut removed_peer = all_peers.swap_remove(removed_peer_idx); + removed_peer.stop(); + removed_peer + }; + // All peers have been stopped here + + // Restart just one peer and check if it updates itself from the blockstore { let rt = Runtime::test(); rt.block_on( PeerBuilder::new() - .with_configuration(configuration) - .with_dir(temp_dir) - .start_with_peer(&mut peer), + .with_dir(removed_peer.temp_dir.as_ref().unwrap().clone()) + .start_with_peer(&mut removed_peer), ); - wait_for_genesis_committed(&vec![iroha_client.clone()], 0); + let removed_peer_client = Client::test(&removed_peer.api_address); + wait_for_genesis_committed(&vec![removed_peer_client.clone()], 0); - iroha_client.poll_request(client::asset::by_account_id(account_id), |result| { + removed_peer_client.poll_request(client::asset::by_account_id(account_id), |result| { let assets = result.collect::>>().expect("Valid"); iroha_logger::error!(?assets); diff --git a/client/tests/integration/roles.rs b/client/tests/integration/roles.rs index a89a939c69d..ef1884806a3 100644 --- a/client/tests/integration/roles.rs +++ b/client/tests/integration/roles.rs @@ -3,6 +3,7 @@ use std::str::FromStr as _; use eyre::Result; use iroha_client::{ client::{self, QueryResult}, + crypto::KeyPair, data_model::prelude::*, }; use serde_json::json; @@ -14,7 +15,7 @@ fn register_empty_role() -> Result<()> { wait_for_genesis_committed(&vec![test_client.clone()], 0); let role_id = "root".parse().expect("Valid"); - let register_role = RegisterExpr::new(Role::new(role_id)); + let register_role = Register::role(Role::new(role_id)); test_client.submit(register_role)?; Ok(()) @@ -29,7 +30,7 @@ fn register_role_with_empty_token_params() -> Result<()> { let token = PermissionToken::new("token".parse()?, &json!(null)); let role = Role::new(role_id).add_permission(token); - test_client.submit(RegisterExpr::new(role))?; + test_client.submit(Register::role(role))?; Ok(()) } @@ -52,8 +53,8 @@ fn register_and_grant_role_for_metadata_access() -> Result<()> { let mouse_id = AccountId::from_str("mouse@wonderland")?; // Registering Mouse - let mouse_key_pair = iroha_crypto::KeyPair::generate()?; - let register_mouse = RegisterExpr::new(Account::new( + let mouse_key_pair = KeyPair::generate()?; + let register_mouse = Register::account(Account::new( mouse_id.clone(), [mouse_key_pair.public_key().clone()], )); @@ -70,18 +71,18 @@ fn register_and_grant_role_for_metadata_access() -> Result<()> { "CanRemoveKeyValueInUserAccount".parse()?, &json!({ "account_id": mouse_id }), )); - let register_role = RegisterExpr::new(role); + let register_role = Register::role(role); test_client.submit_blocking(register_role)?; // Mouse grants role to Alice - let grant_role = GrantExpr::new(role_id.clone(), alice_id.clone()); + let grant_role = Grant::role(role_id.clone(), alice_id.clone()); let grant_role_tx = TransactionBuilder::new(mouse_id.clone()) .with_instructions([grant_role]) .sign(mouse_key_pair)?; test_client.submit_transaction_blocking(&grant_role_tx)?; // Alice modifies Mouse's metadata - let set_key_value = SetKeyValueExpr::new( + let set_key_value = SetKeyValue::account( mouse_id, Name::from_str("key").expect("Valid"), Value::String("value".to_owned()), @@ -107,11 +108,11 @@ fn unregistered_role_removed_from_account() -> Result<()> { let mouse_id: AccountId = "mouse@wonderland".parse().expect("Valid"); // Registering Mouse - let register_mouse = RegisterExpr::new(Account::new(mouse_id.clone(), [])); + let register_mouse = Register::account(Account::new(mouse_id.clone(), [])); test_client.submit_blocking(register_mouse)?; // Register root role - let register_role = RegisterExpr::new(Role::new(role_id.clone()).add_permission( + let register_role = Register::role(Role::new(role_id.clone()).add_permission( PermissionToken::new( "CanSetKeyValueInUserAccount".parse()?, &json!({ "account_id": alice_id }), @@ -120,7 +121,7 @@ fn unregistered_role_removed_from_account() -> Result<()> { test_client.submit_blocking(register_role)?; // Grant root role to Mouse - let grant_role = GrantExpr::new(role_id.clone(), mouse_id.clone()); + let grant_role = Grant::role(role_id.clone(), mouse_id.clone()); test_client.submit_blocking(grant_role)?; // Check that Mouse has root role @@ -130,7 +131,7 @@ fn unregistered_role_removed_from_account() -> Result<()> { assert!(found_mouse_roles.contains(&role_id)); // Unregister root role - let unregister_role = UnregisterExpr::new(role_id.clone()); + let unregister_role = Unregister::role(role_id.clone()); test_client.submit_blocking(unregister_role)?; // Check that Mouse doesn't have the root role @@ -155,7 +156,7 @@ fn role_with_invalid_permissions_is_not_accepted() -> Result<()> { )); let err = test_client - .submit_blocking(RegisterExpr::new(role)) + .submit_blocking(Register::role(role)) .expect_err("Submitting role with invalid permission token should fail"); let rejection_reason = err diff --git a/client/tests/integration/set_parameter.rs b/client/tests/integration/set_parameter.rs index 1308c7ea141..08012429e01 100644 --- a/client/tests/integration/set_parameter.rs +++ b/client/tests/integration/set_parameter.rs @@ -14,7 +14,7 @@ fn can_change_parameter_value() -> Result<()> { let parameter = Parameter::from_str("?BlockTime=4000")?; let parameter_id = ParameterId::from_str("BlockTime")?; - let param_box = SetParameterExpr::new(parameter); + let param_box = SetParameter::new(parameter); let old_params = test_client .request(client::parameter::all())? @@ -46,13 +46,13 @@ fn parameter_propagated() -> Result<()> { wait_for_genesis_committed(&vec![test_client.clone()], 0); let too_long_domain_name: DomainId = "0".repeat(2_usize.pow(8)).parse()?; - let create_domain = RegisterExpr::new(Domain::new(too_long_domain_name)); + let create_domain = Register::domain(Domain::new(too_long_domain_name)); let _ = test_client .submit_blocking(create_domain.clone()) .expect_err("Should fail before ident length limits update"); let parameter = Parameter::from_str("?WSVIdentLengthLimits=1,256_LL")?; - let param_box = SetParameterExpr::new(parameter); + let param_box = SetParameter::new(parameter); test_client.submit_blocking(param_box)?; test_client diff --git a/client/tests/integration/smartcontracts/create_nft_for_every_user_trigger/src/lib.rs b/client/tests/integration/smartcontracts/create_nft_for_every_user_trigger/src/lib.rs index 5fa18856b48..bce2802adcb 100644 --- a/client/tests/integration/smartcontracts/create_nft_for_every_user_trigger/src/lib.rs +++ b/client/tests/integration/smartcontracts/create_nft_for_every_user_trigger/src/lib.rs @@ -43,8 +43,10 @@ fn main(_owner: AccountId, _event: Event) { let account_nft_id = AssetId::new(nft_id, account.id().clone()); let account_nft = Asset::new(account_nft_id, Metadata::new()); - RegisterExpr::new(nft_definition).execute().dbg_unwrap(); - RegisterExpr::new(account_nft).execute().dbg_unwrap(); + Register::asset_definition(nft_definition) + .execute() + .dbg_unwrap(); + Register::asset(account_nft).execute().dbg_unwrap(); } iroha_trigger::log::info!("Smart contract executed successfully"); diff --git a/client/tests/integration/smartcontracts/executor_with_admin/src/lib.rs b/client/tests/integration/smartcontracts/executor_with_admin/src/lib.rs index 1b3b6443ef8..8a950ee38cd 100644 --- a/client/tests/integration/smartcontracts/executor_with_admin/src/lib.rs +++ b/client/tests/integration/smartcontracts/executor_with_admin/src/lib.rs @@ -6,23 +6,22 @@ #[cfg(not(test))] extern crate panic_halt; -use iroha_executor::{parse, prelude::*, smart_contract}; +use iroha_executor::{parse, prelude::*}; use lol_alloc::{FreeListAllocator, LockedAllocator}; #[global_allocator] static ALLOC: LockedAllocator = LockedAllocator::new(FreeListAllocator::new()); -#[derive(Constructor, ValidateEntrypoints, ExpressionEvaluator, Validate, Visit)] +#[derive(Constructor, ValidateEntrypoints, Validate, Visit)] #[visit(custom(visit_instruction))] struct Executor { verdict: Result, block_height: u64, - host: smart_contract::Host, } -fn visit_instruction(executor: &mut Executor, authority: &AccountId, isi: &InstructionExpr) { +fn visit_instruction(executor: &mut Executor, authority: &AccountId, isi: &InstructionBox) { if parse!("admin@admin" as AccountId) == *authority { - pass!(executor); + execute!(executor, isi); } iroha_executor::default::visit_instruction(executor, authority, isi); diff --git a/client/tests/integration/smartcontracts/executor_with_custom_token/src/lib.rs b/client/tests/integration/smartcontracts/executor_with_custom_token/src/lib.rs index f75d0e43fed..bd9322a7f87 100644 --- a/client/tests/integration/smartcontracts/executor_with_custom_token/src/lib.rs +++ b/client/tests/integration/smartcontracts/executor_with_custom_token/src/lib.rs @@ -20,7 +20,7 @@ use alloc::{borrow::ToOwned, string::String}; use anyhow::anyhow; use iroha_executor::{ - default::default_permission_token_schema, permission::Token as _, prelude::*, smart_contract, + default::default_permission_token_schema, permission::Token as _, prelude::*, }; use iroha_schema::IntoSchema; use lol_alloc::{FreeListAllocator, LockedAllocator}; @@ -54,12 +54,11 @@ mod token { pub struct CanControlDomainLives; } -#[derive(Constructor, ValidateEntrypoints, ExpressionEvaluator, Validate, Visit)] +#[derive(Constructor, ValidateEntrypoints, Validate, Visit)] #[visit(custom(visit_register_domain, visit_unregister_domain))] struct Executor { verdict: Result, block_height: u64, - host: smart_contract::Host, } impl Executor { @@ -119,7 +118,7 @@ impl Executor { accounts .iter() .try_for_each(|(account, domain_id)| { - RevokeExpr::new( + Revoke::permission_token( PermissionToken::new( can_unregister_domain_definition_id.clone(), &json!({ "domain_id": domain_id }), @@ -138,7 +137,7 @@ impl Executor { ) })?; - GrantExpr::new( + Grant::permission_token( PermissionToken::new( can_control_domain_lives_definition_id.clone(), &json!(null), @@ -170,13 +169,12 @@ impl Executor { } } -// TODO (#4049): Fix unused `visit_register_domain()` -fn visit_register_domain(executor: &mut Executor, authority: &AccountId, _isi: Register) { +fn visit_register_domain(executor: &mut Executor, authority: &AccountId, isi: &Register) { if executor.block_height() == 0 { - pass!(executor) + execute!(executor, isi); } if token::CanControlDomainLives.is_owned_by(authority) { - pass!(executor); + execute!(executor, isi); } deny!( @@ -188,13 +186,13 @@ fn visit_register_domain(executor: &mut Executor, authority: &AccountId, _isi: R fn visit_unregister_domain( executor: &mut Executor, authority: &AccountId, - _isi: Unregister, + isi: &Unregister, ) { if executor.block_height() == 0 { - pass!(executor); + execute!(executor, isi); } if token::CanControlDomainLives.is_owned_by(authority) { - pass!(executor); + execute!(executor, isi); } deny!(executor, "You don't have permission to unregister domain"); diff --git a/client/tests/integration/smartcontracts/executor_with_migration_fail/src/lib.rs b/client/tests/integration/smartcontracts/executor_with_migration_fail/src/lib.rs index 437e01b9bdd..e603758dd1d 100644 --- a/client/tests/integration/smartcontracts/executor_with_migration_fail/src/lib.rs +++ b/client/tests/integration/smartcontracts/executor_with_migration_fail/src/lib.rs @@ -9,17 +9,16 @@ extern crate panic_halt; use alloc::{borrow::ToOwned as _, format}; use anyhow::anyhow; -use iroha_executor::{parse, prelude::*, smart_contract}; +use iroha_executor::{parse, prelude::*}; use lol_alloc::{FreeListAllocator, LockedAllocator}; #[global_allocator] static ALLOC: LockedAllocator = LockedAllocator::new(FreeListAllocator::new()); -#[derive(Constructor, ValidateEntrypoints, ExpressionEvaluator, Validate, Visit)] +#[derive(Constructor, ValidateEntrypoints, Validate, Visit)] struct Executor { verdict: Result, block_height: u64, - host: smart_contract::Host, } #[entrypoint] @@ -28,7 +27,7 @@ pub fn migrate(_block_height: u64) -> MigrationResult { // Registering a new domain (using ISI) let domain_id = parse!("failed_migration_test_domain" as DomainId); - RegisterExpr::new(Domain::new(domain_id)) + Register::domain(Domain::new(domain_id)) .execute() .map_err(|error| { format!( diff --git a/client/tests/integration/smartcontracts/mint_rose_trigger/src/lib.rs b/client/tests/integration/smartcontracts/mint_rose_trigger/src/lib.rs index 7dd2d5c7c0d..f794772bebd 100644 --- a/client/tests/integration/smartcontracts/mint_rose_trigger/src/lib.rs +++ b/client/tests/integration/smartcontracts/mint_rose_trigger/src/lib.rs @@ -20,7 +20,7 @@ fn main(owner: AccountId, _event: Event) { .dbg_expect("Failed to parse `rose#wonderland` asset definition id"); let rose_id = AssetId::new(rose_definition_id, owner); - MintExpr::new(1_u32, rose_id) + Mint::asset_quantity(1_u32, rose_id) .execute() .dbg_expect("Failed to mint rose"); } diff --git a/client/tests/integration/smartcontracts/query_assets_and_save_cursor/src/lib.rs b/client/tests/integration/smartcontracts/query_assets_and_save_cursor/src/lib.rs index c86e452e693..87137474596 100644 --- a/client/tests/integration/smartcontracts/query_assets_and_save_cursor/src/lib.rs +++ b/client/tests/integration/smartcontracts/query_assets_and_save_cursor/src/lib.rs @@ -26,12 +26,14 @@ fn main(owner: AccountId) { let (_batch, cursor) = asset_cursor.into_raw_parts(); - SetKeyValueExpr::new( + SetKeyValue::account( owner, parse!("cursor" as Name), - serde_json::to_value(cursor) - .dbg_expect("Failed to convert cursor to JSON") - .to_string(), + Value::String( + serde_json::to_value(cursor) + .dbg_expect("Failed to convert cursor to JSON") + .to_string(), + ), ) .execute() .dbg_expect("Failed to save cursor to the owner's metadata"); diff --git a/client/tests/integration/sorting.rs b/client/tests/integration/sorting.rs index 31b974d2b05..19f69f3b86e 100644 --- a/client/tests/integration/sorting.rs +++ b/client/tests/integration/sorting.rs @@ -14,6 +14,7 @@ use iroha_client::{ query::{Pagination, Sorting}, }, }; +use iroha_data_model::isi::InstructionBox; use test_network::*; #[test] @@ -46,8 +47,9 @@ fn correct_pagination_assets_after_creating_new_one() { assets.push(asset.clone()); - let create_asset_definition = RegisterExpr::new(asset_definition); - let create_asset = RegisterExpr::new(asset); + let create_asset_definition: InstructionBox = + Register::asset_definition(asset_definition).into(); + let create_asset = Register::asset(asset).into(); instructions.push(create_asset_definition); instructions.push(create_asset); @@ -94,8 +96,9 @@ fn correct_pagination_assets_after_creating_new_one() { AssetValue::Store(new_asset_metadata), ); - let create_asset_definition = RegisterExpr::new(new_asset_definition); - let create_asset = RegisterExpr::new(new_asset.clone()); + let create_asset_definition: InstructionBox = + Register::asset_definition(new_asset_definition).into(); + let create_asset = Register::asset(new_asset.clone()).into(); test_client .submit_all_blocking([create_asset_definition, create_asset]) @@ -153,7 +156,7 @@ fn correct_sorting_of_entities() { metadata_of_assets.push(asset_metadata); asset_definitions.push(asset_definition_id); - let create_asset_definition = RegisterExpr::new(asset_definition); + let create_asset_definition = Register::asset_definition(asset_definition); instructions.push(create_asset_definition); } @@ -203,7 +206,7 @@ fn correct_sorting_of_entities() { accounts.push(account_id); metadata_of_accounts.push(account_metadata); - let create_account = RegisterExpr::new(account); + let create_account = Register::account(account); instructions.push(create_account); } @@ -249,7 +252,7 @@ fn correct_sorting_of_entities() { domains.push(domain_id); metadata_of_domains.push(domain_metadata); - let create_account = RegisterExpr::new(domain); + let create_account = Register::domain(domain); instructions.push(create_account); } @@ -294,7 +297,7 @@ fn correct_sorting_of_entities() { domains.push(domain_id); metadata_of_domains.push(domain_metadata); - let create_account = RegisterExpr::new(domain); + let create_account = Register::domain(domain); instructions.push(create_account); } test_client @@ -356,7 +359,7 @@ fn sort_only_elements_which_have_sorting_key() -> Result<()> { account }; - let create_account = RegisterExpr::new(account); + let create_account = Register::account(account); instructions.push(create_account); } diff --git a/client/tests/integration/transfer_asset.rs b/client/tests/integration/transfer_asset.rs index b62ba7dc216..be37310c5cd 100644 --- a/client/tests/integration/transfer_asset.rs +++ b/client/tests/integration/transfer_asset.rs @@ -1,19 +1,33 @@ use iroha_client::{ client::{self, QueryResult}, - data_model::{prelude::*, Registered}, + crypto::KeyPair, + data_model::{isi::Instruction, prelude::*, Registered}, }; -use iroha_crypto::KeyPair; use iroha_primitives::fixed::Fixed; use test_network::*; #[test] fn simulate_transfer_quantity() { - simulate_transfer(200_u32, &20_u32, AssetDefinition::quantity, 10_710) + simulate_transfer( + 200_u32, + &20_u32, + AssetDefinition::quantity, + Mint::asset_quantity, + Transfer::asset_quantity, + 10_710, + ) } #[test] fn simulate_transfer_big_quantity() { - simulate_transfer(200_u128, &20_u128, AssetDefinition::big_quantity, 10_785) + simulate_transfer( + 200_u128, + &20_u128, + AssetDefinition::big_quantity, + Mint::asset_big_quantity, + Transfer::asset_big_quantity, + 10_785, + ) } #[test] @@ -22,6 +36,8 @@ fn simulate_transfer_fixed() { Fixed::try_from(200_f64).expect("Valid"), &Fixed::try_from(20_f64).expect("Valid"), AssetDefinition::fixed, + Mint::asset_fixed, + Transfer::asset_fixed, 10_790, ) } @@ -34,22 +50,24 @@ fn simulate_insufficient_funds() { Fixed::try_from(20_f64).expect("Valid"), &Fixed::try_from(200_f64).expect("Valid"), AssetDefinition::fixed, + Mint::asset_fixed, + Transfer::asset_fixed, 10_800, ) } -// TODO add tests when the transfer uses the wrong AssetId. - -fn simulate_transfer< - T: Into + Clone, - D: FnOnce(AssetDefinitionId) -> ::With, ->( +fn simulate_transfer( starting_amount: T, amount_to_transfer: &T, - value_type: D, + asset_definition_ctr: impl FnOnce(AssetDefinitionId) -> ::With, + mint_ctr: impl FnOnce(T, AssetId) -> Mint, + transfer_ctr: impl FnOnce(AssetId, T, AccountId) -> Transfer, port_number: u16, ) where + T: std::fmt::Debug + Clone + Into, Value: From, + Mint: Instruction, + Transfer: Instruction, { let (_rt, _peer, iroha_client) = ::new() .with_port(port_number) @@ -61,15 +79,16 @@ fn simulate_transfer< let (bob_public_key, _) = KeyPair::generate() .expect("Failed to generate KeyPair") .into(); - let create_mouse = RegisterExpr::new(Account::new(mouse_id.clone(), [bob_public_key])); + let create_mouse = Register::account(Account::new(mouse_id.clone(), [bob_public_key])); let asset_definition_id: AssetDefinitionId = "camomile#wonderland".parse().expect("Valid"); - let create_asset = RegisterExpr::new(value_type(asset_definition_id.clone())); - let mint_asset = MintExpr::new( - starting_amount.to_value(), - IdBox::AssetId(AssetId::new(asset_definition_id.clone(), alice_id.clone())), + let create_asset = + Register::asset_definition(asset_definition_ctr(asset_definition_id.clone())); + let mint_asset = mint_ctr( + starting_amount, + AssetId::new(asset_definition_id.clone(), alice_id.clone()), ); - let instructions: [InstructionExpr; 3] = [ + let instructions: [InstructionBox; 3] = [ // create_alice.into(), We don't need to register Alice, because she is created in genesis create_mouse.into(), create_asset.into(), @@ -80,10 +99,10 @@ fn simulate_transfer< .expect("Failed to prepare state."); //When - let transfer_asset = TransferExpr::new( - IdBox::AssetId(AssetId::new(asset_definition_id.clone(), alice_id)), - amount_to_transfer.clone().to_value(), - IdBox::AccountId(mouse_id.clone()), + let transfer_asset = transfer_ctr( + AssetId::new(asset_definition_id.clone(), alice_id), + amount_to_transfer.clone(), + mouse_id.clone(), ); iroha_client .submit_till( diff --git a/client/tests/integration/triggers/by_call_trigger.rs b/client/tests/integration/triggers/by_call_trigger.rs index a2b03df24d7..41644169a73 100644 --- a/client/tests/integration/triggers/by_call_trigger.rs +++ b/client/tests/integration/triggers/by_call_trigger.rs @@ -9,6 +9,7 @@ use iroha_client::{ transaction::Executable, }, }; +use iroha_data_model::events::TriggeringFilterBox; use iroha_genesis::GenesisNetwork; use iroha_logger::info; use test_network::*; @@ -25,12 +26,12 @@ fn call_execute_trigger() -> Result<()> { let asset_id = AssetId::new(asset_definition_id, account_id); let prev_value = get_asset_value(&mut test_client, asset_id.clone())?; - let instruction = MintExpr::new(1_u32, asset_id.clone()); + let instruction = Mint::asset_quantity(1_u32, asset_id.clone()); let register_trigger = build_register_trigger_isi(asset_id.clone(), vec![instruction.into()]); test_client.submit_blocking(register_trigger)?; let trigger_id = TriggerId::from_str(TRIGGER_NAME)?; - let call_trigger = ExecuteTriggerExpr::new(trigger_id); + let call_trigger = ExecuteTrigger::new(trigger_id); test_client.submit_blocking(call_trigger)?; let new_value = get_asset_value(&mut test_client, asset_id)?; @@ -48,12 +49,12 @@ fn execute_trigger_should_produce_event() -> Result<()> { let account_id: AccountId = "alice@wonderland".parse()?; let asset_id = AssetId::new(asset_definition_id, account_id.clone()); - let instruction = MintExpr::new(1_u32, asset_id.clone()); + let instruction = Mint::asset_quantity(1_u32, asset_id.clone()); let register_trigger = build_register_trigger_isi(asset_id, vec![instruction.into()]); test_client.submit_blocking(register_trigger)?; let trigger_id = TriggerId::from_str(TRIGGER_NAME)?; - let call_trigger = ExecuteTriggerExpr::new(trigger_id.clone()); + let call_trigger = ExecuteTrigger::new(trigger_id.clone()); let thread_client = test_client.clone(); let (sender, receiver) = mpsc::channel(); @@ -83,11 +84,11 @@ fn infinite_recursion_should_produce_one_call_per_block() -> Result<()> { let account_id = "alice@wonderland".parse()?; let asset_id = AssetId::new(asset_definition_id, account_id); let trigger_id = TriggerId::from_str(TRIGGER_NAME)?; - let call_trigger = ExecuteTriggerExpr::new(trigger_id); + let call_trigger = ExecuteTrigger::new(trigger_id); let prev_value = get_asset_value(&mut test_client, asset_id.clone())?; let instructions = vec![ - MintExpr::new(1_u32, asset_id.clone()).into(), + Mint::asset_quantity(1_u32, asset_id.clone()).into(), call_trigger.clone().into(), ]; let register_trigger = build_register_trigger_isi(asset_id.clone(), instructions); @@ -113,8 +114,8 @@ fn trigger_failure_should_not_cancel_other_triggers_execution() -> Result<()> { // Registering trigger that should fail on execution let bad_trigger_id = TriggerId::from_str("bad_trigger")?; // Invalid instruction - let bad_trigger_instructions = vec![MintExpr::new(1_u32, account_id.clone())]; - let register_bad_trigger = RegisterExpr::new(Trigger::new( + let bad_trigger_instructions = vec![Fail::new("Bad trigger".to_owned())]; + let register_bad_trigger = Register::trigger(Trigger::new( bad_trigger_id.clone(), Action::new( bad_trigger_instructions, @@ -130,8 +131,8 @@ fn trigger_failure_should_not_cancel_other_triggers_execution() -> Result<()> { // Registering normal trigger let trigger_id = TriggerId::from_str(TRIGGER_NAME)?; - let trigger_instructions = vec![MintExpr::new(1_u32, asset_id.clone())]; - let register_trigger = RegisterExpr::new(Trigger::new( + let trigger_instructions = vec![Mint::asset_quantity(1_u32, asset_id.clone())]; + let register_trigger = Register::trigger(Trigger::new( trigger_id, Action::new( trigger_instructions, @@ -147,7 +148,7 @@ fn trigger_failure_should_not_cancel_other_triggers_execution() -> Result<()> { let prev_asset_value = get_asset_value(&mut test_client, asset_id.clone())?; // Executing bad trigger - test_client.submit_blocking(ExecuteTriggerExpr::new(bad_trigger_id))?; + test_client.submit_blocking(ExecuteTrigger::new(bad_trigger_id))?; // Checking results let new_asset_value = get_asset_value(&mut test_client, asset_id)?; @@ -165,8 +166,8 @@ fn trigger_should_not_be_executed_with_zero_repeats_count() -> Result<()> { let asset_id = AssetId::new(asset_definition_id, account_id.clone()); let trigger_id = TriggerId::from_str("self_modifying_trigger")?; - let trigger_instructions = vec![MintExpr::new(1_u32, asset_id.clone())]; - let register_trigger = RegisterExpr::new(Trigger::new( + let trigger_instructions = vec![Mint::asset_quantity(1_u32, asset_id.clone())]; + let register_trigger = Register::trigger(Trigger::new( trigger_id.clone(), Action::new( trigger_instructions, @@ -184,7 +185,7 @@ fn trigger_should_not_be_executed_with_zero_repeats_count() -> Result<()> { let prev_asset_value = get_asset_value(&mut test_client, asset_id.clone())?; // Executing trigger first time - let execute_trigger = ExecuteTriggerExpr::new(trigger_id.clone()); + let execute_trigger = ExecuteTrigger::new(trigger_id.clone()); test_client.submit_blocking(execute_trigger.clone())?; // Executing trigger second time @@ -224,10 +225,10 @@ fn trigger_should_be_able_to_modify_its_own_repeats_count() -> Result<()> { let trigger_id = TriggerId::from_str("self_modifying_trigger")?; let trigger_instructions = vec![ - MintExpr::new(1_u32, trigger_id.clone()), - MintExpr::new(1_u32, asset_id.clone()), + InstructionBox::from(Mint::trigger_repetitions(1_u32, trigger_id.clone())), + InstructionBox::from(Mint::asset_quantity(1_u32, asset_id.clone())), ]; - let register_trigger = RegisterExpr::new(Trigger::new( + let register_trigger = Register::trigger(Trigger::new( trigger_id.clone(), Action::new( trigger_instructions, @@ -245,7 +246,7 @@ fn trigger_should_be_able_to_modify_its_own_repeats_count() -> Result<()> { let prev_asset_value = get_asset_value(&mut test_client, asset_id.clone())?; // Executing trigger first time - let execute_trigger = ExecuteTriggerExpr::new(trigger_id); + let execute_trigger = ExecuteTrigger::new(trigger_id); test_client.submit_blocking(execute_trigger.clone())?; // Executing trigger second time @@ -270,7 +271,7 @@ fn unregister_trigger() -> Result<()> { let trigger = Trigger::new( trigger_id.clone(), Action::new( - Vec::::new(), + Vec::::new(), Repeats::Indefinitely, account_id.clone(), TriggeringFilterBox::ExecuteTrigger(ExecuteTriggerEventFilter::new( @@ -279,12 +280,12 @@ fn unregister_trigger() -> Result<()> { )), ), ); - let register_trigger = RegisterExpr::new(trigger.clone()); + let register_trigger = Register::trigger(trigger.clone()); test_client.submit_blocking(register_trigger)?; // Finding trigger let find_trigger = FindTriggerById { - id: trigger_id.clone().into(), + id: trigger_id.clone(), }; let found_trigger = test_client.request(find_trigger.clone())?; let found_action = found_trigger.action; @@ -303,7 +304,7 @@ fn unregister_trigger() -> Result<()> { assert_eq!(found_trigger, trigger); // Unregistering trigger - let unregister_trigger = UnregisterExpr::new(trigger_id); + let unregister_trigger = Unregister::trigger(trigger_id); test_client.submit_blocking(unregister_trigger)?; // Checking result @@ -362,7 +363,7 @@ fn trigger_in_genesis_using_base64() -> Result<()> { let tx_ref = &mut genesis.transactions[0].0; match &mut tx_ref.payload_mut().instructions { Executable::Instructions(instructions) => { - instructions.push(RegisterExpr::new(trigger).into()); + instructions.push(Register::trigger(trigger).into()); } Executable::Wasm(_) => panic!("Expected instructions"), } @@ -378,7 +379,7 @@ fn trigger_in_genesis_using_base64() -> Result<()> { let prev_value = get_asset_value(&mut test_client, asset_id.clone())?; // Executing trigger - let call_trigger = ExecuteTriggerExpr::new(trigger_id); + let call_trigger = ExecuteTrigger::new(trigger_id); test_client.submit_blocking(call_trigger)?; // Checking result @@ -397,12 +398,11 @@ fn trigger_should_be_able_to_modify_other_trigger() -> Result<()> { let account_id = AccountId::from_str("alice@wonderland")?; let asset_id = AssetId::new(asset_definition_id, account_id.clone()); let trigger_id_unregister = TriggerId::from_str("unregister_other_trigger")?; - let trigger_id_should_be_unregistered = TriggerId::from_str("should_be_unregistered_trigger")?; + let trigger_id_to_be_unregistered = TriggerId::from_str("should_be_unregistered_trigger")?; - let trigger_unregister_instructions = vec![UnregisterExpr::new( - trigger_id_should_be_unregistered.clone(), - )]; - let register_trigger = RegisterExpr::new(Trigger::new( + let trigger_unregister_instructions = + vec![Unregister::trigger(trigger_id_to_be_unregistered.clone())]; + let register_trigger = Register::trigger(Trigger::new( trigger_id_unregister.clone(), Action::new( trigger_unregister_instructions, @@ -416,15 +416,16 @@ fn trigger_should_be_able_to_modify_other_trigger() -> Result<()> { )); test_client.submit_blocking(register_trigger)?; - let trigger_should_be_unregistered_instructions = vec![MintExpr::new(1_u32, asset_id.clone())]; - let register_trigger = RegisterExpr::new(Trigger::new( - trigger_id_should_be_unregistered.clone(), + let trigger_should_be_unregistered_instructions = + vec![Mint::asset_quantity(1_u32, asset_id.clone())]; + let register_trigger = Register::trigger(Trigger::new( + trigger_id_to_be_unregistered.clone(), Action::new( trigger_should_be_unregistered_instructions, Repeats::from(1_u32), account_id.clone(), TriggeringFilterBox::ExecuteTrigger(ExecuteTriggerEventFilter::new( - trigger_id_should_be_unregistered.clone(), + trigger_id_to_be_unregistered.clone(), account_id, )), ), @@ -435,9 +436,8 @@ fn trigger_should_be_able_to_modify_other_trigger() -> Result<()> { let prev_asset_value = get_asset_value(&mut test_client, asset_id.clone())?; // Executing triggers - let execute_trigger_unregister = ExecuteTriggerExpr::new(trigger_id_unregister); - let execute_trigger_should_be_unregistered = - ExecuteTriggerExpr::new(trigger_id_should_be_unregistered); + let execute_trigger_unregister = ExecuteTrigger::new(trigger_id_unregister); + let execute_trigger_should_be_unregistered = ExecuteTrigger::new(trigger_id_to_be_unregistered); test_client.submit_all_blocking([ execute_trigger_unregister, execute_trigger_should_be_unregistered, @@ -461,8 +461,8 @@ fn trigger_burn_repetitions() -> Result<()> { let asset_id = AssetId::new(asset_definition_id, account_id.clone()); let trigger_id = TriggerId::from_str("trigger")?; - let trigger_instructions = vec![MintExpr::new(1_u32, asset_id)]; - let register_trigger = RegisterExpr::new(Trigger::new( + let trigger_instructions = vec![Mint::asset_quantity(1_u32, asset_id)]; + let register_trigger = Register::trigger(Trigger::new( trigger_id.clone(), Action::new( trigger_instructions, @@ -476,10 +476,10 @@ fn trigger_burn_repetitions() -> Result<()> { )); test_client.submit_blocking(register_trigger)?; - test_client.submit_blocking(BurnExpr::new(1_u32, trigger_id.clone()))?; + test_client.submit_blocking(Burn::trigger_repetitions(1_u32, trigger_id.clone()))?; // Executing trigger - let execute_trigger = ExecuteTriggerExpr::new(trigger_id); + let execute_trigger = ExecuteTrigger::new(trigger_id); let _err = test_client .submit_blocking(execute_trigger) .expect_err("Should fail without repetitions"); @@ -494,11 +494,11 @@ fn get_asset_value(client: &mut Client, asset_id: AssetId) -> Result { fn build_register_trigger_isi( asset_id: AssetId, - trigger_instructions: Vec, -) -> RegisterExpr { + trigger_instructions: Vec, +) -> Register> { let trigger_id: TriggerId = TRIGGER_NAME.parse().expect("Valid"); - RegisterExpr::new(Trigger::new( + Register::trigger(Trigger::new( trigger_id.clone(), Action::new( trigger_instructions, diff --git a/client/tests/integration/triggers/data_trigger.rs b/client/tests/integration/triggers/data_trigger.rs index 46744151559..e7096f7b024 100644 --- a/client/tests/integration/triggers/data_trigger.rs +++ b/client/tests/integration/triggers/data_trigger.rs @@ -13,8 +13,8 @@ fn must_execute_both_triggers() -> Result<()> { let prev_value = get_asset_value(&test_client, asset_id.clone())?; - let instruction = MintExpr::new(1_u32, asset_id.clone()); - let register_trigger = RegisterExpr::new(Trigger::new( + let instruction = Mint::asset_quantity(1_u32, asset_id.clone()); + let register_trigger = Register::trigger(Trigger::new( "mint_rose_1".parse()?, Action::new( [instruction.clone()], @@ -27,7 +27,7 @@ fn must_execute_both_triggers() -> Result<()> { )); test_client.submit_blocking(register_trigger)?; - let register_trigger = RegisterExpr::new(Trigger::new( + let register_trigger = Register::trigger(Trigger::new( "mint_rose_2".parse()?, Action::new( [instruction], @@ -40,11 +40,11 @@ fn must_execute_both_triggers() -> Result<()> { )); test_client.submit_blocking(register_trigger)?; - test_client.submit_blocking(RegisterExpr::new(Account::new( + test_client.submit_blocking(Register::account(Account::new( "bunny@wonderland".parse()?, [], )))?; - test_client.submit_blocking(RegisterExpr::new(Domain::new("neverland".parse()?)))?; + test_client.submit_blocking(Register::domain(Domain::new("neverland".parse()?)))?; let new_value = get_asset_value(&test_client, asset_id)?; assert_eq!(new_value, prev_value + 2); @@ -57,18 +57,19 @@ fn domain_scoped_trigger_must_be_executed_only_on_events_in_its_domain() -> Resu let (_rt, _peer, test_client) = ::new().with_port(10_655).start_with_runtime(); wait_for_genesis_committed(&[test_client.clone()], 0); - let create_neverland_domain = RegisterExpr::new(Domain::new("neverland".parse()?)); + let create_neverland_domain: InstructionBox = + Register::domain(Domain::new("neverland".parse()?)).into(); let account_id: AccountId = "sapporo@neverland".parse()?; - let create_sapporo_account = RegisterExpr::new(Account::new(account_id.clone(), [])); + let create_sapporo_account = Register::account(Account::new(account_id.clone(), [])).into(); let asset_definition_id: AssetDefinitionId = "sakura#neverland".parse()?; let create_sakura_asset_definition = - RegisterExpr::new(AssetDefinition::quantity(asset_definition_id.clone())); + Register::asset_definition(AssetDefinition::quantity(asset_definition_id.clone())).into(); let asset_id = AssetId::new(asset_definition_id, account_id.clone()); let create_sakura_asset = - RegisterExpr::new(Asset::new(asset_id.clone(), AssetValue::Quantity(0))); + Register::asset(Asset::new(asset_id.clone(), AssetValue::Quantity(0))).into(); test_client.submit_all_blocking([ create_neverland_domain, @@ -79,10 +80,10 @@ fn domain_scoped_trigger_must_be_executed_only_on_events_in_its_domain() -> Resu let prev_value = get_asset_value(&test_client, asset_id.clone())?; - let register_trigger = RegisterExpr::new(Trigger::new( + let register_trigger = Register::trigger(Trigger::new( "mint_sakura$neverland".parse()?, Action::new( - [MintExpr::new(1_u32, asset_id.clone())], + [Mint::asset_quantity(1_u32, asset_id.clone())], Repeats::Indefinitely, account_id, TriggeringFilterBox::Data(BySome(DataEntityFilter::ByAccount(BySome( @@ -92,12 +93,12 @@ fn domain_scoped_trigger_must_be_executed_only_on_events_in_its_domain() -> Resu )); test_client.submit_blocking(register_trigger)?; - test_client.submit_blocking(RegisterExpr::new(Account::new( + test_client.submit_blocking(Register::account(Account::new( "asahi@wonderland".parse()?, [], )))?; - test_client.submit_blocking(RegisterExpr::new(Account::new( + test_client.submit_blocking(Register::account(Account::new( "asahi@neverland".parse()?, [], )))?; diff --git a/client/tests/integration/triggers/event_trigger.rs b/client/tests/integration/triggers/event_trigger.rs index 8b438d47fb9..8269a244ad4 100644 --- a/client/tests/integration/triggers/event_trigger.rs +++ b/client/tests/integration/triggers/event_trigger.rs @@ -17,8 +17,8 @@ fn test_mint_asset_when_new_asset_definition_created() -> Result<()> { let asset_id = AssetId::new(asset_definition_id, account_id.clone()); let prev_value = get_asset_value(&mut test_client, asset_id.clone())?; - let instruction = MintExpr::new(1_u32, asset_id.clone()); - let register_trigger = RegisterExpr::new(Trigger::new( + let instruction = Mint::asset_quantity(1_u32, asset_id.clone()); + let register_trigger = Register::trigger(Trigger::new( "mint_rose".parse()?, Action::new( vec![instruction], @@ -35,7 +35,8 @@ fn test_mint_asset_when_new_asset_definition_created() -> Result<()> { test_client.submit(register_trigger)?; let tea_definition_id = "tea#wonderland".parse()?; - let register_tea_definition = RegisterExpr::new(AssetDefinition::quantity(tea_definition_id)); + let register_tea_definition = + Register::asset_definition(AssetDefinition::quantity(tea_definition_id)); test_client.submit_blocking(register_tea_definition)?; let new_value = get_asset_value(&mut test_client, asset_id)?; diff --git a/client/tests/integration/triggers/time_trigger.rs b/client/tests/integration/triggers/time_trigger.rs index 2a150f336c4..9b9c76d3fe6 100644 --- a/client/tests/integration/triggers/time_trigger.rs +++ b/client/tests/integration/triggers/time_trigger.rs @@ -44,8 +44,8 @@ fn time_trigger_execution_count_error_should_be_less_than_15_percent() -> Result let schedule = TimeSchedule::starting_at(start_time).with_period(Duration::from_millis(PERIOD_MS)); - let instruction = MintExpr::new(1_u32, asset_id.clone()); - let register_trigger = RegisterExpr::new(Trigger::new( + let instruction = Mint::asset_quantity(1_u32, asset_id.clone()); + let register_trigger = Register::trigger(Trigger::new( "mint_rose".parse()?, Action::new( vec![instruction], @@ -98,8 +98,8 @@ fn change_asset_metadata_after_1_sec() -> Result<()> { let schedule = TimeSchedule::starting_at(start_time + Duration::from_millis(PERIOD_MS)); let instruction = - SetKeyValueExpr::new(asset_definition_id.clone(), key.clone(), 3_u32.to_value()); - let register_trigger = RegisterExpr::new(Trigger::new( + SetKeyValue::asset_definition(asset_definition_id.clone(), key.clone(), 3_u32.to_value()); + let register_trigger = Register::trigger(Trigger::new( "change_rose_metadata".parse().expect("Valid"), Action::new( vec![instruction], @@ -119,8 +119,8 @@ fn change_asset_metadata_after_1_sec() -> Result<()> { let value = test_client .request(FindAssetDefinitionKeyValueByIdAndKey { - id: asset_definition_id.into(), - key: key.into(), + id: asset_definition_id, + key, })? .into(); assert!(matches!(value, Value::Numeric(NumericValue::U32(3_u32)))); @@ -144,8 +144,8 @@ fn pre_commit_trigger_should_be_executed() -> Result<()> { // Start listening BEFORE submitting any transaction not to miss any block committed event let event_listener = get_block_committed_event_listener(&test_client)?; - let instruction = MintExpr::new(1_u32, asset_id.clone()); - let register_trigger = RegisterExpr::new(Trigger::new( + let instruction = Mint::asset_quantity(1_u32, asset_id.clone()); + let register_trigger = Register::trigger(Trigger::new( "mint_rose".parse()?, Action::new( vec![instruction], @@ -162,7 +162,7 @@ fn pre_commit_trigger_should_be_executed() -> Result<()> { prev_value = new_value; // ISI just to create a new block - let sample_isi = SetKeyValueExpr::new( + let sample_isi = SetKeyValue::account( account_id.clone(), "key".parse::()?, String::from("value"), @@ -196,7 +196,7 @@ fn mint_nft_for_every_user_every_1_sec() -> Result<()> { .iter() .skip(1) // Alice has already been registered in genesis .cloned() - .map(|account_id| RegisterExpr::new(Account::new(account_id, []))) + .map(|account_id| Register::account(Account::new(account_id, []))) .collect::>(); test_client.submit_all_blocking(register_accounts)?; @@ -220,7 +220,7 @@ fn mint_nft_for_every_user_every_1_sec() -> Result<()> { let start_time = current_time(); let schedule = TimeSchedule::starting_at(start_time).with_period(Duration::from_millis(TRIGGER_PERIOD_MS)); - let register_trigger = RegisterExpr::new(Trigger::new( + let register_trigger = Register::trigger(Trigger::new( "mint_nft_for_all".parse()?, Action::new( WasmSmartContract::from_compiled(wasm), @@ -295,7 +295,7 @@ fn submit_sample_isi_on_every_block_commit( for _ in block_committed_event_listener.take(times) { std::thread::sleep(timeout); // ISI just to create a new block - let sample_isi = SetKeyValueExpr::new( + let sample_isi = SetKeyValue::account( account_id.clone(), "key".parse::()?, String::from("value"), diff --git a/client/tests/integration/triggers/trigger_rollback.rs b/client/tests/integration/triggers/trigger_rollback.rs index 67861a9f7b2..3565301e26f 100644 --- a/client/tests/integration/triggers/trigger_rollback.rs +++ b/client/tests/integration/triggers/trigger_rollback.rs @@ -1,8 +1,10 @@ use std::str::FromStr as _; use eyre::Result; -use iroha_client::client::QueryResult; -use iroha_data_model::{prelude::*, query::asset::FindAllAssetsDefinitions, trigger::TriggerId}; +use iroha_client::{ + client::QueryResult, + data_model::{prelude::*, query::asset::FindAllAssetsDefinitions, trigger::TriggerId}, +}; use test_network::*; #[test] @@ -14,9 +16,13 @@ fn failed_trigger_revert() -> Result<()> { let trigger_id = TriggerId::from_str("trigger")?; let account_id = AccountId::from_str("alice@wonderland")?; let asset_definition_id = AssetDefinitionId::from_str("xor#wonderland")?; - let create_asset = RegisterExpr::new(AssetDefinition::quantity(asset_definition_id.clone())); - let instructions: [InstructionExpr; 2] = [create_asset.into(), Fail::new("Always fail").into()]; - let register_trigger = RegisterExpr::new(Trigger::new( + let create_asset = + Register::asset_definition(AssetDefinition::quantity(asset_definition_id.clone())); + let instructions: [InstructionBox; 2] = [ + create_asset.into(), + Fail::new("Always fail".to_owned()).into(), + ]; + let register_trigger = Register::trigger(Trigger::new( trigger_id.clone(), Action::new( instructions, @@ -30,7 +36,7 @@ fn failed_trigger_revert() -> Result<()> { )); let _ = client.submit_blocking(register_trigger); - let call_trigger = ExecuteTriggerExpr::new(trigger_id); + let call_trigger = ExecuteTrigger::new(trigger_id); client.submit_blocking(call_trigger)?; //Then diff --git a/client/tests/integration/tx_history.rs b/client/tests/integration/tx_history.rs index 15c2ef3c3e1..4d26d32fe19 100644 --- a/client/tests/integration/tx_history.rs +++ b/client/tests/integration/tx_history.rs @@ -9,10 +9,9 @@ use iroha_client::{ client::{transaction, QueryResult}, data_model::{prelude::*, query::Pagination}, }; +use iroha_config::iroha::Configuration; use test_network::*; -use super::Configuration; - #[ignore = "ignore, more in #2851"] #[test] fn client_has_rejected_and_acepted_txs_should_return_tx_history() -> Result<()> { @@ -24,19 +23,20 @@ fn client_has_rejected_and_acepted_txs_should_return_tx_history() -> Result<()> // Given let account_id = AccountId::from_str("alice@wonderland")?; let asset_definition_id = AssetDefinitionId::from_str("xor#wonderland")?; - let create_asset = RegisterExpr::new(AssetDefinition::quantity(asset_definition_id.clone())); + let create_asset = + Register::asset_definition(AssetDefinition::quantity(asset_definition_id.clone())); client.submit_blocking(create_asset)?; //When let quantity: u32 = 200; let asset_id = AssetId::new(asset_definition_id, account_id.clone()); - let mint_existed_asset = MintExpr::new(quantity.to_value(), IdBox::AssetId(asset_id)); - let mint_not_existed_asset = MintExpr::new( - quantity.to_value(), - IdBox::AssetId(AssetId::new( + let mint_existed_asset = Mint::asset_quantity(quantity, asset_id); + let mint_not_existed_asset = Mint::asset_quantity( + quantity, + AssetId::new( AssetDefinitionId::from_str("foo#wonderland")?, account_id.clone(), - )), + ), ); let transactions_count = 100; @@ -47,7 +47,7 @@ fn client_has_rejected_and_acepted_txs_should_return_tx_history() -> Result<()> } else { &mint_not_existed_asset }; - let instructions: Vec = vec![mint_asset.clone().into()]; + let instructions: Vec = vec![mint_asset.clone().into()]; let transaction = client.build_transaction(instructions, UnlimitedMetadata::new())?; client.submit_transaction(&transaction)?; } diff --git a/client/tests/integration/tx_rollback.rs b/client/tests/integration/tx_rollback.rs index a8b1b918dc9..0c04bbec3a8 100644 --- a/client/tests/integration/tx_rollback.rs +++ b/client/tests/integration/tx_rollback.rs @@ -16,16 +16,13 @@ fn client_sends_transaction_with_invalid_instruction_should_not_see_any_changes( let account_id = AccountId::from_str("alice@wonderland")?; let asset_definition_id = AssetDefinitionId::from_str("xor#wonderland")?; let wrong_asset_definition_id = AssetDefinitionId::from_str("ksor#wonderland")?; - let create_asset = RegisterExpr::new(AssetDefinition::quantity(asset_definition_id)); + let create_asset = Register::asset_definition(AssetDefinition::quantity(asset_definition_id)); let quantity: u32 = 200; - let mint_asset = MintExpr::new( - quantity.to_value(), - IdBox::AssetId(AssetId::new( - wrong_asset_definition_id.clone(), - account_id.clone(), - )), + let mint_asset = Mint::asset_quantity( + quantity, + AssetId::new(wrong_asset_definition_id.clone(), account_id.clone()), ); - let instructions: [InstructionExpr; 2] = [create_asset.into(), mint_asset.into()]; + let instructions: [InstructionBox; 2] = [create_asset.into(), mint_asset.into()]; let _ = client.submit_all_blocking(instructions); //Then diff --git a/client/tests/integration/unregister_peer.rs b/client/tests/integration/unregister_peer.rs index 845f938de16..3121bf1de76 100644 --- a/client/tests/integration/unregister_peer.rs +++ b/client/tests/integration/unregister_peer.rs @@ -3,16 +3,15 @@ use std::thread; use eyre::Result; use iroha_client::{ client::{self, QueryResult}, + crypto::KeyPair, data_model::{ parameter::{default::MAX_TRANSACTIONS_IN_BLOCK, ParametersBuilder}, prelude::*, }, }; -use iroha_crypto::KeyPair; +use iroha_config::iroha::Configuration; use test_network::*; -use super::Configuration; - // Note the test is marked as `unstable`, not the network. #[ignore = "ignore, more in #2851"] #[test] @@ -34,7 +33,7 @@ fn unstable_network_stable_after_add_and_after_remove_peer() -> Result<()> { // Then the new peer should already have the mint result. check_assets(&peer_client, &account_id, &asset_definition_id, 100); // Also, when a peer is unregistered - let remove_peer = UnregisterExpr::new(IdBox::PeerId(peer.id.clone())); + let remove_peer = Unregister::peer(peer.id.clone()); genesis_client.submit(remove_peer)?; thread::sleep(pipeline_time * 2); // We can mint without error. @@ -82,12 +81,9 @@ fn mint( pipeline_time: std::time::Duration, quantity: u32, ) -> Result { - let mint_asset = MintExpr::new( - quantity.to_value(), - IdBox::AssetId(AssetId::new( - asset_definition_id.clone(), - account_id.clone(), - )), + let mint_asset = Mint::asset_quantity( + quantity, + AssetId::new(asset_definition_id.clone(), account_id.clone()), ); client.submit(mint_asset)?; thread::sleep(pipeline_time * 5); @@ -103,24 +99,27 @@ fn init() -> Result<( AccountId, AssetDefinitionId, )> { - let (rt, network, client) = ::start_test_with_runtime(4, Some(10_925)); + let (rt, network, client) = Network::start_test_with_runtime(4, Some(10_925)); let pipeline_time = Configuration::pipeline_time(); iroha_logger::info!("Started"); let parameters = ParametersBuilder::new() .add_parameter(MAX_TRANSACTIONS_IN_BLOCK, 1u32)? .into_set_parameters(); - let create_domain = RegisterExpr::new(Domain::new("domain".parse()?)); + let create_domain = Register::domain(Domain::new("domain".parse()?)); let account_id: AccountId = "account@domain".parse()?; let (public_key, _) = KeyPair::generate()?.into(); - let create_account = RegisterExpr::new(Account::new(account_id.clone(), [public_key])); + let create_account = Register::account(Account::new(account_id.clone(), [public_key])); let asset_definition_id: AssetDefinitionId = "xor#domain".parse()?; - let create_asset = RegisterExpr::new(AssetDefinition::quantity(asset_definition_id.clone())); - let instructions: [InstructionExpr; 4] = [ - parameters.into(), - create_domain.into(), - create_account.into(), - create_asset.into(), - ]; + let create_asset = + Register::asset_definition(AssetDefinition::quantity(asset_definition_id.clone())); + let instructions = parameters.into_iter().chain( + [ + create_domain.into(), + create_account.into(), + create_asset.into(), + ] + .into_iter(), + ); client.submit_all_blocking(instructions)?; iroha_logger::info!("Init"); Ok(( diff --git a/client/tests/integration/unstable_network.rs b/client/tests/integration/unstable_network.rs index d0df5179a2c..84d1b2d9762 100644 --- a/client/tests/integration/unstable_network.rs +++ b/client/tests/integration/unstable_network.rs @@ -5,12 +5,11 @@ use iroha_client::{ client::{self, Client, QueryResult}, data_model::{prelude::*, Level}, }; +use iroha_config::iroha::Configuration; use rand::seq::SliceRandom; use test_network::*; use tokio::runtime::Runtime; -use super::Configuration; - const MAX_TRANSACTIONS_IN_BLOCK: u32 = 5; #[test] @@ -55,12 +54,12 @@ fn unstable_network( let (network, iroha_client) = rt.block_on(async { let mut configuration = Configuration::test(); configuration.sumeragi.max_transactions_in_block = MAX_TRANSACTIONS_IN_BLOCK; - configuration.logger.max_log_level = Level::INFO.into(); + configuration.logger.level = Level::INFO; #[cfg(debug_assertions)] { configuration.sumeragi.debug_force_soft_fork = force_soft_fork; } - let network = ::new_with_offline_peers( + let network = Network::new_with_offline_peers( Some(configuration), n_peers + n_offline_peers, 0, @@ -77,7 +76,8 @@ fn unstable_network( let account_id: AccountId = "alice@wonderland".parse().expect("Valid"); let asset_definition_id: AssetDefinitionId = "camomile#wonderland".parse().expect("Valid"); - let register_asset = RegisterExpr::new(AssetDefinition::quantity(asset_definition_id.clone())); + let register_asset = + Register::asset_definition(AssetDefinition::quantity(asset_definition_id.clone())); iroha_client .submit_blocking(register_asset) .expect("Failed to register asset"); @@ -99,12 +99,9 @@ fn unstable_network( } let quantity = 1; - let mint_asset = MintExpr::new( - quantity.to_value(), - IdBox::AssetId(AssetId::new( - asset_definition_id.clone(), - account_id.clone(), - )), + let mint_asset = Mint::asset_quantity( + quantity, + AssetId::new(asset_definition_id.clone(), account_id.clone()), ); iroha_client .submit(mint_asset) diff --git a/client/tests/integration/upgrade.rs b/client/tests/integration/upgrade.rs index 5c1de0736c4..3ec49a84600 100644 --- a/client/tests/integration/upgrade.rs +++ b/client/tests/integration/upgrade.rs @@ -3,9 +3,9 @@ use std::{path::Path, str::FromStr as _}; use eyre::Result; use iroha_client::{ client::{self, Client, QueryResult}, + crypto::KeyPair, data_model::prelude::*, }; -use iroha_crypto::KeyPair; use iroha_logger::info; use serde_json::json; use test_network::*; @@ -17,19 +17,19 @@ fn executor_upgrade_should_work() -> Result<()> { // Register `admin` domain and account let admin_domain = Domain::new("admin".parse()?); - let register_admin_domain = RegisterExpr::new(admin_domain); + let register_admin_domain = Register::domain(admin_domain); client.submit_blocking(register_admin_domain)?; let admin_id: AccountId = "admin@admin".parse()?; let admin_keypair = KeyPair::generate()?; let admin_account = Account::new(admin_id.clone(), [admin_keypair.public_key().clone()]); - let register_admin_account = RegisterExpr::new(admin_account); + let register_admin_account = Register::account(admin_account); client.submit_blocking(register_admin_account)?; // Check that admin isn't allowed to transfer alice's rose by default let alice_rose: AssetId = "rose##alice@wonderland".parse()?; let admin_rose: AccountId = "admin@admin".parse()?; - let transfer_alice_rose = TransferExpr::new(alice_rose, NumericValue::U32(1), admin_rose); + let transfer_alice_rose = Transfer::asset_quantity(alice_rose, 1_u32, admin_rose); let transfer_rose_tx = TransactionBuilder::new(admin_id.clone()) .with_instructions([transfer_alice_rose.clone()]) .sign(admin_keypair.clone())?; @@ -152,7 +152,7 @@ fn upgrade_executor(client: &Client, executor: impl AsRef) -> Result<()> { info!("WASM size is {} bytes", wasm.len()); - let upgrade_executor = UpgradeExpr::new(Executor::new(WasmSmartContract::from_compiled(wasm))); + let upgrade_executor = Upgrade::new(Executor::new(WasmSmartContract::from_compiled(wasm))); client.submit_blocking(upgrade_executor)?; Ok(()) diff --git a/client_cli/Cargo.toml b/client_cli/Cargo.toml index 12d6ffa40c5..c05c17decb2 100644 --- a/client_cli/Cargo.toml +++ b/client_cli/Cargo.toml @@ -25,8 +25,6 @@ maintenance = { status = "actively-developed" } [dependencies] iroha_client = { workspace = true } iroha_primitives = { workspace = true } -iroha_crypto = { workspace = true } -iroha_config = { workspace = true } color-eyre = { workspace = true } # TODO: migrate to clap v4 (and use the workspace dependency) diff --git a/client_cli/src/main.rs b/client_cli/src/main.rs index c16c633fd56..83f380d63a8 100644 --- a/client_cli/src/main.rs +++ b/client_cli/src/main.rs @@ -16,9 +16,9 @@ use dialoguer::Confirm; use erased_serde::Serialize; use iroha_client::{ client::{Client, QueryResult}, + config::{path::Path as ConfigPath, Configuration as ClientConfiguration}, data_model::prelude::*, }; -use iroha_config::{client::Configuration as ClientConfiguration, path::Path as ConfigPath}; use iroha_primitives::addr::SocketAddr; /// Metadata wrapper, which can be captured from cli arguments (from user supplied file). @@ -391,7 +391,7 @@ mod domain { id, metadata: Metadata(metadata), } = self; - let create_domain = RegisterExpr::new(Domain::new(id)); + let create_domain = iroha_client::data_model::isi::Register::domain(Domain::new(id)); submit([create_domain], metadata, context).wrap_err("Failed to create domain") } } @@ -449,7 +449,7 @@ mod domain { to, metadata: Metadata(metadata), } = self; - let transfer_domain = TransferExpr::new(from, id, to); + let transfer_domain = iroha_client::data_model::isi::Transfer::domain(from, id, to); submit([transfer_domain], metadata, context).wrap_err("Failed to transfer domain") } } @@ -512,7 +512,8 @@ mod account { key, metadata: Metadata(metadata), } = self; - let create_account = RegisterExpr::new(Account::new(id, [key])); + let create_account = + iroha_client::data_model::isi::Register::account(Account::new(id, [key])); submit([create_account], metadata, context).wrap_err("Failed to register account") } } @@ -558,12 +559,12 @@ mod account { impl RunArgs for SignatureCondition { fn run(self, context: &mut dyn RunContext) -> Result<()> { - let account = Account::new(context.configuration().account_id.clone(), []); + let account_id = context.configuration().account_id.clone(); let Self { condition: Signature(condition), metadata: Metadata(metadata), } = self; - let mint_box = MintExpr::new(account, EvaluatesTo::new_unchecked(condition)); + let mint_box = Mint::account_signature_check_condition(condition, account_id); submit([mint_box], metadata, context).wrap_err("Failed to set signature condition") } } @@ -634,7 +635,7 @@ mod account { permission, metadata: Metadata(metadata), } = self; - let grant = GrantExpr::new(permission.0, id); + let grant = iroha_client::data_model::isi::Grant::permission_token(permission.0, id); submit([grant], metadata, context) .wrap_err("Failed to grant the permission to the account") } @@ -727,7 +728,8 @@ mod asset { if unmintable { asset_definition = asset_definition.mintable_once(); } - let create_asset_definition = RegisterExpr::new(asset_definition); + let create_asset_definition = + iroha_client::data_model::isi::Register::asset_definition(asset_definition); submit([create_asset_definition], metadata, context) .wrap_err("Failed to register asset") } @@ -758,9 +760,9 @@ mod asset { quantity, metadata: Metadata(metadata), } = self; - let mint_asset = MintExpr::new( - quantity.to_value(), - IdBox::AssetId(AssetId::new(asset, account)), + let mint_asset = iroha_client::data_model::isi::Mint::asset_quantity( + quantity, + AssetId::new(asset, account), ); submit([mint_asset], metadata, context) .wrap_err("Failed to mint asset of type `NumericValue::U32`") @@ -792,9 +794,9 @@ mod asset { quantity, metadata: Metadata(metadata), } = self; - let burn_asset = BurnExpr::new( - quantity.to_value(), - IdBox::AssetId(AssetId::new(asset, account)), + let burn_asset = iroha_client::data_model::isi::Burn::asset_quantity( + quantity, + AssetId::new(asset, account), ); submit([burn_asset], metadata, context) .wrap_err("Failed to burn asset of type `NumericValue::U32`") @@ -830,10 +832,10 @@ mod asset { quantity, metadata: Metadata(metadata), } = self; - let transfer_asset = TransferExpr::new( - IdBox::AssetId(AssetId::new(asset_id, from)), - quantity.to_value(), - IdBox::AccountId(to), + let transfer_asset = iroha_client::data_model::isi::Transfer::asset_quantity( + AssetId::new(asset_id, from), + quantity, + to, ); submit([transfer_asset], metadata, context).wrap_err("Failed to transfer asset") } @@ -934,7 +936,9 @@ mod peer { key, metadata: Metadata(metadata), } = self; - let register_peer = RegisterExpr::new(Peer::new(PeerId::new(&address, &key))); + let register_peer = iroha_client::data_model::isi::Register::peer(Peer::new( + PeerId::new(&address, &key), + )); submit([register_peer], metadata, context).wrap_err("Failed to register peer") } } @@ -960,7 +964,8 @@ mod peer { key, metadata: Metadata(metadata), } = self; - let unregister_peer = UnregisterExpr::new(IdBox::PeerId(PeerId::new(&address, &key))); + let unregister_peer = + iroha_client::data_model::isi::Unregister::peer(PeerId::new(&address, &key)); submit([unregister_peer], metadata, context).wrap_err("Failed to unregister peer") } } @@ -1017,7 +1022,7 @@ mod json { reader.read_to_end(&mut raw_content)?; let string_content = String::from_utf8(raw_content)?; - let instructions: Vec = json5::from_str(&string_content)?; + let instructions: Vec = json5::from_str(&string_content)?; submit(instructions, UnlimitedMetadata::new(), context) .wrap_err("Failed to submit parsed instructions") } diff --git a/config/Cargo.toml b/config/Cargo.toml index e4caf3da84d..2f548f60946 100644 --- a/config/Cargo.toml +++ b/config/Cargo.toml @@ -35,6 +35,7 @@ once_cell = { workspace = true } [dev-dependencies] proptest = "1.3.1" stacker = "0.1.15" +expect-test = { workspace = true } [features] tokio-console = [] diff --git a/config/base/derive/src/documented.rs b/config/base/derive/src/documented.rs deleted file mode 100644 index 604399061fa..00000000000 --- a/config/base/derive/src/documented.rs +++ /dev/null @@ -1,263 +0,0 @@ -use proc_macro::TokenStream; -use proc_macro2::Span; -use quote::quote; -use syn::{parse_quote, Lit, LitStr, Meta, Path}; - -use super::utils::{get_inner_type, StructWithFields}; - -pub fn impl_documented(ast: &StructWithFields) -> TokenStream { - let name = &ast.ident; - let docs = gen_docs(ast); - - let get_docs = impl_get_docs(docs.clone(), ast); - let get_inner_docs = impl_get_inner_docs(docs.clone(), ast); - let get_doc_recursive = impl_get_doc_recursive(docs, ast); - - let get_recursive = impl_get_recursive(ast); - - let out = quote! { - impl ::iroha_config_base::proxy::Documented for #name { - type Error = ::iroha_config_base::derive::Error; - - #get_recursive - #get_doc_recursive - #get_docs - #get_inner_docs - } - }; - out.into() -} - -fn impl_get_doc_recursive(docs: Vec, ast: &StructWithFields) -> proc_macro2::TokenStream { - if ast.fields.is_empty() { - return quote! { - fn get_doc_recursive<'a>( - inner_field: impl AsRef<[&'a str]>, - ) -> core::result::Result, ::iroha_config_base::derive::Error> - { - Err(::iroha_config_base::derive::Error::UnknownField( - ::iroha_config_base::derive::Field( - inner_field.as_ref().iter().map(ToString::to_string).collect() - ))) - } - }; - } - - let variants = ast.fields - .iter() - .zip(docs) - .map(|(field, documentation)| { - let ty = &field.ty; - let ident = &field.ident; - let documented_trait: Path = parse_quote! { iroha_config_base::proxy::Documented }; - if field.has_inner && field.has_option { - let inner_ty = get_inner_type("Option", &field.ty); - quote! { - [stringify!(#ident)] => { - let curr_doc = #documentation; - let inner_docs = <#inner_ty as #documented_trait>::get_inner_docs(); - let total_docs = format!("{}\n\nHas following fields:\n\n{}\n", curr_doc, inner_docs); - Some(total_docs) - }, - [stringify!(#ident), rest @ ..] => <#inner_ty as #documented_trait>::get_doc_recursive(rest)?, - } - } else if field.has_inner { - quote! { - [stringify!(#ident)] => { - let curr_doc = #documentation; - let inner_docs = <#ty as #documented_trait>::get_inner_docs(); - let total_docs = format!("{}\n\nHas following fields:\n\n{}\n", curr_doc, inner_docs); - Some(total_docs) - }, - [stringify!(#ident), rest @ ..] => <#ty as #documented_trait>::get_doc_recursive(rest)?, - } - } else { - quote! { [stringify!(#ident)] => Some(#documentation.to_owned()), } - } - }); - - quote! { - fn get_doc_recursive<'a>( - inner_field: impl AsRef<[&'a str]>, - ) -> core::result::Result, ::iroha_config_base::derive::Error> - { - let inner_field = inner_field.as_ref(); - let doc = match inner_field { - #(#variants)* - field => return Err(::iroha_config_base::derive::Error::UnknownField( - ::iroha_config_base::derive::Field( - field.iter().map(ToString::to_string).collect() - ))), - }; - Ok(doc) - } - } -} - -fn impl_get_inner_docs(docs: Vec, ast: &StructWithFields) -> proc_macro2::TokenStream { - let inserts = ast.fields.iter().zip(docs).map(|(field, documentation)| { - let ty = &field.ty; - let ident = &field.ident; - let documented_trait: Path = parse_quote! { ::iroha_config_base::proxy::Documented }; - let doc = if field.has_inner && field.has_option { - let inner_ty = get_inner_type("Option", &field.ty); - quote! { - <#inner_ty as #documented_trait>::get_inner_docs().as_str() - } - } else if field.has_inner { - quote! { <#ty as #documented_trait>::get_inner_docs().as_str() } - } else { - quote! { #documentation.into() } - }; - - quote! { - inner_docs.push_str(stringify!(#ident)); - inner_docs.push_str(": "); - inner_docs.push_str(#doc); - inner_docs.push_str("\n\n"); - } - }); - - quote! { - fn get_inner_docs() -> String { - let mut inner_docs = String::new(); - #(#inserts)* - inner_docs - } - } -} - -fn impl_get_docs(docs: Vec, ast: &StructWithFields) -> proc_macro2::TokenStream { - let inserts = ast.fields.iter().zip(docs).map(|(field, documentation)| { - let ident = &field.ident; - let ty = &field.ty; - let documented_trait: Path = parse_quote! { iroha_config_base::proxy::Documented }; - let doc = if field.has_inner && field.has_option { - let inner_ty = get_inner_type("Option", &field.ty); - quote! { <#inner_ty as #documented_trait>::get_docs().into() } - } else if field.has_inner { - quote! { <#ty as #documented_trait>::get_docs().into() } - } else { - quote! { #documentation.into() } - }; - - quote! { map.insert(stringify!(#ident).to_owned(), #doc); } - }); - - quote! { - fn get_docs() -> serde_json::Value { - let mut map = serde_json::Map::new(); - #(#inserts)* - map.into() - } - } -} - -fn impl_get_recursive(ast: &StructWithFields) -> proc_macro2::TokenStream { - if ast.fields.is_empty() { - return quote! { - fn get_recursive<'a, T>( - &self, - inner_field: T, - ) -> ::iroha_config_base::BoxedFuture<'a, core::result::Result> - where - T: AsRef<[&'a str]> + Send + 'a, - { - Err(::iroha_config_base::derive::Error::UnknownField( - ::iroha_config_base::derive::Field( - inner_field.as_ref().iter().map(ToString::to_string).collect() - ))) - } - }; - } - - let variants = ast.fields - .iter() - .map(|field | { - let ident = &field.ident; - let l_value = &field.lvalue_read; - let inner_thing2 = if field.has_inner && field.has_option { - let inner_ty = get_inner_type("Option", &field.ty); - let documented_trait: Path = parse_quote! { iroha_config_base::proxy::Documented }; - quote! { - [stringify!(#ident), rest @ ..] => { - <#inner_ty as #documented_trait>::get_recursive(#l_value.as_ref().expect("Should be instantiated"), rest)? - }, - } - } else if field.has_inner { - quote! { - [stringify!(#ident), rest @ ..] => { - #l_value.get_recursive(rest)? - }, - } - } else { - quote! {} - }; - quote! { - [stringify!(#ident)] => { - serde_json::to_value(&#l_value) - .map_err( - |error| - ::iroha_config_base::derive::Error::field_deserialization_from_json( - stringify!(#ident), - &error - ) - )? - } - #inner_thing2 - } - }); - - quote! { - fn get_recursive<'a, T>( - &self, - inner_field: T, - ) -> core::result::Result - where - T: AsRef<[&'a str]> + Send + 'a, - { - let inner_field = inner_field.as_ref(); - let value = match inner_field { - #(#variants)* - field => return Err(::iroha_config_base::derive::Error::UnknownField( - ::iroha_config_base::derive::Field( - field.iter().map(ToString::to_string).collect() - ))), - }; - Ok(value) - } - } -} - -/// Generate documentation for all fields based on their type and already existing documentation -pub fn gen_docs(ast: &StructWithFields) -> Vec { - ast.fields - .iter() - .map(|field| { - let field_ty = &field.ty; - let env = &field.env_str; - let real_doc = field - .attrs - .iter() - .filter_map(|attr| attr.parse_meta().ok()) - .find_map(|metadata| { - if let Meta::NameValue(meta) = metadata { - if meta.path.is_ident("doc") { - if let Lit::Str(s) = meta.lit { - return Some(s); - } - } - } - None - }); - let real_doc = real_doc.map(|doc| doc.value() + "\n\n").unwrap_or_default(); - let docs = format!( - "{}Has type `{}`[^1]. Can be configured via environment variable `{}`", - real_doc, - quote! { #field_ty }.to_string().replace(' ', ""), - env - ); - LitStr::new(&docs, Span::mixed_site()) - }) - .collect::>() -} diff --git a/config/base/derive/src/lib.rs b/config/base/derive/src/lib.rs index f86d6af896b..0cd24e4e345 100644 --- a/config/base/derive/src/lib.rs +++ b/config/base/derive/src/lib.rs @@ -2,13 +2,12 @@ use proc_macro::TokenStream; -pub(crate) mod documented; pub(crate) mod proxy; pub(crate) mod utils; pub(crate) mod view; /// Derive for config loading. More details in `iroha_config_base` reexport -#[proc_macro_derive(Override)] +#[proc_macro_derive(Override, attributes(config))] pub fn override_derive(input: TokenStream) -> TokenStream { let ast = syn::parse_macro_input!(input as utils::StructWithFields); proxy::impl_override(&ast) @@ -37,19 +36,12 @@ pub fn load_from_disk_derive(input: TokenStream) -> TokenStream { } /// Derive for config querying and setting. More details in `iroha_config_base` reexport -#[proc_macro_derive(Proxy)] +#[proc_macro_derive(Proxy, attributes(config))] pub fn proxy_derive(input: TokenStream) -> TokenStream { let ast = syn::parse_macro_input!(input as utils::StructWithFields); proxy::impl_proxy(ast) } -/// Derive for config querying and setting. More details in `iroha_config_base` reexport -#[proc_macro_derive(Documented, attributes(config))] -pub fn documented_derive(input: TokenStream) -> TokenStream { - let ast = syn::parse_macro_input!(input as utils::StructWithFields); - documented::impl_documented(&ast) -} - /// Generate view for given struct and convert from type to its view. /// More details in `iroha_config_base` reexport. #[proc_macro] diff --git a/config/base/derive/src/proxy.rs b/config/base/derive/src/proxy.rs index 7a1e170f2e5..dafef4c6145 100644 --- a/config/base/derive/src/proxy.rs +++ b/config/base/derive/src/proxy.rs @@ -14,7 +14,6 @@ pub fn impl_proxy(ast: StructWithFields) -> TokenStream { let disk_derive = quote! { ::iroha_config_base::derive::LoadFromDisk }; let builder_derive = quote! { ::iroha_config_base::derive::Builder }; let override_derive = quote! { ::iroha_config_base::derive::Override }; - let documented_derive = quote! { ::iroha_config_base::derive::Documented }; quote! { /// Proxy configuration structure to be used as an intermediate /// for configuration loading. Both loading from disk and @@ -24,8 +23,7 @@ pub fn impl_proxy(ast: StructWithFields) -> TokenStream { #builder_derive, #loadenv_derive, #disk_derive, - #override_derive, - #documented_derive + #override_derive )] #[builder(parent = #parent_ty)] #proxy_struct diff --git a/config/base/src/lib.rs b/config/base/src/lib.rs index bee2b692efc..7ea61d35ddb 100644 --- a/config/base/src/lib.rs +++ b/config/base/src/lib.rs @@ -2,7 +2,6 @@ use std::{fmt::Debug, path::Path}; use serde::{de::DeserializeOwned, Deserialize, Deserializer, Serialize}; -use serde_json::Value; pub mod derive { //! Derives for configuration entities @@ -100,34 +99,6 @@ pub mod derive { /// ``` pub use iroha_config_derive::Builder; /// Derive macro for implementing the trait - /// [`iroha_config::base::proxy::Documented`](`crate::proxy::Documented`) - /// for config structures. - /// - /// Even though this macro doesn't own any attributes, as of now - /// it relies on the `#[config]` attribute defined by the - /// [`iroha_config::base::derive::Override`](`crate::derive::Override`) - /// macro. As such, `#[config(env_prefix = ...)]` is required for - /// generating documentation, and `#[config(inner)]` for getting - /// inner fields recursively. - /// - /// # Examples - /// - /// ```rust - /// use iroha_config_base::derive::Documented; - /// use iroha_config_base::proxy::Documented as _; - /// - /// #[derive(serde::Deserialize, serde::Serialize, Documented)] - /// struct Outer { #[config(inner)] inner: Inner } - /// - /// #[derive(serde::Deserialize, serde::Serialize, Documented)] - /// struct Inner { b: String } - /// - /// let outer = Outer { inner: Inner { b: "a".to_owned() }}; - /// - /// assert_eq!(outer.get_recursive(["inner", "b"]).unwrap(), "a"); - /// ``` - pub use iroha_config_derive::Documented; - /// Derive macro for implementing the trait /// [`iroha_config::base::proxy::LoadFromDisk`](`crate::proxy::LoadFromDisk`) /// trait for config structures. /// @@ -272,38 +243,10 @@ pub mod derive { /// (via [`iroha_config_base::proxy::Builder`](`crate::proxy::Builder`) /// trait) and ways to combine two proxies together (via /// [`iroha_config_base::proxy::Override`](`crate::proxy::Override`)). - /// - /// # Examples - /// - /// ```rust - /// use iroha_config_base::derive::{Documented, Proxy}; - /// - /// // Need `Documented` here as it owns the `#[config]` attribute - /// #[derive(serde::Deserialize, serde::Serialize, Documented, Proxy)] - /// struct Outer { #[config(inner)] inner: Inner } - /// - /// #[derive(serde::Deserialize, serde::Serialize, Documented, Proxy)] - /// struct Inner { b: String } - /// - /// // Will generate something like this - /// // #[derive(Debug, Clone, serde::Deserialize, serde::Serialize, - /// // Builder, Override, Documented, LoadFromEnv, LoadFromDisk)] - /// // #[builder(parent = Outer)] - /// // struct OuterProxy { #[config(inner)] inner: Option } - /// - /// // #[derive(Debug, PartialEq, serde::Deserialize, serde::Serialize, - /// // Builder, Override, Documented, LoadFromEnv, LoadFromDisk)] - /// // struct InnerProxy { b: Option } - /// ``` pub use iroha_config_derive::Proxy; use serde::Deserialize; use thiserror::Error; - // TODO: use VERGEN to point to LTS reference on LTS branch - /// Reference to the current Dev branch configuration - pub static CONFIG_REFERENCE: &str = - "https://github.com/hyperledger/iroha/blob/iroha2-dev/docs/source/references/config.md"; - /// Represents a path to a nested field in a config structure #[derive(Debug, Deserialize)] #[serde(transparent)] @@ -322,14 +265,9 @@ pub mod derive { #[ignore_extra_doc_attributes] #[allow(clippy::enum_variant_names)] pub enum Error { - /// Got unknown field: `{0}` - /// - /// Used in [`Documented`] trait for wrong query errors - UnknownField(Field), - /// Failed to deserialize the field `{field}` /// - /// Used in [`Documented`] and [`super::proxy::LoadFromEnv`] trait for deserialization + /// Used in [`super::proxy::LoadFromEnv`] trait for deserialization /// errors #[serde(skip)] FieldDeserialization { @@ -408,18 +346,8 @@ pub mod derive { } } } - - #[test] - fn unknown_field_fmt() { - assert_eq!( - Error::UnknownField(Field(vec!["a".into(), "b".into()])).to_string(), - "Got unknown field: `a.b`" - ); - } } -pub mod runtime_upgrades; - pub mod view { //! Module for view related traits and structs @@ -452,81 +380,6 @@ pub mod proxy { use super::*; - /// Trait for dynamic and asynchronous configuration via - /// maintenance endpoint for Rust structures - pub trait Documented: Serialize + DeserializeOwned { - /// Error type returned by methods of this trait - type Error; - - /// Return documentation for all fields in a form of a JSON object - fn get_docs() -> Value; - - /// Get inner documentation for non-leaf fields - fn get_inner_docs() -> String; - - /// Return the JSON value of a given field - /// - /// # Errors - /// Fails if field was unknown - #[inline] - fn get(&self, field: &'_ str) -> Result { - self.get_recursive([field]) - } - - /// Get documentation of a given field - /// - /// # Errors - /// Fails if field was unknown - #[inline] - fn get_doc(field: &str) -> Result, Self::Error> { - Self::get_doc_recursive([field]) - } - - /// Return the JSON value of a given inner field of arbitrary - /// inner depth - /// - /// # Errors - /// Fails if field was unknown - fn get_recursive<'tl, T>(&self, inner_field: T) -> Result - where - T: AsRef<[&'tl str]> + Send + 'tl; - - #[allow(single_use_lifetimes)] // Unstable - /// Get documentation of a given inner field of arbitrary depth - /// - /// # Errors - /// Fails if field was unknown - fn get_doc_recursive<'tl>( - field: impl AsRef<[&'tl str]>, - ) -> Result, Self::Error>; - } - - impl Documented for Box { - type Error = T::Error; - - fn get_docs() -> Value { - T::get_docs() - } - - fn get_inner_docs() -> String { - T::get_inner_docs() - } - - fn get_recursive<'tl, U>(&self, inner_field: U) -> Result - where - U: AsRef<[&'tl str]> + Send + 'tl, - { - T::get_recursive(self, inner_field) - } - - #[allow(single_use_lifetimes)] // False-positive - fn get_doc_recursive<'tl>( - field: impl AsRef<[&'tl str]>, - ) -> Result, Self::Error> { - T::get_doc_recursive(field) - } - } - /// Trait for combining two configuration instances pub trait Override: Serialize + DeserializeOwned + Sized { /// If any of the fields in `other` are filled, they diff --git a/config/base/src/runtime_upgrades.rs b/config/base/src/runtime_upgrades.rs deleted file mode 100644 index 95b69e0e13d..00000000000 --- a/config/base/src/runtime_upgrades.rs +++ /dev/null @@ -1,362 +0,0 @@ -//! Module handling runtime upgrade logic. -pub use serde::{Deserialize, Serialize}; -use thiserror::*; - -type Result = core::result::Result; - -/// Error which occurs when reloading a configuration fails. -#[derive(Clone, Copy, Debug, Error)] -pub enum ReloadError { - /// The resource held by the handle was poisoned by a panic in - /// another thread. - #[error("Resource poisoned.")] - Poisoned, - /// The resource held by the handle was dropped. - #[error("Resource dropped.")] - Dropped, - /// If the reload handle wasn't properly initialized (using - /// [`handle::Singleton::set`]), there's nothing to reload with. - #[error("Cannot reload an uninitialized handle.")] - NotInitialized, - /// Error not specified by the implementer of the [`Reload`] - /// traits. Use as last resort. - #[error("Unspecified reload failure.")] - Other, -} - -/// The field needs to be mutably borrowed to be reloaded. -pub trait ReloadMut { - /// Reload `self` using provided `item`. - /// - /// # Errors - /// Fails with an appropriate variant of - /// [`ReloadError`]. [`ReloadError::Other`] can be used as a - /// **temporary** placeholder. - fn reload(&mut self, item: T) -> Result<()>; -} - -/// The field can be immutably borrowed and reloaded. -pub trait Reload { - /// Reload `self` using provided `item`. - /// - /// # Errors - /// Fails with an appropriate variant of [`ReloadError`]. - /// [`ReloadError::Other`] can be used as a **temporary** placeholder. - fn reload(&self, item: T) -> Result<()>; -} - -/// Contains [`handle`] types: opaque wrappers around a reloadable -/// configuration, used to embed reloading functionality into -/// various [`iroha_config_derive::Documented`] types. -/// -/// # Architecture. -/// -/// ## Desired behaviour -/// -/// Given a value of type (`` in this module), need to -/// -/// - Embed a handle into the configuration options, replacing a Value -/// of type with a handle. -/// -/// - The handle gets (de)serialized as if it were ``: no extra -/// fields, no extra initialisation. -/// -/// - The configuration as a whole is immutable. This is to ensure -/// that you don't accidentally re-assign the handle. -/// -/// - The last object that got instantiated from the configuration -/// file is modified when we call [`Reload::reload`]. -/// -/// - The value used to [`Reload::reload`] the value, must be reflected in the -/// configuration. -/// -/// ## Additional considerations -/// -/// - The handle might have internal mutable state, and be passed -/// along several threads in both a `sync` and `async` context. -/// -/// - The handle's state can be a global mutable static value behind a -/// wrapper. -/// -/// - The handle is almost never read. All interactions with the -/// handle are writes. -/// -/// - The handle can retain a reference to different types, depending -/// on the configuration options. The types might not all be known -/// ahead of time, or be impractically long (both true for -/// `tracting_subscriber::reload::Handle`). -/// -/// # Usage -/// -/// Embed a `SyncValue>`, in your -/// configuration options. When using the configuration to initialise -/// components, call [`handle::SyncValue::set_handle`], on a value that -/// implements [`ReloadMut`] (which you defined earlier). Call -/// [`handle::SyncValue::reload`] to change the configuration at run-time. -/// -/// If the type stored in `H` is a single simple type, it is -/// recommended to use a custom tuple `struct`, and `impl` -/// [`Reload`] for it. -/// -/// If the types are too varied, or generic in arguments that change -/// depending on run-time values, (as in -/// e.g. `tracing_subscriber::reload::Handle`), it is recommended to -/// instead use the provided opaque wrapper [`handle::Singleton`]. -/// -/// **NOTE** you shouldn't normally need to use either -/// [`handle::Singleton`] or [`handle::Value`] directly. -/// -/// # Examples -/// -/// ```ignore -/// use iroha_config_derive::Documented; -/// use serde::{Deserialize, Serialize}; -/// use iroha_config::runtime_upgrades::{handle, Reload, ReloadMut, ReloadError}; -/// use tracing::Level; -/// use tracing_subscriber::{reload::Handle, filter::LevelFilter}; -/// use std::fmt::Debug; -/// -/// struct Logger; -/// -/// #[derive(Clone, Deserialize, Serialize, Debug, Documented)] -/// struct Configuration { -/// pub max_log_level: handle::SyncValue>, -/// pub log_file_path: Option, -/// } -/// -/// fn init(config: &Configuration) -> Logger { -/// let level = config.max_log_level.value(); -/// let level_filter = tracing_subscriber::filter::LevelFilter::from_level(level); -/// let (filter, handle) = reload::Layer::new(level_filter); -/// config.max_log_level.set_handle(iroha_config::logger::ReloadHandle(handle)).unwrap(); -/// } -/// -/// impl ReloadMut for Handle { -/// fn reload(&mut self, level: Level) -> Result<(), ReloadError> { -/// let level_filter = LevelFilter::from_level(level); -/// Handle::reload(self, level_filter).map_err(|_todo| ReloadError::Dropped) -/// } -/// } -/// ``` - -pub mod handle { - use std::{ - fmt::{Debug, Formatter}, - sync::Arc, - }; - - use crossbeam::atomic::AtomicCell; - use parking_lot::Mutex; - use serde::{Deserialize, Serialize}; - - use super::{Reload, ReloadError, ReloadMut, Result}; - // ----------------------------------------------------------------- - - /// An opaque handle for arbitrary [`super::ReloadMut`], useful - /// when it is either impossible or impractical to specify a - /// single `enum` or generic type. You shouldn't embed this into - /// your configuration, and instead use [`SyncValue`]. - #[derive(Clone, Serialize, Deserialize)] - pub struct Singleton { - #[serde(skip)] - inner: Arc + Send + Sync>>>>, - } - - impl Default for Singleton { - fn default() -> Self { - Self { - inner: Arc::new(Mutex::new(None)), - } - } - } - - impl Singleton { - /// Set and/or initialize the [`Self`] to a non-empty value. - /// Reloading before calling this `fn` should cause - /// [`ReloadError::NotInitialized`]. - /// - /// # Errors - /// [`ReloadError::Poisoned`] When the [`Mutex`] storing the reload handle is poisoned. - pub fn set(&self, handle: impl ReloadMut + Send + Sync + 'static) { - *self.inner.lock() = Some(Box::new(handle)); - } - } - - impl Debug for Singleton { - fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result { - f.debug_struct("Handle with side effect").finish() - } - } - - impl Reload for Singleton { - fn reload(&self, item: T) -> Result<()> { - match &mut *self.inner.lock() { - Some(handle) => { - handle.reload(item)?; - Ok(()) - } - None => Err(ReloadError::NotInitialized), - } - } - } - - // --------------------------------------------------------------- - - /// A run-time reloadable configuration option with - /// value-semantics. This means that reloading a [`Value`] only - /// affects the [`Value`] itself. It's useful when you want to - /// keep a configuration immutable, but retain thread-safe - /// interior mutability, which is preferable to making the entire - /// configuration `mut`. - /// - /// # Examples - /// - /// ```ignore - /// use serde::{Serialize, Deserialize}; - /// use iroha_config_base::runtime_upgrades::{handle::Value, Reload}; - /// - /// #[derive(iroha_config_base::derive::Combine, Serialize, Deserialize)] - /// pub struct Config { option: Value } - /// - /// fn main() { - /// let c = Config { option: true.into() }; - /// - /// c.option.reload(false); - /// } - /// ``` - /// - /// If you wish to perform validation on the value, consider using - /// a thin wrapper `tuple` struct. - /// - #[derive(Debug)] - pub struct Value(pub AtomicCell); - - impl Clone for Value { - fn clone(&self) -> Self { - Self(AtomicCell::new(self.0.load())) - } - } - - impl From for Value { - fn from(value: T) -> Self { - Self(AtomicCell::new(value)) - } - } - - impl Default for Value { - fn default() -> Self { - Self(AtomicCell::default()) - } - } - - impl Reload for Value { - fn reload(&self, item: T) -> Result<()> { - self.0.swap(item); - Ok(()) - } - } - - impl<'de, T: Deserialize<'de> + Copy + Clone> Deserialize<'de> for Value { - fn deserialize(deserializer: D) -> Result - where - D: serde::Deserializer<'de>, - { - Ok(Self(AtomicCell::new(T::deserialize(deserializer)?))) - } - } - - impl Serialize for Value { - fn serialize(&self, serializer: S) -> Result - where - S: serde::Serializer, - { - (self.0.load()).serialize(serializer) - } - } - - // ----------------------------------------------------------------------- - - /// Structure that encapsulates a configuration value as well as a - /// handle for reloading other parts of the program. This is the - /// `struct` that you want to use 99% of the time. - /// - /// It handles automatic synchronisation of the current value from - /// the reload, as well as proper (de)serialization: namely the - /// handle doesn't pollute your configuration options. - pub struct SyncValue>(Value, H); - - impl> SyncValue { - /// Getter for the wrapped [`Value`] - pub fn value(&self) -> T { - self.0 .0.load() - } - } - - impl SyncValue> { - /// Set the handle - /// - /// # Errors - /// If [`Singleton::set`] fails. - pub fn set_handle(&self, other: impl ReloadMut + Send + Sync + 'static) { - self.1.set(other); - } - } - - impl + Clone> Clone for SyncValue { - fn clone(&self) -> Self { - Self(self.0.clone(), self.1.clone()) - } - } - - impl + Debug> Debug for SyncValue { - fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result { - f.debug_tuple("Reconfigure") - .field(&self.0) - .field(&self.1) - .finish() - } - } - - impl Default for SyncValue> - where - T: Default + Clone + Copy + Send + Sync + Debug, - { - fn default() -> Self { - Self(Value::default(), Singleton::default()) - } - } - - impl + Default> From for SyncValue { - fn from(value: T) -> Self { - Self(Value(AtomicCell::new(value)), H::default()) - } - } - - impl> Serialize for SyncValue { - fn serialize(&self, serializer: S) -> Result - where - S: serde::Serializer, - { - // We only want the actual (simple) value to be part of the serializing - self.0.serialize(serializer) - } - } - - impl<'de, T: Deserialize<'de> + Copy + Clone, H: Reload + Default> Deserialize<'de> - for SyncValue - { - fn deserialize(deserializer: D) -> Result - where - D: serde::Deserializer<'de>, - { - Ok(Self(Value::::deserialize(deserializer)?, H::default())) - } - } - - impl> Reload for SyncValue { - fn reload(&self, item: T) -> Result<()> { - self.1.reload(item)?; - self.0.reload(item) - } - } -} diff --git a/config/base/tests/simple.rs b/config/base/tests/simple.rs deleted file mode 100644 index 4de93ca3632..00000000000 --- a/config/base/tests/simple.rs +++ /dev/null @@ -1,206 +0,0 @@ -use std::{collections::HashMap, env::VarError, ffi::OsStr}; - -use iroha_config_base::{ - derive::{Documented, LoadFromEnv, Override}, - proxy::{Documented as _, FetchEnv, LoadFromEnv as _, Override as _}, -}; -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, Deserialize, Serialize, LoadFromEnv, Override)] -#[config(env_prefix = "CONF_")] -struct ConfigurationProxy { - /// Inner structure - #[config(inner)] - inner: Option, - #[config(serde_as_str)] - pub string_wrapper: Option, - pub string: Option, - pub data: Option, -} - -#[derive(Clone, Debug, Deserialize, Serialize, Documented)] -#[config(env_prefix = "CONF_")] -struct Configuration { - /// Inner structure - #[config(inner)] - inner: InnerConfiguration, - #[config(serde_as_str)] - pub string_wrapper: StringWrapper, - pub string: String, - pub data: Data, -} - -impl ConfigurationProxy { - fn new_with_placeholders() -> Self { - Self { - inner: Some(InnerConfigurationProxy { - a: Some("string".to_owned()), - b: Some(42), - }), - string_wrapper: Some(StringWrapper("string".to_owned())), - string: Some("cool string".to_owned()), - data: Some(Data { - key: "key".to_owned(), - value: 34, - }), - } - } - - fn new_with_none() -> Self { - Self { - inner: None, - string_wrapper: None, - string: None, - data: None, - } - } -} - -#[derive(Clone, Debug, PartialEq, Eq, Deserialize, Serialize, LoadFromEnv, Override)] -#[config(env_prefix = "CONF_INNER_")] -struct InnerConfigurationProxy { - pub a: Option, - // From expression - /// Docs from b - pub b: Option, -} - -#[derive(Clone, Debug, PartialEq, Eq, Deserialize, Serialize, Documented)] -#[config(env_prefix = "CONF_INNER_")] -struct InnerConfiguration { - pub a: String, - // From expression - /// Docs from b - pub b: i32, -} -#[derive(Clone, Debug, PartialEq, Eq, Deserialize, Serialize)] -struct Data { - key: String, - value: u64, -} - -#[derive(Clone, Debug, PartialEq, Eq, Deserialize, Serialize)] -struct StringWrapper(String); - -#[test] -fn test_docs() { - assert_eq!( - Configuration::get_doc_recursive(["inner", "b"]).unwrap(), - Some(" Docs from b\n\nHas type `i32`[^1]. Can be configured via environment variable `CONF_INNER_B`".to_owned()) - ); - assert_eq!( - Configuration::get_doc_recursive(["inner", "a"]).unwrap(), - Some( - "Has type `String`[^1]. Can be configured via environment variable `CONF_INNER_A`" - .to_owned() - ) - ); - assert_eq!( - Configuration::get_doc_recursive(["inner"]).unwrap(), - Some(" Inner structure\n\nHas type `InnerConfiguration`[^1]. Can be configured via environment variable `CONF_INNER`\n\nHas following fields:\n\na: Has type `String`[^1]. Can be configured via environment variable `CONF_INNER_A`\n\nb: Docs from b\n\nHas type `i32`[^1]. Can be configured via environment variable `CONF_INNER_B`\n\n\n".to_owned()) - ); -} - -struct TestEnv { - map: HashMap, -} - -impl TestEnv { - fn new() -> Self { - Self { - map: HashMap::new(), - } - } - - fn set_var(&mut self, key: impl AsRef, value: impl AsRef) { - self.map - .insert(key.as_ref().to_owned(), value.as_ref().to_owned()); - } - - fn remove_var(&mut self, key: impl AsRef) { - self.map.remove(key.as_ref()); - } -} - -impl FetchEnv for TestEnv { - fn fetch>(&self, key: K) -> Result { - self.map - .get( - key.as_ref() - .to_str() - .ok_or_else(|| VarError::NotUnicode(key.as_ref().to_owned()))?, - ) - .ok_or(VarError::NotPresent) - .map(Clone::clone) - } -} - -fn test_env_factory() -> TestEnv { - let string_wrapper_json = "string"; - let string = "cool string"; - let data_json = r#"{"key": "key", "value": 34}"#; - let inner_json = r#"{"a": "", "b": 0}"#; - let mut env = TestEnv::new(); - env.set_var("CONF_STRING_WRAPPER", string_wrapper_json); - env.set_var("CONF_STRING", string); - env.set_var("CONF_DATA", data_json); - env.set_var("CONF_OPTIONAL_STRING_WRAPPER", string_wrapper_json); - env.set_var("CONF_OPTIONAL_STRING", string); - env.set_var("CONF_OPTIONAL_DATA", data_json); - env.set_var("CONF_OPTIONAL_INNER", inner_json); - env.set_var("CONF_INNER_A", "string"); - env.set_var("CONF_INNER_B", "42"); - env -} - -#[test] -fn test_proxy_load_from_env() { - let config = ConfigurationProxy::new_with_placeholders(); - let env_config = ConfigurationProxy::from_env(&test_env_factory()).expect("valid env"); - assert_eq!(&env_config.data, &config.data); - assert_eq!(&env_config.string_wrapper, &config.string_wrapper); - assert_eq!(&env_config.string, &config.string); - assert_eq!(&env_config.inner, &config.inner); -} - -#[test] -fn test_can_load_inner_without_the_wrapping_config() { - let mut env = test_env_factory(); - env.remove_var("CONF_OPTIONAL_INNER"); - let config = ConfigurationProxy::new_with_placeholders(); - let env_config = ConfigurationProxy::from_env(&env).expect("valid env"); - assert_eq!(&env_config.inner, &config.inner); -} - -#[test] -fn test_proxy_combine_does_not_overload_with_none() { - let config = ConfigurationProxy::new_with_none(); - let env_config = ConfigurationProxy::from_env(&test_env_factory()).expect("valid env"); - let combine_config = env_config.clone().override_with(config); - assert_eq!(&env_config.data, &combine_config.data); -} - -#[test] -fn configuration_proxy_from_env_returns_err_on_parsing_error() { - #[derive(LoadFromEnv, Debug)] - #[config(env_prefix = "")] - struct Target { - #[allow(dead_code)] - foo: Option, - } - - struct Env; - - impl FetchEnv for Env { - fn fetch>(&self, key: K) -> Result { - match key.as_ref().to_str().unwrap() { - "FOO" => Ok("not u64 for sure".to_owned()), - _ => Err(VarError::NotPresent), - } - } - } - - let err = Target::from_env(&Env).expect_err("Must not be parsed"); - let err = eyre::Report::new(err); - assert_eq!(format!("{err:?}"), "Failed to deserialize the field `FOO`\n\nCaused by:\n JSON5: --> 1:1\n |\n 1 | not u64 for sure\n | ^---\n |\n = expected array, boolean, null, number, object, or string\n\nLocation:\n config/base/tests/simple.rs:204:15"); -} diff --git a/config/iroha_test_config.json b/config/iroha_test_config.json index 80f61607c38..6ebbf417a26 100644 --- a/config/iroha_test_config.json +++ b/config/iroha_test_config.json @@ -57,11 +57,8 @@ "FUTURE_THRESHOLD_MS": 1000 }, "LOGGER": { - "MAX_LOG_LEVEL": "INFO", - "TELEMETRY_CAPACITY": 1000, - "COMPACT_MODE": false, - "LOG_FILE_PATH": null, - "TERMINAL_COLORS": true, + "LEVEL": "INFO", + "FORMAT": "full", "TOKIO_CONSOLE_ADDR": "127.0.0.1:5555" }, "GENESIS": { diff --git a/config/src/block_sync.rs b/config/src/block_sync.rs index 6802fcce9c9..dd927df3ece 100644 --- a/config/src/block_sync.rs +++ b/config/src/block_sync.rs @@ -1,5 +1,5 @@ //! Module for `BlockSynchronizer`-related configuration and structs. -use iroha_config_base::derive::{Documented, Proxy}; +use iroha_config_base::derive::Proxy; use serde::{Deserialize, Serialize}; const DEFAULT_BLOCK_BATCH_SIZE: u32 = 4; @@ -7,7 +7,7 @@ const DEFAULT_GOSSIP_PERIOD_MS: u64 = 10000; const DEFAULT_ACTOR_CHANNEL_CAPACITY: u32 = 100; /// Configuration for `BlockSynchronizer`. -#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize, Documented, Proxy)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize, Proxy)] #[serde(rename_all = "UPPERCASE")] #[config(env_prefix = "BLOCK_SYNC_")] pub struct Configuration { diff --git a/config/src/client.rs b/config/src/client.rs index 312bb2a1737..a9238879cac 100644 --- a/config/src/client.rs +++ b/config/src/client.rs @@ -4,7 +4,7 @@ use std::num::NonZeroU64; use derive_more::Display; use eyre::{Result, WrapErr}; -use iroha_config_base::derive::{Documented, Error as ConfigError, Proxy}; +use iroha_config_base::derive::{Error as ConfigError, Proxy}; use iroha_crypto::prelude::*; use iroha_data_model::{prelude::*, transaction::TransactionLimits}; use iroha_primitives::small::SmallStr; @@ -56,7 +56,7 @@ impl<'de> Deserialize<'de> for WebLogin { } /// Basic Authentication credentials -#[derive(Clone, Deserialize, Serialize, Debug, Documented, PartialEq, Eq)] +#[derive(Clone, Deserialize, Serialize, Debug, PartialEq, Eq)] pub struct BasicAuth { /// Login for Basic Authentication pub web_login: WebLogin, @@ -65,7 +65,7 @@ pub struct BasicAuth { } /// `Configuration` provides an ability to define client parameters such as `TORII_URL`. -#[derive(Debug, Clone, Deserialize, Serialize, Proxy, Documented, PartialEq, Eq)] +#[derive(Debug, Clone, Deserialize, Serialize, Proxy, PartialEq, Eq)] #[serde(rename_all = "UPPERCASE")] #[config(env_prefix = "IROHA_")] pub struct Configuration { diff --git a/config/src/client_api.rs b/config/src/client_api.rs new file mode 100644 index 00000000000..030edb8523a --- /dev/null +++ b/config/src/client_api.rs @@ -0,0 +1,70 @@ +//! Functionality related to working with the configuration through client API. +//! +//! Intended usage: +//! +//! - Create [`ConfigurationDTO`] from [`crate::iroha::Configuration`] and serialize it for the client +//! - Deserialize [`ConfigurationDTO`] from the client and use [`ConfigurationDTO::apply_update()`] to update the configuration +// TODO: Currently logic here is not generalised and handles only `logger.level` parameter. In future, when +// other parts of configuration are refactored and there is a solid foundation e.g. as a general +// configuration-related crate, this part should be re-written in a clean way. +// Track configuration refactoring here: https://github.com/hyperledger/iroha/issues/2585 + +use iroha_data_model::Level; +use serde::{Deserialize, Serialize}; + +use super::{iroha::Configuration as BaseConfiguration, logger::Configuration as BaseLogger}; + +/// Subset of [`super::iroha`] configuration. +#[derive(Debug, Serialize, Deserialize, Clone, Copy)] +pub struct ConfigurationDTO { + #[allow(missing_docs)] + pub logger: Logger, +} + +impl From<&'_ BaseConfiguration> for ConfigurationDTO { + fn from(value: &'_ BaseConfiguration) -> Self { + Self { + logger: value.logger.as_ref().into(), + } + } +} + +/// Subset of [`super::logger`] configuration. +#[derive(Debug, Serialize, Deserialize, Clone, Copy)] +pub struct Logger { + #[allow(missing_docs)] + pub level: Level, +} + +impl From<&'_ BaseLogger> for Logger { + fn from(value: &'_ BaseLogger) -> Self { + Self { level: value.level } + } +} + +#[cfg(test)] +mod test { + use super::*; + + #[test] + fn snapshot_serialized_form() { + let value = ConfigurationDTO { + logger: Logger { + level: Level::TRACE, + }, + }; + + let actual = serde_json::to_string_pretty(&value).expect("The value is a valid JSON"); + + // NOTE: whenever this is updated, make sure to update the documentation accordingly: + // https://hyperledger.github.io/iroha-2-docs/reference/torii-endpoints.html + // -> Configuration endpoints + let expected = expect_test::expect![[r#" + { + "logger": { + "level": "TRACE" + } + }"#]]; + expected.assert_eq(&actual); + } +} diff --git a/config/src/genesis.rs b/config/src/genesis.rs index fe51c5e33a3..2bb9e8d892b 100644 --- a/config/src/genesis.rs +++ b/config/src/genesis.rs @@ -1,12 +1,12 @@ //! Module with genesis configuration logic. -use iroha_config_base::derive::{view, Documented, Proxy}; +use iroha_config_base::derive::{view, Proxy}; use iroha_crypto::{PrivateKey, PublicKey}; use serde::{Deserialize, Serialize}; // Generate `ConfigurationView` without the private key view! { /// Configuration of the genesis block and the process of its submission. - #[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize, Documented, Proxy)] + #[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize, Proxy)] #[serde(rename_all = "UPPERCASE")] #[config(env_prefix = "IROHA_GENESIS_")] pub struct Configuration { diff --git a/config/src/iroha.rs b/config/src/iroha.rs index 6ed054b6593..ffa28eddc2e 100644 --- a/config/src/iroha.rs +++ b/config/src/iroha.rs @@ -1,7 +1,7 @@ //! This module contains [`struct@Configuration`] structure and related implementation. use std::fmt::Debug; -use iroha_config_base::derive::{view, Documented, Error as ConfigError, Proxy}; +use iroha_config_base::derive::{view, Error as ConfigError, Proxy}; use iroha_crypto::prelude::*; use serde::{Deserialize, Serialize}; @@ -10,7 +10,7 @@ use super::*; // Generate `ConfigurationView` without the private key view! { /// Configuration parameters for a peer - #[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize, Proxy, Documented)] + #[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize, Proxy)] #[serde(rename_all = "UPPERCASE")] #[config(env_prefix = "IROHA_")] pub struct Configuration { @@ -144,11 +144,8 @@ impl ConfigurationProxy { message: "Torii config should have at least `p2p_addr` provided for sumeragi finalisation", }); } - // Finally, if trusted peers were not supplied, we can fall back to inserting itself as - // the only trusted one - if sumeragi_proxy.trusted_peers.is_none() { - sumeragi_proxy.insert_self_as_trusted_peers() - } + + sumeragi_proxy.insert_self_as_trusted_peers() } Ok(()) @@ -262,9 +259,9 @@ mod tests { #[test] fn example_json_proxy_builds() { - ConfigurationProxy::from_path(CONFIGURATION_PATH).build().unwrap_or_else(|_| panic!("`ConfigurationProxy` specified in {CONFIGURATION_PATH} \ + ConfigurationProxy::from_path(CONFIGURATION_PATH).build().unwrap_or_else(|err| panic!("`ConfigurationProxy` specified in {CONFIGURATION_PATH} \ failed to build. This probably means that some of the fields there were not updated \ - properly with new changes.")); + properly with new changes. Error: {err}")); } #[test] diff --git a/config/src/kura.rs b/config/src/kura.rs index 9eaed6f19d3..5ce29c4ce95 100644 --- a/config/src/kura.rs +++ b/config/src/kura.rs @@ -1,16 +1,13 @@ //! Module for kura-related configuration and structs -use std::{num::NonZeroU64, path::Path}; -use eyre::{eyre, Result}; -use iroha_config_base::derive::{Documented, Proxy}; +use eyre::Result; +use iroha_config_base::derive::Proxy; use serde::{Deserialize, Serialize}; -const DEFAULT_BLOCKS_PER_STORAGE_FILE: u64 = 1000_u64; const DEFAULT_BLOCK_STORE_PATH: &str = "./storage"; -const DEFAULT_ACTOR_CHANNEL_CAPACITY: u32 = 100; /// `Kura` configuration. -#[derive(Clone, Deserialize, Serialize, Debug, Documented, Proxy, PartialEq, Eq)] +#[derive(Clone, Deserialize, Serialize, Debug, Proxy, PartialEq, Eq)] #[serde(rename_all = "UPPERCASE")] #[config(env_prefix = "KURA_")] pub struct Configuration { @@ -18,10 +15,6 @@ pub struct Configuration { pub init_mode: Mode, /// Path to the existing block store folder or path to create new folder. pub block_store_path: String, - /// Maximum number of blocks to write into a single storage file. - pub blocks_per_storage_file: NonZeroU64, - /// Default buffer capacity of actor's MPSC channel. - pub actor_channel_capacity: u32, /// Whether or not new blocks be outputted to a file called blocks.json. pub debug_output_new_blocks: bool, } @@ -30,31 +23,12 @@ impl Default for ConfigurationProxy { fn default() -> Self { Self { init_mode: Some(Mode::default()), - block_store_path: Some(DEFAULT_BLOCK_STORE_PATH.to_owned()), - blocks_per_storage_file: Some( - NonZeroU64::new(DEFAULT_BLOCKS_PER_STORAGE_FILE) - .expect("BLOCKS_PER_STORAGE cannot be set to a non-positive value."), - ), - actor_channel_capacity: Some(DEFAULT_ACTOR_CHANNEL_CAPACITY), + block_store_path: Some(DEFAULT_BLOCK_STORE_PATH.into()), debug_output_new_blocks: Some(false), } } } -impl Configuration { - /// Set `block_store_path` configuration parameter. Will overwrite the existing one. - /// - /// # Errors - /// Fails if the path is not valid - pub fn block_store_path(&mut self, path: &Path) -> Result<()> { - self.block_store_path = path - .to_str() - .ok_or_else(|| eyre!("Failed to yield slice from path"))? - .to_owned(); - Ok(()) - } -} - /// Kura initialization mode. #[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Deserialize, Serialize)] #[serde(rename_all = "snake_case")] @@ -77,12 +51,10 @@ pub mod tests { ( init_mode in prop::option::of(Just(Mode::default())), block_store_path in prop::option::of(Just(DEFAULT_BLOCK_STORE_PATH.into())), - blocks_per_storage_file in prop::option::of(Just(NonZeroU64::new(DEFAULT_BLOCKS_PER_STORAGE_FILE).expect("Cannot be set to a negative value"))), - actor_channel_capacity in prop::option::of(Just(DEFAULT_ACTOR_CHANNEL_CAPACITY)), debug_output_new_blocks in prop::option::of(Just(false)) ) -> ConfigurationProxy { - ConfigurationProxy { init_mode, block_store_path, blocks_per_storage_file, actor_channel_capacity, debug_output_new_blocks } + ConfigurationProxy { init_mode, block_store_path, debug_output_new_blocks } } } } diff --git a/config/src/lib.rs b/config/src/lib.rs index 6e80c5e1c88..423e5a8dd19 100644 --- a/config/src/lib.rs +++ b/config/src/lib.rs @@ -1,9 +1,9 @@ //! Aggregate configuration for different Iroha modules. pub use iroha_config_base as base; -use serde::{Deserialize, Serialize}; pub mod block_sync; pub mod client; +pub mod client_api; pub mod genesis; pub mod iroha; pub mod kura; @@ -18,35 +18,3 @@ pub mod telemetry; pub mod torii; pub mod wasm; pub mod wsv; - -/// Json config for getting configuration -#[derive(Clone, Debug, Deserialize, Serialize)] -pub enum GetConfiguration { - /// Getting docs of specific field - /// - /// Top-level fields must be enclosed in an array (of strings). This array - /// provides the fully qualified path to the fields. - /// - /// # Examples - /// - /// To get the top-level configuration docs for `iroha_core::Torii` - /// `curl -X GET -H 'content-type: application/json' http://127.0.0.1:8080/configuration -d '{"Docs" : ["torii"]} ' -i` - /// - /// To get the documentation on the [`Logger::config::Configuration.max_log_level`] - /// `curl -X GET -H 'content-type: application/json' http://127.0.0.1:8080/configuration -d '{"Docs" : ["logger", "max_log_level"]}' -i` - Docs(Vec), - /// Get the original Value of the full configuration. - Value, -} - -/// Message acceptable for `POST` requests to the configuration endpoint. -#[derive(Clone, Debug, Deserialize, Serialize, Copy)] -pub enum PostConfiguration { - /// Change the maximum logging level of logger. - /// - /// # Examples - /// - /// To silence all logging events that aren't `ERROR`s - /// `curl -X POST -H 'content-type: application/json' http://127.0.0.1:8080/configuration -d '{"LogLevel": "ERROR"}' -i` - LogLevel(iroha_data_model::Level), -} diff --git a/config/src/live_query_store.rs b/config/src/live_query_store.rs index 79382fee2ca..de8b2a31ec2 100644 --- a/config/src/live_query_store.rs +++ b/config/src/live_query_store.rs @@ -2,7 +2,7 @@ use std::num::NonZeroU64; -use iroha_config_base::derive::{Documented, Proxy}; +use iroha_config_base::derive::Proxy; use serde::{Deserialize, Serialize}; /// Default max time a query can remain in the store unaccessed @@ -10,7 +10,7 @@ pub static DEFAULT_QUERY_IDLE_TIME_MS: once_cell::sync::Lazy = once_cell::sync::Lazy::new(|| NonZeroU64::new(30_000).unwrap()); /// Configuration for `QueryService`. -#[derive(Debug, Copy, Clone, PartialEq, Eq, Deserialize, Serialize, Documented, Proxy)] +#[derive(Debug, Copy, Clone, PartialEq, Eq, Deserialize, Serialize, Proxy)] #[serde(rename_all = "UPPERCASE")] #[config(env_prefix = "LIVE_QUERY_STORE_")] pub struct Configuration { diff --git a/config/src/logger.rs b/config/src/logger.rs index bee27fda3df..6d5e4e9d5e6 100644 --- a/config/src/logger.rs +++ b/config/src/logger.rs @@ -2,21 +2,14 @@ //! configuration, as well as run-time reloading of the log-level. use core::fmt::Debug; -use derive_more::{Deref, DerefMut, From}; -use iroha_config_base::{ - derive::{Documented, Proxy}, - runtime_upgrades::{handle, ReloadError, ReloadMut}, -}; -use iroha_data_model::Level; +use iroha_config_base::derive::Proxy; +pub use iroha_data_model::Level; +#[cfg(feature = "tokio-console")] +use iroha_primitives::addr::{socket_addr, SocketAddr}; use serde::{Deserialize, Serialize}; -use tracing::Subscriber; -use tracing_subscriber::{filter::LevelFilter, reload::Handle}; -const TELEMETRY_CAPACITY: u32 = 1000; -const DEFAULT_COMPACT_MODE: bool = false; -const DEFAULT_TERMINAL_COLORS: bool = true; #[cfg(feature = "tokio-console")] -const DEFAULT_TOKIO_CONSOLE_ADDR: &str = "127.0.0.1:5555"; +const DEFAULT_TOKIO_CONSOLE_ADDR: SocketAddr = socket_addr!(127.0.0.1:5555); /// Convert [`Level`] into [`tracing::Level`] pub fn into_tracing_level(level: Level) -> tracing::Level { @@ -29,77 +22,50 @@ pub fn into_tracing_level(level: Level) -> tracing::Level { } } -/// Wrapper for [`Handle`] to implement [`ReloadMut`] -#[derive(From)] -pub struct ReloadHandle(pub Handle); - -impl ReloadMut for ReloadHandle { - fn reload(&mut self, level: Level) -> Result<(), ReloadError> { - let level_filter = - tracing_subscriber::filter::LevelFilter::from_level(into_tracing_level(level)); - - Handle::reload(&self.0, level_filter).map_err(|err| { - if err.is_dropped() { - ReloadError::Dropped - } else { - ReloadError::Poisoned - } - }) - } -} - -/// Wrapper around [`Level`] for runtime upgrades. -#[derive(Debug, Clone, Default, Deref, DerefMut, Deserialize, Serialize)] -#[repr(transparent)] -#[serde(transparent)] -pub struct SyncLevel(handle::SyncValue>); - -impl From for SyncLevel { - fn from(level: Level) -> Self { - Self(level.into()) - } -} - -impl PartialEq for SyncLevel { - fn eq(&self, other: &Self) -> bool { - self.0.value() == other.0.value() - } -} - -impl Eq for SyncLevel {} - /// 'Logger' configuration. -#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize, Proxy, Documented)] +#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize, Proxy)] #[serde(rename_all = "UPPERCASE")] +#[config(env_prefix = "LOG_")] +// `tokio_console_addr` is not `Copy`, but warning appears without `tokio-console` feature +#[allow(missing_copy_implementations)] pub struct Configuration { - /// Maximum log level + /// Level of logging verbosity #[config(serde_as_str)] - pub max_log_level: SyncLevel, - /// Capacity (or batch size) for telemetry channel - pub telemetry_capacity: u32, - /// Compact mode (no spans from telemetry) - pub compact_mode: bool, - /// If provided, logs will be copied to said file in the - /// format readable by [bunyan](https://lib.rs/crates/bunyan) - #[config(serde_as_str)] - pub log_file_path: Option, - /// Enable ANSI terminal colors for formatted output. - pub terminal_colors: bool, + pub level: Level, + /// Output format + pub format: Format, #[cfg(feature = "tokio-console")] /// Address of tokio console (only available under "tokio-console" feature) - pub tokio_console_addr: String, + pub tokio_console_addr: SocketAddr, +} + +/// Reflects formatters in [`tracing_subscriber::fmt::format`] +#[derive(Debug, Copy, Clone, Eq, PartialEq, Deserialize, Serialize)] +#[serde(rename_all = "lowercase")] +pub enum Format { + /// See [`tracing_subscriber::fmt::format::Full`] + Full, + /// See [`tracing_subscriber::fmt::format::Compact`] + Compact, + /// See [`tracing_subscriber::fmt::format::Pretty`] + Pretty, + /// See [`tracing_subscriber::fmt::format::Json`] + Json, +} + +impl Default for Format { + fn default() -> Self { + Self::Full + } } impl Default for ConfigurationProxy { fn default() -> Self { Self { - max_log_level: Some(SyncLevel::default()), - telemetry_capacity: Some(TELEMETRY_CAPACITY), - compact_mode: Some(DEFAULT_COMPACT_MODE), - log_file_path: Some(None), - terminal_colors: Some(DEFAULT_TERMINAL_COLORS), + level: Some(Level::default()), + format: Some(Format::default()), #[cfg(feature = "tokio-console")] - tokio_console_addr: Some(DEFAULT_TOKIO_CONSOLE_ADDR.into()), + tokio_console_addr: Some(DEFAULT_TOKIO_CONSOLE_ADDR), } } } @@ -113,22 +79,23 @@ pub mod tests { #[must_use = "strategies do nothing unless used"] pub fn arb_proxy() -> impl proptest::strategy::Strategy { let strat = ( - (prop::option::of(Just(SyncLevel::default()))), - (prop::option::of(Just(TELEMETRY_CAPACITY))), - (prop::option::of(Just(DEFAULT_COMPACT_MODE))), - (prop::option::of(Just(None))), - (prop::option::of(Just(DEFAULT_TERMINAL_COLORS))), + (prop::option::of(Just(Level::default()))), + (prop::option::of(Just(Format::default()))), #[cfg(feature = "tokio-console")] - (prop::option::of(Just(DEFAULT_TOKIO_CONSOLE_ADDR.to_string()))), + (prop::option::of(Just(DEFAULT_TOKIO_CONSOLE_ADDR))), ); proptest::strategy::Strategy::prop_map(strat, move |strat| ConfigurationProxy { - max_log_level: strat.0, - telemetry_capacity: strat.1, - compact_mode: strat.2, - log_file_path: strat.3, - terminal_colors: strat.4, + level: strat.0, + format: strat.1, #[cfg(feature = "tokio-console")] - tokio_console_addr: strat.5, + tokio_console_addr: strat.2, }) } + + #[test] + fn serialize_pretty_format_in_lowercase() { + let value = Format::Pretty; + let actual = serde_json::to_string(&value).unwrap(); + assert_eq!("\"pretty\"", actual); + } } diff --git a/config/src/network.rs b/config/src/network.rs index e5c5ec48e41..845743fac42 100644 --- a/config/src/network.rs +++ b/config/src/network.rs @@ -1,11 +1,11 @@ //! Module for network-related configuration and structs -use iroha_config_base::derive::{Documented, Proxy}; +use iroha_config_base::derive::Proxy; use serde::{Deserialize, Serialize}; const DEFAULT_ACTOR_CHANNEL_CAPACITY: u32 = 100; /// Network Configuration parameters -#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize, Documented, Proxy)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize, Proxy)] #[serde(rename_all = "UPPERCASE")] #[config(env_prefix = "IROHA_NETWORK_")] pub struct Configuration { diff --git a/config/src/queue.rs b/config/src/queue.rs index 3dde85d60d1..5803e90ed7c 100644 --- a/config/src/queue.rs +++ b/config/src/queue.rs @@ -1,5 +1,5 @@ //! Module for `Queue`-related configuration and structs. -use iroha_config_base::derive::{Documented, Proxy}; +use iroha_config_base::derive::Proxy; use serde::{Deserialize, Serialize}; const DEFAULT_MAX_TRANSACTIONS_IN_QUEUE: u32 = 2_u32.pow(16); @@ -8,7 +8,7 @@ const DEFAULT_TRANSACTION_TIME_TO_LIVE_MS: u64 = 24 * 60 * 60 * 1000; // 24 hour const DEFAULT_FUTURE_THRESHOLD_MS: u64 = 1000; /// `Queue` configuration. -#[derive(Copy, Clone, Deserialize, Serialize, Debug, Documented, Proxy, PartialEq, Eq)] +#[derive(Copy, Clone, Deserialize, Serialize, Debug, Proxy, PartialEq, Eq)] #[serde(rename_all = "UPPERCASE")] #[config(env_prefix = "QUEUE_")] pub struct Configuration { diff --git a/config/src/snapshot.rs b/config/src/snapshot.rs index 484dfb7de3e..ea949340767 100644 --- a/config/src/snapshot.rs +++ b/config/src/snapshot.rs @@ -1,6 +1,6 @@ //! Module for `SnapshotMaker`-related configuration and structs. -use iroha_config_base::derive::{Documented, Proxy}; +use iroha_config_base::derive::Proxy; use serde::{Deserialize, Serialize}; const DEFAULT_SNAPSHOT_PATH: &str = "./storage"; @@ -9,7 +9,7 @@ const DEFAULT_SNAPSHOT_CREATE_EVERY_MS: u64 = 1000 * 60; const DEFAULT_ENABLED: bool = true; /// Configuration for `SnapshotMaker`. -#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize, Documented, Proxy)] +#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize, Proxy)] #[serde(rename_all = "UPPERCASE")] #[config(env_prefix = "SNAPSHOT_")] pub struct Configuration { diff --git a/config/src/sumeragi.rs b/config/src/sumeragi.rs index c6929d441d6..a4eb7760069 100644 --- a/config/src/sumeragi.rs +++ b/config/src/sumeragi.rs @@ -2,7 +2,7 @@ use std::{fmt::Debug, fs::File, io::BufReader, path::Path}; use eyre::{Result, WrapErr}; -use iroha_config_base::derive::{view, Documented, Proxy}; +use iroha_config_base::derive::{view, Proxy}; use iroha_crypto::prelude::*; use iroha_data_model::prelude::*; use iroha_primitives::{unique_vec, unique_vec::UniqueVec}; @@ -36,7 +36,7 @@ view! { /// `Sumeragi` configuration. /// [`struct@Configuration`] provides an ability to define parameters such as `BLOCK_TIME_MS` /// and a list of `TRUSTED_PEERS`. - #[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize, Proxy, Documented)] + #[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize, Proxy)] #[serde(rename_all = "UPPERCASE")] #[config(env_prefix = "SUMERAGI_")] pub struct Configuration { @@ -94,11 +94,16 @@ impl ConfigurationProxy { pub fn insert_self_as_trusted_peers(&mut self) { let peer_id = self .peer_id - .clone() + .as_ref() .expect("Insertion of `self` as `trusted_peers` implies that `peer_id` field should be initialized"); - self.trusted_peers = Some(TrustedPeers { - peers: unique_vec![peer_id], - }); + self.trusted_peers = if let Some(mut trusted_peers) = self.trusted_peers.take() { + trusted_peers.peers.push(peer_id.clone()); + Some(trusted_peers) + } else { + Some(TrustedPeers { + peers: unique_vec![peer_id.clone()], + }) + }; } } diff --git a/config/src/telemetry.rs b/config/src/telemetry.rs index d347df8b050..b7ce10f9ee4 100644 --- a/config/src/telemetry.rs +++ b/config/src/telemetry.rs @@ -1,10 +1,12 @@ //! Module for telemetry-related configuration and structs. -use iroha_config_base::derive::{Documented, Proxy}; +use std::path::PathBuf; + +use iroha_config_base::derive::Proxy; use serde::{Deserialize, Serialize}; use url::Url; /// Configuration parameters container -#[derive(Clone, Deserialize, Serialize, Debug, Proxy, Documented, PartialEq, Eq)] +#[derive(Clone, Deserialize, Serialize, Debug, Proxy, PartialEq, Eq)] #[serde(rename_all = "UPPERCASE")] #[config(env_prefix = "TELEMETRY_")] pub struct Configuration { @@ -20,7 +22,57 @@ pub struct Configuration { pub max_retry_delay_exponent: u8, /// The filepath that to write dev-telemetry to #[config(serde_as_str)] - pub file: Option, + pub file: Option, +} + +/// Complete configuration needed to start regular telemetry. +pub struct RegularTelemetryConfig { + #[allow(missing_docs)] + pub name: String, + #[allow(missing_docs)] + pub url: Url, + #[allow(missing_docs)] + pub min_retry_period: u64, + #[allow(missing_docs)] + pub max_retry_delay_exponent: u8, +} + +/// Complete configuration needed to start dev telemetry. +pub struct DevTelemetryConfig { + #[allow(missing_docs)] + pub file: PathBuf, +} + +impl Configuration { + /// Parses user-provided configuration into stronger typed structures + /// + /// Should be refactored with [#3500](https://github.com/hyperledger/iroha/issues/3500) + pub fn parse(&self) -> (Option, Option) { + let Self { + ref name, + ref url, + max_retry_delay_exponent, + min_retry_period, + ref file, + } = *self; + + let regular = if let (Some(name), Some(url)) = (name, url) { + Some(RegularTelemetryConfig { + name: name.clone(), + url: url.clone(), + max_retry_delay_exponent, + min_retry_period, + }) + } else { + None + }; + + let dev = file + .as_ref() + .map(|file| DevTelemetryConfig { file: file.clone() }); + + (regular, dev) + } } impl Default for ConfigurationProxy { diff --git a/config/src/torii.rs b/config/src/torii.rs index 1c2b801e981..7dea529aa54 100644 --- a/config/src/torii.rs +++ b/config/src/torii.rs @@ -1,6 +1,6 @@ //! `Torii` configuration as well as the default values for the URLs used for the main endpoints: `p2p`, `telemetry`, but not `api`. -use iroha_config_base::derive::{Documented, Proxy}; +use iroha_config_base::derive::Proxy; use iroha_primitives::addr::{socket_addr, SocketAddr}; use serde::{Deserialize, Serialize}; @@ -14,7 +14,7 @@ pub const DEFAULT_TORII_MAX_CONTENT_LENGTH: u32 = 2_u32.pow(12) * 4000; /// Structure that defines the configuration parameters of `Torii` which is the routing module. /// For example the `p2p_addr`, which is used for consensus and block-synchronisation purposes, /// as well as `max_transaction_size`. -#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize, Documented, Proxy)] +#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize, Proxy)] #[serde(rename_all = "UPPERCASE")] #[config(env_prefix = "TORII_")] pub struct Configuration { diff --git a/config/src/wasm.rs b/config/src/wasm.rs index 0528da996ed..9e49f8d9391 100644 --- a/config/src/wasm.rs +++ b/config/src/wasm.rs @@ -1,5 +1,5 @@ //! Module for wasm-related configuration and structs. -use iroha_config_base::derive::{Documented, Proxy}; +use iroha_config_base::derive::Proxy; use serde::{Deserialize, Serialize}; use self::default::*; @@ -13,7 +13,7 @@ pub mod default { } /// `WebAssembly Runtime` configuration. -#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize, Documented, Proxy)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize, Proxy)] #[config(env_prefix = "WASM_")] #[serde(rename_all = "UPPERCASE")] pub struct Configuration { diff --git a/config/src/wsv.rs b/config/src/wsv.rs index aacc58734be..dcb23b23d85 100644 --- a/config/src/wsv.rs +++ b/config/src/wsv.rs @@ -1,6 +1,6 @@ //! Module for `WorldStateView`-related configuration and structs. use default::*; -use iroha_config_base::derive::{Documented, Proxy}; +use iroha_config_base::derive::Proxy; use iroha_data_model::{prelude::*, transaction::TransactionLimits}; use serde::{Deserialize, Serialize}; @@ -26,7 +26,7 @@ pub mod default { } /// `WorldStateView` configuration. -#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize, Proxy, Documented)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize, Proxy)] #[config(env_prefix = "WSV_")] #[serde(rename_all = "UPPERCASE")] pub struct Configuration { diff --git a/configs/peer/config.json b/configs/peer/config.json index 51cc9c5a45a..11d5b354ce8 100644 --- a/configs/peer/config.json +++ b/configs/peer/config.json @@ -5,8 +5,6 @@ "KURA": { "INIT_MODE": "strict", "BLOCK_STORE_PATH": "./storage", - "BLOCKS_PER_STORAGE_FILE": 1000, - "ACTOR_CHANNEL_CAPACITY": 100, "DEBUG_OUTPUT_NEW_BLOCKS": false }, "SUMERAGI": { @@ -38,11 +36,8 @@ "FUTURE_THRESHOLD_MS": 1000 }, "LOGGER": { - "MAX_LOG_LEVEL": "INFO", - "TELEMETRY_CAPACITY": 1000, - "COMPACT_MODE": false, - "LOG_FILE_PATH": null, - "TERMINAL_COLORS": true + "LEVEL": "INFO", + "FORMAT": "full" }, "GENESIS": { "ACCOUNT_PUBLIC_KEY": null, diff --git a/configs/peer/executor.wasm b/configs/peer/executor.wasm index 2ef54f969b6..48c0f25b41c 100644 Binary files a/configs/peer/executor.wasm and b/configs/peer/executor.wasm differ diff --git a/configs/peer/genesis.json b/configs/peer/genesis.json index 2ca5d0365ed..a915d22f4f8 100644 --- a/configs/peer/genesis.json +++ b/configs/peer/genesis.json @@ -3,7 +3,7 @@ [ { "Register": { - "NewDomain": { + "Domain": { "id": "wonderland", "logo": null, "metadata": { @@ -16,7 +16,7 @@ }, { "Register": { - "NewAccount": { + "Account": { "id": "alice@wonderland", "signatories": [ "ed01207233BFC89DCBD68C19FDE6CE6158225298EC1131B6A130D1AEB454C1AB5183C0" @@ -31,7 +31,7 @@ }, { "Register": { - "NewAccount": { + "Account": { "id": "bob@wonderland", "signatories": [ "ed01207233BFC89DCBD68C19FDE6CE6158225298EC1131B6A130D1AEB454C1AB5183C0" @@ -46,7 +46,7 @@ }, { "Register": { - "NewAssetDefinition": { + "AssetDefinition": { "id": "rose#wonderland", "value_type": "Quantity", "mintable": "Infinitely", @@ -57,7 +57,7 @@ }, { "Register": { - "NewDomain": { + "Domain": { "id": "garden_of_live_flowers", "logo": null, "metadata": {} @@ -66,7 +66,7 @@ }, { "Register": { - "NewAccount": { + "Account": { "id": "carpenter@garden_of_live_flowers", "signatories": [ "ed01207233BFC89DCBD68C19FDE6CE6158225298EC1131B6A130D1AEB454C1AB5183C0" @@ -77,7 +77,7 @@ }, { "Register": { - "NewAssetDefinition": { + "AssetDefinition": { "id": "cabbage#garden_of_live_flowers", "value_type": "Quantity", "mintable": "Infinitely", @@ -88,95 +88,71 @@ }, { "Mint": { - "object": "13_u32", - "destination_id": { - "AssetId": "rose##alice@wonderland" + "Asset": { + "Quantity": { + "object": 13, + "destination_id": "rose##alice@wonderland" + } } } }, { "Mint": { - "object": "44_u32", - "destination_id": { - "AssetId": "cabbage#garden_of_live_flowers#alice@wonderland" + "Asset": { + "Quantity": { + "object": 44, + "destination_id": "cabbage#garden_of_live_flowers#alice@wonderland" + } } } }, { "Grant": { - "object": { - "PermissionToken": { + "PermissionToken": { + "object": { "definition_id": "CanSetParameters", "payload": null - } - }, - "destination_id": { - "AccountId": "alice@wonderland" + }, + "destination_id": "alice@wonderland" } } }, { - "Sequence": [ - { - "NewParameter": { - "Parameter": "?MaxTransactionsInBlock=512" - } - }, - { - "NewParameter": { - "Parameter": "?BlockTime=2000" - } - }, - { - "NewParameter": { - "Parameter": "?CommitTimeLimit=4000" - } - }, - { - "NewParameter": { - "Parameter": "?TransactionLimits=4096,4194304_TL" - } - }, - { - "NewParameter": { - "Parameter": "?WSVAssetMetadataLimits=1048576,4096_ML" - } - }, - { - "NewParameter": { - "Parameter": "?WSVAssetDefinitionMetadataLimits=1048576,4096_ML" - } - }, - { - "NewParameter": { - "Parameter": "?WSVAccountMetadataLimits=1048576,4096_ML" - } - }, - { - "NewParameter": { - "Parameter": "?WSVDomainMetadataLimits=1048576,4096_ML" - } - }, - { - "NewParameter": { - "Parameter": "?WSVIdentLengthLimits=1,128_LL" - } - }, - { - "NewParameter": { - "Parameter": "?WASMFuelLimit=23000000" - } - }, - { - "NewParameter": { - "Parameter": "?WASMMaxMemory=524288000" - } - } - ] + "NewParameter": "?MaxTransactionsInBlock=512" + }, + { + "NewParameter": "?BlockTime=2000" + }, + { + "NewParameter": "?CommitTimeLimit=4000" + }, + { + "NewParameter": "?TransactionLimits=4096,4194304_TL" + }, + { + "NewParameter": "?WSVAssetMetadataLimits=1048576,4096_ML" + }, + { + "NewParameter": "?WSVAssetDefinitionMetadataLimits=1048576,4096_ML" + }, + { + "NewParameter": "?WSVAccountMetadataLimits=1048576,4096_ML" + }, + { + "NewParameter": "?WSVDomainMetadataLimits=1048576,4096_ML" + }, + { + "NewParameter": "?WSVIdentLengthLimits=1,128_LL" + }, + { + "NewParameter": "?WASMFuelLimit=23000000" + }, + { + "NewParameter": "?WASMMaxMemory=524288000" }, { "Register": { - "NewRole": { + "Role": { "id": "ALICE_METADATA_ACCESS", "permissions": [ { diff --git a/configs/peer/lts/executor.wasm b/configs/peer/lts/executor.wasm index b74e020ea15..544c9e29dfa 100644 Binary files a/configs/peer/lts/executor.wasm and b/configs/peer/lts/executor.wasm differ diff --git a/configs/peer/stable/executor.wasm b/configs/peer/stable/executor.wasm index b74e020ea15..544c9e29dfa 100644 Binary files a/configs/peer/stable/executor.wasm and b/configs/peer/stable/executor.wasm differ diff --git a/core/Cargo.toml b/core/Cargo.toml index 73a9f5c63b8..def91180650 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -68,7 +68,9 @@ displaydoc = { workspace = true } wasmtime = { workspace = true } parking_lot = { workspace = true, features = ["deadlock_detection"] } derive_more = { workspace = true } + uuid = { version = "1.4.1", features = ["v4"] } +indexmap = "2.1.0" [dev-dependencies] criterion = { workspace = true } diff --git a/core/benches/blocks/apply_blocks_oneshot.rs b/core/benches/blocks/apply_blocks_oneshot.rs index 4c8bdd6e389..f16a5bf5e57 100644 --- a/core/benches/blocks/apply_blocks_oneshot.rs +++ b/core/benches/blocks/apply_blocks_oneshot.rs @@ -8,23 +8,10 @@ mod apply_blocks; use apply_blocks::WsvApplyBlocks; -use iroha_config::base::proxy::Builder; -use iroha_data_model::Level; -use iroha_logger::{Configuration, ConfigurationProxy}; #[tokio::main] async fn main() { - let log_config = Configuration { - max_log_level: Level::INFO.into(), - compact_mode: false, - ..ConfigurationProxy::default() - .build() - .expect("Default logger config should always build") - }; - // Can't use logger because it's failed to initialize. - if let Err(err) = iroha_logger::init(&log_config) { - eprintln!("Failed to initialize logger: {err}"); - } + iroha_logger::test_logger(); iroha_logger::info!("Starting..."); let bench = WsvApplyBlocks::setup().expect("Failed to setup benchmark"); WsvApplyBlocks::measure(&bench).expect("Failed to execute benchmark"); diff --git a/core/benches/blocks/common.rs b/core/benches/blocks/common.rs index f4f412eb633..7aef12edd2d 100644 --- a/core/benches/blocks/common.rs +++ b/core/benches/blocks/common.rs @@ -12,7 +12,7 @@ use iroha_data_model::{ account::Account, asset::{AssetDefinition, AssetDefinitionId}, domain::Domain, - isi::InstructionExpr, + isi::InstructionBox, prelude::*, transaction::TransactionLimits, }; @@ -22,7 +22,7 @@ use serde_json::json; /// Create block pub fn create_block( wsv: &mut WorldStateView, - instructions: Vec, + instructions: Vec, account_id: AccountId, key_pair: KeyPair, ) -> CommittedBlock { @@ -57,13 +57,13 @@ pub fn populate_wsv( accounts_per_domain: usize, assets_per_domain: usize, owner_id: &AccountId, -) -> Vec { - let mut instructions: Vec = Vec::new(); +) -> Vec { + let mut instructions: Vec = Vec::new(); for i in 0..domains { let domain_id = construct_domain_id(i); let domain = Domain::new(domain_id.clone()); - instructions.push(RegisterExpr::new(domain).into()); - let can_unregister_domain = GrantExpr::new( + instructions.push(Register::domain(domain).into()); + let can_unregister_domain = Grant::permission_token( PermissionToken::new( "CanUnregisterDomain".parse().unwrap(), &json!({ "domain_id": domain_id.clone() }), @@ -74,8 +74,8 @@ pub fn populate_wsv( for j in 0..accounts_per_domain { let account_id = construct_account_id(j, domain_id.clone()); let account = Account::new(account_id.clone(), []); - instructions.push(RegisterExpr::new(account).into()); - let can_unregister_account = GrantExpr::new( + instructions.push(Register::account(account).into()); + let can_unregister_account = Grant::permission_token( PermissionToken::new( "CanUnregisterAccount".parse().unwrap(), &json!({ "account_id": account_id.clone() }), @@ -90,8 +90,8 @@ pub fn populate_wsv( asset_definition_id.clone(), iroha_data_model::asset::AssetValueType::Quantity, ); - instructions.push(RegisterExpr::new(asset_definition).into()); - let can_unregister_asset_definition = GrantExpr::new( + instructions.push(Register::asset_definition(asset_definition).into()); + let can_unregister_asset_definition = Grant::permission_token( PermissionToken::new( "CanUnregisterAssetDefinition".parse().unwrap(), &json!({ "asset_definition_id": asset_definition_id }), @@ -109,23 +109,23 @@ pub fn delete_every_nth( accounts_per_domain: usize, assets_per_domain: usize, nth: usize, -) -> Vec { - let mut instructions: Vec = Vec::new(); +) -> Vec { + let mut instructions: Vec = Vec::new(); for i in 0..domains { let domain_id = construct_domain_id(i); if i % nth == 0 { - instructions.push(UnregisterExpr::new(domain_id.clone()).into()); + instructions.push(Unregister::domain(domain_id.clone()).into()); } else { for j in 0..accounts_per_domain { if j % nth == 0 { let account_id = construct_account_id(j, domain_id.clone()); - instructions.push(UnregisterExpr::new(account_id.clone()).into()); + instructions.push(Unregister::account(account_id.clone()).into()); } } for k in 0..assets_per_domain { if k % nth == 0 { let asset_definition_id = construct_asset_definition_id(k, domain_id.clone()); - instructions.push(UnregisterExpr::new(asset_definition_id).into()); + instructions.push(Unregister::asset_definition(asset_definition_id).into()); } } } @@ -138,19 +138,19 @@ pub fn restore_every_nth( accounts_per_domain: usize, assets_per_domain: usize, nth: usize, -) -> Vec { - let mut instructions: Vec = Vec::new(); +) -> Vec { + let mut instructions: Vec = Vec::new(); for i in 0..domains { let domain_id = construct_domain_id(i); if i % nth == 0 { let domain = Domain::new(domain_id.clone()); - instructions.push(RegisterExpr::new(domain).into()); + instructions.push(Register::domain(domain).into()); } for j in 0..accounts_per_domain { if j % nth == 0 || i % nth == 0 { let account_id = construct_account_id(j, domain_id.clone()); let account = Account::new(account_id.clone(), []); - instructions.push(RegisterExpr::new(account).into()); + instructions.push(Register::account(account).into()); } } for k in 0..assets_per_domain { @@ -160,7 +160,7 @@ pub fn restore_every_nth( asset_definition_id, iroha_data_model::asset::AssetValueType::Quantity, ); - instructions.push(RegisterExpr::new(asset_definition).into()); + instructions.push(Register::asset_definition(asset_definition).into()); } } } @@ -186,7 +186,7 @@ pub fn build_wsv(account_id: &AccountId, key_pair: &KeyPair) -> WorldStateView { let wasm = std::fs::read(&path_to_executor) .unwrap_or_else(|_| panic!("Failed to read file: {}", path_to_executor.display())); let executor = Executor::new(WasmSmartContract::from_compiled(wasm)); - UpgradeExpr::new(executor) + Upgrade::new(executor) .execute(account_id, &mut wsv) .expect("Failed to load executor"); } diff --git a/core/benches/blocks/validate_blocks.rs b/core/benches/blocks/validate_blocks.rs index 6a6d0bc585d..f39e7eb288e 100644 --- a/core/benches/blocks/validate_blocks.rs +++ b/core/benches/blocks/validate_blocks.rs @@ -1,6 +1,6 @@ use eyre::Result; use iroha_core::prelude::*; -use iroha_data_model::{isi::InstructionExpr, prelude::*}; +use iroha_data_model::{isi::InstructionBox, prelude::*}; #[path = "./common.rs"] mod common; @@ -10,7 +10,7 @@ use common::*; #[derive(Clone)] pub struct WsvValidateBlocks { wsv: WorldStateView, - instructions: Vec>, + instructions: Vec>, key_pair: KeyPair, account_id: AccountId, } diff --git a/core/benches/blocks/validate_blocks_oneshot.rs b/core/benches/blocks/validate_blocks_oneshot.rs index bcdeb20a519..403adbd0a22 100644 --- a/core/benches/blocks/validate_blocks_oneshot.rs +++ b/core/benches/blocks/validate_blocks_oneshot.rs @@ -7,23 +7,10 @@ mod validate_blocks; -use iroha_config::base::proxy::Builder; -use iroha_data_model::Level; -use iroha_logger::{Configuration, ConfigurationProxy}; use validate_blocks::WsvValidateBlocks; fn main() { - let log_config = Configuration { - max_log_level: Level::INFO.into(), - compact_mode: false, - ..ConfigurationProxy::default() - .build() - .expect("Default logger config should always build") - }; - // Can't use logger because it's failed to initialize. - if let Err(err) = iroha_logger::init(&log_config) { - eprintln!("Failed to initialize logger: {err}"); - } + iroha_logger::test_logger(); iroha_logger::info!("Starting..."); let bench = WsvValidateBlocks::setup().expect("Failed to setup benchmark"); WsvValidateBlocks::measure(bench).expect("Failed to execute bnechmark"); diff --git a/core/benches/kura.rs b/core/benches/kura.rs index c0371201191..a47f731e31d 100644 --- a/core/benches/kura.rs +++ b/core/benches/kura.rs @@ -4,6 +4,7 @@ use std::str::FromStr as _; use byte_unit::Byte; use criterion::{criterion_group, criterion_main, Criterion}; +use iroha_config::kura::Configuration; use iroha_core::{ block::*, kura::{BlockStore, LockStatus}, @@ -22,11 +23,7 @@ async fn measure_block_size_for_n_executors(n_executors: u32) { let bob_id = AccountId::from_str("bob@test").expect("tested"); let xor_id = AssetDefinitionId::from_str("xor#test").expect("tested"); let alice_xor_id = AssetId::new(xor_id, alice_id); - let transfer = TransferExpr::new( - IdBox::AssetId(alice_xor_id), - 10_u32.to_value(), - IdBox::AccountId(bob_id), - ); + let transfer = Transfer::asset_quantity(alice_xor_id, 10_u32, bob_id); let keypair = KeyPair::generate().expect("Failed to generate KeyPair."); let tx = TransactionBuilder::new(AccountId::from_str("alice@wonderland").expect("checked")) .with_instructions([transfer]) @@ -39,8 +36,12 @@ async fn measure_block_size_for_n_executors(n_executors: u32) { let tx = AcceptedTransaction::accept(tx, &transaction_limits) .expect("Failed to accept Transaction."); let dir = tempfile::tempdir().expect("Could not create tempfile."); - let kura = - iroha_core::kura::Kura::new(iroha_config::kura::Mode::Strict, dir.path(), false).unwrap(); + let cfg = Configuration { + init_mode: iroha_config::kura::Mode::Strict, + debug_output_new_blocks: false, + block_store_path: dir.path().to_str().unwrap().into(), + }; + let kura = iroha_core::kura::Kura::new(&cfg).unwrap(); let _thread_handle = iroha_core::kura::Kura::start(kura.clone()); let query_handle = LiveQueryStore::test().start(); diff --git a/core/benches/validation.rs b/core/benches/validation.rs index 0a474ab3ea0..3a5bcaefe23 100644 --- a/core/benches/validation.rs +++ b/core/benches/validation.rs @@ -12,7 +12,7 @@ use iroha_core::{ tx::TransactionExecutor, wsv::World, }; -use iroha_data_model::{prelude::*, transaction::TransactionLimits}; +use iroha_data_model::{isi::InstructionBox, prelude::*, transaction::TransactionLimits}; use iroha_primitives::unique_vec::UniqueVec; const START_DOMAIN: &str = "start"; @@ -26,23 +26,25 @@ const TRANSACTION_LIMITS: TransactionLimits = TransactionLimits { fn build_test_transaction(keys: KeyPair) -> SignedTransaction { let domain_name = "domain"; let domain_id = DomainId::from_str(domain_name).expect("does not panic"); - let create_domain = RegisterExpr::new(Domain::new(domain_id)); + let create_domain: InstructionBox = Register::domain(Domain::new(domain_id)).into(); let account_name = "account"; let (public_key, _) = KeyPair::generate() .expect("Failed to generate KeyPair.") .into(); - let create_account = RegisterExpr::new(Account::new( + let create_account = Register::account(Account::new( AccountId::new( account_name.parse().expect("Valid"), domain_name.parse().expect("Valid"), ), [public_key], - )); + )) + .into(); let asset_definition_id = AssetDefinitionId::new( "xor".parse().expect("Valid"), domain_name.parse().expect("Valid"), ); - let create_asset = RegisterExpr::new(AssetDefinition::quantity(asset_definition_id)); + let create_asset = + Register::asset_definition(AssetDefinition::quantity(asset_definition_id)).into(); let instructions = [create_domain, create_account, create_asset]; TransactionBuilder::new(AccountId::new( @@ -82,7 +84,7 @@ fn build_test_and_transient_wsv(keys: KeyPair) -> WorldStateView { .unwrap_or_else(|_| panic!("Failed to read file: {}", path_to_executor.display())); let executor = Executor::new(WasmSmartContract::from_compiled(wasm)); let authority = "genesis@genesis".parse().expect("Valid"); - UpgradeExpr::new(executor) + Upgrade::new(executor) .execute(&authority, &mut wsv) .expect("Failed to load executor"); } diff --git a/core/clippy.toml b/core/clippy.toml new file mode 100644 index 00000000000..ad9bd114bed --- /dev/null +++ b/core/clippy.toml @@ -0,0 +1 @@ +disallowed-types = ["std::collections::HashMap", "std::collections::HashSet"] diff --git a/core/src/block.rs b/core/src/block.rs index 9322d16400d..164dc6b5456 100644 --- a/core/src/block.rs +++ b/core/src/block.rs @@ -267,27 +267,28 @@ mod valid { topology: &Topology, wsv: &mut WorldStateView, ) -> Result { - let actual_commit_topology = &block.payload().commit_topology; - let expected_commit_topology = &topology.ordered_peers; - - if actual_commit_topology != expected_commit_topology { - let actual_commit_topology = actual_commit_topology.clone(); - - return Err(( - block, - BlockValidationError::TopologyMismatch { - expected: expected_commit_topology.clone(), - actual: actual_commit_topology, - }, - )); - } + if !block.payload().header.is_genesis() { + let actual_commit_topology = &block.payload().commit_topology; + let expected_commit_topology = &topology.ordered_peers; + + if actual_commit_topology != expected_commit_topology { + let actual_commit_topology = actual_commit_topology.clone(); + + return Err(( + block, + BlockValidationError::TopologyMismatch { + expected: expected_commit_topology.clone(), + actual: actual_commit_topology, + }, + )); + } - if !block.payload().header.is_genesis() - && topology + if topology .filter_signatures_by_roles(&[Role::Leader], block.signatures()) .is_empty() - { - return Err((block, SignatureVerificationError::LeaderMissing.into())); + { + return Err((block, SignatureVerificationError::LeaderMissing.into())); + } } let expected_block_height = wsv.height() + 1; @@ -735,7 +736,7 @@ mod tests { // Creating an instruction let asset_definition_id = AssetDefinitionId::from_str("xor#wonderland").expect("Valid"); let create_asset_definition = - RegisterExpr::new(AssetDefinition::quantity(asset_definition_id)); + Register::asset_definition(AssetDefinition::quantity(asset_definition_id)); // Making two transactions that have the same instruction let transaction_limits = &wsv.transaction_executor().transaction_limits; @@ -778,7 +779,7 @@ mod tests { // Creating an instruction let asset_definition_id = AssetDefinitionId::from_str("xor#wonderland").expect("Valid"); let create_asset_definition = - RegisterExpr::new(AssetDefinition::quantity(asset_definition_id.clone())); + Register::asset_definition(AssetDefinition::quantity(asset_definition_id.clone())); // Making two transactions that have the same instruction let transaction_limits = &wsv.transaction_executor().transaction_limits; @@ -791,14 +792,14 @@ mod tests { let quantity: u32 = 200; let fail_quantity: u32 = 20; - let fail_mint = MintExpr::new( - fail_quantity.to_value(), - IdBox::AssetId(AssetId::new(asset_definition_id.clone(), alice_id.clone())), + let fail_mint = Mint::asset_quantity( + fail_quantity, + AssetId::new(asset_definition_id.clone(), alice_id.clone()), ); - let succeed_mint = MintExpr::new( - quantity.to_value(), - IdBox::AssetId(AssetId::new(asset_definition_id, alice_id.clone())), + let succeed_mint = Mint::asset_quantity( + quantity, + AssetId::new(asset_definition_id, alice_id.clone()), ); let tx0 = TransactionBuilder::new(alice_id.clone()) @@ -848,14 +849,15 @@ mod tests { let transaction_limits = &wsv.transaction_executor().transaction_limits; let domain_id = DomainId::from_str("domain").expect("Valid"); - let create_domain = RegisterExpr::new(Domain::new(domain_id)); + let create_domain = Register::domain(Domain::new(domain_id)); let asset_definition_id = AssetDefinitionId::from_str("coin#domain").expect("Valid"); - let create_asset = RegisterExpr::new(AssetDefinition::quantity(asset_definition_id)); - let instructions_fail: [InstructionExpr; 2] = [ + let create_asset = + Register::asset_definition(AssetDefinition::quantity(asset_definition_id)); + let instructions_fail: [InstructionBox; 2] = [ create_domain.clone().into(), - Fail::new("Always fail").into(), + Fail::new("Always fail".to_owned()).into(), ]; - let instructions_accept: [InstructionExpr; 2] = [create_domain.into(), create_asset.into()]; + let instructions_accept: [InstructionBox; 2] = [create_domain.into(), create_asset.into()]; let tx_fail = TransactionBuilder::new(alice_id.clone()) .with_instructions(instructions_fail) .sign(alice_keys.clone()) diff --git a/core/src/block_sync.rs b/core/src/block_sync.rs index 22adcfc2ef8..bf74dfcbbfd 100644 --- a/core/src/block_sync.rs +++ b/core/src/block_sync.rs @@ -84,6 +84,7 @@ impl BlockSynchronizer { } /// Get a random online peer. + #[allow(clippy::disallowed_types)] pub fn random_peer(peers: &std::collections::HashSet) -> Option { use rand::{seq::IteratorRandom, SeedableRng}; diff --git a/core/src/executor.rs b/core/src/executor.rs index 971f7dd7867..62af571fe49 100644 --- a/core/src/executor.rs +++ b/core/src/executor.rs @@ -4,7 +4,7 @@ use derive_more::DebugCustom; use iroha_data_model::{ account::AccountId, executor as data_model_executor, - isi::InstructionExpr, + isi::InstructionBox, query::QueryBox, transaction::{Executable, SignedTransaction}, ValidationFail, @@ -181,7 +181,7 @@ impl Executor { &self, wsv: &mut WorldStateView, authority: &AccountId, - instruction: InstructionExpr, + instruction: InstructionBox, ) -> Result<(), ValidationFail> { trace!("Running instruction validation"); diff --git a/core/src/gossiper.rs b/core/src/gossiper.rs index 5856dfd4a1b..365ebb7ac7a 100644 --- a/core/src/gossiper.rs +++ b/core/src/gossiper.rs @@ -100,7 +100,6 @@ impl TransactionGossiper { .n_random_transactions(self.gossip_batch_size, &self.wsv); if txs.is_empty() { - iroha_logger::debug!("Nothing to gossip"); return; } diff --git a/core/src/kiso.rs b/core/src/kiso.rs new file mode 100644 index 00000000000..cb6d98bf05b --- /dev/null +++ b/core/src/kiso.rs @@ -0,0 +1,206 @@ +//! Actor responsible for configuration state and its dynamic updates. +//! +//! Currently the API exposed by [`KisoHandle`] works only with [`ConfigurationDTO`], because +//! no any part of Iroha is interested in the whole state. However, the API could be extended +//! in future. +//! +//! Updates mechanism is implemented via subscriptions to [`tokio::sync::watch`] channels. For now, +//! only `logger.level` field is dynamic, which might be tracked with [`KisoHandle::subscribe_on_log_level()`]. + +use eyre::Result; +use iroha_config::{ + client_api::{ConfigurationDTO, Logger as LoggerDTO}, + iroha::Configuration, +}; +use iroha_logger::Level; +use tokio::sync::{mpsc, oneshot, watch}; + +const DEFAULT_CHANNEL_SIZE: usize = 32; + +/// Handle to work with the actor. +/// +/// The actor will shutdown when all its handles are dropped. +#[derive(Clone)] +pub struct KisoHandle { + actor: mpsc::Sender, +} + +impl KisoHandle { + /// Spawn a new actor + pub fn new(state: Configuration) -> Self { + let (actor_sender, actor_receiver) = mpsc::channel(DEFAULT_CHANNEL_SIZE); + let (log_level_update, _) = watch::channel(state.logger.level); + let mut actor = Actor { + handle: actor_receiver, + state, + log_level_update, + }; + tokio::spawn(async move { actor.run().await }); + + Self { + actor: actor_sender, + } + } + + /// Fetch the [`ConfigurationDTO`] from the actor's state. + /// + /// # Errors + /// If communication with actor fails. + pub async fn get_dto(&self) -> Result { + let (tx, rx) = oneshot::channel(); + let msg = Message::GetDTO { respond_to: tx }; + let _ = self.actor.send(msg).await; + let dto = rx.await?; + Ok(dto) + } + + /// Update the configuration state and notify subscribers. + /// + /// Works in a fire-and-forget way, i.e. completion of this task doesn't mean that updates are applied. However, + /// subsequent call of [`Self::get_dto()`] will return an updated state. + /// + /// # Errors + /// If communication with actor fails. + pub async fn update_with_dto(&self, dto: ConfigurationDTO) -> Result<(), Error> { + let (tx, rx) = oneshot::channel(); + let msg = Message::UpdateWithDTO { + dto, + respond_to: tx, + }; + let _ = self.actor.send(msg).await; + rx.await? + } + + /// Subscribe on updates of `logger.level` parameter. + /// + /// # Errors + /// If communication with actor fails. + pub async fn subscribe_on_log_level(&self) -> Result, Error> { + let (tx, rx) = oneshot::channel(); + let msg = Message::SubscribeOnLogLevel { respond_to: tx }; + let _ = self.actor.send(msg).await; + let receiver = rx.await?; + Ok(receiver) + } +} + +enum Message { + GetDTO { + respond_to: oneshot::Sender, + }, + UpdateWithDTO { + dto: ConfigurationDTO, + respond_to: oneshot::Sender>, + }, + SubscribeOnLogLevel { + respond_to: oneshot::Sender>, + }, +} + +/// Possible errors might occur while working with [`KisoHandle`] +#[derive(thiserror::Error, displaydoc::Display, Debug)] +pub enum Error { + /// Failed to get actor's response + Communication(#[from] oneshot::error::RecvError), +} + +struct Actor { + handle: mpsc::Receiver, + state: Configuration, + // Current implementation is somewhat not scalable in terms of code writing: for any + // future dynamic parameter, it will require its own `subscribe_on_` function in [`KisoHandle`], + // new channel here, and new [`Message`] variant. If boilerplate expands, a more general solution will be + // required. However, as of now a single manually written implementation seems optimal. + log_level_update: watch::Sender, +} + +impl Actor { + async fn run(&mut self) { + while let Some(msg) = self.handle.recv().await { + self.handle_message(msg) + } + } + + fn handle_message(&mut self, msg: Message) { + match msg { + Message::GetDTO { respond_to } => { + let dto = ConfigurationDTO::from(&self.state); + let _ = respond_to.send(dto); + } + Message::UpdateWithDTO { + dto: + ConfigurationDTO { + logger: LoggerDTO { level: new_level }, + }, + respond_to, + } => { + let _ = self.log_level_update.send(new_level); + self.state.logger.level = new_level; + + let _ = respond_to.send(Ok(())); + } + Message::SubscribeOnLogLevel { respond_to } => { + let _ = respond_to.send(self.log_level_update.subscribe()); + } + } + } +} + +#[cfg(test)] +#[allow(unused)] +mod tests { + use std::time::Duration; + + use iroha_config::{ + base::proxy::LoadFromDisk, + client_api::{ConfigurationDTO, Logger as LoggerDTO}, + iroha::{Configuration, ConfigurationProxy}, + }; + + use super::*; + + fn test_config() -> Configuration { + // FIXME Specifying path here might break! Moreover, if the file is not found, + // the error will say that `public_key` is missing! + // Hopefully this will change: https://github.com/hyperledger/iroha/issues/2585 + ConfigurationProxy::from_path("../config/iroha_test_config.json") + .build() + .unwrap() + } + + #[tokio::test] + async fn subscription_on_log_level_works() { + const INIT_LOG_LEVEL: Level = Level::WARN; + const NEW_LOG_LEVEL: Level = Level::DEBUG; + const WATCH_LAG_MILLIS: u64 = 30; + + let mut config = test_config(); + config.logger.level = INIT_LOG_LEVEL; + let kiso = KisoHandle::new(config); + + let mut recv = kiso + .subscribe_on_log_level() + .await + .expect("Subscription should be fine"); + + let _err = tokio::time::timeout(Duration::from_millis(WATCH_LAG_MILLIS), recv.changed()) + .await + .expect_err("Watcher should not be active initially"); + + kiso.update_with_dto(ConfigurationDTO { + logger: LoggerDTO { + level: NEW_LOG_LEVEL, + }, + }) + .await + .expect("Update should work fine"); + + let () = tokio::time::timeout(Duration::from_millis(WATCH_LAG_MILLIS), recv.changed()) + .await + .expect("Watcher should resolve within timeout") + .expect("Watcher should not be closed"); + + let value = *recv.borrow_and_update(); + assert_eq!(value, NEW_LOG_LEVEL); + } +} diff --git a/core/src/kura.rs b/core/src/kura.rs index cede4d491b3..11dbf2c5192 100644 --- a/core/src/kura.rs +++ b/core/src/kura.rs @@ -10,7 +10,7 @@ use std::{ sync::Arc, }; -use iroha_config::kura::Mode; +use iroha_config::kura::{Configuration, Mode}; use iroha_crypto::{Hash, HashOf}; use iroha_data_model::block::SignedBlock; use iroha_logger::prelude::*; @@ -50,22 +50,19 @@ impl Kura { /// Fails if there are filesystem errors when trying /// to access the block store indicated by the provided /// path. - pub fn new( - mode: Mode, - block_store_path: &Path, - debug_output_new_blocks: bool, - ) -> Result> { + pub fn new(config: &Configuration) -> Result> { + let block_store_path = Path::new(&config.block_store_path); let mut block_store = BlockStore::new(block_store_path, LockStatus::Unlocked); block_store.create_files_if_they_do_not_exist()?; - let block_plain_text_path = debug_output_new_blocks.then(|| { + let block_plain_text_path = config.debug_output_new_blocks.then(|| { let mut path_buf = block_store_path.to_path_buf(); path_buf.push("blocks.json"); path_buf }); let kura = Arc::new(Self { - mode, + mode: config.init_mode, block_store: Mutex::new(block_store), block_data: Mutex::new(Vec::new()), block_plain_text_path, @@ -1054,9 +1051,13 @@ mod tests { #[tokio::test] async fn strict_init_kura() { let temp_dir = TempDir::new().unwrap(); - Kura::new(Mode::Strict, temp_dir.path(), false) - .unwrap() - .init() - .unwrap(); + Kura::new(&Configuration { + init_mode: Mode::Strict, + block_store_path: temp_dir.path().to_str().unwrap().into(), + debug_output_new_blocks: false, + }) + .unwrap() + .init() + .unwrap(); } } diff --git a/core/src/lib.rs b/core/src/lib.rs index e0c6109e31f..3d17d7a16e7 100644 --- a/core/src/lib.rs +++ b/core/src/lib.rs @@ -4,6 +4,7 @@ pub mod block; pub mod block_sync; pub mod executor; pub mod gossiper; +pub mod kiso; pub mod kura; pub mod modules; pub mod query; @@ -15,9 +16,10 @@ pub mod tx; pub mod wsv; use core::time::Duration; -use std::collections::{BTreeSet, HashMap, HashSet}; +use std::collections::BTreeSet; use gossiper::TransactionGossip; +use indexmap::{IndexMap, IndexSet}; use iroha_data_model::{permission::Permissions, prelude::*}; use iroha_primitives::unique_vec::UniqueVec; use parity_scale_codec::{Decode, Encode}; @@ -38,16 +40,16 @@ pub type IrohaNetwork = iroha_p2p::NetworkHandle; pub type PeersIds = UniqueVec; /// Parameters set. -pub type Parameters = HashSet; +pub type Parameters = IndexSet; /// API to work with collections of [`DomainId`] : [`Domain`] mappings. -pub type DomainsMap = HashMap; +pub type DomainsMap = IndexMap; /// API to work with a collections of [`RoleId`]: [`Role`] mappings. -pub type RolesMap = HashMap; +pub type RolesMap = IndexMap; /// API to work with a collections of [`AccountId`] [`Permissions`] mappings. -pub type PermissionTokensMap = HashMap; +pub type PermissionTokensMap = IndexMap; /// API to work with a collections of [`AccountId`] to [`RoleId`] mappings. pub type AccountRolesSet = BTreeSet; diff --git a/core/src/query/store.rs b/core/src/query/store.rs index 92684de1e09..ae1957da793 100644 --- a/core/src/query/store.rs +++ b/core/src/query/store.rs @@ -2,11 +2,11 @@ use std::{ cmp::Ordering, - collections::HashMap, num::NonZeroU64, time::{Duration, Instant}, }; +use indexmap::IndexMap; use iroha_config::live_query_store::Configuration; use iroha_data_model::{ asset::AssetValue, @@ -67,7 +67,7 @@ type LiveQuery = Batched>; /// Clients can handle their queries using [`LiveQueryStoreHandle`] #[derive(Debug)] pub struct LiveQueryStore { - queries: HashMap, + queries: IndexMap, query_idle_time: Duration, } @@ -75,7 +75,7 @@ impl LiveQueryStore { /// Construct [`LiveQueryStore`] from configuration. pub fn from_configuration(cfg: Configuration) -> Self { Self { - queries: HashMap::default(), + queries: IndexMap::new(), query_idle_time: Duration::from_millis(cfg.query_idle_time_ms.into()), } } diff --git a/core/src/queue.rs b/core/src/queue.rs index bc3d860cd1f..2872ebc9365 100644 --- a/core/src/queue.rs +++ b/core/src/queue.rs @@ -1,10 +1,10 @@ //! Module with queue actor use core::time::Duration; -use std::collections::HashSet; use crossbeam_queue::ArrayQueue; use dashmap::{mapref::entry::Entry, DashMap}; use eyre::{Report, Result}; +use indexmap::IndexSet; use iroha_config::queue::Configuration; use iroha_crypto::HashOf; use iroha_data_model::{account::AccountId, transaction::prelude::*}; @@ -326,7 +326,7 @@ impl Queue { self.pop_from_queue(&mut seen_queue, wsv, &mut expired_transactions_queue) }); - let transactions_hashes: HashSet> = + let transactions_hashes: IndexSet> = transactions.iter().map(|tx| tx.payload().hash()).collect(); let txs = txs_from_queue .filter(|tx| !transactions_hashes.contains(&tx.payload().hash())) @@ -512,7 +512,7 @@ mod tests { .build() .expect("Default queue config should always build") }); - let instructions: [InstructionExpr; 0] = []; + let instructions: [InstructionBox; 0] = []; let tx = TransactionBuilder::new("alice@wonderland".parse().expect("Valid")) .with_instructions(instructions); let tx_limits = TransactionLimits { diff --git a/core/src/smartcontracts/isi/account.rs b/core/src/smartcontracts/isi/account.rs index 2c1e67ca25c..eff4dbaebbc 100644 --- a/core/src/smartcontracts/isi/account.rs +++ b/core/src/smartcontracts/isi/account.rs @@ -478,10 +478,9 @@ pub mod isi { /// Account-related [`Query`] instructions. pub mod query { - use eyre::{Result, WrapErr}; + use eyre::Result; use iroha_data_model::{ account::Account, - evaluate::ExpressionEvaluator, permission::PermissionToken, query::{error::QueryExecutionFail as Error, MetadataValue}, }; @@ -494,13 +493,10 @@ pub mod query { &self, wsv: &'wsv WorldStateView, ) -> Result + 'wsv>, Error> { - let account_id = wsv - .evaluate(&self.id) - .wrap_err("Failed to evaluate account id") - .map_err(|e| Error::Evaluate(e.to_string()))?; + let account_id = &self.id; iroha_logger::trace!(%account_id, roles=?wsv.world.roles); - wsv.account(&account_id)?; - Ok(Box::new(wsv.account_roles(&account_id).cloned())) + wsv.account(account_id)?; + Ok(Box::new(wsv.account_roles(account_id).cloned())) } } @@ -510,13 +506,10 @@ pub mod query { &self, wsv: &'wsv WorldStateView, ) -> Result + 'wsv>, Error> { - let account_id = wsv - .evaluate(&self.id) - .wrap_err("Failed to evaluate account id") - .map_err(|e| Error::Evaluate(e.to_string()))?; + let account_id = &self.id; iroha_logger::trace!(%account_id, accounts=?wsv.world.domains); Ok(Box::new( - wsv.account_permission_tokens(&account_id)?.cloned(), + wsv.account_permission_tokens(account_id)?.cloned(), )) } } @@ -539,12 +532,9 @@ pub mod query { impl ValidQuery for FindAccountById { #[metrics(+"find_account_by_id")] fn execute(&self, wsv: &WorldStateView) -> Result { - let id = wsv - .evaluate(&self.id) - .wrap_err("Failed to evaluate id") - .map_err(|e| Error::Evaluate(e.to_string()))?; + let id = &self.id; iroha_logger::trace!(%id); - wsv.map_account(&id, Clone::clone).map_err(Into::into) + wsv.map_account(id, Clone::clone).map_err(Into::into) } } @@ -554,10 +544,7 @@ pub mod query { &self, wsv: &'wsv WorldStateView, ) -> Result + 'wsv>, Error> { - let name = wsv - .evaluate(&self.name) - .wrap_err("Failed to evaluate account name") - .map_err(|e| Error::Evaluate(e.to_string()))?; + let name = self.name.clone(); iroha_logger::trace!(%name); Ok(Box::new( wsv.domains() @@ -581,30 +568,21 @@ pub mod query { &self, wsv: &'wsv WorldStateView, ) -> Result + 'wsv>, Error> { - let id = wsv - .evaluate(&self.domain_id) - .wrap_err("Failed to evaluate domain id") - .map_err(|e| Error::Evaluate(e.to_string()))?; + let id = &self.domain_id; iroha_logger::trace!(%id); - Ok(Box::new(wsv.domain(&id)?.accounts.values().cloned())) + Ok(Box::new(wsv.domain(id)?.accounts.values().cloned())) } } impl ValidQuery for FindAccountKeyValueByIdAndKey { #[metrics(+"find_account_key_value_by_id_and_key")] fn execute(&self, wsv: &WorldStateView) -> Result { - let id = wsv - .evaluate(&self.id) - .wrap_err("Failed to evaluate account id") - .map_err(|e| Error::Evaluate(e.to_string()))?; - let key = wsv - .evaluate(&self.key) - .wrap_err("Failed to evaluate key") - .map_err(|e| Error::Evaluate(e.to_string()))?; + let id = &self.id; + let key = &self.key; iroha_logger::trace!(%id, %key); - wsv.map_account(&id, |account| account.metadata.get(&key).map(Clone::clone))? - .ok_or_else(|| FindError::MetadataKey(key).into()) + wsv.map_account(id, |account| account.metadata.get(key).map(Clone::clone))? + .ok_or_else(|| FindError::MetadataKey(key.clone()).into()) .map(Into::into) } } @@ -615,10 +593,7 @@ pub mod query { &self, wsv: &'wsv WorldStateView, ) -> Result + 'wsv>, Error> { - let asset_definition_id = wsv - .evaluate(&self.asset_definition_id) - .wrap_err("Failed to evaluate asset id") - .map_err(|e| Error::Evaluate(e.to_string()))?; + let asset_definition_id = self.asset_definition_id.clone(); iroha_logger::trace!(%asset_definition_id); Ok(Box::new( diff --git a/core/src/smartcontracts/isi/asset.rs b/core/src/smartcontracts/isi/asset.rs index 6d5fd0ccda3..4aaf3e19168 100644 --- a/core/src/smartcontracts/isi/asset.rs +++ b/core/src/smartcontracts/isi/asset.rs @@ -416,7 +416,7 @@ pub mod isi { /// Asset-related query implementations. pub mod query { - use eyre::{Result, WrapErr as _}; + use eyre::Result; use iroha_data_model::{ asset::{Asset, AssetDefinition}, query::{ @@ -464,12 +464,9 @@ pub mod query { impl ValidQuery for FindAssetById { #[metrics(+"find_asset_by_id")] fn execute(&self, wsv: &WorldStateView) -> Result { - let id = wsv - .evaluate(&self.id) - .wrap_err("Failed to get asset id") - .map_err(|e| Error::Evaluate(e.to_string()))?; + let id = &self.id; iroha_logger::trace!(%id); - wsv.asset(&id).map_err(|asset_err| { + wsv.asset(id).map_err(|asset_err| { if let Err(definition_err) = wsv.asset_definition(&id.definition_id) { definition_err.into() } else { @@ -482,12 +479,9 @@ pub mod query { impl ValidQuery for FindAssetDefinitionById { #[metrics(+"find_asset_defintion_by_id")] fn execute(&self, wsv: &WorldStateView) -> Result { - let id = wsv - .evaluate(&self.id) - .wrap_err("Failed to get asset definition id") - .map_err(|e| Error::Evaluate(e.to_string()))?; + let id = &self.id; - let entry = wsv.asset_definition(&id).map_err(Error::from)?; + let entry = wsv.asset_definition(id).map_err(Error::from)?; Ok(entry) } @@ -499,10 +493,7 @@ pub mod query { &self, wsv: &'wsv WorldStateView, ) -> Result + 'wsv>, Error> { - let name = wsv - .evaluate(&self.name) - .wrap_err("Failed to get asset name") - .map_err(|e| Error::Evaluate(e.to_string()))?; + let name = self.name.clone(); iroha_logger::trace!(%name); Ok(Box::new( wsv.domains() @@ -530,12 +521,9 @@ pub mod query { &self, wsv: &'wsv WorldStateView, ) -> Result + 'wsv>, Error> { - let id = wsv - .evaluate(&self.account_id) - .wrap_err("Failed to get account id") - .map_err(|e| Error::Evaluate(e.to_string()))?; + let id = &self.account_id; iroha_logger::trace!(%id); - Ok(Box::new(wsv.account_assets(&id)?.cloned())) + Ok(Box::new(wsv.account_assets(id)?.cloned())) } } @@ -545,10 +533,7 @@ pub mod query { &self, wsv: &'wsv WorldStateView, ) -> Result + 'wsv>, Error> { - let id = wsv - .evaluate(&self.asset_definition_id) - .wrap_err("Failed to get asset definition id") - .map_err(|e| Error::Evaluate(e.to_string()))?; + let id = self.asset_definition_id.clone(); iroha_logger::trace!(%id); Ok(Box::new( wsv.domains() @@ -576,13 +561,10 @@ pub mod query { &self, wsv: &'wsv WorldStateView, ) -> Result + 'wsv>, Error> { - let id = wsv - .evaluate(&self.domain_id) - .wrap_err("Failed to get domain id") - .map_err(|e| Error::Evaluate(e.to_string()))?; + let id = &self.domain_id; iroha_logger::trace!(%id); Ok(Box::new( - wsv.domain(&id)? + wsv.domain(id)? .accounts .values() .flat_map(|account| account.assets.values()) @@ -597,14 +579,8 @@ pub mod query { &self, wsv: &'wsv WorldStateView, ) -> Result + 'wsv>, Error> { - let domain_id = wsv - .evaluate(&self.domain_id) - .wrap_err("Failed to get domain id") - .map_err(|e| Error::Evaluate(e.to_string()))?; - let asset_definition_id = wsv - .evaluate(&self.asset_definition_id) - .wrap_err("Failed to get asset definition id") - .map_err(|e| Error::Evaluate(e.to_string()))?; + let domain_id = self.domain_id.clone(); + let asset_definition_id = self.asset_definition_id.clone(); let domain = wsv.domain(&domain_id)?; let _definition = domain .asset_definitions @@ -632,13 +608,10 @@ pub mod query { impl ValidQuery for FindAssetQuantityById { #[metrics(+"find_asset_quantity_by_id")] fn execute(&self, wsv: &WorldStateView) -> Result { - let id = wsv - .evaluate(&self.id) - .wrap_err("Failed to get asset id") - .map_err(|e| Error::Evaluate(e.to_string()))?; + let id = &self.id; iroha_logger::trace!(%id); let value = wsv - .asset(&id) + .asset(id) .map_err(|asset_err| { if let Err(definition_err) = wsv.asset_definition(&id.definition_id) { Error::Find(definition_err) @@ -656,12 +629,9 @@ pub mod query { impl ValidQuery for FindTotalAssetQuantityByAssetDefinitionId { #[metrics(+"find_total_asset_quantity_by_asset_definition_id")] fn execute(&self, wsv: &WorldStateView) -> Result { - let id = wsv - .evaluate(&self.id) - .wrap_err("Failed to get asset definition id") - .map_err(|e| Error::Evaluate(e.to_string()))?; + let id = &self.id; iroha_logger::trace!(%id); - let asset_value = wsv.asset_total_amount(&id)?; + let asset_value = wsv.asset_total_amount(id)?; Ok(asset_value) } } @@ -669,15 +639,9 @@ pub mod query { impl ValidQuery for FindAssetKeyValueByIdAndKey { #[metrics(+"find_asset_key_value_by_id_and_key")] fn execute(&self, wsv: &WorldStateView) -> Result { - let id = wsv - .evaluate(&self.id) - .wrap_err("Failed to get asset id") - .map_err(|e| Error::Evaluate(e.to_string()))?; - let key = wsv - .evaluate(&self.key) - .wrap_err("Failed to get key") - .map_err(|e| Error::Evaluate(e.to_string()))?; - let asset = wsv.asset(&id).map_err(|asset_err| { + let id = &self.id; + let key = &self.key; + let asset = wsv.asset(id).map_err(|asset_err| { if let Err(definition_err) = wsv.asset_definition(&id.definition_id) { Error::Find(definition_err) } else { @@ -691,8 +655,8 @@ pub mod query { .map_err(eyre::Error::from) .map_err(|e| Error::Conversion(e.to_string()))?; store - .get(&key) - .ok_or_else(|| Error::Find(FindError::MetadataKey(key))) + .get(key) + .ok_or_else(|| Error::Find(FindError::MetadataKey(key.clone()))) .cloned() .map(Into::into) } diff --git a/core/src/smartcontracts/isi/block.rs b/core/src/smartcontracts/isi/block.rs index 4f241372ef1..08f4af0fb6e 100644 --- a/core/src/smartcontracts/isi/block.rs +++ b/core/src/smartcontracts/isi/block.rs @@ -1,8 +1,7 @@ //! This module contains trait implementations related to block queries -use eyre::{Result, WrapErr}; +use eyre::Result; use iroha_data_model::{ block::{BlockHeader, SignedBlock}, - evaluate::ExpressionEvaluator, query::{ block::FindBlockHeaderByHash, error::{FindError, QueryExecutionFail}, @@ -43,10 +42,7 @@ impl ValidQuery for FindAllBlockHeaders { impl ValidQuery for FindBlockHeaderByHash { #[metrics(+"find_block_header")] fn execute(&self, wsv: &WorldStateView) -> Result { - let hash = wsv - .evaluate(&self.hash) - .wrap_err("Failed to evaluate hash") - .map_err(|e| QueryExecutionFail::Evaluate(e.to_string()))?; + let hash = self.hash; let block = wsv .all_blocks() diff --git a/core/src/smartcontracts/isi/domain.rs b/core/src/smartcontracts/isi/domain.rs index b7930106a04..81855321e7c 100644 --- a/core/src/smartcontracts/isi/domain.rs +++ b/core/src/smartcontracts/isi/domain.rs @@ -301,7 +301,7 @@ pub mod isi { /// Query module provides [`Query`] Domain related implementations. pub mod query { - use eyre::{Result, WrapErr}; + use eyre::Result; use iroha_data_model::{ domain::Domain, query::{error::QueryExecutionFail as Error, MetadataValue}, @@ -322,29 +322,20 @@ pub mod query { impl ValidQuery for FindDomainById { #[metrics(+"find_domain_by_id")] fn execute(&self, wsv: &WorldStateView) -> Result { - let id = wsv - .evaluate(&self.id) - .wrap_err("Failed to get domain id") - .map_err(|e| Error::Evaluate(e.to_string()))?; + let id = &self.id; iroha_logger::trace!(%id); - Ok(wsv.domain(&id)?.clone()) + Ok(wsv.domain(id)?.clone()) } } impl ValidQuery for FindDomainKeyValueByIdAndKey { #[metrics(+"find_domain_key_value_by_id_and_key")] fn execute(&self, wsv: &WorldStateView) -> Result { - let id = wsv - .evaluate(&self.id) - .wrap_err("Failed to get domain id") - .map_err(|e| Error::Evaluate(e.to_string()))?; - let key = wsv - .evaluate(&self.key) - .wrap_err("Failed to get key") - .map_err(|e| Error::Evaluate(e.to_string()))?; + let id = &self.id; + let key = &self.key; iroha_logger::trace!(%id, %key); - wsv.map_domain(&id, |domain| domain.metadata.get(&key).map(Clone::clone))? - .ok_or_else(|| FindError::MetadataKey(key).into()) + wsv.map_domain(id, |domain| domain.metadata.get(key).map(Clone::clone))? + .ok_or_else(|| FindError::MetadataKey(key.clone()).into()) .map(Into::into) } } @@ -352,20 +343,14 @@ pub mod query { impl ValidQuery for FindAssetDefinitionKeyValueByIdAndKey { #[metrics(+"find_asset_definition_key_value_by_id_and_key")] fn execute(&self, wsv: &WorldStateView) -> Result { - let id = wsv - .evaluate(&self.id) - .wrap_err("Failed to get asset definition id") - .map_err(|e| Error::Evaluate(e.to_string()))?; - let key = wsv - .evaluate(&self.key) - .wrap_err("Failed to get key") - .map_err(|e| Error::Evaluate(e.to_string()))?; + let id = &self.id; + let key = &self.key; iroha_logger::trace!(%id, %key); Ok(wsv - .asset_definition(&id)? + .asset_definition(id)? .metadata - .get(&key) - .ok_or(FindError::MetadataKey(key)) + .get(key) + .ok_or(FindError::MetadataKey(key.clone())) .cloned() .map(Into::into)?) } diff --git a/core/src/smartcontracts/isi/mod.rs b/core/src/smartcontracts/isi/mod.rs index 7f80bbfade1..a0bf424f1ef 100644 --- a/core/src/smartcontracts/isi/mod.rs +++ b/core/src/smartcontracts/isi/mod.rs @@ -12,12 +12,10 @@ pub mod world; use eyre::Result; use iroha_data_model::{ - evaluate::ExpressionEvaluator, isi::{error::InstructionExecutionError as Error, *}, prelude::*, }; -use iroha_logger::prelude::{Span, *}; -use iroha_primitives::fixed::Fixed; +use iroha_logger::prelude::*; use super::Execute; use crate::{prelude::*, wsv::WorldStateView}; @@ -31,343 +29,167 @@ pub trait Registrable { fn build(self, authority: &AccountId) -> Self::Target; } -impl Execute for InstructionExpr { +impl Execute for InstructionBox { fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { iroha_logger::debug!(isi=%self, "Executing"); - macro_rules! match_all { - ($($isi:ident),+ $(,)?) => { - - match self { $( - InstructionExpr::$isi(isi) => isi.execute(authority, wsv), )+ - } - }; - } - - match_all! { - Register, - Unregister, - Mint, - Burn, - Transfer, - If, - Pair, - Sequence, - Fail, - SetKeyValue, - RemoveKeyValue, - Grant, - Revoke, - ExecuteTrigger, - SetParameter, - NewParameter, - Upgrade, - Log, + match self { + Self::Register(isi) => isi.execute(authority, wsv), + Self::Unregister(isi) => isi.execute(authority, wsv), + Self::Mint(isi) => isi.execute(authority, wsv), + Self::Burn(isi) => isi.execute(authority, wsv), + Self::Transfer(isi) => isi.execute(authority, wsv), + Self::Fail(isi) => isi.execute(authority, wsv), + Self::SetKeyValue(isi) => isi.execute(authority, wsv), + Self::RemoveKeyValue(isi) => isi.execute(authority, wsv), + Self::Grant(isi) => isi.execute(authority, wsv), + Self::Revoke(isi) => isi.execute(authority, wsv), + Self::ExecuteTrigger(isi) => isi.execute(authority, wsv), + Self::SetParameter(isi) => isi.execute(authority, wsv), + Self::NewParameter(isi) => isi.execute(authority, wsv), + Self::Upgrade(isi) => isi.execute(authority, wsv), + Self::Log(isi) => isi.execute(authority, wsv), } } } -impl Execute for RegisterExpr { +impl Execute for RegisterBox { #[iroha_logger::log(name = "register", skip_all, fields(id))] fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { - let object_id = wsv.evaluate(&self.object)?; - Span::current().record("id", &object_id.to_string()); - match object_id { - RegistrableBox::Peer(object) => Register:: { object }.execute(authority, wsv), - RegistrableBox::Domain(object) => Register:: { object }.execute(authority, wsv), - RegistrableBox::Account(object) => { - Register:: { object }.execute(authority, wsv) - } - RegistrableBox::AssetDefinition(object) => { - Register:: { object }.execute(authority, wsv) - } - RegistrableBox::Asset(object) => Register:: { object }.execute(authority, wsv), - RegistrableBox::Trigger(object) => { - Register::> { object }.execute(authority, wsv) - } - RegistrableBox::Role(object) => Register:: { object }.execute(authority, wsv), + match self { + Self::Peer(isi) => isi.execute(authority, wsv), + Self::Domain(isi) => isi.execute(authority, wsv), + Self::Account(isi) => isi.execute(authority, wsv), + Self::AssetDefinition(isi) => isi.execute(authority, wsv), + Self::Asset(isi) => isi.execute(authority, wsv), + Self::Role(isi) => isi.execute(authority, wsv), + Self::Trigger(isi) => isi.execute(authority, wsv), } } } -impl Execute for UnregisterExpr { +impl Execute for UnregisterBox { #[iroha_logger::log(name = "unregister", skip_all, fields(id))] fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { - let object_id = wsv.evaluate(&self.object_id)?; - Span::current().record("id", &object_id.to_string()); - match object_id { - IdBox::AccountId(object_id) => { - Unregister:: { object_id }.execute(authority, wsv) - } - IdBox::AssetId(object_id) => Unregister:: { object_id }.execute(authority, wsv), - IdBox::AssetDefinitionId(object_id) => { - Unregister:: { object_id }.execute(authority, wsv) - } - IdBox::DomainId(object_id) => { - Unregister:: { object_id }.execute(authority, wsv) - } - IdBox::PeerId(object_id) => Unregister:: { object_id }.execute(authority, wsv), - IdBox::RoleId(object_id) => Unregister:: { object_id }.execute(authority, wsv), - IdBox::TriggerId(object_id) => { - Unregister::> { object_id }.execute(authority, wsv) - } - IdBox::PermissionTokenId(_) | IdBox::ParameterId(_) => { - Err(Error::Evaluate(InstructionType::Unregister.into())) - } + match self { + Self::Peer(isi) => isi.execute(authority, wsv), + Self::Domain(isi) => isi.execute(authority, wsv), + Self::Account(isi) => isi.execute(authority, wsv), + Self::AssetDefinition(isi) => isi.execute(authority, wsv), + Self::Asset(isi) => isi.execute(authority, wsv), + Self::Role(isi) => isi.execute(authority, wsv), + Self::Trigger(isi) => isi.execute(authority, wsv), } } } -impl Execute for MintExpr { +impl Execute for MintBox { #[iroha_logger::log(name = "Mint", skip_all, fields(destination))] fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { - let destination_id = wsv.evaluate(&self.destination_id)?; - let object = wsv.evaluate(&self.object)?; - Span::current().record("destination", &destination_id.to_string()); - iroha_logger::trace!(?object, %authority); - match (destination_id, object) { - (IdBox::AssetId(destination_id), Value::Numeric(NumericValue::U32(object))) => { - Mint:: { - object, - destination_id, - } - .execute(authority, wsv) - } - (IdBox::AssetId(destination_id), Value::Numeric(NumericValue::U128(object))) => { - Mint:: { - object, - destination_id, - } - .execute(authority, wsv) - } - (IdBox::AssetId(destination_id), Value::Numeric(NumericValue::Fixed(object))) => { - Mint:: { - object, - destination_id, - } - .execute(authority, wsv) - } - (IdBox::AccountId(destination_id), Value::PublicKey(object)) => { - Mint:: { - object, - destination_id, - } - .execute(authority, wsv) - } - (IdBox::AccountId(destination_id), Value::SignatureCheckCondition(object)) => { - Mint:: { - object, - destination_id, - } - .execute(authority, wsv) - } - (IdBox::TriggerId(destination_id), Value::Numeric(NumericValue::U32(object))) => { - Mint::> { - object, - destination_id, - } - .execute(authority, wsv) - } - _ => Err(Error::Evaluate(InstructionType::Mint.into())), + match self { + Self::Account(isi) => isi.execute(authority, wsv), + Self::Asset(isi) => isi.execute(authority, wsv), + Self::TriggerRepetitions(isi) => isi.execute(authority, wsv), } } } -impl Execute for BurnExpr { - #[iroha_logger::log(name = "burn", skip_all, fields(destination))] - fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { - let destination_id = wsv.evaluate(&self.destination_id)?; - let object = wsv.evaluate(&self.object)?; - Span::current().record("destination", &destination_id.to_string()); - iroha_logger::trace!(?object, %authority); - match (destination_id, object) { - (IdBox::AssetId(destination_id), Value::Numeric(NumericValue::U32(object))) => { - Burn:: { - object, - destination_id, - } - .execute(authority, wsv) - } - (IdBox::AssetId(destination_id), Value::Numeric(NumericValue::U128(object))) => Burn { - object, - destination_id, - } - .execute(authority, wsv), - (IdBox::AssetId(destination_id), Value::Numeric(NumericValue::Fixed(object))) => Burn { - object, - destination_id, - } - .execute(authority, wsv), - (IdBox::AccountId(destination_id), Value::PublicKey(object)) => Burn { - object, - destination_id, - } - .execute(authority, wsv), - (IdBox::TriggerId(destination_id), Value::Numeric(NumericValue::U32(object))) => { - Burn::> { - object, - destination_id, - } - .execute(authority, wsv) - } - // TODO: Not implemented yet. - // (IdBox::AccountId(account_id), Value::SignatureCheckCondition(condition)) => { - // Burn::{condition, account_id}.execute(authority, wsv) - // } - _ => Err(Error::Evaluate(InstructionType::Burn.into())), +impl Execute for AccountMintBox { + fn execute( + self, + authority: &AccountId, + wsv: &mut WorldStateView, + ) -> std::prelude::v1::Result<(), Error> { + match self { + Self::PublicKey(isi) => isi.execute(authority, wsv), + Self::SignatureCheckCondition(isi) => isi.execute(authority, wsv), } } } -impl Execute for TransferExpr { - #[iroha_logger::log(name = "transfer", skip_all, fields(from, to))] - fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { - let source_id = wsv.evaluate(&self.source_id)?; - let destination_id = wsv.evaluate(&self.destination_id)?; - let object = wsv.evaluate(&self.object)?; - iroha_logger::trace!(%object, %authority); - Span::current().record("from", source_id.to_string()); - Span::current().record("to", destination_id.to_string()); - - match (source_id, object, destination_id) { - ( - IdBox::AssetId(source_id), - Value::Numeric(value), - IdBox::AccountId(destination_id), - ) => match value { - NumericValue::U32(object) => Transfer { - source_id, - object, - destination_id, - } - .execute(authority, wsv), - NumericValue::U128(object) => Transfer { - source_id, - object, - destination_id, - } - .execute(authority, wsv), - NumericValue::Fixed(object) => Transfer { - source_id, - object, - destination_id, - } - .execute(authority, wsv), - _ => Err(Error::Evaluate(InstructionType::Transfer.into())), - }, - ( - IdBox::AccountId(source_id), - Value::Id(IdBox::AssetDefinitionId(object)), - IdBox::AccountId(destination_id), - ) => Transfer { - source_id, - object, - destination_id, - } - .execute(authority, wsv), - ( - IdBox::AccountId(source_id), - Value::Id(IdBox::DomainId(object)), - IdBox::AccountId(destination_id), - ) => Transfer { - source_id, - object, - destination_id, - } - .execute(authority, wsv), - _ => Err(Error::Evaluate(InstructionType::Transfer.into())), +impl Execute for AssetMintBox { + fn execute( + self, + authority: &AccountId, + wsv: &mut WorldStateView, + ) -> std::prelude::v1::Result<(), Error> { + match self { + Self::Quantity(isi) => isi.execute(authority, wsv), + Self::BigQuantity(isi) => isi.execute(authority, wsv), + Self::Fixed(isi) => isi.execute(authority, wsv), } } } -impl Execute for SetKeyValueExpr { +impl Execute for BurnBox { + #[iroha_logger::log(name = "burn", skip_all, fields(destination))] fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { - let key = wsv.evaluate(&self.key)?; - let value = wsv.evaluate(&self.value)?; - iroha_logger::trace!(?key, ?value, %authority); - match wsv.evaluate(&self.object_id)? { - IdBox::AssetId(object_id) => SetKeyValue:: { - object_id, - key, - value, - } - .execute(authority, wsv), - IdBox::AssetDefinitionId(object_id) => SetKeyValue:: { - object_id, - key, - value, - } - .execute(authority, wsv), - IdBox::AccountId(object_id) => SetKeyValue:: { - object_id, - key, - value, - } - .execute(authority, wsv), - IdBox::DomainId(object_id) => SetKeyValue:: { - object_id, - key, - value, - } - .execute(authority, wsv), - _ => Err(Error::Evaluate(InstructionType::SetKeyValue.into())), + match self { + Self::AccountPublicKey(isi) => isi.execute(authority, wsv), + Self::Asset(isi) => isi.execute(authority, wsv), + Self::TriggerRepetitions(isi) => isi.execute(authority, wsv), } } } -impl Execute for RemoveKeyValueExpr { - fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { - let key = wsv.evaluate(&self.key)?; - iroha_logger::trace!(?key, %authority); - match wsv.evaluate(&self.object_id)? { - IdBox::AssetId(object_id) => { - RemoveKeyValue:: { object_id, key }.execute(authority, wsv) - } - IdBox::AssetDefinitionId(object_id) => { - RemoveKeyValue:: { object_id, key }.execute(authority, wsv) - } - IdBox::AccountId(object_id) => { - RemoveKeyValue:: { object_id, key }.execute(authority, wsv) - } - IdBox::DomainId(object_id) => { - RemoveKeyValue:: { object_id, key }.execute(authority, wsv) - } - _ => Err(Error::Evaluate(InstructionType::RemoveKeyValue.into())), +impl Execute for AssetBurnBox { + fn execute( + self, + authority: &AccountId, + wsv: &mut WorldStateView, + ) -> std::prelude::v1::Result<(), Error> { + match self { + Self::Quantity(isi) => isi.execute(authority, wsv), + Self::BigQuantity(isi) => isi.execute(authority, wsv), + Self::Fixed(isi) => isi.execute(authority, wsv), } } } -impl Execute for ConditionalExpr { +impl Execute for TransferBox { + #[iroha_logger::log(name = "transfer", skip_all, fields(from, to))] fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { - iroha_logger::trace!(?self); - if wsv.evaluate(&self.condition)? { - self.then.execute(authority, wsv)?; - } else if let Some(otherwise) = self.otherwise { - otherwise.execute(authority, wsv)?; + match self { + Self::Domain(isi) => isi.execute(authority, wsv), + Self::AssetDefinition(isi) => isi.execute(authority, wsv), + Self::Asset(isi) => isi.execute(authority, wsv), } - Ok(()) } } -impl Execute for PairExpr { - fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { - iroha_logger::trace!(?self); +impl Execute for AssetTransferBox { + fn execute( + self, + authority: &AccountId, + wsv: &mut WorldStateView, + ) -> std::prelude::v1::Result<(), Error> { + match self { + Self::Quantity(isi) => isi.execute(authority, wsv), + Self::BigQuantity(isi) => isi.execute(authority, wsv), + Self::Fixed(isi) => isi.execute(authority, wsv), + } + } +} - self.left_instruction.execute(authority, wsv)?; - self.right_instruction.execute(authority, wsv)?; - Ok(()) +impl Execute for SetKeyValueBox { + fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + match self { + Self::Domain(isi) => isi.execute(authority, wsv), + Self::Account(isi) => isi.execute(authority, wsv), + Self::AssetDefinition(isi) => isi.execute(authority, wsv), + Self::Asset(isi) => isi.execute(authority, wsv), + } } } -impl Execute for SequenceExpr { - #[iroha_logger::log(skip_all, name = "Sequence", fields(count))] +impl Execute for RemoveKeyValueBox { fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { - Span::current().record("count", self.instructions.len()); - for instruction in self.instructions { - iroha_logger::trace!(%instruction); - instruction.execute(authority, wsv)?; + match self { + Self::Domain(isi) => isi.execute(authority, wsv), + Self::Account(isi) => isi.execute(authority, wsv), + Self::AssetDefinition(isi) => isi.execute(authority, wsv), + Self::Asset(isi) => isi.execute(authority, wsv), } - Ok(()) } } @@ -379,86 +201,26 @@ impl Execute for Fail { } } -impl Execute for GrantExpr { +impl Execute for GrantBox { #[iroha_logger::log(name = "grant", skip_all, fields(object))] fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { - let destination_id = wsv.evaluate(&self.destination_id)?; - let object = wsv.evaluate(&self.object)?; - Span::current().record("object", &object.to_string()); - iroha_logger::trace!(%destination_id, %authority); - match object { - Value::PermissionToken(object) => Grant:: { - object, - destination_id, - } - .execute(authority, wsv), - Value::Id(IdBox::RoleId(object)) => Grant:: { - object, - destination_id, - } - .execute(authority, wsv), - _ => Err(Error::Evaluate(InstructionType::Grant.into())), + match self { + Self::PermissionToken(sub_isi) => sub_isi.execute(authority, wsv), + Self::Role(sub_isi) => sub_isi.execute(authority, wsv), } } } -impl Execute for RevokeExpr { +impl Execute for RevokeBox { #[iroha_logger::log(name = "revoke", skip_all, fields(object))] fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { - let destination_id = wsv.evaluate(&self.destination_id)?; - let object = wsv.evaluate(&self.object)?; - Span::current().record("object", &object.to_string()); - iroha_logger::trace!(?destination_id, ?object, %authority); - match object { - Value::PermissionToken(object) => Revoke:: { - object, - destination_id, - } - .execute(authority, wsv), - Value::Id(IdBox::RoleId(object)) => Revoke:: { - object, - destination_id, - } - .execute(authority, wsv), - _ => Err(Error::Evaluate(InstructionType::Revoke.into())), + match self { + Self::PermissionToken(sub_isi) => sub_isi.execute(authority, wsv), + Self::Role(sub_isi) => sub_isi.execute(authority, wsv), } } } -impl Execute for SetParameterExpr { - fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { - let parameter = wsv.evaluate(&self.parameter)?; - SetParameter { parameter }.execute(authority, wsv) - } -} - -impl Execute for NewParameterExpr { - fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { - let parameter = wsv.evaluate(&self.parameter)?; - NewParameter { parameter }.execute(authority, wsv) - } -} - -impl Execute for UpgradeExpr { - fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { - let object = wsv.evaluate(&self.object)?; - match object { - UpgradableBox::Executor(object) => { - Upgrade:: { object }.execute(authority, wsv) - } - } - } -} - -impl Execute for LogExpr { - fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { - let level = wsv.evaluate(&self.level)?; - let msg = wsv.evaluate(&self.msg)?; - - Log { level, msg }.execute(authority, wsv) - } -} - pub mod prelude { //! Re-export important traits and types for glob import `(::*)` pub use super::*; @@ -483,11 +245,11 @@ mod tests { let account_id = AccountId::from_str("alice@wonderland")?; let (public_key, _) = KeyPair::generate()?.into(); let asset_definition_id = AssetDefinitionId::from_str("rose#wonderland")?; - RegisterExpr::new(Domain::new(DomainId::from_str("wonderland")?)) + Register::domain(Domain::new(DomainId::from_str("wonderland")?)) .execute(&genesis_account_id, &mut wsv)?; - RegisterExpr::new(Account::new(account_id, [public_key])) + Register::account(Account::new(account_id, [public_key])) .execute(&genesis_account_id, &mut wsv)?; - RegisterExpr::new(AssetDefinition::store(asset_definition_id)) + Register::asset_definition(AssetDefinition::store(asset_definition_id)) .execute(&genesis_account_id, &mut wsv)?; Ok(wsv) } @@ -499,8 +261,8 @@ mod tests { let account_id = AccountId::from_str("alice@wonderland")?; let asset_definition_id = AssetDefinitionId::from_str("rose#wonderland")?; let asset_id = AssetId::new(asset_definition_id, account_id.clone()); - SetKeyValueExpr::new( - IdBox::from(asset_id.clone()), + SetKeyValue::asset( + asset_id.clone(), Name::from_str("Bytes")?, vec![1_u32, 2_u32, 3_u32], ) @@ -526,8 +288,8 @@ mod tests { let kura = Kura::blank_kura_for_testing(); let mut wsv = wsv_with_test_domains(&kura)?; let account_id = AccountId::from_str("alice@wonderland")?; - SetKeyValueExpr::new( - IdBox::from(account_id.clone()), + SetKeyValue::account( + account_id.clone(), Name::from_str("Bytes")?, vec![1_u32, 2_u32, 3_u32], ) @@ -555,8 +317,8 @@ mod tests { let mut wsv = wsv_with_test_domains(&kura)?; let definition_id = AssetDefinitionId::from_str("rose#wonderland")?; let account_id = AccountId::from_str("alice@wonderland")?; - SetKeyValueExpr::new( - IdBox::from(definition_id.clone()), + SetKeyValue::asset_definition( + definition_id.clone(), Name::from_str("Bytes")?, vec![1_u32, 2_u32, 3_u32], ) @@ -583,8 +345,8 @@ mod tests { let mut wsv = wsv_with_test_domains(&kura)?; let domain_id = DomainId::from_str("wonderland")?; let account_id = AccountId::from_str("alice@wonderland")?; - SetKeyValueExpr::new( - IdBox::from(domain_id.clone()), + SetKeyValue::domain( + domain_id.clone(), Name::from_str("Bytes")?, vec![1_u32, 2_u32, 3_u32], ) @@ -613,7 +375,7 @@ mod tests { let trigger_id = TriggerId::from_str("test_trigger_id")?; assert!(matches!( - ExecuteTriggerExpr::new(trigger_id) + ExecuteTrigger::new(trigger_id) .execute(&account_id, &mut wsv) .expect_err("Error expected"), Error::Find(_) @@ -635,14 +397,14 @@ mod tests { .expect("Failed to generate KeyPair") .into(); let register_account = - RegisterExpr::new(Account::new(fake_account_id.clone(), [public_key])); + Register::account(Account::new(fake_account_id.clone(), [public_key])); register_account.execute(&account_id, &mut wsv)?; // register the trigger - let register_trigger = RegisterExpr::new(Trigger::new( + let register_trigger = Register::trigger(Trigger::new( trigger_id.clone(), Action::new( - Vec::::new(), + Vec::::new(), Repeats::Indefinitely, account_id.clone(), TriggeringFilterBox::ExecuteTrigger(ExecuteTriggerEventFilter::new( @@ -655,11 +417,11 @@ mod tests { register_trigger.execute(&account_id, &mut wsv)?; // execute with the valid account - ExecuteTriggerExpr::new(trigger_id.clone()).execute(&account_id, &mut wsv)?; + ExecuteTrigger::new(trigger_id.clone()).execute(&account_id, &mut wsv)?; // execute with the fake account assert!(matches!( - ExecuteTriggerExpr::new(trigger_id) + ExecuteTrigger::new(trigger_id) .execute(&fake_account_id, &mut wsv) .expect_err("Error expected"), Error::InvariantViolation(_) diff --git a/core/src/smartcontracts/isi/query.rs b/core/src/smartcontracts/isi/query.rs index 19671e06587..d14ed740d0b 100644 --- a/core/src/smartcontracts/isi/query.rs +++ b/core/src/smartcontracts/isi/query.rs @@ -265,14 +265,14 @@ mod tests { wsv.config.transaction_limits = limits; let valid_tx = { - let instructions: [InstructionExpr; 0] = []; + let instructions: [InstructionBox; 0] = []; let tx = TransactionBuilder::new(ALICE_ID.clone()) .with_instructions(instructions) .sign(ALICE_KEYS.clone())?; AcceptedTransaction::accept(tx, &limits)? }; let invalid_tx = { - let isi = Fail::new("fail"); + let isi = Fail::new("fail".to_owned()); let tx = TransactionBuilder::new(ALICE_ID.clone()) .with_instructions([isi.clone(), isi]) .sign(ALICE_KEYS.clone())?; @@ -413,7 +413,7 @@ mod tests { let query_handle = LiveQueryStore::test().start(); let mut wsv = WorldStateView::new(world_with_test_domains(), kura.clone(), query_handle); - let instructions: [InstructionExpr; 0] = []; + let instructions: [InstructionBox; 0] = []; let tx = TransactionBuilder::new(ALICE_ID.clone()) .with_instructions(instructions) .sign(ALICE_KEYS.clone())?; @@ -432,9 +432,7 @@ mod tests { kura.store_block(vcb); let unapplied_tx = TransactionBuilder::new(ALICE_ID.clone()) - .with_instructions([UnregisterExpr::new( - "account@domain".parse::().unwrap(), - )]) + .with_instructions([Unregister::account("account@domain".parse().unwrap())]) .sign(ALICE_KEYS.clone())?; let wrong_hash = unapplied_tx.hash(); let not_found = FindTransactionByHash::new(wrong_hash).execute(&wsv); diff --git a/core/src/smartcontracts/isi/triggers/mod.rs b/core/src/smartcontracts/isi/triggers/mod.rs index 7c814b6fe47..2b8da8ed19f 100644 --- a/core/src/smartcontracts/isi/triggers/mod.rs +++ b/core/src/smartcontracts/isi/triggers/mod.rs @@ -1,9 +1,7 @@ //! This module contains implementations of smart-contract traits and //! instructions for triggers in Iroha. -use iroha_data_model::{ - evaluate::ExpressionEvaluator, isi::error::MathError, prelude::*, query::error::FindError, -}; +use iroha_data_model::{isi::error::MathError, prelude::*, query::error::FindError}; use iroha_telemetry::metrics; pub mod set; @@ -154,13 +152,13 @@ pub mod isi { } } - impl Execute for ExecuteTriggerExpr { + impl Execute for ExecuteTrigger { #[metrics(+"execute_trigger")] fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { - let id = wsv.evaluate(&self.trigger_id)?; + let id = &self.trigger_id; wsv.triggers() - .inspect_by_id(&id, |action| -> Result<(), Error> { + .inspect_by_id(id, |action| -> Result<(), Error> { let allow_execute = if let TriggeringFilterBox::ExecuteTrigger(filter) = action.clone_and_box().filter { @@ -186,7 +184,7 @@ pub mod isi { .ok_or_else(|| Error::Find(Box::new(FindError::Trigger(id.clone())))) .and_then(core::convert::identity)?; - wsv.execute_trigger(id, authority); + wsv.execute_trigger(id.clone(), authority); Ok(()) } @@ -217,43 +215,37 @@ pub mod query { impl ValidQuery for FindTriggerById { #[metrics(+"find_trigger_by_id")] fn execute(&self, wsv: &WorldStateView) -> Result, Error> { - let id = wsv - .evaluate(&self.id) - .map_err(|e| Error::Evaluate(format!("Failed to evaluate trigger id. {e}")))?; + let id = &self.id; iroha_logger::trace!(%id); // Can't use just `LoadedActionTrait::clone_and_box` cause this will trigger lifetime mismatch #[allow(clippy::redundant_closure_for_method_calls)] let loaded_action = wsv .triggers() - .inspect_by_id(&id, |action| action.clone_and_box()) + .inspect_by_id(id, |action| action.clone_and_box()) .ok_or_else(|| Error::Find(FindError::Trigger(id.clone())))?; let action = wsv.triggers().get_original_action(loaded_action); // TODO: Should we redact the metadata if the account is not the authority/owner? - Ok(Trigger::new(id, action)) + Ok(Trigger::new(id.clone(), action)) } } impl ValidQuery for FindTriggerKeyValueByIdAndKey { #[metrics(+"find_trigger_key_value_by_id_and_key")] fn execute(&self, wsv: &WorldStateView) -> Result { - let id = wsv - .evaluate(&self.id) - .map_err(|e| Error::Evaluate(format!("Failed to evaluate trigger id. {e}")))?; - let key = wsv - .evaluate(&self.key) - .map_err(|e| Error::Evaluate(format!("Failed to evaluate key. {e}")))?; + let id = &self.id; + let key = &self.key; iroha_logger::trace!(%id, %key); wsv.triggers() - .inspect_by_id(&id, |action| { + .inspect_by_id(id, |action| { action .metadata() - .get(&key) + .get(key) .cloned() .ok_or_else(|| FindError::MetadataKey(key.clone()).into()) }) - .ok_or_else(|| Error::Find(FindError::Trigger(id)))? + .ok_or_else(|| Error::Find(FindError::Trigger(id.clone())))? .map(Into::into) } } @@ -265,13 +257,11 @@ pub mod query { wsv: &'wsv WorldStateView, ) -> eyre::Result> + 'wsv>, Error> { - let domain_id = wsv - .evaluate(&self.domain_id) - .map_err(|e| Error::Evaluate(format!("Failed to evaluate domain id. {e}")))?; + let domain_id = &self.domain_id; Ok(Box::new( wsv.triggers() - .inspect_by_domain_id(&domain_id, |trigger_id, action| { + .inspect_by_domain_id(domain_id, |trigger_id, action| { (trigger_id.clone(), action.clone_and_box()) }) .map(|(trigger_id, action)| { diff --git a/core/src/smartcontracts/isi/triggers/set.rs b/core/src/smartcontracts/isi/triggers/set.rs index 624fc2a6acd..57e32ae1406 100644 --- a/core/src/smartcontracts/isi/triggers/set.rs +++ b/core/src/smartcontracts/isi/triggers/set.rs @@ -9,8 +9,9 @@ //! trigger hooks. use core::cmp::min; -use std::{collections::HashMap, fmt}; +use std::fmt; +use indexmap::IndexMap; use iroha_crypto::HashOf; use iroha_data_model::{ events::Filter as EventFilter, @@ -138,17 +139,17 @@ impl + Clone> LoadedActionTrait for Loaded #[derive(Debug, Default)] pub struct Set { /// Triggers using [`DataEventFilter`] - data_triggers: HashMap>, + data_triggers: IndexMap>, /// Triggers using [`PipelineEventFilter`] - pipeline_triggers: HashMap>, + pipeline_triggers: IndexMap>, /// Triggers using [`TimeEventFilter`] - time_triggers: HashMap>, + time_triggers: IndexMap>, /// Triggers using [`ExecuteTriggerEventFilter`] - by_call_triggers: HashMap>, + by_call_triggers: IndexMap>, /// Trigger ids with type of events they process - ids: HashMap, + ids: IndexMap, /// Original [`WasmSmartContract`]s by [`TriggerId`] for querying purposes. - original_contracts: HashMap, WasmSmartContract>, + original_contracts: IndexMap, WasmSmartContract>, /// List of actions that should be triggered by events provided by `handle_*` methods. /// Vector is used to save the exact triggers order. matched_ids: Vec<(Event, TriggerId)>, @@ -157,14 +158,14 @@ pub struct Set { /// Helper struct for serializing triggers. struct TriggersWithContext<'s, F> { /// Triggers being serialized - triggers: &'s HashMap>, + triggers: &'s IndexMap>, /// Containing Set, used for looking up origignal [`WasmSmartContract`]s /// during serialization. set: &'s Set, } impl<'s, F> TriggersWithContext<'s, F> { - fn new(triggers: &'s HashMap>, set: &'s Set) -> Self { + fn new(triggers: &'s IndexMap>, set: &'s Set) -> Self { Self { triggers, set } } } @@ -236,7 +237,7 @@ impl<'de> DeserializeSeed<'de> for WasmSeed<'_, Set> { while let Some(key) = map.next_key::()? { match key.as_str() { "data_triggers" => { - let triggers: HashMap> = + let triggers: IndexMap> = map.next_value()?; for (id, action) in triggers { set.add_data_trigger(self.loader.engine, Trigger::new(id, action)) @@ -244,7 +245,7 @@ impl<'de> DeserializeSeed<'de> for WasmSeed<'_, Set> { } } "pipeline_triggers" => { - let triggers: HashMap> = + let triggers: IndexMap> = map.next_value()?; for (id, action) in triggers { set.add_pipeline_trigger( @@ -255,7 +256,7 @@ impl<'de> DeserializeSeed<'de> for WasmSeed<'_, Set> { } } "time_triggers" => { - let triggers: HashMap> = + let triggers: IndexMap> = map.next_value()?; for (id, action) in triggers { set.add_time_trigger(self.loader.engine, Trigger::new(id, action)) @@ -263,7 +264,7 @@ impl<'de> DeserializeSeed<'de> for WasmSeed<'_, Set> { } } "by_call_triggers" => { - let triggers: HashMap> = + let triggers: IndexMap> = map.next_value()?; for (id, action) in triggers { set.add_by_call_trigger( @@ -387,7 +388,7 @@ impl Set { engine: &wasmtime::Engine, trigger: Trigger, event_type: TriggeringEventType, - map: impl FnOnce(&mut Self) -> &mut HashMap>, + map: impl FnOnce(&mut Self) -> &mut IndexMap>, ) -> Result { if self.contains(trigger.id()) { return Ok(false); @@ -816,8 +817,8 @@ impl Set { /// Remove actions with zero execution count from `triggers` fn remove_zeros( - ids: &mut HashMap, - triggers: &mut HashMap>, + ids: &mut IndexMap, + triggers: &mut IndexMap>, ) { let to_remove: Vec = triggers .iter() @@ -861,7 +862,7 @@ pub enum LoadedExecutable { /// Loaded WASM Wasm(LoadedWasm), /// Vector of ISI - Instructions(Vec), + Instructions(Vec), } impl core::fmt::Debug for LoadedExecutable { diff --git a/core/src/smartcontracts/isi/tx.rs b/core/src/smartcontracts/isi/tx.rs index b33fa69f7f5..f103853a9d5 100644 --- a/core/src/smartcontracts/isi/tx.rs +++ b/core/src/smartcontracts/isi/tx.rs @@ -2,11 +2,10 @@ use std::sync::Arc; -use eyre::{Result, WrapErr}; +use eyre::Result; use iroha_crypto::HashOf; use iroha_data_model::{ block::SignedBlock, - evaluate::ExpressionEvaluator, prelude::*, query::{ error::{FindError, QueryExecutionFail}, @@ -66,7 +65,7 @@ impl ValidQuery for FindAllTransactions { .flat_map(BlockTransactionIter::new) .map(|tx| TransactionQueryOutput { block_hash: tx.block_hash(), - transaction: tx.value(), + transaction: Box::new(tx.value()), }), )) } @@ -78,10 +77,7 @@ impl ValidQuery for FindTransactionsByAccountId { &self, wsv: &'wsv WorldStateView, ) -> Result + 'wsv>, QueryExecutionFail> { - let account_id = wsv - .evaluate(&self.account_id) - .wrap_err("Failed to get account id") - .map_err(|e| QueryExecutionFail::Evaluate(e.to_string()))?; + let account_id = self.account_id.clone(); Ok(Box::new( wsv.all_blocks() @@ -89,7 +85,7 @@ impl ValidQuery for FindTransactionsByAccountId { .filter(move |tx| *tx.authority() == account_id) .map(|tx| TransactionQueryOutput { block_hash: tx.block_hash(), - transaction: tx.value(), + transaction: Box::new(tx.value()), }), )) } @@ -98,10 +94,7 @@ impl ValidQuery for FindTransactionsByAccountId { impl ValidQuery for FindTransactionByHash { #[metrics(+"find_transaction_by_hash")] fn execute(&self, wsv: &WorldStateView) -> Result { - let tx_hash = wsv - .evaluate(&self.hash) - .wrap_err("Failed to get hash") - .map_err(|e| QueryExecutionFail::Evaluate(e.to_string()))?; + let tx_hash = self.hash; iroha_logger::trace!(%tx_hash); if !wsv.has_transaction(tx_hash) { return Err(FindError::Transaction(tx_hash).into()); @@ -118,6 +111,7 @@ impl ValidQuery for FindTransactionByHash { .iter() .find(|transaction| transaction.value.hash() == tx_hash) .cloned() + .map(Box::new) .map(|transaction| TransactionQueryOutput { block_hash, transaction, diff --git a/core/src/smartcontracts/isi/world.rs b/core/src/smartcontracts/isi/world.rs index 64199fd9eb8..44ae2f2eb2e 100644 --- a/core/src/smartcontracts/isi/world.rs +++ b/core/src/smartcontracts/isi/world.rs @@ -214,10 +214,10 @@ pub mod isi { } } - impl Execute for Upgrade { + impl Execute for Upgrade { #[metrics(+"upgrade_executor")] fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { - let raw_executor = self.object; + let raw_executor = self.executor; // Cloning executor to avoid multiple mutable borrows of `wsv`. // Also it's a cheap operation. @@ -303,13 +303,11 @@ pub mod query { impl ValidQuery for FindRoleByRoleId { #[metrics(+"find_role_by_role_id")] fn execute(&self, wsv: &WorldStateView) -> Result { - let role_id = wsv - .evaluate(&self.id) - .map_err(|e| Error::Evaluate(e.to_string()))?; + let role_id = &self.id; iroha_logger::trace!(%role_id); - wsv.world.roles.get(&role_id).map_or_else( - || Err(Error::Find(FindError::Role(role_id))), + wsv.world.roles.get(role_id).map_or_else( + || Err(Error::Find(FindError::Role(role_id.clone()))), |role_ref| Ok(role_ref.clone()), ) } diff --git a/core/src/smartcontracts/mod.rs b/core/src/smartcontracts/mod.rs index 05d0195defd..35587c6580b 100644 --- a/core/src/smartcontracts/mod.rs +++ b/core/src/smartcontracts/mod.rs @@ -7,15 +7,12 @@ pub mod isi; pub mod wasm; -use std::collections::BTreeMap; - use iroha_data_model::{ - evaluate::ExpressionEvaluator, isi::error::InstructionExecutionError as Error, prelude::*, - query::error::QueryExecutionFail, + isi::error::InstructionExecutionError as Error, prelude::*, query::error::QueryExecutionFail, }; pub use isi::*; -use self::query::{Lazy, LazyValue}; +use self::query::Lazy; use crate::wsv::WorldStateView; /// Trait implementations should provide actions to apply changes on [`WorldStateView`]. @@ -44,50 +41,3 @@ where wsv: &'wsv WorldStateView, ) -> Result<::Lazy<'wsv>, QueryExecutionFail>; } - -impl ExpressionEvaluator for WorldStateView { - fn evaluate( - &self, - expression: &E, - ) -> Result { - expression.evaluate(&Context::new(self)) - } -} - -#[derive(Clone)] -pub(crate) struct Context<'wsv> { - values: BTreeMap, - wsv: &'wsv WorldStateView, -} - -impl<'a> Context<'a> { - /// Create new [`Self`] - pub fn new(wsv: &'a WorldStateView) -> Self { - Self { - values: BTreeMap::new(), - wsv, - } - } -} - -impl iroha_data_model::evaluate::Context for Context<'_> { - fn query(&self, query: &QueryBox) -> Result { - query - .execute(self.wsv) - .map(|value| match value { - LazyValue::Value(value) => value, - // NOTE: This will only be executed when evaluating an expression for an - // instruction, i.e. it will only be executed from the executor. - LazyValue::Iter(iter) => Value::Vec(iter.collect()), - }) - .map_err(Into::into) - } - - fn get(&self, name: &Name) -> Option<&Value> { - self.values.get(name) - } - - fn update(&mut self, other: impl IntoIterator) { - self.values.extend(other) - } -} diff --git a/core/src/smartcontracts/wasm.rs b/core/src/smartcontracts/wasm.rs index 0d0116dba2f..cde7fe6a624 100644 --- a/core/src/smartcontracts/wasm.rs +++ b/core/src/smartcontracts/wasm.rs @@ -13,7 +13,7 @@ use iroha_config::{ use iroha_data_model::{ account::AccountId, executor::{self, MigrationResult}, - isi::InstructionExpr, + isi::InstructionBox, permission::PermissionTokenSchema, prelude::*, query::{QueryBox, QueryId, QueryRequest, QueryWithParameters}, @@ -87,7 +87,7 @@ mod import { /// Execute `instruction` on host #[codec::wrap_trait_fn] fn execute_instruction( - instruction: InstructionExpr, + instruction: InstructionBox, state: &mut S, ) -> Result<(), ValidationFail>; } @@ -100,7 +100,7 @@ mod import { fn get_validate_transaction_payload(state: &S) -> Validate; #[codec::wrap_trait_fn] - fn get_validate_instruction_payload(state: &S) -> Validate; + fn get_validate_instruction_payload(state: &S) -> Validate; #[codec::wrap_trait_fn] fn get_validate_query_payload(state: &S) -> Validate; @@ -328,9 +328,8 @@ impl LimitsExecutor { pub mod state { //! All supported states for [`Runtime`](super::Runtime) - use std::collections::HashSet; - use derive_more::Constructor; + use indexmap::IndexSet; use super::*; @@ -360,7 +359,7 @@ pub mod state { pub(super) store_limits: StoreLimits, /// Span inside of which all logs are recorded for this smart contract pub(super) log_span: Span, - pub(super) executed_queries: HashSet, + pub(super) executed_queries: IndexSet, /// Borrowed [`WorldStateView`] kind pub(super) wsv: W, /// Concrete state for specific executable @@ -380,14 +379,14 @@ pub mod state { authority, store_limits: store_limits_from_config(&config), log_span, - executed_queries: HashSet::new(), + executed_queries: IndexSet::new(), wsv, specific_state, } } /// Take executed queries leaving an empty set - pub fn take_executed_queries(&mut self) -> HashSet { + pub fn take_executed_queries(&mut self) -> IndexSet { std::mem::take(&mut self.executed_queries) } } @@ -481,7 +480,7 @@ pub mod state { pub type ValidateQuery = Validate; /// State kind for executing `validate_instruction()` entrypoint of executor - pub type ValidateInstruction = Validate; + pub type ValidateInstruction = Validate; /// State kind for executing `migrate()` entrypoint of executor #[derive(Copy, Clone)] @@ -803,7 +802,7 @@ where impl<'wrld, S> Runtime, S>> { fn default_execute_instruction( - instruction: InstructionExpr, + instruction: InstructionBox, state: &mut state::CommonState, S>, ) -> Result<(), ValidationFail> { debug!(%instruction, "Executing"); @@ -913,7 +912,7 @@ impl<'wrld> import::traits::ExecuteOperations> #[codec::wrap] fn execute_instruction( - instruction: InstructionExpr, + instruction: InstructionBox, state: &mut state::SmartContract<'wrld>, ) -> Result<(), ValidationFail> { if let Some(limits_executor) = state.specific_state.limits_executor.as_mut() { @@ -985,7 +984,7 @@ impl<'wrld> import::traits::ExecuteOperations> #[codec::wrap] fn execute_instruction( - instruction: InstructionExpr, + instruction: InstructionBox, state: &mut state::Trigger<'wrld>, ) -> Result<(), ValidationFail> { Self::default_execute_instruction(instruction, state) @@ -1016,7 +1015,7 @@ where #[codec::wrap] fn execute_instruction( - instruction: InstructionExpr, + instruction: InstructionBox, state: &mut state::CommonState, S>, ) -> Result<(), ValidationFail> { debug!(%instruction, "Executing as executor"); @@ -1110,7 +1109,7 @@ impl<'wrld> import::traits::GetExecutorPayloads, - ) -> Validate { + ) -> Validate { panic!("Executor `validate_transaction()` entrypoint should not query payload for `validate_instruction()` entrypoint") } @@ -1142,7 +1141,7 @@ impl<'wrld> Runtime> { wsv: &'wrld mut WorldStateView, authority: &AccountId, module: &wasmtime::Module, - instruction: InstructionExpr, + instruction: InstructionBox, ) -> Result { let span = wasm_log_span!("Running `validate_instruction()`"); @@ -1185,7 +1184,7 @@ impl<'wrld> import::traits::GetExecutorPayloads, - ) -> Validate { + ) -> Validate { Validate { authority: state.authority.clone(), block_height: state.wsv.0.height(), @@ -1254,7 +1253,7 @@ impl<'wrld> import::traits::ExecuteOperations, ) -> Result<(), ValidationFail> { panic!("Executor `validate_query()` entrypoint should not execute instructions") @@ -1279,7 +1278,7 @@ impl<'wrld> import::traits::GetExecutorPayloads, - ) -> Validate { + ) -> Validate { panic!("Executor `validate_query()` entrypoint should not query payload for `validate_instruction()` entrypoint") } @@ -1378,7 +1377,7 @@ impl<'wrld> import::traits::GetExecutorPayloads> #[codec::wrap] fn get_validate_instruction_payload( _state: &state::executor::Migrate<'wrld>, - ) -> Validate { + ) -> Validate { panic!("Executor `migrate()` entrypoint should not query payload for `validate_instruction()` entrypoint") } @@ -1716,8 +1715,8 @@ mod tests { let isi_hex = { let new_authority = AccountId::from_str("mad_hatter@wonderland").expect("Valid"); - let register_isi = RegisterExpr::new(Account::new(new_authority, [])); - encode_hex(InstructionExpr::from(register_isi)) + let register_isi = Register::account(Account::new(new_authority, [])); + encode_hex(InstructionBox::from(register_isi)) }; let wat = format!( @@ -1802,8 +1801,8 @@ mod tests { let isi_hex = { let new_authority = AccountId::from_str("mad_hatter@wonderland").expect("Valid"); - let register_isi = RegisterExpr::new(Account::new(new_authority, [])); - encode_hex(InstructionExpr::from(register_isi)) + let register_isi = Register::account(Account::new(new_authority, [])); + encode_hex(InstructionBox::from(register_isi)) }; let wat = format!( @@ -1851,8 +1850,8 @@ mod tests { let isi_hex = { let new_authority = AccountId::from_str("mad_hatter@wonderland").expect("Valid"); - let register_isi = RegisterExpr::new(Account::new(new_authority, [])); - encode_hex(InstructionExpr::from(register_isi)) + let register_isi = Register::account(Account::new(new_authority, [])); + encode_hex(InstructionBox::from(register_isi)) }; let wat = format!( diff --git a/core/src/sumeragi/main_loop.rs b/core/src/sumeragi/main_loop.rs index 441c0946b2d..33e5e41515f 100644 --- a/core/src/sumeragi/main_loop.rs +++ b/core/src/sumeragi/main_loop.rs @@ -186,7 +186,7 @@ impl Sumeragi { &mut self, shutdown_receiver: &mut tokio::sync::oneshot::Receiver<()>, ) -> Result<(), EarlyReturn> { - trace!("Listen for genesis"); + info!(addr = %self.peer_id.address, "Listen for genesis"); loop { std::thread::sleep(Duration::from_millis(50)); @@ -223,6 +223,8 @@ impl Sumeragi { } }; + new_wsv.world_mut().trusted_peers_ids = + block.payload().commit_topology.clone(); self.commit_block(block, new_wsv); return Err(EarlyReturn::GenesisBlockReceivedAndCommitted); } @@ -295,7 +297,7 @@ impl Sumeragi { info!( addr=%self.peer_id.address, role=%self.current_topology.role(&self.peer_id), - block_height=%self.wsv.height(), + block_height=%block.payload().header.height, block_hash=%block.hash(), "{}", Strategy::LOG_MESSAGE, ); @@ -313,11 +315,8 @@ impl Sumeragi { // Parameters are updated before updating public copy of sumeragi self.update_params(); - let new_topology = Topology::recreate_topology( - block.as_ref(), - 0, - self.wsv.peers_ids().iter().cloned().collect(), - ); + let new_topology = + Topology::recreate_topology(block.as_ref(), 0, self.wsv.peers().cloned().collect()); let events = block.produce_events(); // https://github.com/hyperledger/iroha/issues/3396 @@ -801,10 +800,10 @@ pub(crate) fn run( }; span.exit(); - trace!( - me=%sumeragi.peer_id.public_key, + info!( + addr=%sumeragi.peer_id.address, role_in_next_round=%sumeragi.current_topology.role(&sumeragi.peer_id), - "Finished sumeragi init.", + "Sumeragi initialized", ); let mut voting_block = None; @@ -1125,7 +1124,7 @@ fn handle_block_sync( let last_committed_block = new_wsv .latest_block_ref() .expect("Not in genesis round so must have at least genesis block"); - let new_peers = new_wsv.peers_ids().clone(); + let new_peers = new_wsv.peers().cloned().collect(); let view_change_index = block.payload().header().view_change_index; Topology::recreate_topology(&last_committed_block, view_change_index, new_peers) }; @@ -1145,7 +1144,7 @@ fn handle_block_sync( let last_committed_block = new_wsv .latest_block_ref() .expect("Not in genesis round so must have at least genesis block"); - let new_peers = new_wsv.peers_ids().clone(); + let new_peers = new_wsv.peers().cloned().collect(); let view_change_index = block.payload().header().view_change_index; Topology::recreate_topology(&last_committed_block, view_change_index, new_peers) }; @@ -1210,7 +1209,7 @@ mod tests { // Create "genesis" block // Creating an instruction - let fail_box: InstructionExpr = Fail::new("Dummy isi").into(); + let fail_box: InstructionBox = Fail::new("Dummy isi".to_owned()).into(); // Making two transactions that have the same instruction let tx = TransactionBuilder::new(alice_id.clone()) @@ -1231,10 +1230,10 @@ mod tests { kura.store_block(genesis); // Making two transactions that have the same instruction - let create_asset_definition1 = RegisterExpr::new(AssetDefinition::quantity( + let create_asset_definition1 = Register::asset_definition(AssetDefinition::quantity( "xor1#wonderland".parse().expect("Valid"), )); - let create_asset_definition2 = RegisterExpr::new(AssetDefinition::quantity( + let create_asset_definition2 = Register::asset_definition(AssetDefinition::quantity( "xor2#wonderland".parse().expect("Valid"), )); diff --git a/core/src/sumeragi/mod.rs b/core/src/sumeragi/mod.rs index b6f3c7391f0..8c82663ee6c 100644 --- a/core/src/sumeragi/mod.rs +++ b/core/src/sumeragi/mod.rs @@ -10,7 +10,7 @@ use std::{ use eyre::{Result, WrapErr as _}; use iroha_config::sumeragi::Configuration; use iroha_crypto::{KeyPair, SignatureOf}; -use iroha_data_model::prelude::*; +use iroha_data_model::{block::SignedBlock, prelude::*}; use iroha_genesis::GenesisNetwork; use iroha_logger::prelude::*; use iroha_telemetry::metrics::Metrics; @@ -110,7 +110,10 @@ impl SumeragiHandle { pub fn update_metrics(&self) -> Result<()> { let online_peers_count: u64 = self .network - .online_peers(std::collections::HashSet::len) + .online_peers( + #[allow(clippy::disallowed_types)] + std::collections::HashSet::len, + ) .try_into() .expect("casting usize to u64"); @@ -223,6 +226,28 @@ impl SumeragiHandle { } } + fn replay_block( + block: &SignedBlock, + wsv: &mut WorldStateView, + current_topology: &Topology, + ) -> Topology { + let block = ValidBlock::validate(block.clone(), current_topology, wsv) + .expect("Kura blocks should be valid") + .commit(current_topology) + .expect("Kura blocks should be valid"); + + if block.payload().header.is_genesis() { + wsv.world_mut().trusted_peers_ids = block.payload().commit_topology.clone(); + } + + wsv.apply_without_execution(&block).expect( + "Block application in init should not fail. \ + Blocks loaded from kura assumed to be valid", + ); + + Topology::recreate_topology(block.as_ref(), 0, wsv.peers().cloned().collect()) + } + /// Start [`Sumeragi`] actor and return handle to it. /// /// # Panics @@ -251,7 +276,7 @@ impl SumeragiHandle { ) }); - let current_topology = match wsv.height() { + let mut current_topology = match wsv.height() { 0 => { assert!(!configuration.trusted_peers.peers.is_empty()); Topology::new(configuration.trusted_peers.peers.clone()) @@ -261,40 +286,21 @@ impl SumeragiHandle { "Sumeragi could not load block that was reported as present. \ Please check that the block storage was not disconnected.", ); - Topology::recreate_topology( - &block_ref, - 0, - wsv.peers_ids().iter().cloned().collect(), - ) + Topology::recreate_topology(&block_ref, 0, wsv.peers().cloned().collect()) } }; let block_iter_except_last = (&mut blocks_iter).take(block_count.saturating_sub(skip_block_count + 1)); for block in block_iter_except_last { - let block = ValidBlock::validate(Clone::clone(&block), ¤t_topology, &mut wsv) - .expect("Kura blocks should be valid") - .commit(¤t_topology) - .expect("Kura blocks should be valid"); - wsv.apply_without_execution(&block).expect( - "Block application in init should not fail. \ - Blocks loaded from kura assumed to be valid", - ); + current_topology = Self::replay_block(&block, &mut wsv, ¤t_topology); } // finalized_wsv is one block behind let finalized_wsv = wsv.clone(); - if let Some(latest_block) = blocks_iter.next() { - let latest_block = - ValidBlock::validate(Clone::clone(&latest_block), ¤t_topology, &mut wsv) - .expect("Kura blocks should be valid") - .commit(¤t_topology) - .expect("Kura blocks should be valid"); - wsv.apply_without_execution(&latest_block).expect( - "Block application in init should not fail. \ - Blocks loaded from kura assumed to be valid", - ); + if let Some(block) = blocks_iter.next() { + current_topology = Self::replay_block(&block, &mut wsv, ¤t_topology); } info!("Sumeragi has finished loading blocks and setting up the WSV"); diff --git a/core/src/sumeragi/network_topology.rs b/core/src/sumeragi/network_topology.rs index 05e92157d3e..4ba77806e45 100644 --- a/core/src/sumeragi/network_topology.rs +++ b/core/src/sumeragi/network_topology.rs @@ -1,7 +1,7 @@ //! Structures formalising the peer topology (e.g. which peers have which predefined roles). -use std::collections::HashSet; use derive_more::Display; +use indexmap::IndexSet; use iroha_crypto::{PublicKey, SignatureOf}; use iroha_data_model::{block::SignedBlock, prelude::PeerId}; use iroha_logger::trace; @@ -88,7 +88,7 @@ impl Topology { roles: &[Role], signatures: I, ) -> Vec> { - let mut public_keys: HashSet<&PublicKey> = HashSet::with_capacity(self.ordered_peers.len()); + let mut public_keys = IndexSet::with_capacity(self.ordered_peers.len()); for role in roles { match (role, self.is_non_empty(), self.is_consensus_required()) { (Role::Leader, Some(topology), _) => { diff --git a/core/src/sumeragi/view_change.rs b/core/src/sumeragi/view_change.rs index 11f24b90cfd..0b0ed73032c 100644 --- a/core/src/sumeragi/view_change.rs +++ b/core/src/sumeragi/view_change.rs @@ -1,9 +1,9 @@ //! Structures related to proofs and reasons of view changes. //! Where view change is a process of changing topology due to some faulty network behavior. -use std::collections::HashSet; use derive_more::{Deref, DerefMut}; use eyre::Result; +use indexmap::IndexSet; use iroha_crypto::{HashOf, KeyPair, PublicKey, SignatureOf, SignaturesOf}; use iroha_data_model::{block::SignedBlock, prelude::PeerId}; use parity_scale_codec::{Decode, Encode}; @@ -76,7 +76,7 @@ impl SignedProof { /// Verify if the proof is valid, given the peers in `topology`. fn verify(&self, peers: &[PeerId], max_faults: usize) -> bool { - let peer_public_keys: HashSet<&PublicKey> = + let peer_public_keys: IndexSet<&PublicKey> = peers.iter().map(|peer_id| &peer_id.public_key).collect(); let valid_count = self diff --git a/core/src/tx.rs b/core/src/tx.rs index 790d8942326..01ee688edcf 100644 --- a/core/src/tx.rs +++ b/core/src/tx.rs @@ -36,179 +36,6 @@ pub enum AcceptTransactionFail { UnexpectedGenesisAccountSignature, } -mod len { - use iroha_data_model::{expression::*, query::QueryBox, Value}; - - pub trait ExprLen { - fn len(&self) -> usize; - } - - impl> ExprLen for EvaluatesTo { - fn len(&self) -> usize { - self.expression.len() - } - } - - impl ExprLen for Expression { - fn len(&self) -> usize { - use Expression::*; - - match self { - Add(add) => add.len(), - Subtract(subtract) => subtract.len(), - Greater(greater) => greater.len(), - Less(less) => less.len(), - Equal(equal) => equal.len(), - Not(not) => not.len(), - And(and) => and.len(), - Or(or) => or.len(), - If(if_expression) => if_expression.len(), - Raw(raw) => raw.len(), - Query(query) => query.len(), - Contains(contains) => contains.len(), - ContainsAll(contains_all) => contains_all.len(), - ContainsAny(contains_any) => contains_any.len(), - Where(where_expression) => where_expression.len(), - ContextValue(context_value) => context_value.len(), - Multiply(multiply) => multiply.len(), - Divide(divide) => divide.len(), - Mod(modulus) => modulus.len(), - RaiseTo(raise_to) => raise_to.len(), - } - } - } - impl ExprLen for ContextValue { - fn len(&self) -> usize { - 1 - } - } - impl ExprLen for QueryBox { - fn len(&self) -> usize { - 1 - } - } - - impl ExprLen for Add { - fn len(&self) -> usize { - self.left.len() + self.right.len() + 1 - } - } - impl ExprLen for Subtract { - fn len(&self) -> usize { - self.left.len() + self.right.len() + 1 - } - } - impl ExprLen for Multiply { - fn len(&self) -> usize { - self.left.len() + self.right.len() + 1 - } - } - impl ExprLen for RaiseTo { - fn len(&self) -> usize { - self.left.len() + self.right.len() + 1 - } - } - impl ExprLen for Divide { - fn len(&self) -> usize { - self.left.len() + self.right.len() + 1 - } - } - impl ExprLen for Mod { - fn len(&self) -> usize { - self.left.len() + self.right.len() + 1 - } - } - impl ExprLen for Greater { - fn len(&self) -> usize { - self.left.len() + self.right.len() + 1 - } - } - impl ExprLen for Less { - fn len(&self) -> usize { - self.left.len() + self.right.len() + 1 - } - } - impl ExprLen for Equal { - fn len(&self) -> usize { - self.left.len() + self.right.len() + 1 - } - } - impl ExprLen for And { - fn len(&self) -> usize { - self.left.len() + self.right.len() + 1 - } - } - impl ExprLen for Or { - fn len(&self) -> usize { - self.left.len() + self.right.len() + 1 - } - } - - impl ExprLen for Not { - fn len(&self) -> usize { - self.expression.len() + 1 - } - } - - impl ExprLen for Contains { - fn len(&self) -> usize { - self.collection.len() + self.element.len() + 1 - } - } - impl ExprLen for ContainsAll { - fn len(&self) -> usize { - self.collection.len() + self.elements.len() + 1 - } - } - impl ExprLen for ContainsAny { - fn len(&self) -> usize { - self.collection.len() + self.elements.len() + 1 - } - } - - impl ExprLen for If { - fn len(&self) -> usize { - // TODO: This is wrong because we don't evaluate both branches - self.condition.len() + self.then.len() + self.otherwise.len() + 1 - } - } - impl ExprLen for Where { - fn len(&self) -> usize { - self.expression.len() + self.values.values().map(EvaluatesTo::len).sum::() + 1 - } - } -} - -fn instruction_size(isi: &InstructionExpr) -> usize { - use len::ExprLen as _; - use InstructionExpr::*; - - match isi { - Register(isi) => isi.object.len() + 1, - Unregister(isi) => isi.object_id.len() + 1, - Mint(isi) => isi.destination_id.len() + isi.object.len() + 1, - Burn(isi) => isi.destination_id.len() + isi.object.len() + 1, - Transfer(isi) => isi.destination_id.len() + isi.object.len() + isi.source_id.len() + 1, - If(isi) => { - let otherwise = isi.otherwise.as_ref().map_or(0, instruction_size); - isi.condition.len() + instruction_size(&isi.then) + otherwise + 1 - } - Pair(isi) => { - instruction_size(&isi.left_instruction) + instruction_size(&isi.right_instruction) + 1 - } - Sequence(isi) => isi.instructions.iter().map(instruction_size).sum::() + 1, - SetKeyValue(isi) => isi.object_id.len() + isi.key.len() + isi.value.len() + 1, - RemoveKeyValue(isi) => isi.object_id.len() + isi.key.len() + 1, - Grant(isi) => isi.object.len() + isi.destination_id.len() + 1, - Revoke(isi) => isi.object.len() + isi.destination_id.len() + 1, - SetParameter(isi) => isi.parameter.len() + 1, - NewParameter(isi) => isi.parameter.len() + 1, - Upgrade(isi) => isi.object.len() + 1, - Log(isi) => isi.msg.len() + isi.msg.len() + 1, - Fail(_) | ExecuteTrigger(_) => 1, - } -} - impl AcceptedTransaction { /// Accept genesis transaction. Transition from [`GenesisTransaction`] to [`AcceptedTransaction`]. pub fn accept_genesis(tx: GenesisTransaction) -> Self { @@ -230,14 +57,10 @@ impl AcceptedTransaction { match &transaction.payload().instructions { Executable::Instructions(instructions) => { - let instruction_count: u64 = instructions - .iter() - .map(instruction_size) - .sum::() - .try_into() - .expect("`usize` should always fit in `u64`"); - - if instruction_count > limits.max_instruction_number { + let instruction_count = instructions.len(); + if u64::try_from(instruction_count).expect("`usize` should always fit into `u64`") + > limits.max_instruction_number + { return Err(AcceptTransactionFail::TransactionLimit( TransactionLimitError { reason: format!( @@ -426,67 +249,3 @@ impl TransactionExecutor { }) } } - -#[cfg(test)] -mod tests { - use core::str::FromStr as _; - - use super::*; - - fn if_instruction( - c: impl Into, - then: InstructionExpr, - otherwise: Option, - ) -> InstructionExpr { - let condition: Expression = c.into(); - let condition = EvaluatesTo::new_unchecked(condition); - ConditionalExpr { - condition, - then, - otherwise, - } - .into() - } - - fn fail() -> InstructionExpr { - Fail { - message: String::default(), - } - .into() - } - - #[test] - fn len_empty_sequence() { - assert_eq!(instruction_size(&SequenceExpr::new(vec![]).into()), 1); - } - - #[test] - fn len_if_one_branch() { - let instructions = vec![if_instruction( - ContextValue { - value_name: Name::from_str("a").expect("Cannot fail."), - }, - fail(), - None, - )]; - - assert_eq!(instruction_size(&SequenceExpr::new(instructions).into()), 4); - } - - #[test] - fn len_sequence_if() { - let instructions = vec![ - fail(), - if_instruction( - ContextValue { - value_name: Name::from_str("b").expect("Cannot fail."), - }, - fail(), - Some(fail()), - ), - fail(), - ]; - - assert_eq!(instruction_size(&SequenceExpr::new(instructions).into()), 7); - } -} diff --git a/core/src/wsv.rs b/core/src/wsv.rs index 13a899845e9..0e653e6c488 100644 --- a/core/src/wsv.rs +++ b/core/src/wsv.rs @@ -1,15 +1,12 @@ //! This module provides the [`WorldStateView`] — an in-memory representation of the current blockchain //! state. use std::{ - borrow::Borrow, - collections::{BTreeSet, HashMap}, - fmt::Debug, - marker::PhantomData, - sync::Arc, + borrow::Borrow, collections::BTreeSet, fmt::Debug, marker::PhantomData, sync::Arc, time::Duration, }; use eyre::Result; +use indexmap::IndexMap; use iroha_config::{ base::proxy::Builder, wsv::{Configuration, ConfigurationProxy}, @@ -277,7 +274,7 @@ pub struct WorldStateView { /// Blockchain. pub block_hashes: Vec>, /// Hashes of transactions mapped onto block height where they stored - pub transactions: HashMap, u64>, + pub transactions: IndexMap, u64>, /// Buffer containing events generated during `WorldStateView::apply`. Renewed on every block commit. #[serde(skip)] pub events_buffer: Vec, @@ -605,7 +602,7 @@ impl WorldStateView { fn process_instructions( &mut self, - instructions: impl IntoIterator, + instructions: impl IntoIterator, authority: &AccountId, ) -> Result<()> { instructions.into_iter().try_for_each(|instruction| { @@ -858,12 +855,6 @@ impl WorldStateView { &mut self.world } - /// Returns reference for trusted peer ids - #[inline] - pub fn peers_ids(&self) -> &PeersIds { - &self.world.trusted_peers_ids - } - /// Return an iterator over blockchain block hashes starting with the block of the given `height` pub fn block_hashes_from_height(&self, height: usize) -> Vec> { self.block_hashes @@ -942,7 +933,7 @@ impl WorldStateView { Self { world, config, - transactions: HashMap::new(), + transactions: IndexMap::new(), block_hashes: Vec::new(), events_buffer: Vec::new(), new_tx_amounts: Arc::new(Mutex::new(Vec::new())), diff --git a/core/test_network/src/lib.rs b/core/test_network/src/lib.rs index a5fbb690c6f..bf61826e05c 100644 --- a/core/test_network/src/lib.rs +++ b/core/test_network/src/lib.rs @@ -26,7 +26,7 @@ use iroha_primitives::{ unique_vec, unique_vec::UniqueVec, }; -use rand::seq::IteratorRandom; +use rand::{seq::IteratorRandom, thread_rng}; use serde_json::json; use tempfile::TempDir; use tokio::{ @@ -118,7 +118,7 @@ impl TestGenesis for GenesisNetwork { upgrade_executor_permission, ] { first_transaction - .append_instruction(GrantExpr::new(permission, alice_id.clone()).into()); + .append_instruction(Grant::permission_token(permission, alice_id.clone()).into()); } if submit_genesis { @@ -170,7 +170,7 @@ impl Network { start_port: Option, ) -> (Self, Client) { let mut configuration = Configuration::test(); - configuration.logger.max_log_level = Level::INFO.into(); + configuration.logger.level = Level::INFO; let network = Network::new_with_offline_peers( Some(configuration), n_peers, @@ -179,7 +179,12 @@ impl Network { ) .await .expect("Failed to init peers"); - let client = Client::test(&network.genesis.api_address); + let client = Client::test( + &Network::peers(&network) + .choose(&mut thread_rng()) + .unwrap() + .api_address, + ); (network, client) } @@ -197,7 +202,12 @@ impl Network { /// Adds peer to network and waits for it to start block /// synchronization. pub async fn add_peer(&self) -> (Peer, Client) { - let genesis_client = Client::test(&self.genesis.api_address); + let client = Client::test( + &Network::peers(self) + .choose(&mut thread_rng()) + .unwrap() + .api_address, + ); let mut config = Configuration::test(); config.sumeragi.trusted_peers.peers = @@ -211,14 +221,11 @@ impl Network { time::sleep(Configuration::pipeline_time() + Configuration::block_sync_gossip_time()).await; - let add_peer = RegisterExpr::new(DataModelPeer::new(peer.id.clone())); - genesis_client - .submit(add_peer) - .expect("Failed to add new peer."); - - let client = Client::test(&peer.api_address); + let add_peer = Register::peer(DataModelPeer::new(peer.id.clone())); + client.submit(add_peer).expect("Failed to add new peer."); - (peer, client) + let peer_client = Client::test(&peer.api_address); + (peer, peer_client) } /// Creates new network with some offline peers @@ -359,7 +366,7 @@ pub struct Peer { pub iroha: Option, /// Temporary directory // Note: last field to be dropped after Iroha (struct fields drops in FIFO RFC 1857) - temp_dir: Option>, + pub temp_dir: Option>, } impl From for Box { @@ -414,22 +421,18 @@ impl Peer { temp_dir: Arc, ) { let mut configuration = self.get_config(configuration); - configuration - .kura - .block_store_path(temp_dir.path()) - .expect("block store path not readable"); + configuration.kura.block_store_path = temp_dir.path().to_str().unwrap().into(); let info_span = iroha_logger::info_span!( "test-peer", p2p_addr = %self.p2p_address, api_addr = %self.api_address, ); - let telemetry = - iroha_logger::init(&configuration.logger).expect("Failed to initialize telemetry"); + let logger = iroha_logger::test_logger(); let (sender, receiver) = std::sync::mpsc::sync_channel(1); let handle = task::spawn( async move { - let mut iroha = Iroha::with_genesis(genesis, configuration, telemetry) + let mut iroha = Iroha::with_genesis(genesis, configuration, logger) .await .expect("Failed to start iroha"); let job_handle = iroha.start_as_task().unwrap(); @@ -560,7 +563,7 @@ impl PeerBuilder { /// Set Iroha configuration #[must_use] pub fn with_configuration(mut self, configuration: Configuration) -> Self { - self.configuration.replace(configuration); + self.configuration = Some(configuration); self } @@ -708,7 +711,7 @@ pub trait TestClient: Sized { /// If predicate is not satisfied, after maximum retries. fn submit_all_till( &self, - instructions: Vec, + instructions: Vec, request: R, f: impl Fn(::Target) -> bool, ) -> eyre::Result<()> @@ -840,7 +843,7 @@ impl TestClient for Client { fn submit_all_till( &self, - instructions: Vec, + instructions: Vec, request: R, f: impl Fn(::Target) -> bool, ) -> eyre::Result<()> diff --git a/data_model/clippy.toml b/data_model/clippy.toml new file mode 100644 index 00000000000..ad9bd114bed --- /dev/null +++ b/data_model/clippy.toml @@ -0,0 +1 @@ +disallowed-types = ["std::collections::HashMap", "std::collections::HashSet"] diff --git a/data_model/src/evaluate.rs b/data_model/src/evaluate.rs deleted file mode 100644 index b566bf4e0b9..00000000000 --- a/data_model/src/evaluate.rs +++ /dev/null @@ -1,869 +0,0 @@ -//! Implementations for Expression evaluation for different expressions. - -#[cfg(not(feature = "std"))] -use alloc::{ - boxed::Box, - collections::BTreeMap, - format, - string::{String, ToString}, - vec::Vec, -}; -#[cfg(feature = "std")] -use std::collections::BTreeMap; - -use iroha_data_model_derive::model; -use iroha_macro::FromVariant; -use iroha_schema::IntoSchema; - -pub use self::model::*; -use crate::{ - expression::{prelude::*, Expression}, - isi::error::{BinaryOpIncompatibleNumericValueTypesError, MathError}, - prelude::*, -}; - -/// Expression evaluator -pub trait ExpressionEvaluator { - /// Evaluates expression against current state of the underlying system - /// - /// # Errors - /// - /// - if expression is malformed - fn evaluate(&self, expression: &E) -> Result; -} - -/// Context of expression evaluation, holding (name, value) pairs for resolving identifiers. -/// Context comes into play because of [`Where`] and [`Query`] expressions. -/// -/// # Example -/// -/// Say you have an expression such as: `SELECT name FROM table WHERE name = "alice"`. This -/// compound expression is made up of two basic expressions, namely `SELECT FROM` and `WHERE`. -/// To evaluate any expression you have to substitute concrete values for variable names. -/// In this case, `WHERE` should be evaluated first which would place `name = "alice"` -/// inside the context. This context will then be used to evaluate `SELECT FROM`. -/// Starting expression would then be evaluated to `SELECT "alice" FROM table` -pub trait Context: Clone { - /// Execute query against the current state of `Iroha` - /// - /// # Errors - /// - /// If query execution fails - fn query(&self, query: &QueryBox) -> Result; - - /// Return a reference to the [`Value`] corresponding to the [`Name`]. - fn get(&self, name: &Name) -> Option<&Value>; - - /// Update this context with given values. - fn update(&mut self, other: impl IntoIterator); -} - -/// Calculate the result of the expression without mutating the state. -#[allow(clippy::len_without_is_empty)] // NOTE: Evaluate cannot be empty -pub trait Evaluate { - /// The resulting type of the expression. - type Value; - - /// Calculate result. - /// - /// # Errors - /// Concrete to each implementer. - fn evaluate(&self, context: &C) -> Result; -} - -impl> Evaluate for EvaluatesTo -where - V::Error: ToString, -{ - type Value = V; - - fn evaluate(&self, context: &C) -> Result { - let expr = self.expression.evaluate(context)?; - - V::try_from(expr).map_err(|error| EvaluationError::Conversion(error.to_string())) - } -} - -impl Evaluate for Expression { - type Value = Value; - - fn evaluate(&self, context: &C) -> Result { - macro_rules! match_evals { - ($($non_value: ident),+ $(,)?) => { - match self { $( - $non_value(expr) => expr.evaluate(context).map(Into::into)?, )+ - Raw(value) => value.clone(), - } - }; - } - - use Expression::*; - let result = match_evals!( - // numeric - Add, - Subtract, - Greater, - Less, - Multiply, - Divide, - Mod, - RaiseTo, - // logical - Equal, - Not, - And, - Or, - Contains, - ContainsAll, - ContainsAny, - // value - If, - Where, - Query, - ContextValue, - ); - - Ok(result) - } -} - -impl Evaluate for ContextValue { - type Value = Value; - - fn evaluate(&self, context: &C) -> Result { - context - .get(&self.value_name) - .cloned() - .ok_or_else(|| EvaluationError::Find(self.value_name.to_string())) - } -} - -mod numeric { - use super::*; - - impl Evaluate for Add { - type Value = NumericValue; - - fn evaluate(&self, context: &C) -> Result { - use NumericValue::*; - let left = self.left.evaluate(context)?; - let right = self.right.evaluate(context)?; - - let result = match (left, right) { - (U32(left), U32(right)) => left - .checked_add(right) - .ok_or(MathError::Overflow) - .map(NumericValue::from)?, - (U128(left), U128(right)) => left - .checked_add(right) - .ok_or(MathError::Overflow) - .map(NumericValue::from)?, - (Fixed(left), Fixed(right)) => left - .checked_add(right) - .map(NumericValue::from) - .map_err(MathError::from)?, - (left, right) => Err(MathError::from( - BinaryOpIncompatibleNumericValueTypesError { left, right }, - ))?, - }; - - Ok(result) - } - } - - impl Evaluate for Subtract { - type Value = NumericValue; - - fn evaluate(&self, context: &C) -> Result { - use NumericValue::*; - let left = self.left.evaluate(context)?; - let right = self.right.evaluate(context)?; - - let result = match (left, right) { - (U32(left), U32(right)) => left - .checked_sub(right) - .ok_or(MathError::NotEnoughQuantity) - .map(NumericValue::from)?, - (U128(left), U128(right)) => left - .checked_sub(right) - .ok_or(MathError::NotEnoughQuantity) - .map(NumericValue::from)?, - (Fixed(left), Fixed(right)) => left - .checked_sub(right) - .map(NumericValue::from) - .map_err(MathError::from)?, - (left, right) => Err(MathError::from( - BinaryOpIncompatibleNumericValueTypesError { left, right }, - ))?, - }; - - Ok(result) - } - } - - impl Evaluate for Multiply { - type Value = NumericValue; - - fn evaluate(&self, context: &C) -> Result { - use NumericValue::*; - let left = self.left.evaluate(context)?; - let right = self.right.evaluate(context)?; - - let result = match (left, right) { - (U32(left), U32(right)) => left - .checked_mul(right) - .ok_or(MathError::Overflow) - .map(NumericValue::from)?, - (U128(left), U128(right)) => left - .checked_mul(right) - .ok_or(MathError::Overflow) - .map(NumericValue::from)?, - (Fixed(left), Fixed(right)) => left - .checked_mul(right) - .map(NumericValue::from) - .map_err(MathError::from)?, - (left, right) => Err(MathError::from( - BinaryOpIncompatibleNumericValueTypesError { left, right }, - ))?, - }; - - Ok(result) - } - } - - impl Evaluate for RaiseTo { - type Value = NumericValue; - - fn evaluate(&self, context: &C) -> Result { - use NumericValue::*; - let value = self.left.evaluate(context)?; - let exp = self.right.evaluate(context)?; - - let result = match (value, exp) { - (U32(value), U32(exp)) => value - .checked_pow(exp) - .ok_or(MathError::Overflow) - .map(NumericValue::from)?, - (U128(value), U32(exp)) => value - .checked_pow(exp) - .ok_or(MathError::Overflow) - .map(NumericValue::from)?, - // TODO (#2945): Extend `RaiseTo` to support `Fixed` - (left, right) => Err(MathError::from( - BinaryOpIncompatibleNumericValueTypesError { left, right }, - ))?, - }; - - Ok(result) - } - } - - impl Evaluate for Divide { - type Value = NumericValue; - - fn evaluate(&self, context: &C) -> Result { - use NumericValue::*; - let left = self.left.evaluate(context)?; - let right = self.right.evaluate(context)?; - - let result = match (left, right) { - (U32(left), U32(right)) => left - .checked_div(right) - .ok_or(MathError::DivideByZero) - .map(NumericValue::from)?, - (U128(left), U128(right)) => left - .checked_div(right) - .ok_or(MathError::DivideByZero) - .map(NumericValue::from)?, - (Fixed(left), Fixed(right)) => left - .checked_div(right) - .map(NumericValue::from) - .map_err(MathError::from)?, - (left, right) => Err(MathError::from( - BinaryOpIncompatibleNumericValueTypesError { left, right }, - ))?, - }; - - Ok(result) - } - } - - impl Evaluate for Mod { - type Value = NumericValue; - - fn evaluate(&self, context: &C) -> Result { - use NumericValue::*; - let left = self.left.evaluate(context)?; - let right = self.right.evaluate(context)?; - - let result = match (left, right) { - (U32(left), U32(right)) => left - .checked_rem(right) - .ok_or(MathError::DivideByZero) - .map(NumericValue::from)?, - (U128(left), U128(right)) => left - .checked_rem(right) - .ok_or(MathError::DivideByZero) - .map(NumericValue::from)?, - (left, right) => Err(MathError::from( - BinaryOpIncompatibleNumericValueTypesError { left, right }, - ))?, - }; - - Ok(result) - } - } -} - -mod logical { - use super::*; - - impl Evaluate for Greater { - type Value = bool; - - fn evaluate(&self, context: &C) -> Result { - use NumericValue::*; - let left = self.left.evaluate(context)?; - let right = self.right.evaluate(context)?; - - let result = match (left, right) { - (U32(left), U32(right)) => left > right, - (U128(left), U128(right)) => left > right, - (Fixed(left), Fixed(right)) => left > right, - (left, right) => Err(MathError::from( - BinaryOpIncompatibleNumericValueTypesError { left, right }, - ))?, - }; - - Ok(result) - } - } - - impl Evaluate for Less { - type Value = bool; - - fn evaluate(&self, context: &C) -> Result { - use NumericValue::*; - let left = self.left.evaluate(context)?; - let right = self.right.evaluate(context)?; - - let result = match (left, right) { - (U32(left), U32(right)) => left < right, - (U128(left), U128(right)) => left < right, - (Fixed(left), Fixed(right)) => left < right, - (left, right) => Err(MathError::from( - BinaryOpIncompatibleNumericValueTypesError { left, right }, - ))?, - }; - - Ok(result) - } - } - - impl Evaluate for Equal { - type Value = bool; - - fn evaluate(&self, context: &C) -> Result { - let left = self.left.evaluate(context)?; - let right = self.right.evaluate(context)?; - Ok(left == right) - } - } - - impl Evaluate for And { - type Value = bool; - - fn evaluate(&self, context: &C) -> Result { - let left = self.left.evaluate(context)?; - let right = self.right.evaluate(context)?; - Ok(left && right) - } - } - - impl Evaluate for Or { - type Value = bool; - - fn evaluate(&self, context: &C) -> Result { - let left = self.left.evaluate(context)?; - let right = self.right.evaluate(context)?; - Ok(left || right) - } - } - - impl Evaluate for Not { - type Value = bool; - - fn evaluate(&self, context: &C) -> Result { - let expression = self.expression.evaluate(context)?; - Ok(!expression) - } - } - - impl Evaluate for Contains { - type Value = bool; - - fn evaluate(&self, context: &C) -> Result { - let collection = self.collection.evaluate(context)?; - let element = self.element.evaluate(context)?; - Ok(collection.contains(&element)) - } - } - - impl Evaluate for ContainsAll { - type Value = bool; - - fn evaluate(&self, context: &C) -> Result { - let collection = self.collection.evaluate(context)?; - let elements = self.elements.evaluate(context)?; - Ok(elements.iter().all(|element| collection.contains(element))) - } - } - - impl Evaluate for ContainsAny { - type Value = bool; - - fn evaluate(&self, context: &C) -> Result { - let collection = self.collection.evaluate(context)?; - let elements = self.elements.evaluate(context)?; - Ok(elements.iter().any(|element| collection.contains(element))) - } - } -} - -impl Evaluate for If { - type Value = Value; - - fn evaluate(&self, context: &C) -> Result { - let condition = self.condition.evaluate(context)?; - if condition { - self.then.evaluate(context) - } else { - self.otherwise.evaluate(context) - } - } -} - -impl Evaluate for Where { - type Value = Value; - - fn evaluate(&self, context: &C) -> Result { - let additional_context: Result, EvaluationError> = self - .values - .clone() - .into_iter() - .map(|(value_name, expression)| { - expression - .evaluate(context) - .map(|expression_result| (value_name, expression_result)) - }) - .collect(); - - let mut combined_context = context.clone(); - combined_context.update(additional_context?); - self.expression.evaluate(&combined_context) - } -} - -impl Evaluate for QueryBox { - type Value = Value; - - fn evaluate(&self, context: &C) -> Result { - context - .query(self) - .map_err(|err| EvaluationError::Validation(Box::new(err))) - } -} - -#[model] -pub mod model { - #[cfg(not(feature = "std"))] - use alloc::boxed::Box; - - use parity_scale_codec::{Decode, Encode}; - use serde::{Deserialize, Serialize}; - - use super::*; - - /// Expression evaluation error - #[derive( - Debug, - displaydoc::Display, - Clone, - PartialEq, - Eq, - PartialOrd, - Ord, - FromVariant, - Deserialize, - Serialize, - Decode, - Encode, - IntoSchema, - )] - #[cfg_attr(feature = "std", derive(thiserror::Error))] - // TODO: Only temporarily opaque because of problems with FFI - #[ffi_type(opaque)] - pub enum EvaluationError { - /// Failed due to math exception - Math(#[cfg_attr(feature = "std", source)] MathError), - /// Validation failed - Validation(#[cfg_attr(feature = "std", source)] Box), - /// `{0}`: Value not found in the context - Find( - #[skip_from] - #[skip_try_from] - String, - ), - /// Conversion evaluation error: {0} - Conversion( - #[skip_from] - #[skip_try_from] - String, - ), - } -} - -pub mod prelude { - //! Prelude: re-export of most commonly used traits, structs and macros in this crate. - - pub use super::Evaluate; -} - -#[cfg(test)] -mod tests { - use core::{fmt::Debug, str::FromStr as _}; - - use iroha_crypto::KeyPair; - use iroha_primitives::fixed::Fixed; - use parity_scale_codec::{DecodeAll, Encode}; - - use super::*; - use crate::val_vec; - - /// Context of expression evaluation - #[derive(Clone)] - #[repr(transparent)] - struct TestContext { - values: BTreeMap, - } - - impl TestContext { - fn new() -> Self { - Self { - values: BTreeMap::new(), - } - } - } - - impl super::Context for TestContext { - fn query(&self, _: &QueryBox) -> Result { - unimplemented!("This has to be tested on iroha_core") - } - - fn get(&self, name: &Name) -> Option<&Value> { - self.values.get(name) - } - - fn update(&mut self, other: impl IntoIterator) { - self.values.extend(other) - } - } - - /// Example taken from [whitepaper](https://github.com/hyperledger/iroha/blob/iroha2-dev/docs/source/iroha_2_whitepaper.md#261-multisignature-transactions) - #[test] - #[allow(clippy::too_many_lines)] - fn conditional_multisignature_quorum() -> Result<(), EvaluationError> { - let asset_quantity_high = 750_u32.to_value(); - let asset_quantity_low = 300_u32.to_value(); - let (public_key_teller_1, _) = KeyPair::generate().expect("Valid").into(); - let (public_key_teller_2, _) = KeyPair::generate().expect("Valid").into(); - let (manager_public_key, _) = KeyPair::generate().expect("Valid").into(); - let teller_signatory_set = vec![ - Value::PublicKey(public_key_teller_1.clone()), - Value::PublicKey(public_key_teller_2), - ]; - let one_teller_set = Value::Vec(vec![Value::PublicKey(public_key_teller_1)]); - let manager_signatory = Value::PublicKey(manager_public_key); - let manager_signatory_set = Value::Vec(vec![manager_signatory.clone()]); - let condition = If::new( - And::new( - Greater::new( - EvaluatesTo::new_unchecked(ContextValue::new( - Name::from_str("usd_quantity").expect("Can't fail."), - )), - 500_u32, - ), - Less::new( - EvaluatesTo::new_unchecked(ContextValue::new( - Name::from_str("usd_quantity").expect("Can't fail."), - )), - 1000_u32, - ), - ), - EvaluatesTo::new_evaluates_to_value(Or::new( - ContainsAll::new( - EvaluatesTo::new_unchecked(ContextValue::new( - Name::from_str("signatories").expect("Can't fail."), - )), - teller_signatory_set.clone(), - ), - Contains::new( - EvaluatesTo::new_unchecked(ContextValue::new( - Name::from_str("signatories").expect("Can't fail."), - )), - manager_signatory, - ), - )), - true, - ); - // Signed by all tellers - let expression = Where::new(EvaluatesTo::new_evaluates_to_value(condition.clone())) - .with_value( - //TODO: use query to get the actual quantity of an asset from WSV - // in that case this test should be moved to iroha_core - Name::from_str("usd_quantity").expect("Can't fail."), - asset_quantity_high.clone(), - ) - .with_value( - Name::from_str("signatories").expect("Can't fail."), - teller_signatory_set, - ); - assert_eq!(expression.evaluate(&TestContext::new())?, Value::Bool(true)); - // Signed by manager - let expression = Where::new(EvaluatesTo::new_evaluates_to_value(condition.clone())) - .with_value( - Name::from_str("usd_quantity").expect("Can't fail."), - asset_quantity_high.clone(), - ) - .with_value( - Name::from_str("signatories").expect("Can't fail."), - manager_signatory_set, - ); - assert_eq!(expression.evaluate(&TestContext::new())?, Value::Bool(true)); - // Signed by one teller - let expression = Where::new(EvaluatesTo::new_evaluates_to_value(condition.clone())) - .with_value( - Name::from_str("usd_quantity").expect("Can't fail."), - asset_quantity_high, - ) - .with_value( - Name::from_str("signatories").expect("Can't fail."), - one_teller_set.clone(), - ); - assert_eq!( - expression.evaluate(&TestContext::new())?, - Value::Bool(false) - ); - // Signed by one teller with less value - let expression = Where::new(EvaluatesTo::new_evaluates_to_value(condition)) - .with_value( - Name::from_str("usd_quantity").expect("Can't fail."), - asset_quantity_low, - ) - .with_value( - Name::from_str("signatories").expect("Can't fail."), - one_teller_set, - ); - assert_eq!(expression.evaluate(&TestContext::new())?, Value::Bool(true)); - Ok(()) - } - - #[test] - fn where_expression() -> Result<(), EvaluationError> { - assert_eq!( - Where::new(EvaluatesTo::new_unchecked(ContextValue::new( - Name::from_str("test_value").expect("Can't fail.") - ))) - .with_value( - Name::from_str("test_value").expect("Can't fail."), - EvaluatesTo::new_evaluates_to_value(Add::new(2_u32, 3_u32)) - ) - .evaluate(&TestContext::new())?, - 5_u32.to_value() - ); - Ok(()) - } - - #[test] - fn nested_where_expression() -> Result<(), EvaluationError> { - let expression = Where::new(EvaluatesTo::new_unchecked(ContextValue::new( - Name::from_str("a").expect("Can't fail."), - ))) - .with_value(Name::from_str("a").expect("Can't fail."), 2_u32); - let outer_expression = Where::new(EvaluatesTo::new_evaluates_to_value(Add::new( - EvaluatesTo::new_unchecked(expression), - EvaluatesTo::new_unchecked(ContextValue::new( - Name::from_str("b").expect("Can't fail."), - )), - ))) - .with_value(Name::from_str("b").expect("Can't fail."), 4_u32); - assert_eq!( - outer_expression.evaluate(&TestContext::new())?, - 6_u32.to_value() - ); - Ok(()) - } - - #[test] - fn if_condition_branches_correctly() -> Result<(), EvaluationError> { - assert_eq!( - If::new(true, 1_u32, 2_u32).evaluate(&TestContext::new())?, - 1_u32.to_value() - ); - assert_eq!( - If::new(false, 1_u32, 2_u32).evaluate(&TestContext::new())?, - 2_u32.to_value() - ); - Ok(()) - } - - #[test] - #[allow(clippy::unnecessary_wraps)] - fn incorrect_types_are_caught() -> Result<(), EvaluationError> { - fn assert_eval(inst: &I, err_msg: &str) - where - I: Evaluate + Debug, - I::Value: Debug, - { - let result: Result<_, _> = inst.evaluate(&TestContext::new()); - let _err = result.expect_err(err_msg); - } - - assert_eval( - &And::new( - EvaluatesTo::new_unchecked(1_u32), - EvaluatesTo::new_unchecked(Vec::::new()), - ), - "Should not be possible to apply logical and to int and vec.", - ); - assert_eval( - &Or::new( - EvaluatesTo::new_unchecked(1_u32), - EvaluatesTo::new_unchecked(Vec::::new()), - ), - "Should not be possible to apply logical or to int and vec.", - ); - assert_eval( - &Greater::new( - EvaluatesTo::new_unchecked(1_u32), - EvaluatesTo::new_unchecked(Vec::::new()), - ), - "Should not be possible to apply greater sign to int and vec.", - ); - assert_eval( - &Less::new( - EvaluatesTo::new_unchecked(1_u32), - EvaluatesTo::new_unchecked(Vec::::new()), - ), - "Should not be possible to apply greater sign to int and vec.", - ); - assert_eval( - &If::new(EvaluatesTo::new_unchecked(1_u32), 2_u32, 3_u32), - "If condition should be bool", - ); - assert_eval( - &Add::new(10_u32, 1_u128), - "Should not be possible to add `u32` and `u128`", - ); - assert_eval( - &Subtract::new(Fixed::try_from(10.2_f64).map_err(MathError::from)?, 1_u128), - "Should not be possible to subtract `Fixed` and `u128`", - ); - assert_eval( - &Multiply::new(0_u32, Fixed::try_from(1.0_f64).map_err(MathError::from)?), - "Should not be possible to multiply `u32` and `Fixed`", - ); - Ok(()) - } - - #[test] - fn operations_are_correctly_calculated() -> Result<(), EvaluationError> { - assert_eq!( - Add::new(1_u32, 2_u32).evaluate(&TestContext::new())?, - 3_u32.into() - ); - assert_eq!( - Add::new(1_u128, 2_u128).evaluate(&TestContext::new())?, - 3_u128.into(), - ); - assert_eq!( - Add::new( - Fixed::try_from(1.17_f64).map_err(MathError::from)?, - Fixed::try_from(2.13_f64).map_err(MathError::from)? - ) - .evaluate(&TestContext::new())?, - 3.30_f64.try_into().map_err(MathError::from)? - ); - assert_eq!( - Subtract::new(7_u32, 2_u32).evaluate(&TestContext::new())?, - 5_u32.into() - ); - assert_eq!( - Subtract::new(7_u128, 2_u128).evaluate(&TestContext::new())?, - 5_u128.into() - ); - assert_eq!( - Subtract::new( - Fixed::try_from(7.250_f64).map_err(MathError::from)?, - Fixed::try_from(2.125_f64).map_err(MathError::from)? - ) - .evaluate(&TestContext::new())?, - 5.125_f64.try_into().map_err(MathError::from)? - ); - assert!(!Greater::new(1_u32, 2_u32).evaluate(&TestContext::new())?); - assert!(Greater::new(2_u32, 1_u32).evaluate(&TestContext::new())?); - assert!(Less::new(1_u32, 2_u32).evaluate(&TestContext::new())?); - assert!(!Less::new(2_u32, 1_u32).evaluate(&TestContext::new())?); - assert!(!Equal::new(1_u32, 2_u32).evaluate(&TestContext::new())?); - assert!( - Equal::new(vec![1_u32, 3_u32, 5_u32], vec![1_u32, 3_u32, 5_u32]) - .evaluate(&TestContext::new())? - ); - assert!(Contains::new(val_vec![1_u32, 3_u32, 5_u32], 3_u32).evaluate(&TestContext::new())?); - assert!(!Contains::new(val_vec![1_u32, 3_u32, 5_u32], 7_u32).evaluate(&TestContext::new())?); - assert!( - ContainsAll::new(val_vec![1_u32, 3_u32, 5_u32], val_vec![1_u32, 5_u32]) - .evaluate(&TestContext::new())? - ); - assert!( - !ContainsAll::new(val_vec![1_u32, 3_u32, 5_u32], val_vec![1_u32, 5_u32, 7_u32]) - .evaluate(&TestContext::new())? - ); - Ok(()) - } - - #[test] - #[ignore = "Stack overflow"] - fn serde_serialization_works() { - let expression: Expression = Add::new(1_u32, Subtract::new(7_u32, 4_u32)).into(); - let serialized_expression = - serde_json::to_string(&expression).expect("Failed to serialize."); - let deserialized_expression: Expression = - serde_json::from_str(&serialized_expression).expect("Failed to de-serialize."); - assert_eq!( - expression - .evaluate(&TestContext::new()) - .expect("Failed to calculate."), - deserialized_expression - .evaluate(&TestContext::new()) - .expect("Failed to calculate.") - ) - } - - #[test] - fn scale_codec_serialization_works() { - let expression: Expression = Add::new(1_u32, Subtract::new(7_u32, 4_u32)).into(); - let serialized_expression: Vec = expression.encode(); - let deserialized_expression: Expression = - DecodeAll::decode_all(&mut serialized_expression.as_slice()) - .expect("Failed to decode."); - assert_eq!( - expression - .evaluate(&TestContext::new()) - .expect("Failed to calculate."), - deserialized_expression - .evaluate(&TestContext::new()) - .expect("Failed to calculate.") - ) - } -} diff --git a/data_model/src/events/data/events.rs b/data_model/src/events/data/events.rs index 8bf07a8e49b..90f1caf2cf8 100644 --- a/data_model/src/events/data/events.rs +++ b/data_model/src/events/data/events.rs @@ -566,6 +566,7 @@ mod executor { #[derive( Debug, + Copy, Clone, PartialEq, Eq, @@ -586,6 +587,23 @@ mod executor { } /// Filter for [`ExecutorEvent`]. + #[derive( + Debug, + Copy, + Clone, + PartialEq, + Eq, + PartialOrd, + Ord, + parity_scale_codec::Decode, + parity_scale_codec::Encode, + serde::Deserialize, + serde::Serialize, + iroha_schema::IntoSchema, + )] + #[non_exhaustive] + #[serde(untagged)] // Unaffected by #3330, as single unit variant + #[repr(transparent)] pub enum ExecutorFilter { Upgraded, } diff --git a/data_model/src/expression.rs b/data_model/src/expression.rs deleted file mode 100644 index b5e75ce26bc..00000000000 --- a/data_model/src/expression.rs +++ /dev/null @@ -1,703 +0,0 @@ -//! Expressions to use inside of ISIs. - -#![allow( - // Because of `codec(skip)` - clippy::default_trait_access, - // Because of length on instructions and expressions (can't be 0) - clippy::len_without_is_empty, - // Because of length on instructions and expressions (XXX: Should it be trait?) - clippy::unused_self -)] - -#[cfg(not(feature = "std"))] -use alloc::{boxed::Box, collections::btree_map, format, string::String, vec, vec::Vec}; -use core::marker::PhantomData; -#[cfg(feature = "std")] -use std::collections::btree_map; - -use derive_more::{Constructor, DebugCustom, Display}; -use getset::Getters; -use iroha_data_model_derive::{model, PartiallyTaggedDeserialize, PartiallyTaggedSerialize}; -use iroha_macro::FromVariant; -use iroha_schema::{IntoSchema, TypeId}; -use operation::*; -use parity_scale_codec::{Decode, Encode}; -use serde::{Deserialize, Serialize}; - -pub use self::model::*; -use super::{query::QueryBox, Name, Value}; -use crate::NumericValue; - -/// Generate expression structure and basic impls for it. -/// -/// # Syntax -/// -/// Basic syntax: -/// -/// ```ignore -/// gen_expr_and_impls! { -/// /// Comment -/// #[derive(Derives)] -/// pub Expr(param1: Type1, param2: Type2, param3: Type3, ...) -> OutputType -/// } -/// ``` -/// -/// The macro has three syntax forms to specify parameters: -/// - One unnamed parameter. In that case, the parameter name will be `expression`. -/// - Two unnamed parameters. -/// In that case, the parameter names will be `left` and `right` respectively. -/// - Any number of named parameters. -/// -/// The macro has two syntax forms to specify result: -/// - With the actual result type after the arrow (`->`). -/// In that case, `impl From<$i> for EvaluatesTo<$result_type>` will be generated. -/// - With `?` sign as a result type. -/// In that case `impl From<$i> for EvaluatesTo<$result_type>` **won't** be generated. -/// -/// See the example and further usage for more details. -/// -/// # Example -/// -/// ```ignore -/// gen_expr_and_impls! { -/// /// Evaluates to the sum of left and right expressions. -/// #[derive(Debug)] -/// pub Add(u32, u32) -> u32 -/// } -/// -/// // Will generate the following code: -/// -/// /// Evaluates to the sum of left and right expressions. -/// iroha_data_model_derive::model_single! { -/// #[derive(Debug)] -/// pub struct Add { -/// #[allow(missing_docs)] -/// pub left: EvaluatesTo, -/// #[allow(missing_docs)] -/// pub right: EvaluatesTo, -/// } -/// } -/// -/// impl Add { -/// /// Construct new [`Add`] expression -/// pub fn new(left: impl Into>, right: impl Into>) -> Self { -/// Self { -/// left: left.into(), -/// right: right.into(), -/// } -/// } -/// } -/// -/// impl From for EvaluatesTo { -/// fn from(expression: Add) -> Self { -/// EvaluatesTo::new_unchecked(expression) -/// } -/// } -/// ``` -macro_rules! gen_expr_and_impls { - // Case: one unnamed parameter - ($(#[$me:meta])* $v:vis $i:ident($first_type:ty $(,)?) -> $($result:tt)*) => { - gen_expr_and_impls!($(#[$me])* $v $i(expression: $first_type) -> $($result)*); - }; - // Case: two unnamed parameters - ($(#[$me:meta])* $v:vis $i:ident($first_type:ty, $second_type:ty $(,)?) -> $($result:tt)*) => { - gen_expr_and_impls!($(#[$me])* $v $i(left: $first_type, right: $second_type) -> $($result)*); - }; - // Case: any number of named parameters - ($(#[$me:meta])* $v:vis $i:ident($($param_name:ident: $param_type:ty),* $(,)?) -> $($result:tt)*) => { - gen_expr_and_impls!(impl_basic $(#[$me])* $v $i($($param_name: $param_type),*)); - gen_expr_and_impls!(impl_extra_convert $i $($result)*); - }; - // Internal usage: generate basic code for the expression - (impl_basic $(#[$me:meta])* $v:vis $i:ident($($param_name:ident: $param_type:ty),* $(,)?)) => { - iroha_data_model_derive::model_single! { - #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Getters, Decode, Encode, Deserialize, Serialize, IntoSchema)] - #[getset(get = "pub")] - $(#[$me])* - $v struct $i { $( - /// - #[allow(missing_docs)] - pub $param_name: EvaluatesTo<$param_type>, )* - } - } - - impl $i { - #[doc = concat!(" Construct new [`", stringify!($i), "`] expression")] - pub fn new( - $($param_name: impl Into>),* - ) -> Self { - Self { - $($param_name: $param_name.into()),* - } - } - } - }; - // Internal usage: do nothing for expressions with unknown result type - (impl_extra_convert $i:ident ?) => { - }; - // Internal usage: generate extra `From` impl for expressions with known result type - (impl_extra_convert $i:ident $result_type:ty) => { - impl From<$i> for EvaluatesTo<$result_type> { - fn from(expression: $i) -> Self { - EvaluatesTo::new_unchecked(expression) - } - } - }; -} - -#[model] -pub mod model { - use super::*; - - /// Struct for type checking and converting expression results. - #[derive( - DebugCustom, - Display, - Clone, - PartialEq, - Eq, - PartialOrd, - Ord, - Decode, - Encode, - Deserialize, - Serialize, - TypeId, - )] - // As this structure exists only for type checking - // it makes sense to display `expression` directly - #[display(fmt = "{expression}")] - #[debug(fmt = "{expression:?}")] - #[serde(transparent)] - #[repr(transparent)] - // SAFETY: `EvaluatesTo` has no trap representation in `Box` - #[ffi_type(unsafe {robust})] - pub struct EvaluatesTo { - /// Expression. - #[serde(flatten)] - pub expression: Box, - #[codec(skip)] - pub(super) _value_type: PhantomData, - } - - /// Represents all possible expressions. - #[derive( - DebugCustom, - Display, - Clone, - PartialEq, - Eq, - PartialOrd, - Ord, - FromVariant, - Decode, - Encode, - PartiallyTaggedDeserialize, - PartiallyTaggedSerialize, - IntoSchema, - )] - #[ffi_type(opaque)] - pub enum Expression { - /// Add expression. - Add(Add), - /// Subtract expression. - Subtract(Subtract), - /// Multiply expression. - Multiply(Multiply), - /// Divide expression. - Divide(Divide), - /// Module expression. - Mod(Mod), - /// Raise to power expression. - RaiseTo(RaiseTo), - /// Greater expression. - Greater(Greater), - /// Less expression. - Less(Less), - /// Equal expression. - Equal(Equal), - /// Not expression. - Not(Not), - /// And expression. - And(And), - /// Or expression. - Or(Or), - /// If expression. - If(If), - /// Raw value. - #[serde_partially_tagged(untagged)] - #[debug(fmt = "{_0:?}")] - Raw(#[skip_from] Value), - /// Query to Iroha state. - Query(QueryBox), - /// Contains expression for vectors. - Contains(Contains), - /// Contains all expression for vectors. - ContainsAll(ContainsAll), - /// Contains any expression for vectors. - ContainsAny(ContainsAny), - /// Where expression to supply temporary values to local context. - Where(Where), - /// Get a temporary value by name - ContextValue(ContextValue), - } - - /// Get a temporary value by name. The values are brought into [`Context`] by [`Where`] expression. - // NOTE: Can't use `gen_expr_and_impls!` here because we need special type for `value_name` - #[derive( - Debug, - Display, - Clone, - PartialEq, - Eq, - PartialOrd, - Ord, - Getters, - Constructor, - Decode, - Encode, - Deserialize, - Serialize, - IntoSchema, - )] - #[display(fmt = "CONTEXT `{value_name}`")] - #[getset(get = "pub")] - #[serde(transparent)] - #[repr(transparent)] - // SAFETY: `ContextValue` has no trap representation in `Name` - #[ffi_type(unsafe {robust})] - pub struct ContextValue { - /// Name bound to the value. - pub value_name: Name, - } - - gen_expr_and_impls! { - /// Evaluates to the multiplication of left and right expressions. - #[derive(Display)] - #[display(fmt = "{}*{}", // Keep without spaces - "self.left.parenthesise(Operation::Multiply)", - "self.right.parenthesise(Operation::Multiply)" - )] - #[ffi_type] - pub Multiply(NumericValue, NumericValue) -> NumericValue - } - - gen_expr_and_impls! { - /// Evaluates to the left expression divided by the right expression. - #[derive(Display)] - #[display(fmt = "{}/{}", // Keep without spaces - "self.left.parenthesise(Operation::Divide)", - "self.right.parenthesise(Operation::Divide)" - )] - #[ffi_type] - pub Divide(NumericValue, NumericValue) -> NumericValue - } - - gen_expr_and_impls! { - /// Evaluates to the left expression modulo the right expression. - #[derive(Display)] - #[display(fmt = "{} % {}", - "self.left.parenthesise(Operation::Mod)", - "self.right.parenthesise(Operation::Mod)" - )] - #[ffi_type] - pub Mod(NumericValue, NumericValue) -> NumericValue - } - - gen_expr_and_impls! { - /// Evaluates to the left expression in the power of right expression. - /// Currently does not support [`NumericValue::Fixed`]. - #[derive(Display)] - #[display(fmt = "{}**{}", - "self.left.parenthesise(Operation::RaiseTo)", - "self.right.parenthesise(Operation::RaiseTo)" - )] - #[ffi_type] - pub RaiseTo(NumericValue, NumericValue) -> NumericValue - } - - gen_expr_and_impls! { - /// Evaluates to the sum of left and right expressions. - #[derive(Display)] - #[display(fmt = "{}+{}", - "self.left.parenthesise(Operation::Add)", - "self.right.parenthesise(Operation::Add)" - )] - #[ffi_type] - pub Add(NumericValue, NumericValue) -> NumericValue - } - - gen_expr_and_impls! { - /// Evaluates to the left expression minus the right expression. - #[derive(Display)] - #[display(fmt = "{}-{}", - "self.left.parenthesise(Operation::Subtract)", - "self.right.parenthesise(Operation::Subtract)" - )] - #[ffi_type] - pub Subtract(NumericValue, NumericValue) -> NumericValue - } - - gen_expr_and_impls! { - /// Returns whether the `left` expression is greater than the `right`. - #[derive(Display)] - #[display(fmt = "{} > {}", - "self.left.parenthesise(Operation::Greater)", - "self.right.parenthesise(Operation::Greater)" - )] - #[ffi_type] - pub Greater(NumericValue, NumericValue) -> bool - } - - gen_expr_and_impls! { - /// Returns whether the `left` expression is less than the `right`. - #[derive(Display)] - #[display(fmt = "{} < {}", - "self.left.parenthesise(Operation::Less)", - "self.right.parenthesise(Operation::Less)" - )] - #[ffi_type] - pub Less(NumericValue, NumericValue) -> bool - } - - gen_expr_and_impls! { - /// Negates the result of the `expression`. - /// Works only for `Value::Bool`. - #[derive(Display)] - #[display(fmt = "!{}", "self.expression.parenthesise(Operation::Not)")] - #[serde(transparent)] - #[repr(transparent)] - // SAFETY: `Not` has no trap representation in `bool` - #[ffi_type(unsafe {robust})] - pub Not(bool) -> bool - } - - gen_expr_and_impls! { - /// Applies the logical `and` to two `Value::Bool` operands. - #[derive(Display)] - #[display(fmt = "{} && {}", - "self.left.parenthesise(Operation::And)", - "self.right.parenthesise(Operation::And)" - )] - #[ffi_type] - pub And(bool, bool) -> bool - } - - gen_expr_and_impls! { - /// Applies the logical `or` to two `Value::Bool` operands. - #[derive(Display)] - #[display(fmt = "{} || {}", - "self.left.parenthesise(Operation::Or)", - "self.right.parenthesise(Operation::Or)" - )] - #[ffi_type] - pub Or(bool, bool) -> bool - } - - gen_expr_and_impls! { - /// If expression. Based on the `condition`, returns the result of either `then` or `otherwise`. - #[derive(Display)] - #[display(fmt = "if {condition} {{ {then} }} else {{ {otherwise} }}")] - #[ffi_type] - pub If(condition: bool, then: Value, otherwise: Value) -> ? - } - - gen_expr_and_impls! { - /// `Contains` expression. - /// Returns `true` if `collection` contains an `element`, `false` otherwise. - #[derive(Display)] - #[display(fmt = "{}.contains({})", "collection.parenthesise(Operation::MethodCall)", "element")] - #[ffi_type] - pub Contains(collection: Vec, element: Value) -> bool - } - - gen_expr_and_impls! { - /// `ContainsAll` expression. - /// Returns `true` if `collection` contains all `elements`, `false` otherwise. - #[derive(Display)] - #[display(fmt = "{}.contains_all({})", "collection.parenthesise(Operation::MethodCall)", "elements")] - #[ffi_type] - pub ContainsAll(collection: Vec, elements: Vec) -> bool - } - - gen_expr_and_impls! { - /// `ContainsAny` expression. - /// Returns `true` if `collection` contains any element out of the `elements`, `false` otherwise. - #[derive(Display)] - #[display(fmt = "{}.contains_any({})", "collection.parenthesise(Operation::MethodCall)", "elements")] - #[ffi_type] - pub ContainsAny(collection: Vec, elements: Vec) -> bool - } - - gen_expr_and_impls! { - /// Returns `true` if `left` operand is equal to the `right` operand. - #[derive(Display)] - #[display(fmt = "{} == {}", - "self.left.parenthesise(Operation::Equal)", - "self.right.parenthesise(Operation::Equal)" - )] - #[ffi_type] - pub Equal(Value, Value) -> bool - } - - /// Adds a local context of `values` for the `expression`. - /// It is similar to **where** syntax in *Haskell* although evaluated eagerly. - // NOTE: Can't use `gen_expr_and_impls!` here because we need special type for `values` - #[derive( - Debug, - Clone, - PartialEq, - Eq, - PartialOrd, - Ord, - Getters, - Decode, - Encode, - Deserialize, - Serialize, - IntoSchema, - )] - #[ffi_type] - pub struct Where { - /// Expression to be evaluated. - #[getset(get = "pub")] - pub expression: EvaluatesTo, - /// Context values for the context bonded to their `String` names. - pub values: btree_map::BTreeMap>, - } -} - -impl> From for Expression { - fn from(value: V) -> Self { - Self::Raw(value.into()) - } -} - -impl, E: Into + Into> From for EvaluatesTo { - fn from(expression: E) -> Self { - Self::new_unchecked(expression) - } -} - -impl EvaluatesTo { - /// Expression - #[inline] - // NOTE: getset would return &Box - pub fn expression(&self) -> &Expression { - &self.expression - } - - /// Construct new [`EvaluatesTo`] from [`Expression`] without type checking. - /// - /// # Warning - /// Prefer using [`Into`] conversions rather than this method, - /// because it does not check the value type at compile-time. - #[inline] - pub fn new_unchecked(expression: impl Into) -> Self { - Self { - expression: Box::new(expression.into()), - _value_type: PhantomData, - } - } - - fn operation(&self) -> Operation { - use Expression::*; - - match self.expression.as_ref() { - Add(_) => Operation::Add, - Subtract(_) => Operation::Subtract, - Multiply(_) => Operation::Multiply, - Divide(_) => Operation::Divide, - Mod(_) => Operation::Mod, - RaiseTo(_) => Operation::RaiseTo, - Greater(_) => Operation::Greater, - Less(_) => Operation::Less, - Equal(_) => Operation::Equal, - Not(_) => Operation::Not, - And(_) => Operation::And, - Or(_) => Operation::Or, - Contains(_) | ContainsAll(_) | ContainsAny(_) => Operation::MethodCall, - If(_) | Raw(_) | Query(_) | Where(_) | ContextValue(_) => Operation::Other, - } - } - - /// Wrap expression into parentheses depending on `operation` and get the resulting string. - fn parenthesise(&self, operation: Operation) -> String { - if self.operation().priority() < operation.priority() - && !matches!(self.expression.as_ref(), Expression::Raw(_)) - { - format!("({})", self.expression) - } else { - format!("{}", self.expression) - } - } -} - -impl EvaluatesTo { - /// Construct `EvaluatesTo` from any `expression` - /// because all of them evaluate to [`Value`]. - #[inline] - pub fn new_evaluates_to_value(expression: impl Into) -> Self { - Self::new_unchecked(expression) - } -} - -impl + IntoSchema> IntoSchema for EvaluatesTo { - fn type_name() -> String { - format!("EvaluatesTo<{}>", V::type_name()) - } - fn update_schema_map(map: &mut iroha_schema::MetaMap) { - const EXPRESSION: &str = "expression"; - - if !map.contains_key::() { - map.insert::(iroha_schema::Metadata::Struct( - iroha_schema::NamedFieldsMeta { - declarations: vec![iroha_schema::Declaration { - name: String::from(EXPRESSION), - ty: core::any::TypeId::of::(), - }], - }, - )); - - Expression::update_schema_map(map); - } - } -} - -impl core::fmt::Display for Where { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - write!(f, "`{} where: [", self.expression)?; - - let mut first = true; - for (key, value) in &self.values { - if !first { - write!(f, ", ")?; - } - first = false; - write!(f, "`{key}` : `{value}`")?; - } - - write!(f, "]") - } -} - -impl Where { - /// Construct [`Where`] expression - #[must_use] - pub fn new(expression: impl Into>) -> Self { - Self { - expression: expression.into(), - values: Default::default(), - } - } - - /// Get an iterator over the values of [`Where`] clause - #[inline] - pub fn values(&self) -> impl ExactSizeIterator)> { - self.values.iter() - } - - /// Binds `expression` result to a `value_name`, by which it will be reachable from the main expression. - #[must_use] - pub fn with_value>>( - mut self, - value_name: Name, - expression: E, - ) -> Self { - self.values.insert(value_name, expression.into()); - self - } -} - -mod operation { - //! Module containing operations and their priorities. - - /// Type of expression operation. - #[derive(Clone, Copy, PartialEq, Eq)] - pub enum Operation { - MethodCall, - RaiseTo, - Multiply, - Divide, - Mod, - Add, - Subtract, - Greater, - Less, - Equal, - Not, - And, - Or, - Other, - } - - /// Priority of operation. - /// - /// [`First`](Operation::First) is the highest priority - /// and [`Ninth`](Operation::Ninth) is the lowest. - #[derive(Debug, Clone, Copy, PartialEq, Eq)] - pub enum Priority { - First = 1, - Second = 2, - Third = 3, - Fourth = 4, - Fifth = 5, - Sixth = 6, - Seventh = 7, - Eighth = 8, - Ninth = 9, - } - - impl Operation { - /// Get the priority of the operation. - /// - /// Ordering is the same as in Python code. - /// See [`here`](https://docs.python.org/3/reference/expressions.html#operator-precedence) - /// for more details. - pub fn priority(self) -> Priority { - use Operation::*; - - match self { - MethodCall => Priority::First, - RaiseTo => Priority::Second, - Multiply | Divide | Mod => Priority::Third, - Add | Subtract => Priority::Fourth, - Greater | Less | Equal => Priority::Fifth, - Not => Priority::Sixth, - And => Priority::Seventh, - Or => Priority::Eighth, - Other => Priority::Ninth, - } - } - } - - impl PartialOrd for Priority { - #[inline] - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } - } - - impl Ord for Priority { - fn cmp(&self, other: &Self) -> core::cmp::Ordering { - use core::cmp::Ordering::*; - - let lhs = *self as u8; - let rhs = *other as u8; - - match lhs.cmp(&rhs) { - Less => Greater, - Equal => Equal, - Greater => Less, - } - } - } -} - -/// The prelude re-exports most commonly used traits, structs and macros from this crate. -pub mod prelude { - pub use super::{ - Add, And, Contains, ContainsAll, ContainsAny, ContextValue, Divide, Equal, EvaluatesTo, - Expression, Greater, If, Less, Mod, Multiply, Not, Or, RaiseTo, Subtract, Where, - }; -} diff --git a/data_model/src/isi.rs b/data_model/src/isi.rs index c0b2fd41da1..0dd3b178d9e 100644 --- a/data_model/src/isi.rs +++ b/data_model/src/isi.rs @@ -4,36 +4,25 @@ #[cfg(not(feature = "std"))] use alloc::{boxed::Box, format, string::String, vec::Vec}; -use core::fmt::Debug; +use core::fmt::{Debug, Display}; -use derive_more::{DebugCustom, Display}; +use derive_more::{Constructor, DebugCustom, Display}; use iroha_data_model_derive::model; -use iroha_macro::FromVariant; use iroha_schema::IntoSchema; use parity_scale_codec::{Decode, Encode}; use serde::{Deserialize, Serialize}; use strum::EnumDiscriminants; -pub use self::model::*; -use super::{expression::EvaluatesTo, prelude::*, IdBox, RegistrableBox, Value}; +pub use self::{model::*, transparent::*}; +use super::{prelude::*, Value}; use crate::{seal, Level, Registered}; -/// Marker trait designating instruction -pub trait Instruction: Into + seal::Sealed {} - -macro_rules! isi { - ($($meta:meta)* $item:item) => { - iroha_data_model_derive::model_single! { - #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, getset::Getters)] - #[derive(parity_scale_codec::Decode, parity_scale_codec::Encode)] - #[derive(serde::Deserialize, serde::Serialize)] - #[derive(iroha_schema::IntoSchema)] - #[getset(get = "pub")] - $($meta)* - $item - } - }; -} +/// Marker trait designating instruction. +/// +/// Instructions allows to change the state of `Iroha`. +/// All possible instructions are implementors of this trait, excluding +/// [`InstructionBox`] which is just a wrapper. +pub trait Instruction: Into + seal::Sealed {} #[model] pub mod model { @@ -42,6 +31,10 @@ pub mod model { use super::*; /// Sized structure for all possible Instructions. + /// + /// Note that [`InstructionBox`] is not a self-sufficient instruction, + /// but just a wrapper to pass instructions back and forth. + /// If you are a client SDK user then you likely don't need to use this type directly. #[derive( DebugCustom, Display, @@ -50,7 +43,6 @@ pub mod model { Eq, PartialOrd, Ord, - FromVariant, EnumDiscriminants, Decode, Encode, @@ -79,790 +71,1151 @@ pub mod model { )] #[ffi_type(opaque)] #[allow(missing_docs)] - pub enum InstructionExpr { - #[debug(fmt = "{_0:?}")] - Register(RegisterExpr), + pub enum InstructionBox { #[debug(fmt = "{_0:?}")] - Unregister(UnregisterExpr), + Register(RegisterBox), #[debug(fmt = "{_0:?}")] - Mint(MintExpr), + Unregister(UnregisterBox), #[debug(fmt = "{_0:?}")] - Burn(BurnExpr), + Mint(MintBox), #[debug(fmt = "{_0:?}")] - Transfer(TransferExpr), + Burn(BurnBox), #[debug(fmt = "{_0:?}")] - If(Box), + Transfer(TransferBox), #[debug(fmt = "{_0:?}")] - Pair(Box), + SetKeyValue(SetKeyValueBox), #[debug(fmt = "{_0:?}")] - Sequence(SequenceExpr), + RemoveKeyValue(RemoveKeyValueBox), #[debug(fmt = "{_0:?}")] - SetKeyValue(SetKeyValueExpr), + Grant(GrantBox), #[debug(fmt = "{_0:?}")] - RemoveKeyValue(RemoveKeyValueExpr), + Revoke(RevokeBox), #[debug(fmt = "{_0:?}")] - Grant(GrantExpr), + ExecuteTrigger(ExecuteTrigger), #[debug(fmt = "{_0:?}")] - Revoke(RevokeExpr), + SetParameter(SetParameter), #[debug(fmt = "{_0:?}")] - ExecuteTrigger(ExecuteTriggerExpr), + NewParameter(NewParameter), #[debug(fmt = "{_0:?}")] - SetParameter(SetParameterExpr), + Upgrade(Upgrade), #[debug(fmt = "{_0:?}")] - NewParameter(NewParameterExpr), - Upgrade(UpgradeExpr), - /// `Log` variant. - Log(LogExpr), + Log(Log), #[debug(fmt = "{_0:?}")] Fail(Fail), } - impl Instruction for InstructionExpr {} - - impl Instruction for SetKeyValueExpr {} - impl Instruction for RemoveKeyValueExpr {} - impl Instruction for RegisterExpr {} - impl Instruction for UnregisterExpr {} - impl Instruction for MintExpr {} - impl Instruction for BurnExpr {} - impl Instruction for TransferExpr {} - impl Instruction for GrantExpr {} - impl Instruction for RevokeExpr {} - impl Instruction for SetParameterExpr {} - impl Instruction for NewParameterExpr {} - impl Instruction for UpgradeExpr {} - impl Instruction for ExecuteTriggerExpr {} - impl Instruction for LogExpr {} + impl Instruction for InstructionBox {} + + impl Instruction for SetKeyValue {} + impl Instruction for SetKeyValue {} + impl Instruction for SetKeyValue {} + impl Instruction for SetKeyValue {} + + impl Instruction for RemoveKeyValue {} + impl Instruction for RemoveKeyValue {} + impl Instruction for RemoveKeyValue {} + impl Instruction for RemoveKeyValue {} + + impl Instruction for Register {} + impl Instruction for Register {} + impl Instruction for Register {} + impl Instruction for Register {} + impl Instruction for Register {} + impl Instruction for Register {} + impl Instruction for Register> {} + + impl Instruction for Unregister {} + impl Instruction for Unregister {} + impl Instruction for Unregister {} + impl Instruction for Unregister {} + impl Instruction for Unregister {} + impl Instruction for Unregister {} + impl Instruction for Unregister> {} + + impl Instruction for Mint {} + impl Instruction for Mint {} + impl Instruction for Mint {} + impl Instruction for Mint {} + impl Instruction for Mint {} + impl Instruction for Mint> {} + + impl Instruction for Burn {} + impl Instruction for Burn {} + impl Instruction for Burn {} + impl Instruction for Burn {} + impl Instruction for Burn> {} + + impl Instruction for Transfer {} + impl Instruction for Transfer {} + impl Instruction for Transfer {} + impl Instruction for Transfer {} + impl Instruction for Transfer {} + + impl Instruction for Grant {} + impl Instruction for Grant {} + + impl Instruction for Revoke {} + impl Instruction for Revoke {} + + impl Instruction for SetParameter {} + impl Instruction for NewParameter {} + impl Instruction for Upgrade {} + impl Instruction for ExecuteTrigger {} + impl Instruction for Log {} impl Instruction for Fail {} - - // Composite instructions - impl Instruction for ConditionalExpr {} - impl Instruction for SequenceExpr {} - impl Instruction for PairExpr {} } mod transparent { - // NOTE: instructions in this module don't have to be made opaque with `model!` - // because they are never shared between client and server(http)/host(wasm) - use super::*; - use crate::executor::Executor; - - /// Generic instruction to set key value at the object. - #[derive(Debug, Clone)] - pub struct SetKeyValue { - /// Where to set key value. - pub object_id: O::Id, - /// Key. - pub key: Name, - /// Value. - pub value: Value, - } - - /// Generic instruction to remove key value at the object. - #[derive(Debug, Clone)] - pub struct RemoveKeyValue { - /// From where to remove key value. - pub object_id: O::Id, - /// Key of the pair to remove. - pub key: Name, - } - - /// Generic instruction for a registration of an object to the identifiable destination. - #[derive(Debug, Clone)] - pub struct Register { - /// The object that should be registered, should be uniquely identifiable by its id. - pub object: O::With, + use crate::{account::NewAccount, domain::NewDomain}; + + macro_rules! isi { + ($($meta:meta)* $item:item) => { + iroha_data_model_derive::model_single! { + #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] + #[derive(getset::Getters)] + #[derive(parity_scale_codec::Decode, parity_scale_codec::Encode)] + #[derive(serde::Deserialize, serde::Serialize)] + #[derive(iroha_schema::IntoSchema)] + #[getset(get = "pub")] + $($meta)* + $item + } + }; + } + + macro_rules! impl_display { + ( + $ty:ident $(< $($generic:tt),+ >)? + $(where + $( $lt:path $( : $clt:tt $(< $inner_generic:tt >)? $(+ $dlt:tt )* )? ),+ $(,)?)? + => $fmt:literal, $($args:ident),* $(,)? + ) => { + impl $(< $($generic),+ >)? ::core::fmt::Display for $ty $(< $($generic),+ >)? + $(where + $( $lt $( : $clt $(< $inner_generic >)? $(+ $dlt )* )? ),+)? + { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + write!( + f, + $fmt, + $(self.$args),* + ) + } + } + } } - /// Generic instruction for an unregistration of an object from the identifiable destination. - #[derive(Debug, Clone)] - pub struct Unregister { - /// [`Identifiable::Id`] of the object which should be unregistered. - pub object_id: O::Id, + macro_rules! impl_into_box { + ( + $($isi:ident $(< $($generic:ident $(< $nested_generic:ident >)?),+ >)?)|* + ==> $boxed:ident :: $variant:ident + ) => {$( + impl From<$isi $(< $($generic $(< $nested_generic >)?),+ >)? > for $boxed { + fn from(instruction: $isi $(< $($generic $(< $nested_generic >)?),+ >)?) -> Self { + Self::$variant(instruction) + } + } + )*}; + ( + $($isi:ident $(< $($generic:ident $(< $nested_generic:ident >)?),+ >)?)|* + => $middle:ident ==> $boxed:ident :: $variant:ident + ) => {$( + impl From<$isi $(< $($generic $(< $nested_generic >)?),+ >)? > for $boxed { + fn from(instruction: $isi $(< $($generic $(< $nested_generic >)?),+ >)?) -> Self { + Self::$variant($middle::from(instruction)) + } + } + )*}; + } + + isi! { + /// Generic instruction for setting a chain-wide config parameter. + #[derive(Constructor, Display)] + #[display(fmt = "SET `{parameter}`")] + #[serde(transparent)] + #[repr(transparent)] + pub struct SetParameter { + /// The configuration parameter being changed. + #[serde(flatten)] + pub parameter: Parameter, + } } - /// Generic instruction for a mint of an object to the identifiable destination. - #[derive(Debug, Clone)] - pub struct Mint, D: Identifiable> { - /// Object which should be minted. - pub object: O, - /// Destination object [`Identifiable::Id`]. - pub destination_id: D::Id, + impl_into_box!(SetParameter ==> InstructionBox::SetParameter); + + isi! { + /// Sized structure for all possible on-chain configuration parameters when they are first created. + /// Generic instruction for setting a chain-wide config parameter. + #[derive(Constructor, Display)] + #[display(fmt = "SET `{parameter}`")] + #[serde(transparent)] + #[repr(transparent)] + pub struct NewParameter { + /// Parameter to be changed. + #[serde(flatten)] + pub parameter: Parameter, + } } - /// Generic instruction for a burn of an object to the identifiable destination. - #[derive(Debug, Clone)] - pub struct Burn, D: Identifiable> { - /// Object which should be burned. - pub object: O, - /// Destination object [`Identifiable::Id`]. - pub destination_id: D::Id, + impl_into_box!(NewParameter ==> InstructionBox::NewParameter); + + isi! { + /// Generic instruction to set key value at the object. + #[schema(bounds = "O: Identifiable, O::Id: IntoSchema")] + pub struct SetKeyValue { + /// Where to set key value. + #[serde(flatten)] + pub object_id: O::Id, + /// Key. + pub key: Name, + /// Value. + pub value: Value, + } } - /// Generic instruction for a transfer of an object from the identifiable source to the identifiable destination. - #[derive(Debug, Clone)] - pub struct Transfer, D: Identifiable> { - /// Source object `Id`. - pub source_id: S::Id, - /// Object which should be transferred. - pub object: O, - /// Destination object `Id`. - pub destination_id: D::Id, + impl SetKeyValue { + /// Constructs a new [`SetKeyValue`] for a [`Domain`] with the given `key` and `value`. + pub fn domain(domain_id: DomainId, key: Name, value: impl Into) -> Self { + Self { + object_id: domain_id, + key, + value: value.into(), + } + } } - /// Generic instruction for granting permission to an entity. - #[derive(Debug, Clone)] - pub struct Grant> { - /// Object to grant. - pub object: O, - /// Entity to which to grant this token. - pub destination_id: AccountId, + impl SetKeyValue { + /// Constructs a new [`SetKeyValue`] for an [`Account`] with the given `key` and `value`. + pub fn account(account_id: AccountId, key: Name, value: impl Into) -> Self { + Self { + object_id: account_id, + key, + value: value.into(), + } + } } - /// Generic instruction for revoking permission from an entity. - #[derive(Debug, Clone)] - pub struct Revoke> { - /// Object to revoke. - pub object: O, - /// Entity which is being revoked this token from. - pub destination_id: AccountId, + impl SetKeyValue { + /// Constructs a new [`SetKeyValue`] for an [`AssetDefinition`] with the given `key` and `value`. + pub fn asset_definition( + asset_definition_id: AssetDefinitionId, + key: Name, + value: impl Into, + ) -> Self { + Self { + object_id: asset_definition_id, + key, + value: value.into(), + } + } } - /// Generic instruction for setting a chain-wide config parameter. - #[derive(Debug, Clone)] - pub struct SetParameter { - /// Parameter to be changed. - pub parameter: Parameter, + impl SetKeyValue { + /// Constructs a new [`SetKeyValue`] for an [`Asset`] with the given `key` and `value`. + pub fn asset(asset_id: AssetId, key: Name, value: impl Into) -> Self { + Self { + object_id: asset_id, + key, + value: value.into(), + } + } } - /// Generic instruction for setting a chain-wide config parameter. - #[derive(Debug, Clone)] - pub struct NewParameter { - /// Parameter to be changed. - pub parameter: Parameter, + impl_display! { + SetKeyValue + where + O: Identifiable, + O::Id: Display, + => + "SET `{}` = `{}` IN `{}`", + key, value, object_id, + } + + impl_into_box! { + SetKeyValue | + SetKeyValue | + SetKeyValue | + SetKeyValue => SetKeyValueBox ==> InstructionBox::SetKeyValue + } + + isi! { + /// Generic instruction to remove key value at the object. + #[schema(bounds = "O: Identifiable, O::Id: IntoSchema")] + pub struct RemoveKeyValue { + /// From where to remove key value. + #[serde(flatten)] + pub object_id: O::Id, + /// Key of the pair to remove. + pub key: Name, + } } - /// Generic instruction for upgrading runtime objects. - #[derive(Debug, Clone)] - pub struct Upgrade> { - /// Object to upgrade. - pub object: O, + impl RemoveKeyValue { + /// Constructs a new [`RemoveKeyValue`] for a [`Domain`] with the given `key`. + pub fn domain(domain_id: DomainId, key: Name) -> Self { + Self { + object_id: domain_id, + key, + } + } + } + + impl RemoveKeyValue { + /// Constructs a new [`RemoveKeyValue`] for an [`Account`] with the given `key`. + pub fn account(account_id: AccountId, key: Name) -> Self { + Self { + object_id: account_id, + key, + } + } + } + + impl RemoveKeyValue { + /// Constructs a new [`RemoveKeyValue`] for an [`AssetDefinition`] with the given `key`. + pub fn asset_definition(asset_definition_id: AssetDefinitionId, key: Name) -> Self { + Self { + object_id: asset_definition_id, + key, + } + } + } + + impl RemoveKeyValue { + /// Constructs a new [`RemoveKeyValue`] for an [`Asset`] with the given `key`. + pub fn asset(asset_id: AssetId, key: Name) -> Self { + Self { + object_id: asset_id, + key, + } + } + } + + impl_display! { + RemoveKeyValue + where + O: Identifiable, + O::Id: Display, + => + "REMOVE `{}` from `{}`", + key, object_id, + } + + impl_into_box! { + RemoveKeyValue | + RemoveKeyValue | + RemoveKeyValue | + RemoveKeyValue => RemoveKeyValueBox ==> InstructionBox::RemoveKeyValue + } + + isi! { + /// Generic instruction for a registration of an object to the identifiable destination. + #[schema(bounds = "O: Registered, O::With: IntoSchema")] + #[serde(transparent)] + pub struct Register { + /// The object that should be registered, should be uniquely identifiable by its id. + pub object: O::With, + } } - /// Generic instruction for executing specified trigger - #[derive(Debug, Clone)] - pub struct ExecuteTrigger { - /// Id of a trigger to execute - pub trigger_id: TriggerId, + impl Register { + /// Constructs a new [`Register`] for a [`Peer`]. + pub fn peer(new_peer: Peer) -> Self { + Self { object: new_peer } + } } - /// Generic instruction for logging messages - #[derive(Debug, Clone)] - pub struct Log { - /// Log level of the message - pub level: Level, - /// Message to be logged - pub msg: String, + impl Register { + /// Constructs a new [`Register`] for a [`Domain`]. + pub fn domain(new_domain: NewDomain) -> Self { + Self { object: new_domain } + } } - impl From> for SetKeyValueExpr { - fn from(source: SetKeyValue) -> Self { - Self::new(source.object_id.into(), source.key, source.value) + impl Register { + /// Constructs a new [`Register`] for an [`Account`]. + pub fn account(new_account: NewAccount) -> Self { + Self { + object: new_account, + } } } - impl From> for RemoveKeyValueExpr { - fn from(source: RemoveKeyValue) -> Self { - Self::new(source.object_id.into(), source.key) + impl Register { + /// Constructs a new [`Register`] for an [`AssetDefinition`]. + pub fn asset_definition(new_asset_definition: NewAssetDefinition) -> Self { + Self { + object: new_asset_definition, + } } } - impl From> for RegisterExpr { - fn from(source: Register) -> Self { - Self::new(source.object.into()) + impl Register { + /// Constructs a new [`Register`] for an [`Asset`]. + pub fn asset(new_asset: Asset) -> Self { + Self { object: new_asset } } } - impl From> for UnregisterExpr { - fn from(source: Unregister) -> Self { - Self::new(source.object_id.into()) + impl Register { + /// Constructs a new [`Register`] for a [`Role`]. + pub fn role(new_role: NewRole) -> Self { + Self { object: new_role } } } - impl, D: Identifiable> From> for MintExpr { - fn from(source: Mint) -> Self { - Self::new(source.object, source.destination_id.into()) + impl Register> { + /// Constructs a new [`Register`] for a [`Trigger`]. + pub fn trigger(new_trigger: Trigger) -> Self { + Self { + object: new_trigger, + } } } - impl, D: Identifiable> From> for BurnExpr { - fn from(source: Burn) -> Self { - Self::new(source.object, source.destination_id.into()) + impl_display! { + Register + where + O: Registered, + O::With: Display, + => + "REGISTER `{}`", + object, + } + + impl_into_box! { + Register | + Register | + Register | + Register | + Register | + Register | + Register > => RegisterBox ==> InstructionBox::Register + } + + isi! { + /// Generic instruction for an unregistration of an object from the identifiable destination. + #[schema(bounds = "O: Identifiable, O::Id: IntoSchema")] + pub struct Unregister { + /// [`Identifiable::Id`] of the object which should be unregistered. + pub object_id: O::Id, } } - impl, D: Identifiable> From> for TransferExpr { - fn from(source: Transfer) -> Self { - Self::new( - source.source_id.into(), - source.object, - source.destination_id.into(), - ) + impl_display! { + Unregister + where + O: Identifiable, + O::Id: Display, + => + "UNREGISTER `{}`", + object_id, + } + + impl_into_box! { + Unregister | + Unregister | + Unregister | + Unregister | + Unregister | + Unregister | + Unregister > => UnregisterBox ==> InstructionBox::Unregister + } + + impl Unregister { + /// Constructs a new [`Unregister`] for a [`Peer`]. + pub fn peer(peer_id: PeerId) -> Self { + Self { object_id: peer_id } } } - impl> From> for GrantExpr { - fn from(source: Grant) -> Self { - Self::new(source.object, source.destination_id) + impl Unregister { + /// Constructs a new [`Unregister`] for a [`Domain`]. + pub fn domain(domain_id: DomainId) -> Self { + Self { + object_id: domain_id, + } } } - impl> From> for RevokeExpr { - fn from(source: Revoke) -> Self { - Self::new(source.object, source.destination_id) + impl Unregister { + /// Constructs a new [`Unregister`] for an [`Account`]. + pub fn account(account_id: AccountId) -> Self { + Self { + object_id: account_id, + } } } - impl From for SetParameterExpr { - fn from(source: SetParameter) -> Self { - Self::new(source.parameter) + impl Unregister { + /// Constructs a new [`Unregister`] for an [`AssetDefinition`]. + pub fn asset_definition(asset_definition_id: AssetDefinitionId) -> Self { + Self { + object_id: asset_definition_id, + } } } - impl From for NewParameterExpr { - fn from(source: NewParameter) -> Self { - Self::new(source.parameter) + impl Unregister { + /// Constructs a new [`Unregister`] for an [`Asset`]. + pub fn asset(asset_id: AssetId) -> Self { + Self { + object_id: asset_id, + } } } - impl From> for UpgradeExpr { - fn from(source: Upgrade) -> Self { - Self::new(source.object) + impl Unregister { + /// Constructs a new [`Unregister`] for a [`Role`]. + pub fn role(role_id: RoleId) -> Self { + Self { object_id: role_id } } } - impl From for ExecuteTriggerExpr { - fn from(source: ExecuteTrigger) -> Self { - Self::new(source.trigger_id) + impl Unregister> { + /// Constructs a new [`Unregister`] for a [`Trigger`]. + pub fn trigger(trigger_id: TriggerId) -> Self { + Self { + object_id: trigger_id, + } } } - impl From for LogExpr { - fn from(source: Log) -> Self { - Self::new(source.level, source.msg) + isi! { + /// Generic instruction for a mint of an object to the identifiable destination. + #[schema(bounds = "O: Into + IntoSchema, D: Identifiable, D::Id: IntoSchema")] + pub struct Mint, D: Identifiable> { + /// Object which should be minted. + pub object: O, + /// Destination object [`Identifiable::Id`]. + pub destination_id: D::Id, } } -} -isi! { - /// Sized structure for all possible on-chain configuration parameters. - #[derive(Display)] - #[display(fmt = "SET `{parameter}`")] - #[serde(transparent)] - #[repr(transparent)] - // SAFETY: `SetParameterExpr` has no trap representation in `EvaluatesTo` - #[ffi_type(unsafe {robust})] - pub struct SetParameterExpr { - /// The configuration parameter being changed. - #[serde(flatten)] - pub parameter: EvaluatesTo, + impl Mint { + /// Constructs a new [`Mint`] for a [`PublicKey`] for [`Account`]. + pub fn account_public_key(public_key: PublicKey, account_id: AccountId) -> Self { + Self { + object: public_key, + destination_id: account_id, + } + } } -} -isi! { - /// Sized structure for all possible on-chain configuration parameters when they are first created. - #[derive(Display)] - #[display(fmt = "SET `{parameter}`")] - #[serde(transparent)] - #[repr(transparent)] - // SAFETY: `NewParameterExpr` has no trap representation in `EvaluatesTo` - #[ffi_type(unsafe {robust})] - pub struct NewParameterExpr { - /// The configuration parameter being created. - #[serde(flatten)] - pub parameter: EvaluatesTo, + impl Mint { + /// Constructs a new [`Mint`] for a [`SignatureCheckCondition`] for [`Account`]. + pub fn account_signature_check_condition( + signature_check_condition: SignatureCheckCondition, + account_id: AccountId, + ) -> Self { + Self { + object: signature_check_condition, + destination_id: account_id, + } + } } -} -isi! { - /// Sized structure for all possible key value set instructions. - #[derive(Display)] - #[display(fmt = "SET `{key}` = `{value}` IN `{object_id}`")] - #[ffi_type] - pub struct SetKeyValueExpr { - /// Where to set this key value. - #[serde(flatten)] - pub object_id: EvaluatesTo, - /// Key string. - pub key: EvaluatesTo, - /// Object to set as a value. - pub value: EvaluatesTo, + impl Mint { + /// Constructs a new [`Mint`] for an [`Asset`] of [`Quantity`] type. + pub fn asset_quantity(quantity: u32, asset_id: AssetId) -> Self { + Self { + object: quantity, + destination_id: asset_id, + } + } } -} -isi! { - /// Sized structure for all possible key value pair remove instructions. - #[derive(Display)] - #[display(fmt = "REMOVE `{key}` from `{object_id}`")] - #[ffi_type] - pub struct RemoveKeyValueExpr { - /// From where to remove this key value. - #[serde(flatten)] - pub object_id: EvaluatesTo, - /// Key string. - pub key: EvaluatesTo, + impl Mint { + /// Constructs a new [`Mint`] for an [`Asset`] of [`BigQuantity`] type. + pub fn asset_big_quantity(big_quantity: u128, asset_id: AssetId) -> Self { + Self { + object: big_quantity, + destination_id: asset_id, + } + } } -} -isi! { - /// Sized structure for all possible Registers. - #[derive(Display)] - #[display(fmt = "REGISTER `{object}`")] - #[serde(transparent)] - #[repr(transparent)] - // SAFETY: `RegisterExpr` has no trap representation in `EvaluatesTo` - #[ffi_type(unsafe {robust})] - pub struct RegisterExpr { - /// The object that should be registered, should be uniquely identifiable by its id. - pub object: EvaluatesTo, + impl Mint { + /// Constructs a new [`Mint`] for an [`Asset`] of [`Fixed`] type. + pub fn asset_fixed(fixed: Fixed, asset_id: AssetId) -> Self { + Self { + object: fixed, + destination_id: asset_id, + } + } } -} -isi! { - /// Sized structure for all possible Unregisters. - #[derive(Display)] - #[display(fmt = "UNREGISTER `{object_id}`")] - #[serde(transparent)] - #[repr(transparent)] - // SAFETY: `UnregisterExpr` has no trap representation in `EvaluatesTo` - #[ffi_type(unsafe {robust})] - pub struct UnregisterExpr { - /// The id of the object that should be unregistered. - pub object_id: EvaluatesTo, + impl Mint> { + /// Constructs a new [`Mint`] for repetition count of [`Trigger`]. + pub fn trigger_repetitions(repetitions: u32, trigger_id: TriggerId) -> Self { + Self { + object: repetitions, + destination_id: trigger_id, + } + } } -} -isi! { - /// Sized structure for all possible Mints. - #[derive(Display)] - #[display(fmt = "MINT `{object}` TO `{destination_id}`")] - #[ffi_type] - pub struct MintExpr { - /// Object to mint. - pub object: EvaluatesTo, - /// Entity to mint to. - pub destination_id: EvaluatesTo, + impl_display! { + Mint + where + O: Into + Display, + D: Identifiable, + D::Id: Display, + => + "MINT `{}` TO `{}`", + object, + destination_id, } -} -isi! { - /// Sized structure for all possible Burns. - #[derive(Display)] - #[display(fmt = "BURN `{object}` FROM `{destination_id}`")] - #[ffi_type] - pub struct BurnExpr { - /// Object to burn. - pub object: EvaluatesTo, - /// Entity to burn from. - pub destination_id: EvaluatesTo, + impl_into_box! { + Mint | + Mint => AccountMintBox ==> MintBox::Account } -} -isi! { - /// Sized structure for all possible Transfers. - #[derive(Display)] - #[display(fmt = "TRANSFER `{object}` FROM `{source_id}` TO `{destination_id}`")] - #[ffi_type] - pub struct TransferExpr { - /// Entity to transfer from. - pub source_id: EvaluatesTo, - /// Object to transfer. - pub object: EvaluatesTo, - /// Entity to transfer to. - pub destination_id: EvaluatesTo, + impl_into_box! { + Mint | + Mint | + Mint => AssetMintBox ==> MintBox::Asset } -} -isi! { - /// Composite instruction for a pair of instructions. - #[derive(Display)] - #[display(fmt = "(`{left_instruction}`, `{right_instruction}`)")] - #[ffi_type] - pub struct PairExpr { - /// Left instruction - pub left_instruction: InstructionExpr, - /// Right instruction - pub right_instruction: InstructionExpr, + impl_into_box! { + Mint | + Mint | + Mint | + Mint | + Mint | + Mint > => MintBox ==> InstructionBox::Mint } -} -isi! { - /// Composite instruction for a sequence of instructions. - #[serde(transparent)] - #[repr(transparent)] - // SAFETY: `SequenceExpr` has no trap representation in `Vec` - #[ffi_type(unsafe {robust})] - pub struct SequenceExpr { - /// Sequence of Iroha Special Instructions to execute. - pub instructions: Vec, + isi! { + /// Generic instruction for a burn of an object to the identifiable destination. + #[schema(bounds = "O: Into + IntoSchema, D: Identifiable, D::Id: IntoSchema")] + pub struct Burn, D: Identifiable> { + /// Object which should be burned. + pub object: O, + /// Destination object [`Identifiable::Id`]. + pub destination_id: D::Id, + } } -} -impl core::fmt::Display for SequenceExpr { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - write!(f, "SEQUENCE [")?; - let mut first = true; - for instruction in &self.instructions { - if !first { - write!(f, ", ")?; + impl Burn { + /// Constructs a new [`Burn`] for a [`PublicKey`] for [`Account`]. + pub fn account_public_key(public_key: PublicKey, account_id: AccountId) -> Self { + Self { + object: public_key, + destination_id: account_id, } - first = false; + } + } - write!(f, "`{instruction}`")?; + impl Burn { + /// Constructs a new [`Burn`] for an [`Asset`] of [`Quantity`] type. + pub fn asset_quantity(quantity: u32, asset_id: AssetId) -> Self { + Self { + object: quantity, + destination_id: asset_id, + } } - write!(f, "]") } -} -isi! { - /// Composite instruction for a conditional execution of other instructions. - #[ffi_type] - pub struct ConditionalExpr { - /// Condition to be checked. - pub condition: EvaluatesTo, - /// Instruction to be executed if condition pass. - pub then: InstructionExpr, - /// Optional instruction to be executed if condition fail. - pub otherwise: Option, + impl Burn { + /// Constructs a new [`Burn`] for an [`Asset`] of [`BigQuantity`] type. + pub fn asset_big_quantity(big_quantity: u128, asset_id: AssetId) -> Self { + Self { + object: big_quantity, + destination_id: asset_id, + } + } } -} -impl core::fmt::Display for ConditionalExpr { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - write!(f, "IF `{}` THEN `{}`", self.condition, self.then)?; - if let Some(otherwise) = &self.otherwise { - write!(f, " ELSE `{otherwise}`")?; + impl Burn { + /// Constructs a new [`Burn`] for an [`Asset`] of [`Fixed`] type. + pub fn asset_fixed(fixed: Fixed, asset_id: AssetId) -> Self { + Self { + object: fixed, + destination_id: asset_id, + } } + } - Ok(()) + impl Burn> { + /// Constructs a new [`Burn`] for repetition count of [`Trigger`]. + pub fn trigger_repetitions(repetitions: u32, trigger_id: TriggerId) -> Self { + Self { + object: repetitions, + destination_id: trigger_id, + } + } } -} -isi! { - /// Utilitary instruction to fail execution and submit an error `message`. - #[derive(Display)] - #[display(fmt = "FAIL `{message}`")] - #[serde(transparent)] - #[repr(transparent)] - // SAFETY: `Fail` has no trap representation in `String` - #[ffi_type(unsafe {robust})] - pub struct Fail { - /// Message to submit. - pub message: String, + impl_display! { + Burn + where + O: Into + Display, + D: Identifiable, + D::Id: Display, + => + "BURN `{}` FROM `{}`", + object, + destination_id, + } + + impl_into_box! { + Burn | + Burn | + Burn => AssetBurnBox ==> BurnBox::Asset + } + + impl_into_box! { + Burn | + Burn | + Burn | + Burn | + Burn > => BurnBox ==> InstructionBox::Burn + } + + isi! { + /// Generic instruction for a transfer of an object from the identifiable source to the identifiable destination. + #[schema(bounds = "S: Identifiable, S::Id: IntoSchema, \ + O: Into + IntoSchema, \ + D: Identifiable, D::Id: IntoSchema")] + pub struct Transfer, D: Identifiable> { + /// Source object `Id`. + pub source_id: S::Id, + /// Object which should be transferred. + pub object: O, + /// Destination object `Id`. + pub destination_id: D::Id, + } } -} -isi! { - /// Sized structure for all possible Grants. - #[derive(Display)] - #[display(fmt = "GRANT `{object}` TO `{destination_id}`")] - #[ffi_type] - pub struct GrantExpr { - /// Object to grant. - pub object: EvaluatesTo, - /// Account to which to grant this object. - pub destination_id: EvaluatesTo, + impl Transfer { + /// Constructs a new [`Transfer`] for a [`Domain`]. + pub fn domain(from: AccountId, domain_id: DomainId, to: AccountId) -> Self { + Self { + source_id: from, + object: domain_id, + destination_id: to, + } + } } -} -isi! { - /// Sized structure for all possible Grants. - #[derive(Display)] - #[display(fmt = "REVOKE `{object}` FROM `{destination_id}`")] - #[ffi_type] - pub struct RevokeExpr { - /// Object to revoke. - pub object: EvaluatesTo, - /// Account to which to revoke this object from. - pub destination_id: EvaluatesTo, + impl Transfer { + /// Constructs a new [`Transfer`] for an [`AssetDefinition`]. + pub fn asset_definition( + from: AccountId, + asset_definition_id: AssetDefinitionId, + to: AccountId, + ) -> Self { + Self { + source_id: from, + object: asset_definition_id, + destination_id: to, + } + } } -} -isi! { - /// Instruction to execute specified trigger - #[derive(Display)] - #[display(fmt = "EXECUTE `{trigger_id}`")] - #[serde(transparent)] - #[repr(transparent)] - // SAFETY: `ExecuteTriggerExpr` has no trap representation in `TriggerId` - #[ffi_type(unsafe {robust})] - pub struct ExecuteTriggerExpr { - /// Id of a trigger to execute - pub trigger_id: EvaluatesTo, + impl Transfer { + /// Constructs a new [`Transfer`] for an [`Asset`] of [`Quantity`] type. + pub fn asset_quantity(asset_id: AssetId, quantity: u32, to: AccountId) -> Self { + Self { + source_id: asset_id, + object: quantity, + destination_id: to, + } + } } -} -isi! { - /// Sized structure for all possible Upgrades. - #[derive(Display)] - #[display(fmt = "UPGRADE `{object}`")] - #[serde(transparent)] - #[repr(transparent)] - // SAFETY: `UpgradeExpr` has no trap representation in `EvaluatesTo` - #[ffi_type(unsafe {robust})] - pub struct UpgradeExpr { - /// The object to upgrade. - pub object: EvaluatesTo, + impl Transfer { + /// Constructs a new [`Transfer`] for an [`Asset`] of [`BigQuantity`] type. + pub fn asset_big_quantity(asset_id: AssetId, big_quantity: u128, to: AccountId) -> Self { + Self { + source_id: asset_id, + object: big_quantity, + destination_id: to, + } + } } -} -isi! { - /// Instruction to print logs - #[derive(Display)] - #[display(fmt = "LOG({level}): {msg}")] - #[ffi_type] - pub struct LogExpr { - /// Message log level - #[serde(flatten)] - pub level: EvaluatesTo, - /// Msg to be logged - pub msg: EvaluatesTo, + impl Transfer { + /// Constructs a new [`Transfer`] for an [`Asset`] of [`Fixed`] type. + pub fn asset_fixed(asset_id: AssetId, fixed: Fixed, to: AccountId) -> Self { + Self { + source_id: asset_id, + object: fixed, + destination_id: to, + } + } } -} -impl ExecuteTriggerExpr { - /// Construct [`ExecuteTriggerExpr`] - pub fn new(trigger_id: I) -> Self - where - I: Into>, - { - Self { - trigger_id: trigger_id.into(), + impl_display! { + Transfer + where + S: Identifiable, + S::Id: Display, + O: Into + Display, + D: Identifiable, + D::Id: Display, + => + "TRANSFER `{}` FROM `{}` TO `{}`", + object, + source_id, + destination_id, + } + + impl_into_box! { + Transfer | + Transfer | + Transfer => AssetTransferBox ==> TransferBox::Asset + } + + impl_into_box! { + Transfer | + Transfer | + Transfer | + Transfer | + Transfer => TransferBox ==> InstructionBox::Transfer + } + + isi! { + /// Utilitary instruction to fail execution and submit an error `message`. + #[derive(Constructor, Display)] + #[display(fmt = "FAIL `{message}`")] + #[serde(transparent)] + #[repr(transparent)] + pub struct Fail { + /// Message to submit. + pub message: String, } } -} -impl RevokeExpr { - /// Generic constructor. - pub fn new>, I: Into>>( - object: P, - destination_id: I, - ) -> Self { - Self { - destination_id: destination_id.into(), - object: object.into(), + impl_into_box!(Fail ==> InstructionBox::Fail); + + isi! { + /// Generic instruction for granting permission to an entity. + pub struct Grant> { + /// Object to grant. + pub object: O, + /// Entity to which to grant this token. + pub destination_id: AccountId, } } -} -impl GrantExpr { - /// Constructor. - pub fn new>, I: Into>>( - object: P, - destination_id: I, - ) -> Self { - Self { - destination_id: destination_id.into(), - object: object.into(), + impl Grant { + /// Constructs a new [`Grant`] for a [`PermissionToken`]. + pub fn permission_token(permission_token: PermissionToken, to: AccountId) -> Self { + Self { + object: permission_token, + destination_id: to, + } } } -} -impl SetKeyValueExpr { - /// Construct [`SetKeyValueExpr`]. - pub fn new< - I: Into>, - K: Into>, - V: Into>, - >( - object_id: I, - key: K, - value: V, - ) -> Self { - Self { - object_id: object_id.into(), - key: key.into(), - value: value.into(), + impl Grant { + /// Constructs a new [`Grant`] for a [`Role`]. + pub fn role(role_id: RoleId, to: AccountId) -> Self { + Self { + object: role_id, + destination_id: to, + } } } -} -impl RemoveKeyValueExpr { - /// Construct [`RemoveKeyValueExpr`]. - pub fn new>, K: Into>>( - object_id: I, - key: K, - ) -> Self { - Self { - object_id: object_id.into(), - key: key.into(), + impl_display! { + Grant + where + O: Into + Display, + => + "GRANT `{}` TO `{}`", + object, + destination_id, + } + + impl_into_box! { + Grant | + Grant => GrantBox ==> InstructionBox::Grant + } + + isi! { + /// Generic instruction for revoking permission from an entity. + pub struct Revoke> { + /// Object to revoke. + pub object: O, + /// Entity which is being revoked this token from. + pub destination_id: AccountId, } } -} -impl RegisterExpr { - /// Construct [`Register`]. - pub fn new>>(object: O) -> Self { - Self { - object: object.into(), + impl Revoke { + /// Constructs a new [`Revoke`] for a [`PermissionToken`]. + pub fn permission_token(permission_token: PermissionToken, from: AccountId) -> Self { + Self { + object: permission_token, + destination_id: from, + } } } -} -impl UnregisterExpr { - /// Construct [`Unregister`]. - pub fn new>>(object_id: O) -> Self { - Self { - object_id: object_id.into(), + impl Revoke { + /// Constructs a new [`Revoke`] for a [`Role`]. + pub fn role(role_id: RoleId, from: AccountId) -> Self { + Self { + object: role_id, + destination_id: from, + } } } -} -impl MintExpr { - /// Construct [`Mint`]. - pub fn new>, D: Into>>( - object: O, - destination_id: D, - ) -> Self { - Self { - object: object.into(), - destination_id: destination_id.into(), + impl_display! { + Revoke + where + O: Into + Display, + => + "REVOKE `{}` FROM `{}`", + object, + destination_id, + } + + impl_into_box! { + Revoke | + Revoke => RevokeBox ==> InstructionBox::Revoke + } + + isi! { + /// Instruction to execute specified trigger + #[derive(Constructor, Display)] + #[display(fmt = "EXECUTE `{trigger_id}`")] + #[serde(transparent)] + #[repr(transparent)] + pub struct ExecuteTrigger { + /// Id of a trigger to execute + pub trigger_id: TriggerId, } } -} -impl BurnExpr { - /// Construct [`Burn`]. - pub fn new>, D: Into>>( - object: O, - destination_id: D, - ) -> Self { - Self { - object: object.into(), - destination_id: destination_id.into(), + impl_into_box!(ExecuteTrigger ==> InstructionBox::ExecuteTrigger); + + isi! { + /// Generic instruction for upgrading runtime objects. + #[derive(Constructor, Display)] + #[display(fmt = "UPGRADE")] + #[serde(transparent)] + #[repr(transparent)] + pub struct Upgrade { + /// Object to upgrade. + pub executor: Executor, } } -} -impl TransferExpr { - /// Construct [`Transfer`]. - pub fn new< - S: Into>, - O: Into>, - D: Into>, - >( - source_id: S, - object: O, - destination_id: D, - ) -> Self { - Self { - source_id: source_id.into(), - object: object.into(), - destination_id: destination_id.into(), + impl_into_box!(Upgrade ==> InstructionBox::Upgrade); + + isi! { + /// Instruction to print logs + #[derive(Constructor, Display)] + #[display(fmt = "LOG({level}): {msg}")] + pub struct Log { + /// Message log level + #[serde(flatten)] + pub level: Level, + #[getset(skip)] // TODO: Fix this by addressing ffi issues + /// Msg to be logged + pub msg: String, } } + + impl_into_box!(Log ==> InstructionBox::Log); } -impl PairExpr { - /// Construct [`Pair`]. - pub fn new, RI: Into>( - left_instruction: LI, - right_instruction: RI, - ) -> Self { - PairExpr { - left_instruction: left_instruction.into(), - right_instruction: right_instruction.into(), - } +macro_rules! isi_box { + ($($meta:meta)* $item:item) => { + #[derive( + Debug, + Clone, + PartialEq, + Eq, + PartialOrd, + Ord, + Display, + parity_scale_codec::Decode, + parity_scale_codec::Encode, + serde::Deserialize, + serde::Serialize, + iroha_schema::IntoSchema, + derive_more::From, + )] + $($meta)* + $item + }; +} + +isi_box! { + /// Enum with all supported [`SetKeyValue`] instructions. + pub enum SetKeyValueBox { + /// Set key value for [`Domain`]. + Domain(SetKeyValue), + /// Set key value for [`Account`]. + Account(SetKeyValue), + /// Set key value for [`AssetDefinition`]. + AssetDefinition(SetKeyValue), + /// Set key value for [`Asset`]. + Asset(SetKeyValue), } } -impl SequenceExpr { - /// Construct [`SequenceExpr`]. - pub fn new(instructions: impl IntoIterator) -> Self { - Self { - instructions: instructions.into_iter().collect(), - } +isi_box! { + /// Enum with all supported [`RemoveKeyValue`] instructions. + pub enum RemoveKeyValueBox { + /// Remove key value from [`Domain`]. + Domain(RemoveKeyValue), + /// Remove key value from [`Account`]. + Account(RemoveKeyValue), + /// Remove key value from [`AssetDefinition`]. + AssetDefinition(RemoveKeyValue), + /// Remove key value from [`Asset`]. + Asset(RemoveKeyValue), } } -impl ConditionalExpr { - /// Construct [`If`]. - pub fn new>, T: Into>( - condition: C, - then: T, - ) -> Self { - Self { - condition: condition.into(), - then: then.into(), - otherwise: None, - } - } - /// [`If`] constructor with `Otherwise` instruction. - pub fn with_otherwise< - C: Into>, - T: Into, - O: Into, - >( - condition: C, - then: T, - otherwise: O, - ) -> Self { - Self { - condition: condition.into(), - then: then.into(), - otherwise: Some(otherwise.into()), - } +isi_box! { + /// Enum with all supported [`Register`] instructions. + pub enum RegisterBox { + /// Register [`Peer`]. + Peer(Register), + /// Register [`Domain`]. + Domain(Register), + /// Register [`Account`]. + Account(Register), + /// Register [`AssetDefinition`]. + AssetDefinition(Register), + /// Register [`Asset`]. + Asset(Register), + /// Register [`Role`]. + Role(Register), + /// Register [`Trigger`]. + Trigger(Register>) } } -impl Fail { - /// Construct [`Fail`]. - pub fn new(message: &str) -> Self { - Self { - message: String::from(message), - } +isi_box! { + /// Enum with all supported [`Unregister`] instructions. + pub enum UnregisterBox { + /// Unregister [`Peer`]. + Peer(Unregister), + /// Unregister [`Domain`]. + Domain(Unregister), + /// Unregister [`Account`]. + Account(Unregister), + /// Unregister [`AssetDefinition`]. + AssetDefinition(Unregister), + /// Unregister [`Asset`]. + Asset(Unregister), + /// Unregister [`Role`]. + Role(Unregister), + /// Unregister [`Trigger`]. + Trigger(Unregister>) } } -impl SetParameterExpr { - /// Construct [`SetParameterExpr`]. - pub fn new>>(parameter: P) -> Self { - Self { - parameter: parameter.into(), - } +isi_box! { + /// Enum with all supported [`Mint`] instructions. + pub enum MintBox { + /// Mint for [`Account`]. + Account(AccountMintBox), + /// Mint for [`Asset`]. + Asset(AssetMintBox), + /// Mint [`Trigger`] repetitions. + TriggerRepetitions(Mint>), } } -impl NewParameterExpr { - /// Construct [`NewParameterExpr`]. - pub fn new>>(parameter: P) -> Self { - Self { - parameter: parameter.into(), - } +isi_box! { + /// Enum with all supported [`Mint`] instructions related to [`Account`]. + pub enum AccountMintBox { + /// Mint [`PublicKey`]. + PublicKey(Mint), + /// Mint [`SignatureCheckCondition`]. + SignatureCheckCondition(Mint), } } -impl UpgradeExpr { - /// Construct [`UpgradeExpr`]. - pub fn new>>(object: O) -> Self { - Self { - object: object.into(), - } +isi_box! { + /// Enum with all supported [`Mint`] instructions related to [`Asset`]. + pub enum AssetMintBox { + /// Mint [`Asset`] of [`Quantity`] type. + Quantity(Mint), + /// Mint [`Asset`] of [`BigQuantity`] type. + BigQuantity(Mint), + /// Mint [`Asset`] of [`Fixed`] type. + Fixed(Mint), } } -impl LogExpr { - /// Construct [`LogExpr`] - pub fn new>, M: Into>>( - level: L, - msg: M, - ) -> Self { - Self { - level: level.into(), - msg: msg.into(), - } +isi_box! { + /// Enum with all supported [`Burn`] instructions. + pub enum BurnBox { + /// Burn [`PublicKey`] for [`Account`]. + AccountPublicKey(Burn), + /// Burn [`Asset`]. + Asset(AssetBurnBox), + /// Burn [`Trigger`] repetitions. + TriggerRepetitions(Burn>), + } +} + +isi_box! { + /// Enum with all supported [`Burn`] instructions related to [`Asset`]. + pub enum AssetBurnBox { + /// Burn [`Asset`] of [`Quantity`] type. + Quantity(Burn), + /// Burn [`Asset`] of [`BigQuantity`] type. + BigQuantity(Burn), + /// Burn [`Asset`] of [`Fixed`] type. + Fixed(Burn), + } +} + +isi_box! { + /// Enum with all supported [`Transfer`] instructions. + pub enum TransferBox { + /// Transfer [`Domain`] to another [`Account`]. + Domain(Transfer), + /// Transfer [`AssetDefinition`] to another [`Account`]. + AssetDefinition(Transfer), + /// Transfer [`Asset`] to another [`Account`]. + Asset(AssetTransferBox), + } +} + +isi_box! { + /// Enum with all supported [`Transfer`] instructions related to [`Asset`]. + pub enum AssetTransferBox { + /// Transfer [`Asset`] of [`Quantity`] type. + Quantity(Transfer), + /// Transfer [`Asset`] of [`BigQuantity`] type. + BigQuantity(Transfer), + /// Transfer [`Asset`] of [`Fixed`] type. + Fixed(Transfer), + } +} + +isi_box! { + /// Enum with all supported [`Grant`] instructions. + pub enum GrantBox { + /// Grant [`PermissionToken`] to [`Account`]. + PermissionToken(Grant), + /// Grant [`Role`] to [`Account`]. + Role(Grant), + } +} + +isi_box! { + /// Enum with all supported [`Revoke`] instructions. + pub enum RevokeBox { + /// Revoke [`PermissionToken`] from [`Account`]. + PermissionToken(Revoke), + /// Revoke [`Role`] from [`Account`]. + Role(Revoke), } } @@ -884,9 +1237,9 @@ pub mod error { use super::InstructionType; use crate::{ asset::AssetValueType, - evaluate, metadata, + metadata, query::error::{FindError, QueryExecutionFail}, - IdBox, NumericValue, Value, + IdBox, Value, }; #[model] @@ -975,8 +1328,6 @@ pub mod error { // TODO: Only temporarily opaque because of problems with FFI #[ffi_type(opaque)] pub enum InstructionEvaluationError { - /// Failed to evaluate expression - Expression(#[cfg_attr(feature = "std", source)] evaluate::EvaluationError), /// Unsupported parameter type for instruction of type `{0}` Unsupported(InstructionType), /// Failed to find parameter in a permission: {0} @@ -1074,38 +1425,10 @@ pub mod error { /// /// No actual function should ever return this if possible Unknown, - /// Encountered incompatible type of arguments - BinaryOpIncompatibleNumericValueTypes( - #[cfg_attr(feature = "std", source)] BinaryOpIncompatibleNumericValueTypesError, - ), /// Conversion failed: {0} FixedPointConversion(String), } - #[derive( - Debug, - Display, - Clone, - PartialEq, - Eq, - PartialOrd, - Ord, - Deserialize, - Serialize, - Decode, - Encode, - IntoSchema, - )] - #[display( - fmt = "Binary operation does not support provided combination of arguments ({left}, {right})" - )] - #[cfg_attr(feature = "std", derive(thiserror::Error))] - #[ffi_type] - pub struct BinaryOpIncompatibleNumericValueTypesError { - pub left: NumericValue, - pub right: NumericValue, - } - /// Mintability logic error #[derive( Debug, @@ -1160,9 +1483,10 @@ pub mod error { NameLength, } + /// Repetition of of `{instruction_type}` for id `{id}` #[derive( Debug, - Display, + displaydoc::Display, Clone, PartialEq, Eq, @@ -1174,11 +1498,12 @@ pub mod error { Encode, IntoSchema, )] - #[display(fmt = "Repetition of of `{instruction_type}` for id `{id}`")] #[cfg_attr(feature = "std", derive(thiserror::Error))] #[ffi_type] pub struct RepetitionError { + /// Instruction type pub instruction_type: InstructionType, + /// Id of the object being repeated pub id: IdBox, } } @@ -1188,11 +1513,6 @@ pub mod error { Self::Evaluate(InstructionEvaluationError::Type(err)) } } - impl From for InstructionExecutionError { - fn from(err: evaluate::EvaluationError) -> Self { - Self::Evaluate(InstructionEvaluationError::Expression(err)) - } - } impl From for MathError { fn from(err: FixedPointOperationError) -> Self { match err { @@ -1215,10 +1535,9 @@ pub mod error { /// The prelude re-exports most commonly used traits, structs and macros from this crate. pub mod prelude { pub use super::{ - Burn, BurnExpr, ConditionalExpr, ExecuteTrigger, ExecuteTriggerExpr, Fail, Grant, - GrantExpr, InstructionExpr, Log, LogExpr, Mint, MintExpr, NewParameter, NewParameterExpr, - PairExpr, Register, RegisterExpr, RemoveKeyValue, RemoveKeyValueExpr, Revoke, RevokeExpr, - SequenceExpr, SetKeyValue, SetKeyValueExpr, SetParameter, SetParameterExpr, Transfer, - TransferExpr, Unregister, UnregisterExpr, Upgrade, UpgradeExpr, + AccountMintBox, AssetBurnBox, AssetMintBox, AssetTransferBox, Burn, BurnBox, + ExecuteTrigger, Fail, Grant, GrantBox, InstructionBox, Log, Mint, MintBox, NewParameter, + Register, RegisterBox, RemoveKeyValue, RemoveKeyValueBox, Revoke, RevokeBox, SetKeyValue, + SetKeyValueBox, SetParameter, Transfer, TransferBox, Unregister, UnregisterBox, Upgrade, }; } diff --git a/data_model/src/lib.rs b/data_model/src/lib.rs index ff1920331da..0852f58d05f 100644 --- a/data_model/src/lib.rs +++ b/data_model/src/lib.rs @@ -56,10 +56,8 @@ pub mod account; pub mod asset; pub mod block; pub mod domain; -pub mod evaluate; pub mod events; pub mod executor; -pub mod expression; pub mod ipfs; pub mod isi; pub mod metadata; @@ -77,39 +75,76 @@ pub mod trigger; pub mod visit; mod seal { - use crate::{isi::prelude::*, query::prelude::*}; + use crate::prelude::*; pub trait Sealed {} macro_rules! impl_sealed { - ($($ident:ident),+ $(,)?) => { $( - impl Sealed for $ident {} )+ + ($($ident:ident $(< $($generic:ident $(< $inner_generic:ident >)?),+ >)?),+ $(,)?) => { $( + impl Sealed for $ident $(< $($generic $(< $inner_generic >)?),+ >)? {} )+ }; } impl_sealed! { // Boxed instructions - InstructionExpr, - SetKeyValueExpr, - RemoveKeyValueExpr, - RegisterExpr, - UnregisterExpr, - MintExpr, - BurnExpr, - TransferExpr, - GrantExpr, - RevokeExpr, - SetParameterExpr, - NewParameterExpr, - UpgradeExpr, - ExecuteTriggerExpr, - LogExpr, - - // Composite instructions - SequenceExpr, - ConditionalExpr, - PairExpr, - + InstructionBox, + + SetKeyValue, + SetKeyValue, + SetKeyValue, + SetKeyValue, + + RemoveKeyValue, + RemoveKeyValue, + RemoveKeyValue, + RemoveKeyValue, + + Register, + Register, + Register, + Register, + Register, + Register, + Register >, + + Unregister, + Unregister, + Unregister, + Unregister, + Unregister, + Unregister, + Unregister >, + + Mint, + Mint, + Mint, + Mint, + Mint, + Mint >, + + Burn, + Burn, + Burn, + Burn, + Burn >, + + Transfer, + Transfer, + Transfer, + Transfer, + Transfer, + + Grant, + Grant, + + Revoke, + Revoke, + + SetParameter, + NewParameter, + Upgrade, + ExecuteTrigger, + Log, Fail, // Boxed queries @@ -227,6 +262,7 @@ pub mod parameter { pub use self::model::*; use super::*; + use crate::isi::InstructionBox; /// Set of parameter names currently used by iroha #[allow(missing_docs)] @@ -450,27 +486,21 @@ pub mod parameter { } /// Create sequence isi for setting parameters - pub fn into_set_parameters(self) -> isi::SequenceExpr { - isi::SequenceExpr { - instructions: self - .parameters - .into_iter() - .map(isi::SetParameterExpr::new) - .map(Into::into) - .collect(), - } + pub fn into_set_parameters(self) -> Vec { + self.parameters + .into_iter() + .map(isi::SetParameter::new) + .map(Into::into) + .collect() } /// Create sequence isi for creating parameters - pub fn into_create_parameters(self) -> isi::SequenceExpr { - isi::SequenceExpr { - instructions: self - .parameters - .into_iter() - .map(isi::NewParameterExpr::new) - .map(Into::into) - .collect(), - } + pub fn into_create_parameters(self) -> Vec { + self.parameters + .into_iter() + .map(isi::NewParameter::new) + .map(Into::into) + .collect() } } @@ -1828,12 +1858,12 @@ pub mod prelude { #[cfg(feature = "std")] pub use super::current_time; pub use super::{ - account::prelude::*, asset::prelude::*, domain::prelude::*, evaluate::prelude::*, - events::prelude::*, executor::prelude::*, expression::prelude::*, isi::prelude::*, - metadata::prelude::*, name::prelude::*, parameter::prelude::*, peer::prelude::*, - permission::prelude::*, query::prelude::*, role::prelude::*, transaction::prelude::*, - trigger::prelude::*, EnumTryAsError, HasMetadata, IdBox, Identifiable, IdentifiableBox, - LengthLimits, NumericValue, PredicateTrait, RegistrableBox, ToValue, TryAsMut, TryAsRef, - TryToValue, UpgradableBox, ValidationFail, Value, + account::prelude::*, asset::prelude::*, domain::prelude::*, events::prelude::*, + executor::prelude::*, isi::prelude::*, metadata::prelude::*, name::prelude::*, + parameter::prelude::*, peer::prelude::*, permission::prelude::*, query::prelude::*, + role::prelude::*, transaction::prelude::*, trigger::prelude::*, EnumTryAsError, + HasMetadata, IdBox, Identifiable, IdentifiableBox, LengthLimits, NumericValue, + PredicateTrait, RegistrableBox, ToValue, TryAsMut, TryAsRef, TryToValue, UpgradableBox, + ValidationFail, Value, }; } diff --git a/data_model/src/query/mod.rs b/data_model/src/query/mod.rs index d5df0b85016..6d96d52634f 100644 --- a/data_model/src/query/mod.rs +++ b/data_model/src/query/mod.rs @@ -88,6 +88,7 @@ macro_rules! queries { #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] #[derive(parity_scale_codec::Decode, parity_scale_codec::Encode)] #[derive(serde::Deserialize, serde::Serialize)] + #[derive(derive_more::Constructor)] #[derive(iroha_schema::IntoSchema)] $($meta)* $item )+ @@ -184,7 +185,7 @@ pub mod model { /// The hash of the block to which `tx` belongs to pub block_hash: HashOf, /// Transaction - pub transaction: TransactionValue, + pub transaction: Box, } /// Type returned from [`Metadata`] queries @@ -220,9 +221,13 @@ pub mod model { )] #[getset(get = "pub")] pub struct QueryWithParameters { + /// The actual query. pub query: Q, + /// Sorting of the query results. pub sorting: Sorting, + /// Pagination of the query results. pub pagination: Pagination, + /// Amount of results to fetch. pub fetch_size: FetchSize, } } @@ -323,7 +328,7 @@ pub mod role { #[ffi_type(unsafe {robust})] pub struct FindRoleByRoleId { /// `Id` of the [`Role`] to find - pub id: EvaluatesTo, + pub id: RoleId, } /// [`FindRolesByAccountId`] Iroha Query finds all [`Role`]s for a specified account. @@ -334,7 +339,7 @@ pub mod role { #[ffi_type(unsafe {robust})] pub struct FindRolesByAccountId { /// `Id` of an account to find. - pub id: EvaluatesTo, + pub id: AccountId, } } @@ -354,22 +359,6 @@ pub mod role { type Output = Role; } - impl FindRoleByRoleId { - /// Construct [`FindRoleByRoleId`]. - pub fn new(id: impl Into>) -> Self { - Self { id: id.into() } - } - } - - impl FindRolesByAccountId { - /// Construct [`FindRolesByAccountId`]. - pub fn new(account_id: impl Into>) -> Self { - Self { - id: account_id.into(), - } - } - } - /// The prelude re-exports most commonly used traits, structs and macros from this module. pub mod prelude { pub use super::{FindAllRoleIds, FindAllRoles, FindRoleByRoleId, FindRolesByAccountId}; @@ -405,7 +394,7 @@ pub mod permission { #[ffi_type(unsafe {robust})] pub struct FindPermissionTokensByAccountId { /// `Id` of an account to find. - pub id: EvaluatesTo, + pub id: AccountId, } } @@ -417,15 +406,6 @@ pub mod permission { type Output = Vec; } - impl FindPermissionTokensByAccountId { - /// Construct [`FindPermissionTokensByAccountId`]. - pub fn new(account_id: impl Into>) -> Self { - Self { - id: account_id.into(), - } - } - } - /// The prelude re-exports most commonly used traits, structs and macros from this module. pub mod prelude { pub use super::{FindPermissionTokenSchema, FindPermissionTokensByAccountId}; @@ -459,7 +439,7 @@ pub mod account { #[ffi_type(unsafe {robust})] pub struct FindAccountById { /// `Id` of an account to find. - pub id: EvaluatesTo, + pub id: AccountId, } /// [`FindAccountKeyValueByIdAndKey`] Iroha Query finds a [`Value`] @@ -469,9 +449,9 @@ pub mod account { #[ffi_type] pub struct FindAccountKeyValueByIdAndKey { /// `Id` of an account to find. - pub id: EvaluatesTo, + pub id: AccountId, /// Key of the specific key-value in the Account's metadata. - pub key: EvaluatesTo, + pub key: Name, } /// [`FindAccountsByName`] Iroha Query gets [`Account`]s name as input and @@ -483,7 +463,7 @@ pub mod account { #[ffi_type(unsafe {robust})] pub struct FindAccountsByName { /// `name` of accounts to find. - pub name: EvaluatesTo, + pub name: Name, } @@ -496,7 +476,7 @@ pub mod account { #[ffi_type(unsafe {robust})] pub struct FindAccountsByDomainId { /// `Id` of the domain under which accounts should be found. - pub domain_id: EvaluatesTo, + pub domain_id: DomainId, } /// [`FindAccountsWithAsset`] Iroha Query gets [`AssetDefinition`]s id as input and @@ -508,7 +488,7 @@ pub mod account { #[ffi_type(unsafe {robust})] pub struct FindAccountsWithAsset { /// `Id` of the definition of the asset which should be stored in founded accounts. - pub asset_definition_id: EvaluatesTo, + pub asset_definition_id: AssetDefinitionId, } } @@ -536,51 +516,6 @@ pub mod account { type Output = Vec; } - impl FindAccountById { - /// Construct [`FindAccountById`]. - pub fn new(id: impl Into>) -> Self { - Self { id: id.into() } - } - } - - impl FindAccountKeyValueByIdAndKey { - /// Construct [`FindAccountById`]. - pub fn new( - id: impl Into>, - key: impl Into>, - ) -> Self { - Self { - id: id.into(), - key: key.into(), - } - } - } - - impl FindAccountsByName { - /// Construct [`FindAccountsByName`]. - pub fn new(name: impl Into>) -> Self { - Self { name: name.into() } - } - } - - impl FindAccountsByDomainId { - /// Construct [`FindAccountsByDomainId`]. - pub fn new(domain_id: impl Into>) -> Self { - Self { - domain_id: domain_id.into(), - } - } - } - - impl FindAccountsWithAsset { - /// Construct [`FindAccountsWithAsset`]. - pub fn new(asset_definition_id: impl Into>) -> Self { - Self { - asset_definition_id: asset_definition_id.into(), - } - } - } - /// The prelude re-exports most commonly used traits, structs and macros from this crate. pub mod prelude { pub use super::{ @@ -627,7 +562,7 @@ pub mod asset { #[ffi_type(unsafe {robust})] pub struct FindAssetById { /// `Id` of an [`Asset`] to find. - pub id: EvaluatesTo, + pub id: AssetId, } /// [`FindAssetDefinitionById`] Iroha Query finds an [`AssetDefinition`] by it's identification in Iroha [`Peer`]. @@ -638,7 +573,7 @@ pub mod asset { #[ffi_type(unsafe {robust})] pub struct FindAssetDefinitionById { /// `Id` of an [`AssetDefinition`] to find. - pub id: EvaluatesTo, + pub id: AssetDefinitionId, } /// [`FindAssetsByName`] Iroha Query gets [`Asset`]s name as input and @@ -650,7 +585,7 @@ pub mod asset { #[ffi_type(unsafe {robust})] pub struct FindAssetsByName { /// [`Name`] of [`Asset`]s to find. - pub name: EvaluatesTo, + pub name: Name, } /// [`FindAssetsByAccountId`] Iroha Query gets [`AccountId`] as input and find all [`Asset`]s @@ -662,7 +597,7 @@ pub mod asset { #[ffi_type(unsafe {robust})] pub struct FindAssetsByAccountId { /// [`AccountId`] under which assets should be found. - pub account_id: EvaluatesTo, + pub account_id: AccountId, } /// [`FindAssetsByAssetDefinitionId`] Iroha Query gets [`AssetDefinitionId`] as input and @@ -674,7 +609,7 @@ pub mod asset { #[ffi_type(unsafe {robust})] pub struct FindAssetsByAssetDefinitionId { /// [`AssetDefinitionId`] with type of [`Asset`]s should be found. - pub asset_definition_id: EvaluatesTo, + pub asset_definition_id: AssetDefinitionId, } /// [`FindAssetsByDomainId`] Iroha Query gets [`Domain`]s id as input and @@ -686,7 +621,7 @@ pub mod asset { #[ffi_type(unsafe {robust})] pub struct FindAssetsByDomainId { /// `Id` of the domain under which assets should be found. - pub domain_id: EvaluatesTo, + pub domain_id: DomainId, } /// [`FindAssetsByDomainIdAndAssetDefinitionId`] Iroha Query gets [`DomainId`] and @@ -697,9 +632,9 @@ pub mod asset { #[ffi_type] pub struct FindAssetsByDomainIdAndAssetDefinitionId { /// `Id` of the domain under which assets should be found. - pub domain_id: EvaluatesTo, + pub domain_id: DomainId, /// [`AssetDefinitionId`] assets of which type should be found. - pub asset_definition_id: EvaluatesTo, + pub asset_definition_id: AssetDefinitionId, } /// [`FindAssetQuantityById`] Iroha Query gets [`AssetId`] as input and finds [`Asset::quantity`] @@ -711,7 +646,7 @@ pub mod asset { #[ffi_type(unsafe {robust})] pub struct FindAssetQuantityById { /// `Id` of an [`Asset`] to find quantity of. - pub id: EvaluatesTo, + pub id: AssetId, } /// [`FindTotalAssetQuantityByAssetDefinitionId`] Iroha Query gets [`AssetDefinitionId`] as input and finds total [`Asset::quantity`] @@ -724,7 +659,7 @@ pub mod asset { #[ffi_type(unsafe {robust})] pub struct FindTotalAssetQuantityByAssetDefinitionId { /// `Id` of an [`Asset`] to find quantity of. - pub id: EvaluatesTo, + pub id: AssetDefinitionId, } /// [`FindAssetKeyValueByIdAndKey`] Iroha Query gets [`AssetId`] and key as input and finds [`Value`] @@ -734,9 +669,9 @@ pub mod asset { #[ffi_type] pub struct FindAssetKeyValueByIdAndKey { /// `Id` of an [`Asset`] acting as [`Store`](crate::asset::AssetValue::Store). - pub id: EvaluatesTo, + pub id: AssetId, /// The key of the key-value pair stored in the asset. - pub key: EvaluatesTo, + pub key: Name, } /// [`FindAssetDefinitionKeyValueByIdAndKey`] Iroha Query gets [`AssetDefinitionId`] and key as input and finds [`Value`] @@ -746,9 +681,9 @@ pub mod asset { #[ffi_type] pub struct FindAssetDefinitionKeyValueByIdAndKey { /// `Id` of an [`Asset`] acting as [`Store`](crate::asset::AssetValue::Store).. - pub id: EvaluatesTo, + pub id: AssetDefinitionId, /// The key of the key-value pair stored in the asset. - pub key: EvaluatesTo, + pub key: Name, } } @@ -804,104 +739,6 @@ pub mod asset { type Output = MetadataValue; } - impl FindAssetById { - /// Construct [`FindAssetById`]. - pub fn new(id: impl Into>) -> Self { - Self { id: id.into() } - } - } - - impl FindAssetDefinitionById { - /// Construct [`FindAssetDefinitionById`]. - pub fn new(id: impl Into>) -> Self { - Self { id: id.into() } - } - } - - impl FindAssetsByName { - /// Construct [`FindAssetsByName`]. - pub fn new(name: impl Into>) -> Self { - Self { name: name.into() } - } - } - - impl FindAssetsByAccountId { - /// Construct [`FindAssetsByAccountId`]. - pub fn new(account_id: impl Into>) -> Self { - Self { - account_id: account_id.into(), - } - } - } - - impl FindAssetsByAssetDefinitionId { - /// Construct [`FindAssetsByAssetDefinitionId`]. - pub fn new(asset_definition_id: impl Into>) -> Self { - Self { - asset_definition_id: asset_definition_id.into(), - } - } - } - - impl FindAssetsByDomainId { - /// Construct [`FindAssetsByDomainId`]. - pub fn new(domain_id: impl Into>) -> Self { - Self { - domain_id: domain_id.into(), - } - } - } - - impl FindAssetsByDomainIdAndAssetDefinitionId { - /// Construct [`FindAssetsByDomainIdAndAssetDefinitionId`]. - pub fn new( - domain_id: impl Into>, - asset_definition_id: impl Into>, - ) -> Self { - Self { - domain_id: domain_id.into(), - asset_definition_id: asset_definition_id.into(), - } - } - } - - impl FindAssetQuantityById { - /// Construct [`FindAssetQuantityById`]. - pub fn new(id: impl Into>) -> Self { - Self { id: id.into() } - } - } - - impl FindTotalAssetQuantityByAssetDefinitionId { - /// Construct [`FindTotalAssetQuantityByAssetDefinitionId`] - pub fn new(id: impl Into>) -> Self { - Self { id: id.into() } - } - } - - impl FindAssetKeyValueByIdAndKey { - /// Construct [`FindAssetKeyValueByIdAndKey`]. - pub fn new(id: impl Into>, key: impl Into>) -> Self { - Self { - id: id.into(), - key: key.into(), - } - } - } - - impl FindAssetDefinitionKeyValueByIdAndKey { - /// Construct [`FindAssetDefinitionKeyValueByIdAndKey`]. - pub fn new( - id: impl Into>, - key: impl Into>, - ) -> Self { - Self { - id: id.into(), - key: key.into(), - } - } - } - /// The prelude re-exports most commonly used traits, structs and macros from this crate. pub mod prelude { pub use super::{ @@ -942,10 +779,9 @@ pub mod domain { #[ffi_type(unsafe {robust})] pub struct FindDomainById { /// `Id` of the domain to find. - pub id: EvaluatesTo, + pub id: DomainId, } - /// [`FindDomainKeyValueByIdAndKey`] Iroha Query finds a [`Value`] of the key-value metadata pair /// in the specified domain. #[derive(Display)] @@ -953,9 +789,9 @@ pub mod domain { #[ffi_type] pub struct FindDomainKeyValueByIdAndKey { /// `Id` of an domain to find. - pub id: EvaluatesTo, + pub id: DomainId, /// Key of the specific key-value in the domain's metadata. - pub key: EvaluatesTo, + pub key: Name, } } @@ -971,26 +807,6 @@ pub mod domain { type Output = MetadataValue; } - impl FindDomainById { - /// Construct [`FindDomainById`]. - pub fn new(id: impl Into>) -> Self { - Self { id: id.into() } - } - } - - impl FindDomainKeyValueByIdAndKey { - /// Construct [`FindDomainKeyValueByIdAndKey`]. - pub fn new( - id: impl Into>, - key: impl Into>, - ) -> Self { - Self { - id: id.into(), - key: key.into(), - } - } - } - /// The prelude re-exports most commonly used traits, structs and macros from this crate. pub mod prelude { pub use super::{FindAllDomains, FindDomainById, FindDomainKeyValueByIdAndKey}; @@ -1015,7 +831,6 @@ pub mod peer { #[ffi_type] pub struct FindAllPeers; - /// [`FindAllParameters`] Iroha Query finds all [`Peer`]s parameters. #[derive(Copy, Display)] #[display(fmt = "Find all peers parameters")] @@ -1049,8 +864,7 @@ pub mod trigger { use crate::{ domain::prelude::*, events::TriggeringFilterBox, - expression::EvaluatesTo, - prelude::InstructionExpr, + prelude::InstructionBox, trigger::{Trigger, TriggerId}, Executable, Identifiable, Name, Value, }; @@ -1063,7 +877,6 @@ pub mod trigger { #[ffi_type] pub struct FindAllActiveTriggerIds; - /// Find Trigger given its ID. #[derive(Display)] #[display(fmt = "Find `{id}` trigger")] @@ -1072,7 +885,7 @@ pub mod trigger { #[ffi_type(unsafe {robust})] pub struct FindTriggerById { /// The Identification of the trigger to be found. - pub id: EvaluatesTo, + pub id: TriggerId, } @@ -1082,9 +895,9 @@ pub mod trigger { #[ffi_type] pub struct FindTriggerKeyValueByIdAndKey { /// The Identification of the trigger to be found. - pub id: EvaluatesTo, + pub id: TriggerId, /// The key inside the metadata dictionary to be returned. - pub key: EvaluatesTo, + pub key: Name, } @@ -1096,7 +909,7 @@ pub mod trigger { #[ffi_type(unsafe {robust})] pub struct FindTriggersByDomainId { /// [`DomainId`] specifies the domain in which to search for triggers. - pub domain_id: EvaluatesTo, + pub domain_id: DomainId, } } @@ -1116,35 +929,6 @@ pub mod trigger { type Output = Vec>; } - impl FindTriggerById { - /// Construct [`FindTriggerById`]. - pub fn new(id: impl Into>) -> Self { - Self { id: id.into() } - } - } - - impl FindTriggerKeyValueByIdAndKey { - /// Construct [`FindTriggerKeyValueByIdAndKey`]. - pub fn new( - id: impl Into>, - key: impl Into>, - ) -> Self { - Self { - id: id.into(), - key: key.into(), - } - } - } - - impl FindTriggersByDomainId { - /// Construct [`FindTriggersByDomainId`]. - pub fn new(domain_id: impl Into>) -> Self { - Self { - domain_id: domain_id.into(), - } - } - } - pub mod prelude { //! Prelude Re-exports most commonly used traits, structs and macros from this crate. pub use super::{ @@ -1166,10 +950,7 @@ pub mod transaction { use iroha_crypto::HashOf; use super::{Query, TransactionQueryOutput}; - use crate::{ - account::AccountId, expression::EvaluatesTo, prelude::Account, - transaction::SignedTransaction, - }; + use crate::{account::AccountId, prelude::Account, transaction::SignedTransaction}; queries! { /// [`FindAllTransactions`] Iroha Query lists all transactions included in a blockchain @@ -1187,19 +968,19 @@ pub mod transaction { #[ffi_type(unsafe {robust})] pub struct FindTransactionsByAccountId { /// Signer's [`AccountId`] under which transactions should be found. - pub account_id: EvaluatesTo, + pub account_id: AccountId, } /// [`FindTransactionByHash`] Iroha Query finds a transaction (if any) /// with corresponding hash value - #[derive(Display)] + #[derive(Copy, Display)] #[display(fmt = "Find transaction with `{hash}` hash")] #[repr(transparent)] // SAFETY: `FindTransactionByHash` has no trap representation in `EvaluatesTo>` #[ffi_type(unsafe {robust})] pub struct FindTransactionByHash { /// Transaction hash. - pub hash: EvaluatesTo>, + pub hash: HashOf, } } @@ -1215,22 +996,6 @@ pub mod transaction { type Output = TransactionQueryOutput; } - impl FindTransactionsByAccountId { - /// Construct [`FindTransactionsByAccountId`]. - pub fn new(account_id: impl Into>) -> Self { - Self { - account_id: account_id.into(), - } - } - } - - impl FindTransactionByHash { - /// Construct [`FindTransactionByHash`]. - pub fn new(hash: impl Into>>) -> Self { - Self { hash: hash.into() } - } - } - /// The prelude re-exports most commonly used traits, structs and macros from this crate. pub mod prelude { pub use super::{FindAllTransactions, FindTransactionByHash, FindTransactionsByAccountId}; @@ -1249,10 +1014,7 @@ pub mod block { use iroha_crypto::HashOf; use super::Query; - use crate::{ - block::{BlockHeader, SignedBlock}, - prelude::EvaluatesTo, - }; + use crate::block::{BlockHeader, SignedBlock}; queries! { /// [`FindAllBlocks`] Iroha Query lists all blocks sorted by @@ -1270,14 +1032,14 @@ pub mod block { pub struct FindAllBlockHeaders; /// [`FindBlockHeaderByHash`] Iroha Query finds block header by block hash - #[derive(Display)] + #[derive(Copy, Display)] #[display(fmt = "Find block header with `{hash}` hash")] #[repr(transparent)] // SAFETY: `FindBlockHeaderByHash` has no trap representation in `EvaluatesTo>` #[ffi_type(unsafe {robust})] pub struct FindBlockHeaderByHash { /// Block hash. - pub hash: EvaluatesTo>, + pub hash: HashOf, } } @@ -1293,13 +1055,6 @@ pub mod block { type Output = BlockHeader; } - impl FindBlockHeaderByHash { - /// Construct [`FindBlockHeaderByHash`]. - pub fn new(hash: impl Into>>) -> Self { - Self { hash: hash.into() } - } - } - /// The prelude re-exports most commonly used traits, structs and macros from this crate. pub mod prelude { pub use super::{FindAllBlockHeaders, FindAllBlocks, FindBlockHeaderByHash}; @@ -1538,12 +1293,6 @@ pub mod error { #[skip_try_from] String, ), - /// Query has a malformed expression: {0} - Evaluate( - #[skip_from] - #[skip_try_from] - String, - ), /// {0} #[cfg_attr(feature = "std", error(transparent))] Find(FindError), diff --git a/data_model/src/transaction.rs b/data_model/src/transaction.rs index a7742873c7a..9d288239636 100644 --- a/data_model/src/transaction.rs +++ b/data_model/src/transaction.rs @@ -22,7 +22,7 @@ use serde::{Deserialize, Serialize}; pub use self::model::*; use crate::{ account::AccountId, - isi::{Instruction, InstructionExpr}, + isi::{Instruction, InstructionBox}, metadata::UnlimitedMetadata, name::Name, Value, @@ -51,7 +51,7 @@ pub mod model { pub enum Executable { /// Ordered set of instructions. #[debug(fmt = "{_0:?}")] - Instructions(Vec), + Instructions(Vec), /// WebAssembly smartcontract Wasm(WasmSmartContract), } @@ -533,7 +533,7 @@ pub mod error { pub struct InstructionExecutionFail { /// Instruction for which execution failed #[getset(get = "pub")] - pub instruction: InstructionExpr, + pub instruction: InstructionBox, /// Error which happened during execution pub reason: String, } @@ -611,15 +611,12 @@ pub mod error { impl Display for InstructionExecutionFail { fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult { - use InstructionExpr::*; + use InstructionBox::*; let kind = match self.instruction { Burn(_) => "burn", Fail(_) => "fail", - If(_) => "if", Mint(_) => "mint", - Pair(_) => "pair", Register(_) => "register", - Sequence(_) => "sequence", Transfer(_) => "transfer", Unregister(_) => "un-register", SetKeyValue(_) => "set key-value pair", @@ -691,7 +688,7 @@ mod http { creation_time_ms, nonce: None, time_to_live_ms: None, - instructions: Vec::::new().into(), + instructions: Vec::::new().into(), metadata: UnlimitedMetadata::new(), }, } @@ -707,7 +704,7 @@ mod http { self.payload.instructions = instructions .into_iter() .map(Into::into) - .collect::>() + .collect::>() .into(); self } diff --git a/data_model/src/trigger.rs b/data_model/src/trigger.rs index 0127ce3bf77..4270d2d34ac 100644 --- a/data_model/src/trigger.rs +++ b/data_model/src/trigger.rs @@ -203,7 +203,7 @@ pub mod action { /// Enumeration of possible repetitions schemes. #[derive( - Debug, Clone, PartialEq, Eq, Decode, Encode, Deserialize, Serialize, IntoSchema, + Debug, Copy, Clone, PartialEq, Eq, Decode, Encode, Deserialize, Serialize, IntoSchema, )] #[ffi_type] pub enum Repeats { diff --git a/data_model/src/visit.rs b/data_model/src/visit.rs index 62a33f328c8..9f82b19baae 100644 --- a/data_model/src/visit.rs +++ b/data_model/src/visit.rs @@ -6,7 +6,7 @@ use alloc::format; use iroha_crypto::PublicKey; -use crate::{evaluate::ExpressionEvaluator, isi::Log, prelude::*, NumericValue}; +use crate::{isi::Log, prelude::*}; macro_rules! delegate { ( $($visitor:ident $(<$param:ident $(: $bound:path)?>)?($operation:ty)),+ $(,)? ) => { $( @@ -16,54 +16,36 @@ macro_rules! delegate { }; } -macro_rules! evaluate_expr { - ($visitor:ident, $authority:ident, <$isi:ident as $isi_type:ty>::$field:ident()) => {{ - $visitor.visit_expression($authority, $isi.$field()); - - $visitor.evaluate($isi.$field()).expect(&format!( - "Failed to evaluate field '{}::{}'", - stringify!($isi_type), - stringify!($field), - )) - }}; -} - /// Trait to validate Iroha entities. /// Default implementation of non-leaf visitors runs `visit_` functions for leafs. /// Default implementation for leaf visitors is blank. /// /// This trait is based on the visitor pattern. -pub trait Visit: ExpressionEvaluator { +pub trait Visit { delegate! { - visit_unsupported(T), - // Visit SignedTransaction visit_transaction(&SignedTransaction), - visit_instruction(&InstructionExpr), - visit_expression(&EvaluatesTo), + visit_instruction(&InstructionBox), visit_wasm(&WasmSmartContract), visit_query(&QueryBox), - // Visit InstructionExpr - visit_burn(&BurnExpr), + // Visit InstructionBox + visit_burn(&BurnBox), visit_fail(&Fail), - visit_grant(&GrantExpr), - visit_if(&ConditionalExpr), - visit_mint(&MintExpr), - visit_pair(&PairExpr), - visit_register(&RegisterExpr), - visit_remove_key_value(&RemoveKeyValueExpr), - visit_revoke(&RevokeExpr), - visit_sequence(&SequenceExpr), - visit_set_key_value(&SetKeyValueExpr), - visit_transfer(&TransferExpr), - visit_unregister(&UnregisterExpr), - visit_upgrade(&UpgradeExpr), - - visit_execute_trigger(ExecuteTrigger), - visit_new_parameter(NewParameter), - visit_set_parameter(SetParameter), - visit_log(Log), + visit_grant(&GrantBox), + visit_mint(&MintBox), + visit_register(&RegisterBox), + visit_remove_key_value(&RemoveKeyValueBox), + visit_revoke(&RevokeBox), + visit_set_key_value(&SetKeyValueBox), + visit_transfer(&TransferBox), + visit_unregister(&UnregisterBox), + visit_upgrade(&Upgrade), + + visit_execute_trigger(&ExecuteTrigger), + visit_new_parameter(&NewParameter), + visit_set_parameter(&SetParameter), + visit_log(&Log), // Visit QueryBox visit_find_account_by_id(&FindAccountById), @@ -78,7 +60,7 @@ pub trait Visit: ExpressionEvaluator { visit_find_all_block_headers(&FindAllBlockHeaders), visit_find_all_blocks(&FindAllBlocks), visit_find_all_domains(&FindAllDomains), - visit_find_all_parammeters(&FindAllParameters), + visit_find_all_parameters(&FindAllParameters), visit_find_all_peers(&FindAllPeers), visit_find_permission_token_schema(&FindPermissionTokenSchema), visit_find_all_role_ids(&FindAllRoleIds), @@ -107,74 +89,69 @@ pub trait Visit: ExpressionEvaluator { visit_find_trigger_key_value_by_id_and_key(&FindTriggerKeyValueByIdAndKey), visit_find_triggers_by_domain_id(&FindTriggersByDomainId), - // Visit RegisterExpr - visit_register_peer(Register), - visit_register_domain(Register), - visit_register_account(Register), - visit_register_asset_definition(Register), - visit_register_asset(Register), - visit_register_role(Register), - visit_register_trigger(Register>), - - // Visit UnregisterExpr - visit_unregister_peer(Unregister), - visit_unregister_domain(Unregister), - visit_unregister_account(Unregister), - visit_unregister_asset_definition(Unregister), - visit_unregister_asset(Unregister), + // Visit RegisterBox + visit_register_peer(&Register), + visit_register_domain(&Register), + visit_register_account(&Register), + visit_register_asset_definition(&Register), + visit_register_asset(&Register), + visit_register_role(&Register), + visit_register_trigger(&Register>), + + // Visit UnregisterBox + visit_unregister_peer(&Unregister), + visit_unregister_domain(&Unregister), + visit_unregister_account(&Unregister), + visit_unregister_asset_definition(&Unregister), + visit_unregister_asset(&Unregister), // TODO: Need to allow role creator to unregister it somehow - visit_unregister_role(Unregister), - visit_unregister_trigger(Unregister>), - - // Visit MintExpr - visit_mint_asset(Mint), - visit_mint_account_public_key(Mint), - visit_mint_account_signature_check_condition(Mint), - visit_mint_trigger_repetitions(Mint>), - - // Visit BurnExpr - visit_burn_account_public_key(Burn), - visit_burn_asset(Burn), - visit_burn_trigger_repetitions(Burn>), - - // Visit TransferExpr - visit_transfer_asset_definition(Transfer), - visit_transfer_asset(Transfer), - visit_transfer_domain(Transfer), - - // Visit SetKeyValueExpr - visit_set_domain_key_value(SetKeyValue), - visit_set_account_key_value(SetKeyValue), - visit_set_asset_definition_key_value(SetKeyValue), - visit_set_asset_key_value(SetKeyValue), - - // Visit RemoveKeyValueExpr - visit_remove_domain_key_value(RemoveKeyValue), - visit_remove_account_key_value(RemoveKeyValue), - visit_remove_asset_definition_key_value(RemoveKeyValue), - visit_remove_asset_key_value(RemoveKeyValue), - - // Visit GrantExpr - visit_grant_account_permission(Grant), - visit_grant_account_role(Grant), - - // Visit RevokeExpr - visit_revoke_account_permission(Revoke), - visit_revoke_account_role(Revoke), - - // Visit UpgradeExpr - visit_upgrade_executor(Upgrade), + visit_unregister_role(&Unregister), + visit_unregister_trigger(&Unregister>), + + // Visit MintBox + visit_mint_asset_quantity(&Mint), + visit_mint_asset_big_quantity(&Mint), + visit_mint_asset_fixed(&Mint), + visit_mint_account_public_key(&Mint), + visit_mint_account_signature_check_condition(&Mint), + visit_mint_trigger_repetitions(&Mint>), + + // Visit BurnBox + visit_burn_account_public_key(&Burn), + visit_burn_asset_quantity(&Burn), + visit_burn_asset_big_quantity(&Burn), + visit_burn_asset_fixed(&Burn), + visit_burn_trigger_repetitions(&Burn>), + + // Visit TransferBox + visit_transfer_asset_definition(&Transfer), + visit_transfer_asset_quantity(&Transfer), + visit_transfer_asset_big_quantity(&Transfer), + visit_transfer_asset_fixed(&Transfer), + visit_transfer_domain(&Transfer), + + // Visit SetKeyValueBox + visit_set_domain_key_value(&SetKeyValue), + visit_set_account_key_value(&SetKeyValue), + visit_set_asset_definition_key_value(&SetKeyValue), + visit_set_asset_key_value(&SetKeyValue), + + // Visit RemoveKeyValueBox + visit_remove_domain_key_value(&RemoveKeyValue), + visit_remove_account_key_value(&RemoveKeyValue), + visit_remove_asset_definition_key_value(&RemoveKeyValue), + visit_remove_asset_key_value(&RemoveKeyValue), + + // Visit GrantBox + visit_grant_account_permission(&Grant), + visit_grant_account_role(&Grant), + + // Visit RevokeBox + visit_revoke_account_permission(&Revoke), + visit_revoke_account_role(&Revoke), } } -/// Called when visiting any unsupported syntax tree node -fn visit_unsupported( - _visitor: &mut V, - _authority: &AccountId, - _isi: T, -) { -} - pub fn visit_transaction( visitor: &mut V, authority: &AccountId, @@ -213,7 +190,7 @@ pub fn visit_query(visitor: &mut V, authority: &AccountId, qu visit_find_all_block_headers(FindAllBlockHeaders), visit_find_all_blocks(FindAllBlocks), visit_find_all_domains(FindAllDomains), - visit_find_all_parammeters(FindAllParameters), + visit_find_all_parameters(FindAllParameters), visit_find_all_peers(FindAllPeers), visit_find_permission_token_schema(FindPermissionTokenSchema), visit_find_all_role_ids(FindAllRoleIds), @@ -251,7 +228,7 @@ pub fn visit_wasm( ) { } -/// Default validation for [`InstructionExpr`]. +/// Default validation for [`InstructionBox`]. /// /// # Warning /// @@ -259,430 +236,167 @@ pub fn visit_wasm( pub fn visit_instruction( visitor: &mut V, authority: &AccountId, - isi: &InstructionExpr, -) { - macro_rules! isi_visitors { - ( $($visitor:ident($isi:ident)),+ $(,)? ) => { - match isi { - InstructionExpr::NewParameter(isi) => { - let parameter = evaluate_expr!(visitor, authority, ::parameter()); - visitor.visit_new_parameter(authority, NewParameter{parameter}); - } - InstructionExpr::SetParameter(isi) => { - let parameter = evaluate_expr!(visitor, authority, ::parameter()); - visitor.visit_set_parameter(authority, SetParameter{parameter}); - } - InstructionExpr::ExecuteTrigger(isi) => { - let trigger_id = evaluate_expr!(visitor, authority, ::trigger_id()); - visitor.visit_execute_trigger(authority, ExecuteTrigger{trigger_id}); - } - InstructionExpr::Log(isi) => { - let msg = evaluate_expr!(visitor, authority, ::msg()); - let level = evaluate_expr!(visitor, authority, ::level()); - visitor.visit_log(authority, Log { msg, level }); - } $( - InstructionExpr::$isi(isi) => { - visitor.$visitor(authority, isi); - } )+ - } - }; - } - - isi_visitors! { - visit_burn(Burn), - visit_fail(Fail), - visit_grant(Grant), - visit_mint(Mint), - visit_register(Register), - visit_remove_key_value(RemoveKeyValue), - visit_revoke(Revoke), - visit_set_key_value(SetKeyValue), - visit_transfer(Transfer), - visit_unregister(Unregister), - visit_upgrade(Upgrade), - visit_sequence(Sequence), - visit_pair(Pair), - visit_if(If), - } -} - -pub fn visit_expression( - visitor: &mut V, - authority: &AccountId, - expression: &EvaluatesTo, + isi: &InstructionBox, ) { - macro_rules! visit_binary_math_expression { - ($e:ident) => {{ - visitor.visit_expression(authority, $e.left()); - visitor.visit_expression(authority, $e.right()) - }}; - } - - macro_rules! visit_binary_bool_expression { - ($e:ident) => {{ - visitor.visit_expression(authority, $e.left()); - visitor.visit_expression(authority, $e.right()) - }}; - } - - match expression.expression() { - Expression::Add(expr) => visit_binary_math_expression!(expr), - Expression::Subtract(expr) => visit_binary_math_expression!(expr), - Expression::Multiply(expr) => visit_binary_math_expression!(expr), - Expression::Divide(expr) => visit_binary_math_expression!(expr), - Expression::Mod(expr) => visit_binary_math_expression!(expr), - Expression::RaiseTo(expr) => visit_binary_math_expression!(expr), - Expression::Greater(expr) => visit_binary_math_expression!(expr), - Expression::Less(expr) => visit_binary_math_expression!(expr), - Expression::Equal(expr) => visit_binary_bool_expression!(expr), - Expression::Not(expr) => visitor.visit_expression(authority, expr.expression()), - Expression::And(expr) => visit_binary_bool_expression!(expr), - Expression::Or(expr) => visit_binary_bool_expression!(expr), - Expression::If(expr) => { - visitor.visit_expression(authority, expr.condition()); - visitor.visit_expression(authority, expr.then()); - visitor.visit_expression(authority, expr.otherwise()); + match isi { + InstructionBox::NewParameter(variant_value) => { + visitor.visit_new_parameter(authority, variant_value) } - Expression::Contains(expr) => { - visitor.visit_expression(authority, expr.collection()); - visitor.visit_expression(authority, expr.element()); + InstructionBox::SetParameter(variant_value) => { + visitor.visit_set_parameter(authority, variant_value) } - Expression::ContainsAll(expr) => { - visitor.visit_expression(authority, expr.collection()); - visitor.visit_expression(authority, expr.elements()); + InstructionBox::ExecuteTrigger(variant_value) => { + visitor.visit_execute_trigger(authority, variant_value) } - Expression::ContainsAny(expr) => { - visitor.visit_expression(authority, expr.collection()); - visitor.visit_expression(authority, expr.elements()); + InstructionBox::Log(variant_value) => visitor.visit_log(authority, variant_value), + InstructionBox::Burn(variant_value) => visitor.visit_burn(authority, variant_value), + InstructionBox::Fail(variant_value) => visitor.visit_fail(authority, variant_value), + InstructionBox::Grant(variant_value) => visitor.visit_grant(authority, variant_value), + InstructionBox::Mint(variant_value) => visitor.visit_mint(authority, variant_value), + InstructionBox::Register(variant_value) => visitor.visit_register(authority, variant_value), + InstructionBox::RemoveKeyValue(variant_value) => { + visitor.visit_remove_key_value(authority, variant_value) } - Expression::Where(expr) => visitor.visit_expression(authority, expr.expression()), - Expression::Query(query) => visitor.visit_query(authority, query), - Expression::ContextValue(_) | Expression::Raw(_) => {} + InstructionBox::Revoke(variant_value) => visitor.visit_revoke(authority, variant_value), + InstructionBox::SetKeyValue(variant_value) => { + visitor.visit_set_key_value(authority, variant_value) + } + InstructionBox::Transfer(variant_value) => visitor.visit_transfer(authority, variant_value), + InstructionBox::Unregister(variant_value) => { + visitor.visit_unregister(authority, variant_value) + } + InstructionBox::Upgrade(variant_value) => visitor.visit_upgrade(authority, variant_value), } } pub fn visit_register( visitor: &mut V, authority: &AccountId, - isi: &RegisterExpr, + isi: &RegisterBox, ) { - macro_rules! match_all { - ( $( $visitor:ident($object:ident) ),+ $(,)? ) => { - let object = evaluate_expr!(visitor, authority, ::object()); - - match object { $( - RegistrableBox::$object(object) => visitor.$visitor(authority, Register{object}), )+ - } - }; - } - - match_all! { - visit_register_peer(Peer), - visit_register_domain(Domain), - visit_register_account(Account), - visit_register_asset_definition(AssetDefinition), - visit_register_asset(Asset), - visit_register_role(Role), - visit_register_trigger(Trigger), + match isi { + RegisterBox::Peer(obj) => visitor.visit_register_peer(authority, obj), + RegisterBox::Domain(obj) => visitor.visit_register_domain(authority, obj), + RegisterBox::Account(obj) => visitor.visit_register_account(authority, obj), + RegisterBox::AssetDefinition(obj) => { + visitor.visit_register_asset_definition(authority, obj) + } + RegisterBox::Asset(obj) => visitor.visit_register_asset(authority, obj), + RegisterBox::Role(obj) => visitor.visit_register_role(authority, obj), + RegisterBox::Trigger(obj) => visitor.visit_register_trigger(authority, obj), } } pub fn visit_unregister( visitor: &mut V, authority: &AccountId, - isi: &UnregisterExpr, + isi: &UnregisterBox, ) { - macro_rules! match_all { - ( $( $visitor:ident($id:ident) ),+ $(,)? ) => { - let object_id = evaluate_expr!(visitor, authority, ::object_id()); - match object_id { $( - IdBox::$id(object_id) => visitor.$visitor(authority, Unregister{object_id}), )+ - _ => visitor.visit_unsupported(authority, isi), - } - }; - } - - match_all! { - visit_unregister_peer(PeerId), - visit_unregister_domain(DomainId), - visit_unregister_account(AccountId), - visit_unregister_asset_definition(AssetDefinitionId), - visit_unregister_asset(AssetId), - visit_unregister_role(RoleId), - visit_unregister_trigger(TriggerId), + match isi { + UnregisterBox::Peer(obj) => visitor.visit_unregister_peer(authority, obj), + UnregisterBox::Domain(obj) => visitor.visit_unregister_domain(authority, obj), + UnregisterBox::Account(obj) => visitor.visit_unregister_account(authority, obj), + UnregisterBox::AssetDefinition(obj) => { + visitor.visit_unregister_asset_definition(authority, obj) + } + UnregisterBox::Asset(obj) => visitor.visit_unregister_asset(authority, obj), + UnregisterBox::Role(obj) => visitor.visit_unregister_role(authority, obj), + UnregisterBox::Trigger(obj) => visitor.visit_unregister_trigger(authority, obj), } } -pub fn visit_mint(visitor: &mut V, authority: &AccountId, isi: &MintExpr) { - let destination_id = evaluate_expr!(visitor, authority, ::destination_id()); - let object = evaluate_expr!(visitor, authority, ::object()); - - match (destination_id, object) { - (IdBox::AssetId(destination_id), Value::Numeric(object)) => visitor.visit_mint_asset( - authority, - Mint { - object, - destination_id, - }, - ), - (IdBox::AccountId(destination_id), Value::PublicKey(object)) => visitor - .visit_mint_account_public_key( - authority, - Mint { - object, - destination_id, - }, - ), - (IdBox::AccountId(destination_id), Value::SignatureCheckCondition(object)) => visitor - .visit_mint_account_signature_check_condition( - authority, - Mint { - object, - destination_id, - }, - ), - (IdBox::TriggerId(destination_id), Value::Numeric(NumericValue::U32(object))) => visitor - .visit_mint_trigger_repetitions( - authority, - Mint { - object, - destination_id, - }, - ), - _ => visitor.visit_unsupported(authority, isi), +pub fn visit_mint(visitor: &mut V, authority: &AccountId, isi: &MintBox) { + match isi { + MintBox::Account(mint_account) => match mint_account { + AccountMintBox::PublicKey(obj) => visitor.visit_mint_account_public_key(authority, obj), + AccountMintBox::SignatureCheckCondition(obj) => { + visitor.visit_mint_account_signature_check_condition(authority, obj) + } + }, + MintBox::Asset(mint_asset) => match mint_asset { + AssetMintBox::Quantity(obj) => visitor.visit_mint_asset_quantity(authority, obj), + AssetMintBox::BigQuantity(obj) => visitor.visit_mint_asset_big_quantity(authority, obj), + AssetMintBox::Fixed(obj) => visitor.visit_mint_asset_fixed(authority, obj), + }, + MintBox::TriggerRepetitions(obj) => visitor.visit_mint_trigger_repetitions(authority, obj), } } -pub fn visit_burn(visitor: &mut V, authority: &AccountId, isi: &BurnExpr) { - let destination_id = evaluate_expr!(visitor, authority, ::destination_id()); - let object = evaluate_expr!(visitor, authority, ::object()); - - match (destination_id, object) { - (IdBox::AssetId(destination_id), Value::Numeric(object)) => visitor.visit_burn_asset( - authority, - Burn { - object, - destination_id, - }, - ), - (IdBox::AccountId(destination_id), Value::PublicKey(object)) => visitor - .visit_burn_account_public_key( - authority, - Burn { - object, - destination_id, - }, - ), - (IdBox::TriggerId(destination_id), Value::Numeric(NumericValue::U32(object))) => visitor - .visit_burn_trigger_repetitions( - authority, - Burn { - object, - destination_id, - }, - ), - _ => visitor.visit_unsupported(authority, isi), +pub fn visit_burn(visitor: &mut V, authority: &AccountId, isi: &BurnBox) { + match isi { + BurnBox::AccountPublicKey(obj) => visitor.visit_burn_account_public_key(authority, obj), + BurnBox::Asset(burn_asset) => match burn_asset { + AssetBurnBox::Quantity(obj) => visitor.visit_burn_asset_quantity(authority, obj), + AssetBurnBox::BigQuantity(obj) => visitor.visit_burn_asset_big_quantity(authority, obj), + AssetBurnBox::Fixed(obj) => visitor.visit_burn_asset_fixed(authority, obj), + }, + BurnBox::TriggerRepetitions(obj) => visitor.visit_burn_trigger_repetitions(authority, obj), } } pub fn visit_transfer( visitor: &mut V, authority: &AccountId, - isi: &TransferExpr, + isi: &TransferBox, ) { - let object = evaluate_expr!(visitor, authority, ::object()); - let source_id = evaluate_expr!(visitor, authority, ::source_id()); - let destination_id = evaluate_expr!(visitor, authority, ::destination_id()); - - match (source_id, object, destination_id) { - (IdBox::AssetId(source_id), Value::Numeric(object), IdBox::AccountId(destination_id)) => { - visitor.visit_transfer_asset( - authority, - Transfer { - source_id, - object, - destination_id, - }, - ); + match isi { + TransferBox::Domain(obj) => visitor.visit_transfer_domain(authority, obj), + TransferBox::AssetDefinition(obj) => { + visitor.visit_transfer_asset_definition(authority, obj) } - ( - IdBox::AccountId(source_id), - Value::Id(IdBox::AssetDefinitionId(object)), - IdBox::AccountId(destination_id), - ) => visitor.visit_transfer_asset_definition( - authority, - Transfer { - source_id, - object, - destination_id, - }, - ), - ( - IdBox::AccountId(source_id), - Value::Id(IdBox::DomainId(object)), - IdBox::AccountId(destination_id), - ) => visitor.visit_transfer_domain( - authority, - Transfer { - source_id, - object, - destination_id, - }, - ), - _ => visitor.visit_unsupported(authority, isi), + TransferBox::Asset(transfer_asset) => match transfer_asset { + AssetTransferBox::Quantity(obj) => { + visitor.visit_transfer_asset_quantity(authority, obj) + } + AssetTransferBox::BigQuantity(obj) => { + visitor.visit_transfer_asset_big_quantity(authority, obj) + } + AssetTransferBox::Fixed(obj) => visitor.visit_transfer_asset_fixed(authority, obj), + }, } } pub fn visit_set_key_value( visitor: &mut V, authority: &AccountId, - isi: &SetKeyValueExpr, + isi: &SetKeyValueBox, ) { - let object_id = evaluate_expr!(visitor, authority, ::object_id()); - let key = evaluate_expr!(visitor, authority, ::key()); - let value = evaluate_expr!(visitor, authority, ::value()); - - match object_id { - IdBox::AssetId(object_id) => visitor.visit_set_asset_key_value( - authority, - SetKeyValue { - object_id, - key, - value, - }, - ), - IdBox::AssetDefinitionId(object_id) => visitor.visit_set_asset_definition_key_value( - authority, - SetKeyValue { - object_id, - key, - value, - }, - ), - IdBox::AccountId(object_id) => visitor.visit_set_account_key_value( - authority, - SetKeyValue { - object_id, - key, - value, - }, - ), - IdBox::DomainId(object_id) => visitor.visit_set_domain_key_value( - authority, - SetKeyValue { - object_id, - key, - value, - }, - ), - _ => visitor.visit_unsupported(authority, isi), + match isi { + SetKeyValueBox::Domain(obj) => visitor.visit_set_domain_key_value(authority, obj), + SetKeyValueBox::Account(obj) => visitor.visit_set_account_key_value(authority, obj), + SetKeyValueBox::AssetDefinition(obj) => { + visitor.visit_set_asset_definition_key_value(authority, obj) + } + SetKeyValueBox::Asset(obj) => visitor.visit_set_asset_key_value(authority, obj), } } pub fn visit_remove_key_value( visitor: &mut V, authority: &AccountId, - isi: &RemoveKeyValueExpr, + isi: &RemoveKeyValueBox, ) { - let object_id = evaluate_expr!(visitor, authority, ::object_id()); - let key = evaluate_expr!(visitor, authority, ::key()); - - match object_id { - IdBox::AssetId(object_id) => { - visitor.visit_remove_asset_key_value(authority, RemoveKeyValue { object_id, key }); - } - IdBox::AssetDefinitionId(object_id) => visitor - .visit_remove_asset_definition_key_value(authority, RemoveKeyValue { object_id, key }), - IdBox::AccountId(object_id) => { - visitor.visit_remove_account_key_value(authority, RemoveKeyValue { object_id, key }); - } - IdBox::DomainId(object_id) => { - visitor.visit_remove_domain_key_value(authority, RemoveKeyValue { object_id, key }); - } - _ => visitor.visit_unsupported(authority, isi), - } -} - -pub fn visit_grant(visitor: &mut V, authority: &AccountId, isi: &GrantExpr) { - let destination_id = evaluate_expr!(visitor, authority, ::destination_id()); - let object = evaluate_expr!(visitor, authority, ::object()); - - match object { - Value::PermissionToken(object) => visitor.visit_grant_account_permission( - authority, - Grant { - object, - destination_id, - }, - ), - Value::Id(IdBox::RoleId(object)) => visitor.visit_grant_account_role( - authority, - Grant { - object, - destination_id, - }, - ), - _ => visitor.visit_unsupported(authority, isi), - } -} - -pub fn visit_revoke(visitor: &mut V, authority: &AccountId, isi: &RevokeExpr) { - let destination_id = evaluate_expr!(visitor, authority, ::destination_id()); - let object = evaluate_expr!(visitor, authority, ::object()); - - match object { - Value::PermissionToken(object) => visitor.visit_revoke_account_permission( - authority, - Revoke { - object, - destination_id, - }, - ), - Value::Id(IdBox::RoleId(object)) => visitor.visit_revoke_account_role( - authority, - Revoke { - object, - destination_id, - }, - ), - _ => visitor.visit_unsupported(authority, isi), - } -} - -pub fn visit_upgrade(visitor: &mut V, authority: &AccountId, isi: &UpgradeExpr) { - let object = evaluate_expr!(visitor, authority, ::object()); - - match object { - UpgradableBox::Executor(object) => { - visitor.visit_upgrade_executor(authority, Upgrade { object }); + match isi { + RemoveKeyValueBox::Domain(obj) => visitor.visit_remove_domain_key_value(authority, obj), + RemoveKeyValueBox::Account(obj) => visitor.visit_remove_account_key_value(authority, obj), + RemoveKeyValueBox::AssetDefinition(obj) => { + visitor.visit_remove_asset_definition_key_value(authority, obj) } + RemoveKeyValueBox::Asset(obj) => visitor.visit_remove_asset_key_value(authority, obj), } } -pub fn visit_if(visitor: &mut V, authority: &AccountId, isi: &ConditionalExpr) { - let condition = evaluate_expr!(visitor, authority, ::condition()); - - // TODO: Should visit both by default or not? It will affect Executor behavior - // because only one branch needs to be executed. IMO both should be validated - if condition { - visitor.visit_instruction(authority, isi.then()); - } else if let Some(otherwise) = isi.otherwise() { - visitor.visit_instruction(authority, otherwise); +pub fn visit_grant(visitor: &mut V, authority: &AccountId, isi: &GrantBox) { + match isi { + GrantBox::PermissionToken(obj) => visitor.visit_grant_account_permission(authority, obj), + GrantBox::Role(obj) => visitor.visit_grant_account_role(authority, obj), } } -pub fn visit_pair(visitor: &mut V, authority: &AccountId, isi: &PairExpr) { - visitor.visit_instruction(authority, isi.left_instruction()); - visitor.visit_instruction(authority, isi.right_instruction()); -} - -pub fn visit_sequence( - visitor: &mut V, - authority: &AccountId, - isi: &SequenceExpr, -) { - for instruction in isi.instructions() { - visitor.visit_instruction(authority, instruction); +pub fn visit_revoke(visitor: &mut V, authority: &AccountId, isi: &RevokeBox) { + match isi { + RevokeBox::PermissionToken(obj) => visitor.visit_revoke_account_permission(authority, obj), + RevokeBox::Role(obj) => visitor.visit_revoke_account_role(authority, obj), } } @@ -696,48 +410,54 @@ macro_rules! leaf_visitors { leaf_visitors! { // Instruction visitors - visit_register_account(Register), - visit_unregister_account(Unregister), - visit_mint_account_public_key(Mint), - visit_burn_account_public_key(Burn), - visit_mint_account_signature_check_condition(Mint), - visit_set_account_key_value(SetKeyValue), - visit_remove_account_key_value(RemoveKeyValue), - visit_register_asset(Register), - visit_unregister_asset(Unregister), - visit_mint_asset(Mint), - visit_burn_asset(Burn), - visit_transfer_asset(Transfer), - visit_set_asset_key_value(SetKeyValue), - visit_remove_asset_key_value(RemoveKeyValue), - visit_register_asset_definition(Register), - visit_unregister_asset_definition(Unregister), - visit_transfer_asset_definition(Transfer), - visit_set_asset_definition_key_value(SetKeyValue), - visit_remove_asset_definition_key_value(RemoveKeyValue), - visit_register_domain(Register), - visit_unregister_domain(Unregister), - visit_transfer_domain(Transfer), - visit_set_domain_key_value(SetKeyValue), - visit_remove_domain_key_value(RemoveKeyValue), - visit_register_peer(Register), - visit_unregister_peer(Unregister), - visit_grant_account_permission(Grant), - visit_revoke_account_permission(Revoke), - visit_register_role(Register), - visit_unregister_role(Unregister), - visit_grant_account_role(Grant), - visit_revoke_account_role(Revoke), - visit_register_trigger(Register>), - visit_unregister_trigger(Unregister>), - visit_mint_trigger_repetitions(Mint>), - visit_burn_trigger_repetitions(Burn>), - visit_upgrade_executor(Upgrade), - visit_new_parameter(NewParameter), - visit_set_parameter(SetParameter), - visit_execute_trigger(ExecuteTrigger), + visit_register_account(&Register), + visit_unregister_account(&Unregister), + visit_mint_account_public_key(&Mint), + visit_burn_account_public_key(&Burn), + visit_mint_account_signature_check_condition(&Mint), + visit_set_account_key_value(&SetKeyValue), + visit_remove_account_key_value(&RemoveKeyValue), + visit_register_asset(&Register), + visit_unregister_asset(&Unregister), + visit_mint_asset_quantity(&Mint), + visit_burn_asset_quantity(&Burn), + visit_mint_asset_big_quantity(&Mint), + visit_burn_asset_big_quantity(&Burn), + visit_mint_asset_fixed(&Mint), + visit_burn_asset_fixed(&Burn), + visit_transfer_asset_quantity(&Transfer), + visit_transfer_asset_big_quantity(&Transfer), + visit_transfer_asset_fixed(&Transfer), + visit_set_asset_key_value(&SetKeyValue), + visit_remove_asset_key_value(&RemoveKeyValue), + visit_register_asset_definition(&Register), + visit_unregister_asset_definition(&Unregister), + visit_transfer_asset_definition(&Transfer), + visit_set_asset_definition_key_value(&SetKeyValue), + visit_remove_asset_definition_key_value(&RemoveKeyValue), + visit_register_domain(&Register), + visit_unregister_domain(&Unregister), + visit_transfer_domain(&Transfer), + visit_set_domain_key_value(&SetKeyValue), + visit_remove_domain_key_value(&RemoveKeyValue), + visit_register_peer(&Register), + visit_unregister_peer(&Unregister), + visit_grant_account_permission(&Grant), + visit_revoke_account_permission(&Revoke), + visit_register_role(&Register), + visit_unregister_role(&Unregister), + visit_grant_account_role(&Grant), + visit_revoke_account_role(&Revoke), + visit_register_trigger(&Register>), + visit_unregister_trigger(&Unregister>), + visit_mint_trigger_repetitions(&Mint>), + visit_burn_trigger_repetitions(&Burn>), + visit_upgrade(&Upgrade), + visit_new_parameter(&NewParameter), + visit_set_parameter(&SetParameter), + visit_execute_trigger(&ExecuteTrigger), visit_fail(&Fail), - visit_log(Log), + visit_log(&Log), // Query visitors visit_find_account_by_id(&FindAccountById), @@ -752,7 +472,7 @@ leaf_visitors! { visit_find_all_block_headers(&FindAllBlockHeaders), visit_find_all_blocks(&FindAllBlocks), visit_find_all_domains(&FindAllDomains), - visit_find_all_parammeters(&FindAllParameters), + visit_find_all_parameters(&FindAllParameters), visit_find_all_peers(&FindAllPeers), visit_find_permission_token_schema(&FindPermissionTokenSchema), visit_find_all_role_ids(&FindAllRoleIds), diff --git a/data_model/tests/data_model.rs b/data_model/tests/data_model.rs index 09cf1d602ff..c795f7590a4 100644 --- a/data_model/tests/data_model.rs +++ b/data_model/tests/data_model.rs @@ -1,65 +1,14 @@ -use std::str::FromStr as _; - use iroha_data_model::{prelude::*, ParseError}; #[test] fn transfer_isi_should_be_valid() { - let _instruction = TransferExpr::new( - IdBox::AssetId("btc##seller@crypto".parse().expect("Valid")), + let _instruction = Transfer::asset_quantity( + "btc##seller@crypto".parse().expect("Valid"), 12_u32, - IdBox::AccountId("buyer@crypto".parse().expect("Valid")), + "buyer@crypto".parse().expect("Valid"), ); } -#[test] -fn find_quantity_and_check_it_greater_than_value_isi_should_be_valid() { - let asset_id: AssetId = "rose##alice@wonderland".parse().expect("Valid"); - let find_asset = QueryBox::from(FindAssetQuantityById::new(asset_id)); - - let _instruction = ConditionalExpr::new( - Not::new(Greater::new(EvaluatesTo::new_unchecked(find_asset), 10_u32)), - Fail::new("rate is less or equal to value"), - ); -} - -struct FindRateAndCheckItGreaterThanValue { - from_currency: String, - to_currency: String, - value: u32, -} - -impl FindRateAndCheckItGreaterThanValue { - pub fn new(from_currency: &str, to_currency: &str, value: u32) -> Self { - Self { - from_currency: from_currency.to_string(), - to_currency: to_currency.to_string(), - value, - } - } - - pub fn into_isi(self) -> ConditionalExpr { - ConditionalExpr::new( - Not::new(Greater::new( - EvaluatesTo::new_unchecked(QueryBox::from(FindAssetQuantityById::new( - AssetId::new( - format!("{}2{}_rate#exchange", self.from_currency, self.to_currency) - .parse() - .expect("Valid"), - AccountId::from_str("dex@exchange").expect("Valid"), - ), - ))), - self.value, - )), - Fail::new("rate is less or equal to value"), - ) - } -} - -#[test] -fn find_rate_and_check_it_greater_than_value_predefined_isi_should_be_valid() { - let _instruction = FindRateAndCheckItGreaterThanValue::new("btc", "eth", 10).into_isi(); -} - #[test] fn account_id_parsing() -> Result<(), ParseError> { // `AccountId` should have format `name@domain_name` diff --git a/data_model/tests/ui.rs b/data_model/tests/ui.rs deleted file mode 100644 index 48e1f1bbea3..00000000000 --- a/data_model/tests/ui.rs +++ /dev/null @@ -1,7 +0,0 @@ -#![cfg(not(coverage))] -use trybuild::TestCases; - -#[test] -fn ui() { - TestCases::new().compile_fail("tests/ui_fail/*.rs"); -} diff --git a/data_model/tests/ui_fail/evaluates_to.rs b/data_model/tests/ui_fail/evaluates_to.rs deleted file mode 100644 index 6694b013653..00000000000 --- a/data_model/tests/ui_fail/evaluates_to.rs +++ /dev/null @@ -1,12 +0,0 @@ -//! This test ensures that [`EvaluatesTo`] provides compile-time strong typing - -use iroha_data_model::prelude::*; - -fn get_assets_by_account_id(_account_id: impl Into>) -> Vec { - Vec::new() -} - -fn main() { - let asset_definition_id: AssetDefinitionId = "rose#wonderland".parse().unwrap(); - get_assets_by_account_id(asset_definition_id); -} diff --git a/data_model/tests/ui_fail/evaluates_to.stderr b/data_model/tests/ui_fail/evaluates_to.stderr deleted file mode 100644 index ff7da36be16..00000000000 --- a/data_model/tests/ui_fail/evaluates_to.stderr +++ /dev/null @@ -1,17 +0,0 @@ -error[E0277]: the trait bound `iroha_data_model::account::AccountId: From` is not satisfied - --> tests/ui_fail/evaluates_to.rs:11:30 - | -11 | get_assets_by_account_id(asset_definition_id); - | ------------------------ ^^^^^^^^^^^^^^^^^^^ the trait `From` is not implemented for `iroha_data_model::account::AccountId` - | | - | required by a bound introduced by this call - | - = note: required for `iroha_data_model::asset::AssetDefinitionId` to implement `Into` - = note: required for `iroha_data_model::expression::EvaluatesTo` to implement `From` - = note: 1 redundant requirement hidden - = note: required for `iroha_data_model::asset::AssetDefinitionId` to implement `Into>` -note: required by a bound in `get_assets_by_account_id` - --> tests/ui_fail/evaluates_to.rs:5:47 - | -5 | fn get_assets_by_account_id(_account_id: impl Into>) -> Vec { - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `get_assets_by_account_id` diff --git a/default_executor/src/lib.rs b/default_executor/src/lib.rs index 4834bcc9228..6214fdaf03b 100644 --- a/default_executor/src/lib.rs +++ b/default_executor/src/lib.rs @@ -8,7 +8,7 @@ extern crate panic_halt; use alloc::borrow::ToOwned as _; -use iroha_executor::{default::default_permission_token_schema, prelude::*, smart_contract}; +use iroha_executor::{default::default_permission_token_schema, prelude::*}; use lol_alloc::{FreeListAllocator, LockedAllocator}; #[global_allocator] @@ -19,11 +19,10 @@ static ALLOC: LockedAllocator = LockedAllocator::new(FreeList /// # Warning /// /// The defaults are not guaranteed to be stable. -#[derive(Clone, Constructor, Debug, ValidateEntrypoints, ExpressionEvaluator, Validate, Visit)] +#[derive(Debug, Clone, Constructor, Visit, Validate, ValidateEntrypoints)] pub struct Executor { verdict: Result, block_height: u64, - host: smart_contract::Host, } impl Executor { diff --git a/docker-compose.dev.local.yml b/docker-compose.dev.local.yml index 162572d7dd7..a1d0855dc04 100644 --- a/docker-compose.dev.local.yml +++ b/docker-compose.dev.local.yml @@ -13,7 +13,7 @@ services: TORII_API_URL: iroha0:8080 IROHA_GENESIS_ACCOUNT_PUBLIC_KEY: ed01204164BF554923ECE1FD412D241036D863A6AE430476C898248B8237D77534CFC4 IROHA_GENESIS_ACCOUNT_PRIVATE_KEY: '{"digest_function":"ed25519","payload":"82b3bde54aebeca4146257da0de8d59d8e46d5fe34887dcd8072866792fcb3ad4164bf554923ece1fd412d241036d863a6ae430476c898248b8237d77534cfc4"}' - SUMERAGI_TRUSTED_PEERS: '[{"address":"iroha1:1338","public_key":"ed0120815BBDC9775D28C3633269B25F22D048E2AA2E36017CBE5AD85F15220BEB6F6F"},{"address":"iroha0:1337","public_key":"ed01208BA62848CF767D72E7F7F4B9D2D7BA07FEE33760F79ABE5597A51520E292A0CB"},{"address":"iroha3:1340","public_key":"ed0120A66522370D60B9C09E79ADE2E9BB1EF2E78733A944B999B3A6AEE687CE476D61"},{"address":"iroha2:1339","public_key":"ed0120F417E0371E6ADB32FD66749477402B1AB67F84A8E9B082E997980CC91F327736"}]' + SUMERAGI_TRUSTED_PEERS: '[{"address":"iroha1:1338","public_key":"ed0120815BBDC9775D28C3633269B25F22D048E2AA2E36017CBE5AD85F15220BEB6F6F"},{"address":"iroha3:1340","public_key":"ed0120A66522370D60B9C09E79ADE2E9BB1EF2E78733A944B999B3A6AEE687CE476D61"},{"address":"iroha2:1339","public_key":"ed0120F417E0371E6ADB32FD66749477402B1AB67F84A8E9B082E997980CC91F327736"}]' ports: - 1337:1337 - 8080:8080 @@ -30,7 +30,7 @@ services: TORII_P2P_ADDR: iroha1:1338 TORII_API_URL: iroha1:8081 IROHA_GENESIS_ACCOUNT_PUBLIC_KEY: ed01204164BF554923ECE1FD412D241036D863A6AE430476C898248B8237D77534CFC4 - SUMERAGI_TRUSTED_PEERS: '[{"address":"iroha1:1338","public_key":"ed0120815BBDC9775D28C3633269B25F22D048E2AA2E36017CBE5AD85F15220BEB6F6F"},{"address":"iroha0:1337","public_key":"ed01208BA62848CF767D72E7F7F4B9D2D7BA07FEE33760F79ABE5597A51520E292A0CB"},{"address":"iroha3:1340","public_key":"ed0120A66522370D60B9C09E79ADE2E9BB1EF2E78733A944B999B3A6AEE687CE476D61"},{"address":"iroha2:1339","public_key":"ed0120F417E0371E6ADB32FD66749477402B1AB67F84A8E9B082E997980CC91F327736"}]' + SUMERAGI_TRUSTED_PEERS: '[{"address":"iroha0:1337","public_key":"ed01208BA62848CF767D72E7F7F4B9D2D7BA07FEE33760F79ABE5597A51520E292A0CB"},{"address":"iroha3:1340","public_key":"ed0120A66522370D60B9C09E79ADE2E9BB1EF2E78733A944B999B3A6AEE687CE476D61"},{"address":"iroha2:1339","public_key":"ed0120F417E0371E6ADB32FD66749477402B1AB67F84A8E9B082E997980CC91F327736"}]' ports: - 1338:1338 - 8081:8081 @@ -46,7 +46,7 @@ services: TORII_P2P_ADDR: iroha2:1339 TORII_API_URL: iroha2:8082 IROHA_GENESIS_ACCOUNT_PUBLIC_KEY: ed01204164BF554923ECE1FD412D241036D863A6AE430476C898248B8237D77534CFC4 - SUMERAGI_TRUSTED_PEERS: '[{"address":"iroha1:1338","public_key":"ed0120815BBDC9775D28C3633269B25F22D048E2AA2E36017CBE5AD85F15220BEB6F6F"},{"address":"iroha0:1337","public_key":"ed01208BA62848CF767D72E7F7F4B9D2D7BA07FEE33760F79ABE5597A51520E292A0CB"},{"address":"iroha3:1340","public_key":"ed0120A66522370D60B9C09E79ADE2E9BB1EF2E78733A944B999B3A6AEE687CE476D61"},{"address":"iroha2:1339","public_key":"ed0120F417E0371E6ADB32FD66749477402B1AB67F84A8E9B082E997980CC91F327736"}]' + SUMERAGI_TRUSTED_PEERS: '[{"address":"iroha1:1338","public_key":"ed0120815BBDC9775D28C3633269B25F22D048E2AA2E36017CBE5AD85F15220BEB6F6F"},{"address":"iroha0:1337","public_key":"ed01208BA62848CF767D72E7F7F4B9D2D7BA07FEE33760F79ABE5597A51520E292A0CB"},{"address":"iroha3:1340","public_key":"ed0120A66522370D60B9C09E79ADE2E9BB1EF2E78733A944B999B3A6AEE687CE476D61"}]' ports: - 1339:1339 - 8082:8082 @@ -62,7 +62,7 @@ services: TORII_P2P_ADDR: iroha3:1340 TORII_API_URL: iroha3:8083 IROHA_GENESIS_ACCOUNT_PUBLIC_KEY: ed01204164BF554923ECE1FD412D241036D863A6AE430476C898248B8237D77534CFC4 - SUMERAGI_TRUSTED_PEERS: '[{"address":"iroha1:1338","public_key":"ed0120815BBDC9775D28C3633269B25F22D048E2AA2E36017CBE5AD85F15220BEB6F6F"},{"address":"iroha0:1337","public_key":"ed01208BA62848CF767D72E7F7F4B9D2D7BA07FEE33760F79ABE5597A51520E292A0CB"},{"address":"iroha3:1340","public_key":"ed0120A66522370D60B9C09E79ADE2E9BB1EF2E78733A944B999B3A6AEE687CE476D61"},{"address":"iroha2:1339","public_key":"ed0120F417E0371E6ADB32FD66749477402B1AB67F84A8E9B082E997980CC91F327736"}]' + SUMERAGI_TRUSTED_PEERS: '[{"address":"iroha1:1338","public_key":"ed0120815BBDC9775D28C3633269B25F22D048E2AA2E36017CBE5AD85F15220BEB6F6F"},{"address":"iroha0:1337","public_key":"ed01208BA62848CF767D72E7F7F4B9D2D7BA07FEE33760F79ABE5597A51520E292A0CB"},{"address":"iroha2:1339","public_key":"ed0120F417E0371E6ADB32FD66749477402B1AB67F84A8E9B082E997980CC91F327736"}]' ports: - 1340:1340 - 8083:8083 diff --git a/docker-compose.dev.single.yml b/docker-compose.dev.single.yml index 9a4891ff227..b817eece0c8 100644 --- a/docker-compose.dev.single.yml +++ b/docker-compose.dev.single.yml @@ -13,7 +13,6 @@ services: TORII_API_URL: iroha0:8080 IROHA_GENESIS_ACCOUNT_PUBLIC_KEY: ed01204164BF554923ECE1FD412D241036D863A6AE430476C898248B8237D77534CFC4 IROHA_GENESIS_ACCOUNT_PRIVATE_KEY: '{"digest_function":"ed25519","payload":"82b3bde54aebeca4146257da0de8d59d8e46d5fe34887dcd8072866792fcb3ad4164bf554923ece1fd412d241036d863a6ae430476c898248b8237d77534cfc4"}' - SUMERAGI_TRUSTED_PEERS: '[{"address":"iroha0:1337","public_key":"ed01208BA62848CF767D72E7F7F4B9D2D7BA07FEE33760F79ABE5597A51520E292A0CB"}]' ports: - 1337:1337 - 8080:8080 diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml index 9c45d746a06..54404425d99 100644 --- a/docker-compose.dev.yml +++ b/docker-compose.dev.yml @@ -13,7 +13,7 @@ services: TORII_API_URL: iroha0:8080 IROHA_GENESIS_ACCOUNT_PUBLIC_KEY: ed01204164BF554923ECE1FD412D241036D863A6AE430476C898248B8237D77534CFC4 IROHA_GENESIS_ACCOUNT_PRIVATE_KEY: '{"digest_function":"ed25519","payload":"82b3bde54aebeca4146257da0de8d59d8e46d5fe34887dcd8072866792fcb3ad4164bf554923ece1fd412d241036d863a6ae430476c898248b8237d77534cfc4"}' - SUMERAGI_TRUSTED_PEERS: '[{"address":"iroha1:1338","public_key":"ed0120815BBDC9775D28C3633269B25F22D048E2AA2E36017CBE5AD85F15220BEB6F6F"},{"address":"iroha0:1337","public_key":"ed01208BA62848CF767D72E7F7F4B9D2D7BA07FEE33760F79ABE5597A51520E292A0CB"},{"address":"iroha3:1340","public_key":"ed0120A66522370D60B9C09E79ADE2E9BB1EF2E78733A944B999B3A6AEE687CE476D61"},{"address":"iroha2:1339","public_key":"ed0120F417E0371E6ADB32FD66749477402B1AB67F84A8E9B082E997980CC91F327736"}]' + SUMERAGI_TRUSTED_PEERS: '[{"address":"iroha1:1338","public_key":"ed0120815BBDC9775D28C3633269B25F22D048E2AA2E36017CBE5AD85F15220BEB6F6F"},{"address":"iroha3:1340","public_key":"ed0120A66522370D60B9C09E79ADE2E9BB1EF2E78733A944B999B3A6AEE687CE476D61"},{"address":"iroha2:1339","public_key":"ed0120F417E0371E6ADB32FD66749477402B1AB67F84A8E9B082E997980CC91F327736"}]' ports: - 1337:1337 - 8080:8080 @@ -30,7 +30,7 @@ services: TORII_P2P_ADDR: iroha1:1338 TORII_API_URL: iroha1:8081 IROHA_GENESIS_ACCOUNT_PUBLIC_KEY: ed01204164BF554923ECE1FD412D241036D863A6AE430476C898248B8237D77534CFC4 - SUMERAGI_TRUSTED_PEERS: '[{"address":"iroha1:1338","public_key":"ed0120815BBDC9775D28C3633269B25F22D048E2AA2E36017CBE5AD85F15220BEB6F6F"},{"address":"iroha0:1337","public_key":"ed01208BA62848CF767D72E7F7F4B9D2D7BA07FEE33760F79ABE5597A51520E292A0CB"},{"address":"iroha3:1340","public_key":"ed0120A66522370D60B9C09E79ADE2E9BB1EF2E78733A944B999B3A6AEE687CE476D61"},{"address":"iroha2:1339","public_key":"ed0120F417E0371E6ADB32FD66749477402B1AB67F84A8E9B082E997980CC91F327736"}]' + SUMERAGI_TRUSTED_PEERS: '[{"address":"iroha0:1337","public_key":"ed01208BA62848CF767D72E7F7F4B9D2D7BA07FEE33760F79ABE5597A51520E292A0CB"},{"address":"iroha3:1340","public_key":"ed0120A66522370D60B9C09E79ADE2E9BB1EF2E78733A944B999B3A6AEE687CE476D61"},{"address":"iroha2:1339","public_key":"ed0120F417E0371E6ADB32FD66749477402B1AB67F84A8E9B082E997980CC91F327736"}]' ports: - 1338:1338 - 8081:8081 @@ -46,7 +46,7 @@ services: TORII_P2P_ADDR: iroha2:1339 TORII_API_URL: iroha2:8082 IROHA_GENESIS_ACCOUNT_PUBLIC_KEY: ed01204164BF554923ECE1FD412D241036D863A6AE430476C898248B8237D77534CFC4 - SUMERAGI_TRUSTED_PEERS: '[{"address":"iroha1:1338","public_key":"ed0120815BBDC9775D28C3633269B25F22D048E2AA2E36017CBE5AD85F15220BEB6F6F"},{"address":"iroha0:1337","public_key":"ed01208BA62848CF767D72E7F7F4B9D2D7BA07FEE33760F79ABE5597A51520E292A0CB"},{"address":"iroha3:1340","public_key":"ed0120A66522370D60B9C09E79ADE2E9BB1EF2E78733A944B999B3A6AEE687CE476D61"},{"address":"iroha2:1339","public_key":"ed0120F417E0371E6ADB32FD66749477402B1AB67F84A8E9B082E997980CC91F327736"}]' + SUMERAGI_TRUSTED_PEERS: '[{"address":"iroha1:1338","public_key":"ed0120815BBDC9775D28C3633269B25F22D048E2AA2E36017CBE5AD85F15220BEB6F6F"},{"address":"iroha0:1337","public_key":"ed01208BA62848CF767D72E7F7F4B9D2D7BA07FEE33760F79ABE5597A51520E292A0CB"},{"address":"iroha3:1340","public_key":"ed0120A66522370D60B9C09E79ADE2E9BB1EF2E78733A944B999B3A6AEE687CE476D61"}]' ports: - 1339:1339 - 8082:8082 @@ -62,7 +62,7 @@ services: TORII_P2P_ADDR: iroha3:1340 TORII_API_URL: iroha3:8083 IROHA_GENESIS_ACCOUNT_PUBLIC_KEY: ed01204164BF554923ECE1FD412D241036D863A6AE430476C898248B8237D77534CFC4 - SUMERAGI_TRUSTED_PEERS: '[{"address":"iroha1:1338","public_key":"ed0120815BBDC9775D28C3633269B25F22D048E2AA2E36017CBE5AD85F15220BEB6F6F"},{"address":"iroha0:1337","public_key":"ed01208BA62848CF767D72E7F7F4B9D2D7BA07FEE33760F79ABE5597A51520E292A0CB"},{"address":"iroha3:1340","public_key":"ed0120A66522370D60B9C09E79ADE2E9BB1EF2E78733A944B999B3A6AEE687CE476D61"},{"address":"iroha2:1339","public_key":"ed0120F417E0371E6ADB32FD66749477402B1AB67F84A8E9B082E997980CC91F327736"}]' + SUMERAGI_TRUSTED_PEERS: '[{"address":"iroha1:1338","public_key":"ed0120815BBDC9775D28C3633269B25F22D048E2AA2E36017CBE5AD85F15220BEB6F6F"},{"address":"iroha0:1337","public_key":"ed01208BA62848CF767D72E7F7F4B9D2D7BA07FEE33760F79ABE5597A51520E292A0CB"},{"address":"iroha2:1339","public_key":"ed0120F417E0371E6ADB32FD66749477402B1AB67F84A8E9B082E997980CC91F327736"}]' ports: - 1340:1340 - 8083:8083 diff --git a/docker-compose.single.yml b/docker-compose.single.yml index d46667110c2..454f92ff312 100644 --- a/docker-compose.single.yml +++ b/docker-compose.single.yml @@ -3,6 +3,7 @@ services: iroha0: build: . image: iroha2:lts + platform: linux/amd64 environment: TORII_P2P_ADDR: iroha0:1337 TORII_API_URL: iroha0:8080 diff --git a/docker-compose.stable.single.yml b/docker-compose.stable.single.yml index e2c07b8e2fc..dfb250f6d9f 100644 --- a/docker-compose.stable.single.yml +++ b/docker-compose.stable.single.yml @@ -3,13 +3,13 @@ services: iroha0: build: . image: iroha2:stable + platform: linux/amd64 environment: TORII_P2P_ADDR: iroha0:1337 TORII_API_URL: iroha0:8080 TORII_TELEMETRY_URL: iroha0:8180 IROHA_PUBLIC_KEY: "ed01201C61FAF8FE94E253B93114240394F79A607B7FA55F9E5A41EBEC74B88055768B" IROHA_PRIVATE_KEY: '{"digest_function": "ed25519", "payload": "282ED9F3CF92811C3818DBC4AE594ED59DC1A2F78E4241E31924E101D6B1FB831C61FAF8FE94E253B93114240394F79A607B7FA55F9E5A41EBEC74B88055768B"}' - SUMERAGI_TRUSTED_PEERS: '[{"address":"iroha0:1337", "public_key": "ed01201C61FAF8FE94E253B93114240394F79A607B7FA55F9E5A41EBEC74B88055768B"}]' IROHA_GENESIS_ACCOUNT_PUBLIC_KEY: 'ed01203F4E3E98571B55514EDC5CCF7E53CA7509D89B2868E62921180A6F57C2F4E255' IROHA_GENESIS_ACCOUNT_PRIVATE_KEY: '{"digest_function": "ed25519", "payload": "038AE16B219DA35AA036335ED0A43C28A2CC737150112C78A7B8034B9D99C9023F4E3E98571B55514EDC5CCF7E53CA7509D89B2868E62921180A6F57C2F4E255"}' IROHA_GENESIS_WAIT_FOR_PEERS_RETRY_COUNT_LIMIT: 100 @@ -18,7 +18,6 @@ services: ports: - "1337:1337" - "8080:8080" - - "8180:8180" init: true command: iroha --submit-genesis volumes: diff --git a/docker-compose.stable.yml b/docker-compose.stable.yml index e57463504fc..95cdf9e04d8 100644 --- a/docker-compose.stable.yml +++ b/docker-compose.stable.yml @@ -2,13 +2,14 @@ version: "3.8" services: iroha0: image: hyperledger/iroha2:stable + platform: linux/amd64 environment: TORII_P2P_ADDR: iroha0:1337 TORII_API_URL: iroha0:8080 TORII_TELEMETRY_URL: iroha0:8180 IROHA_PUBLIC_KEY: "ed01201C61FAF8FE94E253B93114240394F79A607B7FA55F9E5A41EBEC74B88055768B" IROHA_PRIVATE_KEY: '{"digest_function": "ed25519", "payload": "282ED9F3CF92811C3818DBC4AE594ED59DC1A2F78E4241E31924E101D6B1FB831C61FAF8FE94E253B93114240394F79A607B7FA55F9E5A41EBEC74B88055768B"}' - SUMERAGI_TRUSTED_PEERS: '[{"address":"iroha0:1337", "public_key": "ed01201C61FAF8FE94E253B93114240394F79A607B7FA55F9E5A41EBEC74B88055768B"}, {"address":"iroha1:1338", "public_key": "ed0120CC25624D62896D3A0BFD8940F928DC2ABF27CC57CEFEB442AA96D9081AAE58A1"}, {"address": "iroha2:1339", "public_key": "ed0120FACA9E8AA83225CB4D16D67F27DD4F93FC30FFA11ADC1F5C88FD5495ECC91020"}, {"address": "iroha3:1340", "public_key": "ed01208E351A70B6A603ED285D666B8D689B680865913BA03CE29FB7D13A166C4E7F1F"}]' + SUMERAGI_TRUSTED_PEERS: '[{"address":"iroha1:1338", "public_key": "ed0120CC25624D62896D3A0BFD8940F928DC2ABF27CC57CEFEB442AA96D9081AAE58A1"}, {"address": "iroha2:1339", "public_key": "ed0120FACA9E8AA83225CB4D16D67F27DD4F93FC30FFA11ADC1F5C88FD5495ECC91020"}, {"address": "iroha3:1340", "public_key": "ed01208E351A70B6A603ED285D666B8D689B680865913BA03CE29FB7D13A166C4E7F1F"}]' IROHA_GENESIS_ACCOUNT_PUBLIC_KEY: 'ed01203f4e3e98571b55514edc5ccf7e53ca7509d89b2868e62921180a6f57c2f4e255' IROHA_GENESIS_ACCOUNT_PRIVATE_KEY: '{ "digest_function": "ed25519", "payload": "038AE16B219DA35AA036335ED0A43C28A2CC737150112C78A7B8034B9D99C9023F4E3E98571B55514EDC5CCF7E53CA7509D89B2868E62921180A6F57C2F4E255" }' IROHA_GENESIS_WAIT_FOR_PEERS_RETRY_COUNT_LIMIT: 100 @@ -17,7 +18,6 @@ services: ports: - "1337:1337" - "8080:8080" - - "8180:8180" volumes: - './configs/peer/stable:/config' init: true @@ -25,13 +25,14 @@ services: iroha1: image: hyperledger/iroha2:stable + platform: linux/amd64 environment: TORII_P2P_ADDR: iroha1:1338 TORII_API_URL: iroha1:8081 TORII_TELEMETRY_URL: iroha1:8181 IROHA_PUBLIC_KEY: "ed0120CC25624D62896D3A0BFD8940F928DC2ABF27CC57CEFEB442AA96D9081AAE58A1" IROHA_PRIVATE_KEY: '{"digest_function": "ed25519", "payload": "3BAC34CDA9E3763FA069C1198312D1EC73B53023B8180C822AC355435EDC4A24CC25624D62896D3A0BFD8940F928DC2ABF27CC57CEFEB442AA96D9081AAE58A1"}' - SUMERAGI_TRUSTED_PEERS: '[{"address":"iroha0:1337", "public_key": "ed01201C61FAF8FE94E253B93114240394F79A607B7FA55F9E5A41EBEC74B88055768B"}, {"address":"iroha1:1338", "public_key": "ed0120CC25624D62896D3A0BFD8940F928DC2ABF27CC57CEFEB442AA96D9081AAE58A1"}, {"address": "iroha2:1339", "public_key": "ed0120FACA9E8AA83225CB4D16D67F27DD4F93FC30FFA11ADC1F5C88FD5495ECC91020"}, {"address": "iroha3:1340", "public_key": "ed01208E351A70B6A603ED285D666B8D689B680865913BA03CE29FB7D13A166C4E7F1F"}]' + SUMERAGI_TRUSTED_PEERS: '[{"address":"iroha0:1337", "public_key": "ed01201C61FAF8FE94E253B93114240394F79A607B7FA55F9E5A41EBEC74B88055768B"}, {"address": "iroha2:1339", "public_key": "ed0120FACA9E8AA83225CB4D16D67F27DD4F93FC30FFA11ADC1F5C88FD5495ECC91020"}, {"address": "iroha3:1340", "public_key": "ed01208E351A70B6A603ED285D666B8D689B680865913BA03CE29FB7D13A166C4E7F1F"}]' IROHA_GENESIS_ACCOUNT_PUBLIC_KEY: 'ed01203F4E3E98571B55514EDC5CCF7E53CA7509D89B2868E62921180A6F57C2F4E255' IROHA_GENESIS_WAIT_FOR_PEERS_RETRY_COUNT_LIMIT: 100 IROHA_GENESIS_WAIT_FOR_PEERS_RETRY_PERIOD_MS: 500 @@ -39,20 +40,20 @@ services: ports: - "1338:1338" - "8081:8081" - - "8181:8181" volumes: - './configs/peer/stable:/config' init: true iroha2: image: hyperledger/iroha2:stable + platform: linux/amd64 environment: TORII_P2P_ADDR: iroha2:1339 TORII_API_URL: iroha2:8082 TORII_TELEMETRY_URL: iroha2:8182 IROHA_PUBLIC_KEY: "ed0120FACA9E8AA83225CB4D16D67F27DD4F93FC30FFA11ADC1F5C88FD5495ECC91020" IROHA_PRIVATE_KEY: '{"digest_function": "ed25519", "payload": "1261A436D36779223D7D6CF20E8B644510E488E6A50BAFD77A7485264D27197DFACA9E8AA83225CB4D16D67F27DD4F93FC30FFA11ADC1F5C88FD5495ECC91020"}' - SUMERAGI_TRUSTED_PEERS: '[{"address":"iroha0:1337", "public_key": "ed01201C61FAF8FE94E253B93114240394F79A607B7FA55F9E5A41EBEC74B88055768B"}, {"address":"iroha1:1338", "public_key": "ed0120CC25624D62896D3A0BFD8940F928DC2ABF27CC57CEFEB442AA96D9081AAE58A1"}, {"address": "iroha2:1339", "public_key": "ed0120FACA9E8AA83225CB4D16D67F27DD4F93FC30FFA11ADC1F5C88FD5495ECC91020"}, {"address": "iroha3:1340", "public_key": "ed01208E351A70B6A603ED285D666B8D689B680865913BA03CE29FB7D13A166C4E7F1F"}]' + SUMERAGI_TRUSTED_PEERS: '[{"address":"iroha0:1337", "public_key": "ed01201C61FAF8FE94E253B93114240394F79A607B7FA55F9E5A41EBEC74B88055768B"}, {"address":"iroha1:1338", "public_key": "ed0120CC25624D62896D3A0BFD8940F928DC2ABF27CC57CEFEB442AA96D9081AAE58A1"}, {"address": "iroha3:1340", "public_key": "ed01208E351A70B6A603ED285D666B8D689B680865913BA03CE29FB7D13A166C4E7F1F"}]' IROHA_GENESIS_ACCOUNT_PUBLIC_KEY: 'ed01203F4E3E98571B55514EDC5CCF7E53CA7509D89B2868E62921180A6F57C2F4E255' IROHA_GENESIS_WAIT_FOR_PEERS_RETRY_COUNT_LIMIT: 100 IROHA_GENESIS_WAIT_FOR_PEERS_RETRY_PERIOD_MS: 500 @@ -60,20 +61,20 @@ services: ports: - "1339:1339" - "8082:8082" - - "8182:8182" volumes: - './configs/peer/stable:/config' init: true iroha3: image: hyperledger/iroha2:stable + platform: linux/amd64 environment: TORII_P2P_ADDR: iroha3:1340 TORII_API_URL: iroha3:8083 TORII_TELEMETRY_URL: iroha3:8183 IROHA_PUBLIC_KEY: "ed01208E351A70B6A603ED285D666B8D689B680865913BA03CE29FB7D13A166C4E7F1F" IROHA_PRIVATE_KEY: '{"digest_function": "ed25519", "payload": "A70DAB95C7482EB9F159111B65947E482108CFE67DF877BD8D3B9441A781C7C98E351A70B6A603ED285D666B8D689B680865913BA03CE29FB7D13A166C4E7F1F"}' - SUMERAGI_TRUSTED_PEERS: '[{"address":"iroha0:1337", "public_key": "ed01201C61FAF8FE94E253B93114240394F79A607B7FA55F9E5A41EBEC74B88055768B"}, {"address":"iroha1:1338", "public_key": "ed0120CC25624D62896D3A0BFD8940F928DC2ABF27CC57CEFEB442AA96D9081AAE58A1"}, {"address": "iroha2:1339", "public_key": "ed0120FACA9E8AA83225CB4D16D67F27DD4F93FC30FFA11ADC1F5C88FD5495ECC91020"}, {"address": "iroha3:1340", "public_key": "ed01208E351A70B6A603ED285D666B8D689B680865913BA03CE29FB7D13A166C4E7F1F"}]' + SUMERAGI_TRUSTED_PEERS: '[{"address":"iroha0:1337", "public_key": "ed01201C61FAF8FE94E253B93114240394F79A607B7FA55F9E5A41EBEC74B88055768B"}, {"address":"iroha1:1338", "public_key": "ed0120CC25624D62896D3A0BFD8940F928DC2ABF27CC57CEFEB442AA96D9081AAE58A1"}, {"address": "iroha2:1339", "public_key": "ed0120FACA9E8AA83225CB4D16D67F27DD4F93FC30FFA11ADC1F5C88FD5495ECC91020"}]' IROHA_GENESIS_ACCOUNT_PUBLIC_KEY: 'ed01203F4E3E98571B55514EDC5CCF7E53CA7509D89B2868E62921180A6F57C2F4E255' IROHA_GENESIS_WAIT_FOR_PEERS_RETRY_COUNT_LIMIT: 100 IROHA_GENESIS_WAIT_FOR_PEERS_RETRY_PERIOD_MS: 500 @@ -81,7 +82,6 @@ services: ports: - "1340:1340" - "8083:8083" - - "8183:8183" volumes: - './configs/peer/stable:/config' init: true diff --git a/docker-compose.yml b/docker-compose.yml index e781607ee9a..d24025c2fb3 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -2,6 +2,7 @@ version: "3.8" services: iroha0: image: hyperledger/iroha2:lts + platform: linux/amd64 environment: TORII_P2P_ADDR: iroha0:1337 TORII_API_URL: iroha0:8080 @@ -25,6 +26,7 @@ services: iroha1: image: hyperledger/iroha2:lts + platform: linux/amd64 environment: TORII_P2P_ADDR: iroha1:1338 TORII_API_URL: iroha1:8081 @@ -46,6 +48,7 @@ services: iroha2: image: hyperledger/iroha2:lts + platform: linux/amd64 environment: TORII_P2P_ADDR: iroha2:1339 TORII_API_URL: iroha2:8082 @@ -67,6 +70,7 @@ services: iroha3: image: hyperledger/iroha2:lts + platform: linux/amd64 environment: TORII_P2P_ADDR: iroha3:1340 TORII_API_URL: iroha3:8083 diff --git a/docs/README.md b/docs/README.md index 11e941c5e8f..b8aa69d85a4 100644 --- a/docs/README.md +++ b/docs/README.md @@ -4,7 +4,7 @@ This is the main Iroha 2 documentation that you will find useful: - [Tutorial](https://hyperledger.github.io/iroha-2-docs/) - [API Reference](https://hyperledger.github.io/iroha-2-docs/api/torii-endpoints) -- [Configuration Reference](./source/references/config.md) + - [Iroha 2 Whitepaper](./source/iroha_2_whitepaper.md) ## Tools @@ -18,5 +18,6 @@ Documentation for Iroha 2 tools: ## Development The following is useful for development: + - [Hot reload Iroha in a Docker container](./source/guides/hot-reload.md) - [Benchmark your code](../client/benches/tps/README.md) diff --git a/docs/source/references/config.md b/docs/source/references/config.md deleted file mode 100644 index 4a8288df05c..00000000000 --- a/docs/source/references/config.md +++ /dev/null @@ -1,895 +0,0 @@ -# Iroha Configuration reference - -In this document we provide a reference and detailed descriptions of Iroha's configuration options. The options have different underlying types and default values, which are denoted in code as types wrapped in a single `Option<..>` or in a double `Option>`. For the detailed explanation, please refer to this [section](#configuration-types). - -## Configuration types - -### `Option<..>` - -A type wrapped in a single `Option<..>` signifies that in the corresponding `json` block there is a fallback value for this type, and that it only serves as a reference. If a default for such a type has a `null` value, it means that there is no meaningful fallback available for this particular value. - -All the default values can be freely obtained from a provided [sample configuration file](../../../configs/peer/config.json), but it should only serve as a starting point. If left unchanged, the sample configuration file would still fail to build due to it having `null` in place of [public](#public_key) and [private](#private_key) keys as well as [API endpoint URL](#torii.api_url). These should be provided either by modifying the sample config file or as environment variables. No other overloading of configuration values happens besides reading them from a file and capturing the environment variables. - -For both types of configuration options wrapped in a single `Option<..>` (i.e. both those that have meaningful defaults and those that have `null`), failure to provide them in any of the above two ways results in an error. - -### `Option>` - -`Option>` types should be distinguished from types wrapped in a single `Option<..>`. Only the double option ones are allowed to stay `null`, meaning that **not** providing them in an environment variable or a file will **not** result in an error. - -Thus, only these types are truly optional in the mundane sense of the word. An example of this distinction is genesis [public](#genesis.account_public_key) and [private](#genesis.account_private_key) key. While the first one is a single `Option<..>` wrapped type, the latter is wrapped in `Option>`. This means that the genesis *public* key should always be provided by the user, be it via a file config or an environment variable, whereas the *private* key is only needed for the peer that submits the genesis block, and can be omitted for all others. The same logic goes for other double option fields such as logger file path. - -### Sumeragi: default `null` values - -A special note about sumeragi fields with `null` as default: only the [`trusted_peers`](#sumeragi.trusted_peers) field out of the three can be initialized via a provided file or an environment variable. - -The other two fields, namely [`key_pair`](#sumeragi.key_pair) and [`peer_id`](#sumeragi.peer_id), go through a process of finalization where their values are derived from the corresponding ones in the uppermost Iroha config (using its [`public_key`](#public_key) and [`private_key`](#private_key) fields) or the Torii config (via its [`p2p_addr`](#torii.p2p_addr)). This ensures that these linked fields stay in sync, and prevents the programmer error when different values are provided to these field pairs. Providing either `sumeragi.key_pair` or `sumeragi.peer_id` by hand will result in an error, as it should never be done directly. - -## Default configuration - -The following is the default configuration used by Iroha. - -```json -{ - "PUBLIC_KEY": null, - "PRIVATE_KEY": null, - "DISABLE_PANIC_TERMINAL_COLORS": false, - "KURA": { - "INIT_MODE": "strict", - "BLOCK_STORE_PATH": "./storage", - "BLOCKS_PER_STORAGE_FILE": 1000, - "ACTOR_CHANNEL_CAPACITY": 100, - "DEBUG_OUTPUT_NEW_BLOCKS": false - }, - "SUMERAGI": { - "KEY_PAIR": null, - "PEER_ID": null, - "BLOCK_TIME_MS": 2000, - "TRUSTED_PEERS": null, - "COMMIT_TIME_LIMIT_MS": 4000, - "MAX_TRANSACTIONS_IN_BLOCK": 512, - "ACTOR_CHANNEL_CAPACITY": 100, - "GOSSIP_BATCH_SIZE": 500, - "GOSSIP_PERIOD_MS": 1000 - }, - "TORII": { - "P2P_ADDR": null, - "API_URL": null, - "MAX_TRANSACTION_SIZE": 32768, - "MAX_CONTENT_LEN": 16384000 - }, - "BLOCK_SYNC": { - "GOSSIP_PERIOD_MS": 10000, - "BLOCK_BATCH_SIZE": 4, - "ACTOR_CHANNEL_CAPACITY": 100 - }, - "QUEUE": { - "MAX_TRANSACTIONS_IN_QUEUE": 65536, - "MAX_TRANSACTIONS_IN_QUEUE_PER_USER": 65536, - "TRANSACTION_TIME_TO_LIVE_MS": 86400000, - "FUTURE_THRESHOLD_MS": 1000 - }, - "LOGGER": { - "MAX_LOG_LEVEL": "INFO", - "TELEMETRY_CAPACITY": 1000, - "COMPACT_MODE": false, - "LOG_FILE_PATH": null, - "TERMINAL_COLORS": true - }, - "GENESIS": { - "ACCOUNT_PUBLIC_KEY": null, - "ACCOUNT_PRIVATE_KEY": null - }, - "WSV": { - "ASSET_METADATA_LIMITS": { - "max_len": 1048576, - "max_entry_byte_size": 4096 - }, - "ASSET_DEFINITION_METADATA_LIMITS": { - "max_len": 1048576, - "max_entry_byte_size": 4096 - }, - "ACCOUNT_METADATA_LIMITS": { - "max_len": 1048576, - "max_entry_byte_size": 4096 - }, - "DOMAIN_METADATA_LIMITS": { - "max_len": 1048576, - "max_entry_byte_size": 4096 - }, - "IDENT_LENGTH_LIMITS": { - "min": 1, - "max": 128 - }, - "TRANSACTION_LIMITS": { - "max_instruction_number": 4096, - "max_wasm_size_bytes": 4194304 - }, - "WASM_RUNTIME_CONFIG": { - "FUEL_LIMIT": 23000000, - "MAX_MEMORY": 524288000 - } - }, - "NETWORK": { - "ACTOR_CHANNEL_CAPACITY": 100 - }, - "TELEMETRY": { - "NAME": null, - "URL": null, - "MIN_RETRY_PERIOD": 1, - "MAX_RETRY_DELAY_EXPONENT": 4, - "FILE": null - }, - "SNAPSHOT": { - "CREATE_EVERY_MS": 60000, - "DIR_PATH": "./storage", - "CREATION_ENABLED": true - }, - "LIVE_QUERY_STORE": { - "QUERY_IDLE_TIME_MS": 30000 - } -} -``` - -## `block_sync` - -`BlockSynchronizer` configuration - -Has type `Option`[^1]. Can be configured via environment variable `IROHA_BLOCK_SYNC` - -```json -{ - "ACTOR_CHANNEL_CAPACITY": 100, - "BLOCK_BATCH_SIZE": 4, - "GOSSIP_PERIOD_MS": 10000 -} -``` - -### `block_sync.actor_channel_capacity` - -Buffer capacity of actor's MPSC channel - -Has type `Option`[^1]. Can be configured via environment variable `BLOCK_SYNC_ACTOR_CHANNEL_CAPACITY` - -```json -100 -``` - -### `block_sync.block_batch_size` - -The number of blocks that can be sent in one message. - -Has type `Option`[^1]. Can be configured via environment variable `BLOCK_SYNC_BLOCK_BATCH_SIZE` - -```json -4 -``` - -### `block_sync.gossip_period_ms` - -The period of time to wait between sending requests for the latest block. - -Has type `Option`[^1]. Can be configured via environment variable `BLOCK_SYNC_GOSSIP_PERIOD_MS` - -```json -10000 -``` - -## `disable_panic_terminal_colors` - -Disable coloring of the backtrace and error report on panic - -Has type `Option`[^1]. Can be configured via environment variable `IROHA_DISABLE_PANIC_TERMINAL_COLORS` - -```json -false -``` - -## `genesis` - -`GenesisBlock` configuration - -Has type `Option>`[^1]. Can be configured via environment variable `IROHA_GENESIS` - -```json -{ - "ACCOUNT_PRIVATE_KEY": null, - "ACCOUNT_PUBLIC_KEY": null -} -``` - -### `genesis.account_private_key` - -The private key of the genesis account, only needed for the peer that submits the genesis block. - -Has type `Option>`[^1]. Can be configured via environment variable `IROHA_GENESIS_ACCOUNT_PRIVATE_KEY` - -```json -null -``` - -### `genesis.account_public_key` - -The public key of the genesis account, should be supplied to all peers. - -Has type `Option`[^1]. Can be configured via environment variable `IROHA_GENESIS_ACCOUNT_PUBLIC_KEY` - -```json -null -``` - -## `kura` - -`Kura` configuration - -Has type `Option>`[^1]. Can be configured via environment variable `IROHA_KURA` - -```json -{ - "ACTOR_CHANNEL_CAPACITY": 100, - "BLOCKS_PER_STORAGE_FILE": 1000, - "BLOCK_STORE_PATH": "./storage", - "DEBUG_OUTPUT_NEW_BLOCKS": false, - "INIT_MODE": "strict" -} -``` - -### `kura.actor_channel_capacity` - -Default buffer capacity of actor's MPSC channel. - -Has type `Option`[^1]. Can be configured via environment variable `KURA_ACTOR_CHANNEL_CAPACITY` - -```json -100 -``` - -### `kura.block_store_path` - -Path to the existing block store folder or path to create new folder. - -Has type `Option`[^1]. Can be configured via environment variable `KURA_BLOCK_STORE_PATH` - -```json -"./storage" -``` - -### `kura.blocks_per_storage_file` - -Maximum number of blocks to write into a single storage file. - -Has type `Option`[^1]. Can be configured via environment variable `KURA_BLOCKS_PER_STORAGE_FILE` - -```json -1000 -``` - -### `kura.debug_output_new_blocks` - -Whether or not new blocks be outputted to a file called blocks.json. - -Has type `Option`[^1]. Can be configured via environment variable `KURA_DEBUG_OUTPUT_NEW_BLOCKS` - -```json -false -``` - -### `kura.init_mode` - -Initialization mode: `strict` or `fast`. - -Has type `Option`[^1]. Can be configured via environment variable `KURA_INIT_MODE` - -```json -"strict" -``` - -## `live_query_store` - -LiveQueryStore configuration - -Has type `Option`[^1]. Can be configured via environment variable `IROHA_LIVE_QUERY_STORE` - -```json -{ - "QUERY_IDLE_TIME_MS": 30000 -} -``` - -### `live_query_store.query_idle_time_ms` - -Time query can remain in the store if unaccessed - -Has type `Option`[^1]. Can be configured via environment variable `LIVE_QUERY_STORE_QUERY_IDLE_TIME_MS` - -```json -30000 -``` - -## `logger` - -`Logger` configuration - -Has type `Option>`[^1]. Can be configured via environment variable `IROHA_LOGGER` - -```json -{ - "COMPACT_MODE": false, - "LOG_FILE_PATH": null, - "MAX_LOG_LEVEL": "INFO", - "TELEMETRY_CAPACITY": 1000, - "TERMINAL_COLORS": true -} -``` - -### `logger.compact_mode` - -Compact mode (no spans from telemetry) - -Has type `Option`[^1]. Can be configured via environment variable `COMPACT_MODE` - -```json -false -``` - -### `logger.log_file_path` - -If provided, logs will be copied to said file in the - -Has type `Option>`[^1]. Can be configured via environment variable `LOG_FILE_PATH` - -```json -null -``` - -### `logger.max_log_level` - -Maximum log level - -Has type `Option`[^1]. Can be configured via environment variable `MAX_LOG_LEVEL` - -```json -"INFO" -``` - -### `logger.telemetry_capacity` - -Capacity (or batch size) for telemetry channel - -Has type `Option`[^1]. Can be configured via environment variable `TELEMETRY_CAPACITY` - -```json -1000 -``` - -### `logger.terminal_colors` - -Enable ANSI terminal colors for formatted output. - -Has type `Option`[^1]. Can be configured via environment variable `TERMINAL_COLORS` - -```json -true -``` - -## `network` - -Network configuration - -Has type `Option`[^1]. Can be configured via environment variable `IROHA_NETWORK` - -```json -{ - "ACTOR_CHANNEL_CAPACITY": 100 -} -``` - -### `network.actor_channel_capacity` - -Buffer capacity of actor's MPSC channel - -Has type `Option`[^1]. Can be configured via environment variable `IROHA_NETWORK_ACTOR_CHANNEL_CAPACITY` - -```json -100 -``` - -## `private_key` - -Private key of this peer - -Has type `Option`[^1]. Can be configured via environment variable `IROHA_PRIVATE_KEY` - -```json -null -``` - -## `public_key` - -Public key of this peer - -Has type `Option`[^1]. Can be configured via environment variable `IROHA_PUBLIC_KEY` - -```json -null -``` - -## `queue` - -`Queue` configuration - -Has type `Option`[^1]. Can be configured via environment variable `IROHA_QUEUE` - -```json -{ - "FUTURE_THRESHOLD_MS": 1000, - "MAX_TRANSACTIONS_IN_QUEUE": 65536, - "MAX_TRANSACTIONS_IN_QUEUE_PER_USER": 65536, - "TRANSACTION_TIME_TO_LIVE_MS": 86400000 -} -``` - -### `queue.future_threshold_ms` - -The threshold to determine if a transaction has been tampered to have a future timestamp. - -Has type `Option`[^1]. Can be configured via environment variable `QUEUE_FUTURE_THRESHOLD_MS` - -```json -1000 -``` - -### `queue.max_transactions_in_queue` - -The upper limit of the number of transactions waiting in the queue. - -Has type `Option`[^1]. Can be configured via environment variable `QUEUE_MAX_TRANSACTIONS_IN_QUEUE` - -```json -65536 -``` - -### `queue.max_transactions_in_queue_per_user` - -The upper limit of the number of transactions waiting in the queue for single user. - -Has type `Option`[^1]. Can be configured via environment variable `QUEUE_MAX_TRANSACTIONS_IN_QUEUE_PER_USER` - -```json -65536 -``` - -### `queue.transaction_time_to_live_ms` - -The transaction will be dropped after this time if it is still in the queue. - -Has type `Option`[^1]. Can be configured via environment variable `QUEUE_TRANSACTION_TIME_TO_LIVE_MS` - -```json -86400000 -``` - -## `snapshot` - -SnapshotMaker configuration - -Has type `Option>`[^1]. Can be configured via environment variable `IROHA_SNAPSHOT` - -```json -{ - "CREATE_EVERY_MS": 60000, - "CREATION_ENABLED": true, - "DIR_PATH": "./storage" -} -``` - -### `snapshot.create_every_ms` - -The period of time to wait between attempts to create new snapshot. - -Has type `Option`[^1]. Can be configured via environment variable `SNAPSHOT_CREATE_EVERY_MS` - -```json -60000 -``` - -### `snapshot.creation_enabled` - -Flag to enable or disable snapshot creation - -Has type `Option`[^1]. Can be configured via environment variable `SNAPSHOT_CREATION_ENABLED` - -```json -true -``` - -### `snapshot.dir_path` - -Path to the directory where snapshots should be stored - -Has type `Option`[^1]. Can be configured via environment variable `SNAPSHOT_DIR_PATH` - -```json -"./storage" -``` - -## `sumeragi` - -`Sumeragi` configuration - -Has type `Option>`[^1]. Can be configured via environment variable `IROHA_SUMERAGI` - -```json -{ - "ACTOR_CHANNEL_CAPACITY": 100, - "BLOCK_TIME_MS": 2000, - "COMMIT_TIME_LIMIT_MS": 4000, - "GOSSIP_BATCH_SIZE": 500, - "GOSSIP_PERIOD_MS": 1000, - "KEY_PAIR": null, - "MAX_TRANSACTIONS_IN_BLOCK": 512, - "PEER_ID": null, - "TRUSTED_PEERS": null -} -``` - -### `sumeragi.actor_channel_capacity` - -Buffer capacity of actor's MPSC channel - -Has type `Option`[^1]. Can be configured via environment variable `SUMERAGI_ACTOR_CHANNEL_CAPACITY` - -```json -100 -``` - -### `sumeragi.block_time_ms` - -The period of time a peer waits for the `CreatedBlock` message after getting a `TransactionReceipt` - -Has type `Option`[^1]. Can be configured via environment variable `SUMERAGI_BLOCK_TIME_MS` - -```json -2000 -``` - -### `sumeragi.commit_time_limit_ms` - -The period of time a peer waits for `CommitMessage` from the proxy tail. - -Has type `Option`[^1]. Can be configured via environment variable `SUMERAGI_COMMIT_TIME_LIMIT_MS` - -```json -4000 -``` - -### `sumeragi.gossip_batch_size` - -max number of transactions in tx gossip batch message. While configuring this, pay attention to `p2p` max message size. - -Has type `Option`[^1]. Can be configured via environment variable `SUMERAGI_GOSSIP_BATCH_SIZE` - -```json -500 -``` - -### `sumeragi.gossip_period_ms` - -Period in milliseconds for pending transaction gossiping between peers. - -Has type `Option`[^1]. Can be configured via environment variable `SUMERAGI_GOSSIP_PERIOD_MS` - -```json -1000 -``` - -### `sumeragi.key_pair` - -The key pair consisting of a private and a public key. - -Has type `Option`[^1]. Can be configured via environment variable `SUMERAGI_KEY_PAIR` - -```json -null -``` - -### `sumeragi.max_transactions_in_block` - -The upper limit of the number of transactions per block. - -Has type `Option`[^1]. Can be configured via environment variable `SUMERAGI_MAX_TRANSACTIONS_IN_BLOCK` - -```json -512 -``` - -### `sumeragi.peer_id` - -Current Peer Identification. - -Has type `Option`[^1]. Can be configured via environment variable `SUMERAGI_PEER_ID` - -```json -null -``` - -### `sumeragi.trusted_peers` - -Optional list of predefined trusted peers. - -Has type `Option`[^1]. Can be configured via environment variable `SUMERAGI_TRUSTED_PEERS` - -```json -null -``` - -## `telemetry` - -Telemetry configuration - -Has type `Option>`[^1]. Can be configured via environment variable `IROHA_TELEMETRY` - -```json -{ - "FILE": null, - "MAX_RETRY_DELAY_EXPONENT": 4, - "MIN_RETRY_PERIOD": 1, - "NAME": null, - "URL": null -} -``` - -### `telemetry.file` - -The filepath that to write dev-telemetry to - -Has type `Option>`[^1]. Can be configured via environment variable `TELEMETRY_FILE` - -```json -null -``` - -### `telemetry.max_retry_delay_exponent` - -The maximum exponent of 2 that is used for increasing delay between reconnections - -Has type `Option`[^1]. Can be configured via environment variable `TELEMETRY_MAX_RETRY_DELAY_EXPONENT` - -```json -4 -``` - -### `telemetry.min_retry_period` - -The minimum period of time in seconds to wait before reconnecting - -Has type `Option`[^1]. Can be configured via environment variable `TELEMETRY_MIN_RETRY_PERIOD` - -```json -1 -``` - -### `telemetry.name` - -The node's name to be seen on the telemetry - -Has type `Option>`[^1]. Can be configured via environment variable `TELEMETRY_NAME` - -```json -null -``` - -### `telemetry.url` - -The url of the telemetry, e.g., ws://127.0.0.1:8001/submit - -Has type `Option>`[^1]. Can be configured via environment variable `TELEMETRY_URL` - -```json -null -``` - -## `torii` - -`Torii` configuration - -Has type `Option>`[^1]. Can be configured via environment variable `IROHA_TORII` - -```json -{ - "API_URL": null, - "MAX_CONTENT_LEN": 16384000, - "MAX_TRANSACTION_SIZE": 32768, - "P2P_ADDR": null -} -``` - -### `torii.api_url` - -Torii address for client API. - -Has type `Option`[^1]. Can be configured via environment variable `TORII_API_URL` - -```json -null -``` - -### `torii.max_content_len` - -Maximum number of bytes in raw message. Used to prevent from DOS attacks. - -Has type `Option`[^1]. Can be configured via environment variable `TORII_MAX_CONTENT_LEN` - -```json -16384000 -``` - -### `torii.max_transaction_size` - -Maximum number of bytes in raw transaction. Used to prevent from DOS attacks. - -Has type `Option`[^1]. Can be configured via environment variable `TORII_MAX_TRANSACTION_SIZE` - -```json -32768 -``` - -### `torii.p2p_addr` - -Torii address for p2p communication for consensus and block synchronization purposes. - -Has type `Option`[^1]. Can be configured via environment variable `TORII_P2P_ADDR` - -```json -null -``` - -## `wsv` - -`WorldStateView` configuration - -Has type `Option>`[^1]. Can be configured via environment variable `IROHA_WSV` - -```json -{ - "ACCOUNT_METADATA_LIMITS": { - "max_entry_byte_size": 4096, - "max_len": 1048576 - }, - "ASSET_DEFINITION_METADATA_LIMITS": { - "max_entry_byte_size": 4096, - "max_len": 1048576 - }, - "ASSET_METADATA_LIMITS": { - "max_entry_byte_size": 4096, - "max_len": 1048576 - }, - "DOMAIN_METADATA_LIMITS": { - "max_entry_byte_size": 4096, - "max_len": 1048576 - }, - "IDENT_LENGTH_LIMITS": { - "max": 128, - "min": 1 - }, - "TRANSACTION_LIMITS": { - "max_instruction_number": 4096, - "max_wasm_size_bytes": 4194304 - }, - "WASM_RUNTIME_CONFIG": { - "FUEL_LIMIT": 23000000, - "MAX_MEMORY": 524288000 - } -} -``` - -### `wsv.account_metadata_limits` - -[`MetadataLimits`] of any account metadata. - -Has type `Option`[^1]. Can be configured via environment variable `WSV_ACCOUNT_METADATA_LIMITS` - -```json -{ - "max_entry_byte_size": 4096, - "max_len": 1048576 -} -``` - -### `wsv.asset_definition_metadata_limits` - -[`MetadataLimits`] of any asset definition metadata. - -Has type `Option`[^1]. Can be configured via environment variable `WSV_ASSET_DEFINITION_METADATA_LIMITS` - -```json -{ - "max_entry_byte_size": 4096, - "max_len": 1048576 -} -``` - -### `wsv.asset_metadata_limits` - -[`MetadataLimits`] for every asset with store. - -Has type `Option`[^1]. Can be configured via environment variable `WSV_ASSET_METADATA_LIMITS` - -```json -{ - "max_entry_byte_size": 4096, - "max_len": 1048576 -} -``` - -### `wsv.domain_metadata_limits` - -[`MetadataLimits`] of any domain metadata. - -Has type `Option`[^1]. Can be configured via environment variable `WSV_DOMAIN_METADATA_LIMITS` - -```json -{ - "max_entry_byte_size": 4096, - "max_len": 1048576 -} -``` - -### `wsv.ident_length_limits` - -[`LengthLimits`] for the number of chars in identifiers that can be stored in the WSV. - -Has type `Option`[^1]. Can be configured via environment variable `WSV_IDENT_LENGTH_LIMITS` - -```json -{ - "max": 128, - "min": 1 -} -``` - -### `wsv.transaction_limits` - -Limits that all transactions need to obey, in terms of size - -Has type `Option`[^1]. Can be configured via environment variable `WSV_TRANSACTION_LIMITS` - -```json -{ - "max_instruction_number": 4096, - "max_wasm_size_bytes": 4194304 -} -``` - -### `wsv.wasm_runtime_config` - -WASM runtime configuration - -Has type `Option`[^1]. Can be configured via environment variable `WSV_WASM_RUNTIME_CONFIG` - -```json -{ - "FUEL_LIMIT": 23000000, - "MAX_MEMORY": 524288000 -} -``` - -#### `wsv.wasm_runtime_config.fuel_limit` - -The fuel limit determines the maximum number of instructions that can be executed within a smart contract. - -Has type `Option`[^1]. Can be configured via environment variable `WASM_FUEL_LIMIT` - -```json -23000000 -``` - -#### `wsv.wasm_runtime_config.max_memory` - -Maximum amount of linear memory a given smart contract can allocate. - -Has type `Option`[^1]. Can be configured via environment variable `WASM_MAX_MEMORY` - -```json -524288000 -``` - diff --git a/docs/source/references/schema.json b/docs/source/references/schema.json index 756d22082be..a4640d1122e 100644 --- a/docs/source/references/schema.json +++ b/docs/source/references/schema.json @@ -155,6 +155,20 @@ } ] }, + "AccountMintBox": { + "Enum": [ + { + "tag": "PublicKey", + "discriminant": 0, + "type": "Mint" + }, + { + "tag": "SignatureCheckCondition", + "discriminant": 1, + "type": "Mint" + } + ] + }, "AccountPermissionChanged": { "Struct": [ { @@ -203,18 +217,6 @@ } ] }, - "Add": { - "Struct": [ - { - "name": "left", - "type": "EvaluatesTo" - }, - { - "name": "right", - "type": "EvaluatesTo" - } - ] - }, "Algorithm": { "Enum": [ { @@ -235,18 +237,6 @@ } ] }, - "And": { - "Struct": [ - { - "name": "left", - "type": "EvaluatesTo" - }, - { - "name": "right", - "type": "EvaluatesTo" - } - ] - }, "Array, 8>": { "Array": { "type": "Interval", @@ -289,6 +279,25 @@ } ] }, + "AssetBurnBox": { + "Enum": [ + { + "tag": "Quantity", + "discriminant": 0, + "type": "Burn" + }, + { + "tag": "BigQuantity", + "discriminant": 1, + "type": "Burn" + }, + { + "tag": "Fixed", + "discriminant": 2, + "type": "Burn" + } + ] + }, "AssetChanged": { "Struct": [ { @@ -534,6 +543,44 @@ } ] }, + "AssetMintBox": { + "Enum": [ + { + "tag": "Quantity", + "discriminant": 0, + "type": "Mint" + }, + { + "tag": "BigQuantity", + "discriminant": 1, + "type": "Mint" + }, + { + "tag": "Fixed", + "discriminant": 2, + "type": "Mint" + } + ] + }, + "AssetTransferBox": { + "Enum": [ + { + "tag": "Quantity", + "discriminant": 0, + "type": "Transfer" + }, + { + "tag": "BigQuantity", + "discriminant": 1, + "type": "Transfer" + }, + { + "tag": "Fixed", + "discriminant": 2, + "type": "Transfer" + } + ] + }, "AssetValue": { "Enum": [ { @@ -632,18 +679,6 @@ } ] }, - "BinaryOpIncompatibleNumericValueTypesError": { - "Struct": [ - { - "name": "left", - "type": "NumericValue" - }, - { - "name": "right", - "type": "NumericValue" - } - ] - }, "BlockHeader": { "Struct": [ { @@ -702,31 +737,82 @@ ] }, "BlockSubscriptionRequest": "NonZero", - "BurnExpr": { + "Burn": { + "Struct": [ + { + "name": "object", + "type": "Fixed" + }, + { + "name": "destination_id", + "type": "AssetId" + } + ] + }, + "Burn": { + "Struct": [ + { + "name": "object", + "type": "PublicKey" + }, + { + "name": "destination_id", + "type": "AccountId" + } + ] + }, + "Burn": { + "Struct": [ + { + "name": "object", + "type": "u128" + }, + { + "name": "destination_id", + "type": "AssetId" + } + ] + }, + "Burn": { "Struct": [ { "name": "object", - "type": "EvaluatesTo" + "type": "u32" }, { "name": "destination_id", - "type": "EvaluatesTo" + "type": "AssetId" } ] }, - "ConditionalExpr": { + "Burn>": { "Struct": [ { - "name": "condition", - "type": "EvaluatesTo" + "name": "object", + "type": "u32" + }, + { + "name": "destination_id", + "type": "TriggerId" + } + ] + }, + "BurnBox": { + "Enum": [ + { + "tag": "AccountPublicKey", + "discriminant": 0, + "type": "Burn" }, { - "name": "then", - "type": "InstructionExpr" + "tag": "Asset", + "discriminant": 1, + "type": "AssetBurnBox" }, { - "name": "otherwise", - "type": "Option" + "tag": "TriggerRepetitions", + "discriminant": 2, + "type": "Burn>" } ] }, @@ -778,50 +864,6 @@ } ] }, - "Contains": { - "Struct": [ - { - "name": "collection", - "type": "EvaluatesTo>" - }, - { - "name": "element", - "type": "EvaluatesTo" - } - ] - }, - "ContainsAll": { - "Struct": [ - { - "name": "collection", - "type": "EvaluatesTo>" - }, - { - "name": "elements", - "type": "EvaluatesTo>" - } - ] - }, - "ContainsAny": { - "Struct": [ - { - "name": "collection", - "type": "EvaluatesTo>" - }, - { - "name": "elements", - "type": "EvaluatesTo>" - } - ] - }, - "ContextValue": { - "Struct": [ - { - "name": "value_name", - "type": "Name" - } - ] - }, "DataEntityFilter": { "Enum": [ { @@ -915,18 +957,6 @@ } ] }, - "Divide": { - "Struct": [ - { - "name": "left", - "type": "EvaluatesTo" - }, - { - "name": "right", - "type": "EvaluatesTo" - } - ] - }, "Domain": { "Struct": [ { @@ -1070,423 +1100,131 @@ "u32" ] }, - "Equal": { - "Struct": [ + "Event": { + "Enum": [ { - "name": "left", - "type": "EvaluatesTo" + "tag": "Pipeline", + "discriminant": 0, + "type": "PipelineEvent" }, { - "name": "right", - "type": "EvaluatesTo" - } - ] - }, - "EvaluatesTo": { - "Struct": [ + "tag": "Data", + "discriminant": 1, + "type": "DataEvent" + }, { - "name": "expression", - "type": "Expression" - } - ] - }, - "EvaluatesTo": { - "Struct": [ + "tag": "Time", + "discriminant": 2, + "type": "TimeEvent" + }, { - "name": "expression", - "type": "Expression" - } - ] - }, - "EvaluatesTo": { - "Struct": [ + "tag": "ExecuteTrigger", + "discriminant": 3, + "type": "ExecuteTriggerEvent" + }, { - "name": "expression", - "type": "Expression" + "tag": "Notification", + "discriminant": 4, + "type": "NotificationEvent" } ] }, - "EvaluatesTo": { - "Struct": [ + "EventMessage": "Event", + "EventSubscriptionRequest": "FilterBox", + "Executable": { + "Enum": [ + { + "tag": "Instructions", + "discriminant": 0, + "type": "Vec" + }, { - "name": "expression", - "type": "Expression" + "tag": "Wasm", + "discriminant": 1, + "type": "WasmSmartContract" } ] }, - "EvaluatesTo>": { + "ExecuteTrigger": { "Struct": [ { - "name": "expression", - "type": "Expression" + "name": "trigger_id", + "type": "TriggerId" } ] }, - "EvaluatesTo>": { + "ExecuteTriggerEvent": { "Struct": [ { - "name": "expression", - "type": "Expression" + "name": "trigger_id", + "type": "TriggerId" + }, + { + "name": "authority", + "type": "AccountId" } ] }, - "EvaluatesTo": { + "ExecuteTriggerEventFilter": { "Struct": [ { - "name": "expression", - "type": "Expression" + "name": "trigger_id", + "type": "TriggerId" + }, + { + "name": "authority", + "type": "AccountId" } ] }, - "EvaluatesTo": { - "Struct": [ + "ExecutionTime": { + "Enum": [ + { + "tag": "PreCommit", + "discriminant": 0 + }, { - "name": "expression", - "type": "Expression" + "tag": "Schedule", + "discriminant": 1, + "type": "Schedule" } ] }, - "EvaluatesTo": { + "Executor": { "Struct": [ { - "name": "expression", - "type": "Expression" + "name": "wasm", + "type": "WasmSmartContract" } ] }, - "EvaluatesTo": { - "Struct": [ + "ExecutorEvent": { + "Enum": [ { - "name": "expression", - "type": "Expression" + "tag": "Upgraded", + "discriminant": 0 } ] }, - "EvaluatesTo": { - "Struct": [ + "ExecutorMode": { + "Enum": [ + { + "tag": "Path", + "discriminant": 0, + "type": "String" + }, { - "name": "expression", - "type": "Expression" + "tag": "Inline", + "discriminant": 1, + "type": "Executor" } ] }, - "EvaluatesTo": { + "Fail": { "Struct": [ { - "name": "expression", - "type": "Expression" - } - ] - }, - "EvaluatesTo": { - "Struct": [ - { - "name": "expression", - "type": "Expression" - } - ] - }, - "EvaluatesTo": { - "Struct": [ - { - "name": "expression", - "type": "Expression" - } - ] - }, - "EvaluatesTo": { - "Struct": [ - { - "name": "expression", - "type": "Expression" - } - ] - }, - "EvaluatesTo": { - "Struct": [ - { - "name": "expression", - "type": "Expression" - } - ] - }, - "EvaluatesTo": { - "Struct": [ - { - "name": "expression", - "type": "Expression" - } - ] - }, - "EvaluatesTo>": { - "Struct": [ - { - "name": "expression", - "type": "Expression" - } - ] - }, - "EvaluatesTo": { - "Struct": [ - { - "name": "expression", - "type": "Expression" - } - ] - }, - "EvaluationError": { - "Enum": [ - { - "tag": "Math", - "discriminant": 0, - "type": "MathError" - }, - { - "tag": "Validation", - "discriminant": 1, - "type": "ValidationFail" - }, - { - "tag": "Find", - "discriminant": 2, - "type": "String" - }, - { - "tag": "Conversion", - "discriminant": 3, - "type": "String" - } - ] - }, - "Event": { - "Enum": [ - { - "tag": "Pipeline", - "discriminant": 0, - "type": "PipelineEvent" - }, - { - "tag": "Data", - "discriminant": 1, - "type": "DataEvent" - }, - { - "tag": "Time", - "discriminant": 2, - "type": "TimeEvent" - }, - { - "tag": "ExecuteTrigger", - "discriminant": 3, - "type": "ExecuteTriggerEvent" - }, - { - "tag": "Notification", - "discriminant": 4, - "type": "NotificationEvent" - } - ] - }, - "EventMessage": "Event", - "EventSubscriptionRequest": "FilterBox", - "Executable": { - "Enum": [ - { - "tag": "Instructions", - "discriminant": 0, - "type": "Vec" - }, - { - "tag": "Wasm", - "discriminant": 1, - "type": "WasmSmartContract" - } - ] - }, - "ExecuteTriggerEvent": { - "Struct": [ - { - "name": "trigger_id", - "type": "TriggerId" - }, - { - "name": "authority", - "type": "AccountId" - } - ] - }, - "ExecuteTriggerEventFilter": { - "Struct": [ - { - "name": "trigger_id", - "type": "TriggerId" - }, - { - "name": "authority", - "type": "AccountId" - } - ] - }, - "ExecuteTriggerExpr": { - "Struct": [ - { - "name": "trigger_id", - "type": "EvaluatesTo" - } - ] - }, - "ExecutionTime": { - "Enum": [ - { - "tag": "PreCommit", - "discriminant": 0 - }, - { - "tag": "Schedule", - "discriminant": 1, - "type": "Schedule" - } - ] - }, - "Executor": { - "Struct": [ - { - "name": "wasm", - "type": "WasmSmartContract" - } - ] - }, - "ExecutorEvent": { - "Enum": [ - { - "tag": "Upgraded", - "discriminant": 0 - } - ] - }, - "ExecutorMode": { - "Enum": [ - { - "tag": "Path", - "discriminant": 0, - "type": "String" - }, - { - "tag": "Inline", - "discriminant": 1, - "type": "Executor" - } - ] - }, - "Expression": { - "Enum": [ - { - "tag": "Add", - "discriminant": 0, - "type": "Add" - }, - { - "tag": "Subtract", - "discriminant": 1, - "type": "Subtract" - }, - { - "tag": "Multiply", - "discriminant": 2, - "type": "Multiply" - }, - { - "tag": "Divide", - "discriminant": 3, - "type": "Divide" - }, - { - "tag": "Mod", - "discriminant": 4, - "type": "Mod" - }, - { - "tag": "RaiseTo", - "discriminant": 5, - "type": "RaiseTo" - }, - { - "tag": "Greater", - "discriminant": 6, - "type": "Greater" - }, - { - "tag": "Less", - "discriminant": 7, - "type": "Less" - }, - { - "tag": "Equal", - "discriminant": 8, - "type": "Equal" - }, - { - "tag": "Not", - "discriminant": 9, - "type": "Not" - }, - { - "tag": "And", - "discriminant": 10, - "type": "And" - }, - { - "tag": "Or", - "discriminant": 11, - "type": "Or" - }, - { - "tag": "If", - "discriminant": 12, - "type": "If" - }, - { - "tag": "Raw", - "discriminant": 13, - "type": "Value" - }, - { - "tag": "Query", - "discriminant": 14, - "type": "QueryBox" - }, - { - "tag": "Contains", - "discriminant": 15, - "type": "Contains" - }, - { - "tag": "ContainsAll", - "discriminant": 16, - "type": "ContainsAll" - }, - { - "tag": "ContainsAny", - "discriminant": 17, - "type": "ContainsAny" - }, - { - "tag": "Where", - "discriminant": 18, - "type": "Where" - }, - { - "tag": "ContextValue", - "discriminant": 19, - "type": "ContextValue" - } - ] - }, - "Fail": { - "Struct": [ - { - "name": "message", - "type": "String" + "name": "message", + "type": "String" } ] }, @@ -1809,7 +1547,7 @@ "Struct": [ { "name": "id", - "type": "EvaluatesTo" + "type": "AccountId" } ] }, @@ -1817,11 +1555,11 @@ "Struct": [ { "name": "id", - "type": "EvaluatesTo" + "type": "AccountId" }, { "name": "key", - "type": "EvaluatesTo" + "type": "Name" } ] }, @@ -1829,7 +1567,7 @@ "Struct": [ { "name": "domain_id", - "type": "EvaluatesTo" + "type": "DomainId" } ] }, @@ -1837,7 +1575,7 @@ "Struct": [ { "name": "name", - "type": "EvaluatesTo" + "type": "Name" } ] }, @@ -1845,7 +1583,7 @@ "Struct": [ { "name": "asset_definition_id", - "type": "EvaluatesTo" + "type": "AssetDefinitionId" } ] }, @@ -1865,7 +1603,7 @@ "Struct": [ { "name": "id", - "type": "EvaluatesTo" + "type": "AssetId" } ] }, @@ -1873,7 +1611,7 @@ "Struct": [ { "name": "id", - "type": "EvaluatesTo" + "type": "AssetDefinitionId" } ] }, @@ -1881,11 +1619,11 @@ "Struct": [ { "name": "id", - "type": "EvaluatesTo" + "type": "AssetDefinitionId" }, { "name": "key", - "type": "EvaluatesTo" + "type": "Name" } ] }, @@ -1893,11 +1631,11 @@ "Struct": [ { "name": "id", - "type": "EvaluatesTo" + "type": "AssetId" }, { "name": "key", - "type": "EvaluatesTo" + "type": "Name" } ] }, @@ -1905,7 +1643,7 @@ "Struct": [ { "name": "id", - "type": "EvaluatesTo" + "type": "AssetId" } ] }, @@ -1913,7 +1651,7 @@ "Struct": [ { "name": "account_id", - "type": "EvaluatesTo" + "type": "AccountId" } ] }, @@ -1921,7 +1659,7 @@ "Struct": [ { "name": "asset_definition_id", - "type": "EvaluatesTo" + "type": "AssetDefinitionId" } ] }, @@ -1929,7 +1667,7 @@ "Struct": [ { "name": "domain_id", - "type": "EvaluatesTo" + "type": "DomainId" } ] }, @@ -1937,11 +1675,11 @@ "Struct": [ { "name": "domain_id", - "type": "EvaluatesTo" + "type": "DomainId" }, { "name": "asset_definition_id", - "type": "EvaluatesTo" + "type": "AssetDefinitionId" } ] }, @@ -1949,7 +1687,7 @@ "Struct": [ { "name": "name", - "type": "EvaluatesTo" + "type": "Name" } ] }, @@ -1957,7 +1695,7 @@ "Struct": [ { "name": "hash", - "type": "EvaluatesTo>" + "type": "HashOf" } ] }, @@ -1965,7 +1703,7 @@ "Struct": [ { "name": "id", - "type": "EvaluatesTo" + "type": "DomainId" } ] }, @@ -1973,11 +1711,11 @@ "Struct": [ { "name": "id", - "type": "EvaluatesTo" + "type": "DomainId" }, { "name": "key", - "type": "EvaluatesTo" + "type": "Name" } ] }, @@ -2055,7 +1793,7 @@ "Struct": [ { "name": "id", - "type": "EvaluatesTo" + "type": "AccountId" } ] }, @@ -2063,7 +1801,7 @@ "Struct": [ { "name": "id", - "type": "EvaluatesTo" + "type": "RoleId" } ] }, @@ -2071,7 +1809,7 @@ "Struct": [ { "name": "id", - "type": "EvaluatesTo" + "type": "AccountId" } ] }, @@ -2079,7 +1817,7 @@ "Struct": [ { "name": "id", - "type": "EvaluatesTo" + "type": "AssetDefinitionId" } ] }, @@ -2087,7 +1825,7 @@ "Struct": [ { "name": "hash", - "type": "EvaluatesTo>" + "type": "HashOf" } ] }, @@ -2095,7 +1833,7 @@ "Struct": [ { "name": "account_id", - "type": "EvaluatesTo" + "type": "AccountId" } ] }, @@ -2103,7 +1841,7 @@ "Struct": [ { "name": "id", - "type": "EvaluatesTo" + "type": "TriggerId" } ] }, @@ -2111,11 +1849,11 @@ "Struct": [ { "name": "id", - "type": "EvaluatesTo" + "type": "TriggerId" }, { "name": "key", - "type": "EvaluatesTo" + "type": "Name" } ] }, @@ -2123,7 +1861,7 @@ "Struct": [ { "name": "domain_id", - "type": "EvaluatesTo" + "type": "DomainId" } ] }, @@ -2170,27 +1908,41 @@ } ] }, - "GrantExpr": { + "Grant": { "Struct": [ { "name": "object", - "type": "EvaluatesTo" + "type": "PermissionToken" }, { "name": "destination_id", - "type": "EvaluatesTo" + "type": "AccountId" } ] }, - "Greater": { + "Grant": { "Struct": [ { - "name": "left", - "type": "EvaluatesTo" + "name": "object", + "type": "RoleId" }, { - "name": "right", - "type": "EvaluatesTo" + "name": "destination_id", + "type": "AccountId" + } + ] + }, + "GrantBox": { + "Enum": [ + { + "tag": "PermissionToken", + "discriminant": 0, + "type": "Grant" + }, + { + "tag": "Role", + "discriminant": 1, + "type": "Grant" } ] }, @@ -2291,76 +2043,134 @@ { "tag": "Domain", "discriminant": 5, - "type": "Domain" + "type": "Domain" + }, + { + "tag": "Account", + "discriminant": 6, + "type": "Account" + }, + { + "tag": "AssetDefinition", + "discriminant": 7, + "type": "AssetDefinition" + }, + { + "tag": "Asset", + "discriminant": 8, + "type": "Asset" + }, + { + "tag": "Trigger", + "discriminant": 9, + "type": "Trigger" + }, + { + "tag": "Role", + "discriminant": 10, + "type": "Role" + }, + { + "tag": "Parameter", + "discriminant": 11, + "type": "Parameter" + } + ] + }, + "InstructionBox": { + "Enum": [ + { + "tag": "Register", + "discriminant": 0, + "type": "RegisterBox" + }, + { + "tag": "Unregister", + "discriminant": 1, + "type": "UnregisterBox" + }, + { + "tag": "Mint", + "discriminant": 2, + "type": "MintBox" + }, + { + "tag": "Burn", + "discriminant": 3, + "type": "BurnBox" + }, + { + "tag": "Transfer", + "discriminant": 4, + "type": "TransferBox" + }, + { + "tag": "SetKeyValue", + "discriminant": 5, + "type": "SetKeyValueBox" }, { - "tag": "Account", + "tag": "RemoveKeyValue", "discriminant": 6, - "type": "Account" + "type": "RemoveKeyValueBox" }, { - "tag": "AssetDefinition", + "tag": "Grant", "discriminant": 7, - "type": "AssetDefinition" + "type": "GrantBox" }, { - "tag": "Asset", + "tag": "Revoke", "discriminant": 8, - "type": "Asset" + "type": "RevokeBox" }, { - "tag": "Trigger", + "tag": "ExecuteTrigger", "discriminant": 9, - "type": "Trigger" + "type": "ExecuteTrigger" }, { - "tag": "Role", + "tag": "SetParameter", "discriminant": 10, - "type": "Role" + "type": "SetParameter" }, { - "tag": "Parameter", + "tag": "NewParameter", "discriminant": 11, - "type": "Parameter" - } - ] - }, - "If": { - "Struct": [ + "type": "NewParameter" + }, { - "name": "condition", - "type": "EvaluatesTo" + "tag": "Upgrade", + "discriminant": 12, + "type": "Upgrade" }, { - "name": "then", - "type": "EvaluatesTo" + "tag": "Log", + "discriminant": 13, + "type": "Log" }, { - "name": "otherwise", - "type": "EvaluatesTo" + "tag": "Fail", + "discriminant": 14, + "type": "Fail" } ] }, "InstructionEvaluationError": { "Enum": [ - { - "tag": "Expression", - "discriminant": 0, - "type": "EvaluationError" - }, { "tag": "Unsupported", - "discriminant": 1, + "discriminant": 0, "type": "InstructionType" }, { "tag": "PermissionParameter", - "discriminant": 2, + "discriminant": 1, "type": "String" }, { "tag": "Type", - "discriminant": 3, + "discriminant": 2, "type": "TypeError" } ] @@ -2428,7 +2238,7 @@ "Struct": [ { "name": "instruction", - "type": "InstructionExpr" + "type": "InstructionBox" }, { "name": "reason", @@ -2436,100 +2246,6 @@ } ] }, - "InstructionExpr": { - "Enum": [ - { - "tag": "Register", - "discriminant": 0, - "type": "RegisterExpr" - }, - { - "tag": "Unregister", - "discriminant": 1, - "type": "UnregisterExpr" - }, - { - "tag": "Mint", - "discriminant": 2, - "type": "MintExpr" - }, - { - "tag": "Burn", - "discriminant": 3, - "type": "BurnExpr" - }, - { - "tag": "Transfer", - "discriminant": 4, - "type": "TransferExpr" - }, - { - "tag": "If", - "discriminant": 5, - "type": "ConditionalExpr" - }, - { - "tag": "Pair", - "discriminant": 6, - "type": "PairExpr" - }, - { - "tag": "Sequence", - "discriminant": 7, - "type": "SequenceExpr" - }, - { - "tag": "SetKeyValue", - "discriminant": 8, - "type": "SetKeyValueExpr" - }, - { - "tag": "RemoveKeyValue", - "discriminant": 9, - "type": "RemoveKeyValueExpr" - }, - { - "tag": "Grant", - "discriminant": 10, - "type": "GrantExpr" - }, - { - "tag": "Revoke", - "discriminant": 11, - "type": "RevokeExpr" - }, - { - "tag": "ExecuteTrigger", - "discriminant": 12, - "type": "ExecuteTriggerExpr" - }, - { - "tag": "SetParameter", - "discriminant": 13, - "type": "SetParameterExpr" - }, - { - "tag": "NewParameter", - "discriminant": 14, - "type": "NewParameterExpr" - }, - { - "tag": "Upgrade", - "discriminant": 15, - "type": "UpgradeExpr" - }, - { - "tag": "Log", - "discriminant": 16, - "type": "LogExpr" - }, - { - "tag": "Fail", - "discriminant": 17, - "type": "Fail" - } - ] - }, "InstructionType": { "Enum": [ { @@ -2552,57 +2268,45 @@ "tag": "Transfer", "discriminant": 4 }, - { - "tag": "If", - "discriminant": 5 - }, - { - "tag": "Pair", - "discriminant": 6 - }, - { - "tag": "Sequence", - "discriminant": 7 - }, { "tag": "SetKeyValue", - "discriminant": 8 + "discriminant": 5 }, { "tag": "RemoveKeyValue", - "discriminant": 9 + "discriminant": 6 }, { "tag": "Grant", - "discriminant": 10 + "discriminant": 7 }, { "tag": "Revoke", - "discriminant": 11 + "discriminant": 8 }, { "tag": "ExecuteTrigger", - "discriminant": 12 + "discriminant": 9 }, { "tag": "SetParameter", - "discriminant": 13 + "discriminant": 10 }, { "tag": "NewParameter", - "discriminant": 14 + "discriminant": 11 }, { "tag": "Upgrade", - "discriminant": 15 + "discriminant": 12 }, { "tag": "Log", - "discriminant": 16 + "discriminant": 13 }, { "tag": "Fail", - "discriminant": 17 + "discriminant": 14 } ] }, @@ -2660,18 +2364,6 @@ } ] }, - "Less": { - "Struct": [ - { - "name": "left", - "type": "EvaluatesTo" - }, - { - "name": "right", - "type": "EvaluatesTo" - } - ] - }, "Level": { "Enum": [ { @@ -2708,15 +2400,15 @@ } ] }, - "LogExpr": { + "Log": { "Struct": [ { "name": "level", - "type": "EvaluatesTo" + "type": "Level" }, { "name": "msg", - "type": "EvaluatesTo" + "type": "String" } ] }, @@ -2746,14 +2438,9 @@ "tag": "Unknown", "discriminant": 5 }, - { - "tag": "BinaryOpIncompatibleNumericValueTypes", - "discriminant": 6, - "type": "BinaryOpIncompatibleNumericValueTypesError" - }, { "tag": "FixedPointConversion", - "discriminant": 7, + "discriminant": 6, "type": "String" } ] @@ -2801,75 +2488,154 @@ } ] }, - "MetadataChanged": { + "MetadataChanged": { + "Struct": [ + { + "name": "target_id", + "type": "AssetId" + }, + { + "name": "key", + "type": "Name" + }, + { + "name": "value", + "type": "Value" + } + ] + }, + "MetadataChanged": { + "Struct": [ + { + "name": "target_id", + "type": "DomainId" + }, + { + "name": "key", + "type": "Name" + }, + { + "name": "value", + "type": "Value" + } + ] + }, + "MetadataError": { + "Enum": [ + { + "tag": "EntryTooBig", + "discriminant": 0, + "type": "SizeError" + }, + { + "tag": "OverallSize", + "discriminant": 1, + "type": "SizeError" + }, + { + "tag": "EmptyPath", + "discriminant": 2 + }, + { + "tag": "MissingSegment", + "discriminant": 3, + "type": "Name" + }, + { + "tag": "InvalidSegment", + "discriminant": 4, + "type": "Name" + } + ] + }, + "Mint": { + "Struct": [ + { + "name": "object", + "type": "Fixed" + }, + { + "name": "destination_id", + "type": "AssetId" + } + ] + }, + "Mint": { + "Struct": [ + { + "name": "object", + "type": "PublicKey" + }, + { + "name": "destination_id", + "type": "AccountId" + } + ] + }, + "Mint": { + "Struct": [ + { + "name": "object", + "type": "SignatureCheckCondition" + }, + { + "name": "destination_id", + "type": "AccountId" + } + ] + }, + "Mint": { + "Struct": [ + { + "name": "object", + "type": "u128" + }, + { + "name": "destination_id", + "type": "AssetId" + } + ] + }, + "Mint": { "Struct": [ { - "name": "target_id", - "type": "AssetId" - }, - { - "name": "key", - "type": "Name" + "name": "object", + "type": "u32" }, { - "name": "value", - "type": "Value" + "name": "destination_id", + "type": "AssetId" } ] }, - "MetadataChanged": { + "Mint>": { "Struct": [ { - "name": "target_id", - "type": "DomainId" - }, - { - "name": "key", - "type": "Name" + "name": "object", + "type": "u32" }, { - "name": "value", - "type": "Value" + "name": "destination_id", + "type": "TriggerId" } ] }, - "MetadataError": { + "MintBox": { "Enum": [ { - "tag": "EntryTooBig", + "tag": "Account", "discriminant": 0, - "type": "SizeError" + "type": "AccountMintBox" }, { - "tag": "OverallSize", + "tag": "Asset", "discriminant": 1, - "type": "SizeError" - }, - { - "tag": "EmptyPath", - "discriminant": 2 - }, - { - "tag": "MissingSegment", - "discriminant": 3, - "type": "Name" - }, - { - "tag": "InvalidSegment", - "discriminant": 4, - "type": "Name" - } - ] - }, - "MintExpr": { - "Struct": [ - { - "name": "object", - "type": "EvaluatesTo" + "type": "AssetMintBox" }, { - "name": "destination_id", - "type": "EvaluatesTo" + "tag": "TriggerRepetitions", + "discriminant": 2, + "type": "Mint>" } ] }, @@ -2937,30 +2703,6 @@ } ] }, - "Mod": { - "Struct": [ - { - "name": "left", - "type": "EvaluatesTo" - }, - { - "name": "right", - "type": "EvaluatesTo" - } - ] - }, - "Multiply": { - "Struct": [ - { - "name": "left", - "type": "EvaluatesTo" - }, - { - "name": "right", - "type": "EvaluatesTo" - } - ] - }, "Name": "String", "NewAccount": { "Struct": [ @@ -3018,11 +2760,11 @@ } ] }, - "NewParameterExpr": { + "NewParameter": { "Struct": [ { "name": "parameter", - "type": "EvaluatesTo" + "type": "Parameter" } ] }, @@ -3037,14 +2779,6 @@ "NonTrivial>": "Vec>", "NonZero": "u32", "NonZero": "u64", - "Not": { - "Struct": [ - { - "name": "expression", - "type": "EvaluatesTo" - } - ] - }, "NotificationEvent": { "Enum": [ { @@ -3106,9 +2840,6 @@ "Option>": { "Option": "HashOf" }, - "Option": { - "Option": "InstructionExpr" - }, "Option": { "Option": "IpfsPath" }, @@ -3139,18 +2870,6 @@ "Option": { "Option": "TriggerId" }, - "Or": { - "Struct": [ - { - "name": "left", - "type": "EvaluatesTo" - }, - { - "name": "right", - "type": "EvaluatesTo" - } - ] - }, "OriginFilter": "AccountId", "OriginFilter": "AssetDefinitionId", "OriginFilter": "AssetId", @@ -3158,18 +2877,6 @@ "OriginFilter": "PeerId", "OriginFilter": "RoleId", "OriginFilter": "TriggerId", - "PairExpr": { - "Struct": [ - { - "name": "left_instruction", - "type": "InstructionExpr" - }, - { - "name": "right_instruction", - "type": "InstructionExpr" - } - ] - }, "Parameter": { "Struct": [ { @@ -3610,28 +3317,23 @@ "discriminant": 0, "type": "String" }, - { - "tag": "Evaluate", - "discriminant": 1, - "type": "String" - }, { "tag": "Find", - "discriminant": 2, + "discriminant": 1, "type": "FindError" }, { "tag": "Conversion", - "discriminant": 3, + "discriminant": 2, "type": "String" }, { "tag": "UnknownCursor", - "discriminant": 4 + "discriminant": 3 }, { "tag": "FetchSizeTooBig", - "discriminant": 5 + "discriminant": 4 } ] }, @@ -3651,35 +3353,110 @@ } ] }, - "RaiseTo": { + "RawGenesisBlock": { "Struct": [ { - "name": "left", - "type": "EvaluatesTo" + "name": "transactions", + "type": "Vec>" }, { - "name": "right", - "type": "EvaluatesTo" + "name": "executor", + "type": "ExecutorMode" } ] }, - "RawGenesisBlock": { + "Register": { "Struct": [ { - "name": "transactions", - "type": "Vec>" - }, + "name": "object", + "type": "NewAccount" + } + ] + }, + "Register": { + "Struct": [ { - "name": "executor", - "type": "ExecutorMode" + "name": "object", + "type": "Asset" + } + ] + }, + "Register": { + "Struct": [ + { + "name": "object", + "type": "NewAssetDefinition" } ] }, - "RegisterExpr": { + "Register": { "Struct": [ { "name": "object", - "type": "EvaluatesTo" + "type": "NewDomain" + } + ] + }, + "Register": { + "Struct": [ + { + "name": "object", + "type": "Peer" + } + ] + }, + "Register": { + "Struct": [ + { + "name": "object", + "type": "NewRole" + } + ] + }, + "Register>": { + "Struct": [ + { + "name": "object", + "type": "Trigger" + } + ] + }, + "RegisterBox": { + "Enum": [ + { + "tag": "Peer", + "discriminant": 0, + "type": "Register" + }, + { + "tag": "Domain", + "discriminant": 1, + "type": "Register" + }, + { + "tag": "Account", + "discriminant": 2, + "type": "Register" + }, + { + "tag": "AssetDefinition", + "discriminant": 3, + "type": "Register" + }, + { + "tag": "Asset", + "discriminant": 4, + "type": "Register" + }, + { + "tag": "Role", + "discriminant": 5, + "type": "Register" + }, + { + "tag": "Trigger", + "discriminant": 6, + "type": "Register>" } ] }, @@ -3722,15 +3499,75 @@ } ] }, - "RemoveKeyValueExpr": { + "RemoveKeyValue": { + "Struct": [ + { + "name": "object_id", + "type": "AccountId" + }, + { + "name": "key", + "type": "Name" + } + ] + }, + "RemoveKeyValue": { "Struct": [ { "name": "object_id", - "type": "EvaluatesTo" + "type": "AssetId" + }, + { + "name": "key", + "type": "Name" + } + ] + }, + "RemoveKeyValue": { + "Struct": [ + { + "name": "object_id", + "type": "AssetDefinitionId" + }, + { + "name": "key", + "type": "Name" + } + ] + }, + "RemoveKeyValue": { + "Struct": [ + { + "name": "object_id", + "type": "DomainId" }, { "name": "key", - "type": "EvaluatesTo" + "type": "Name" + } + ] + }, + "RemoveKeyValueBox": { + "Enum": [ + { + "tag": "Domain", + "discriminant": 0, + "type": "RemoveKeyValue" + }, + { + "tag": "Account", + "discriminant": 1, + "type": "RemoveKeyValue" + }, + { + "tag": "AssetDefinition", + "discriminant": 2, + "type": "RemoveKeyValue" + }, + { + "tag": "Asset", + "discriminant": 3, + "type": "RemoveKeyValue" } ] }, @@ -3747,27 +3584,53 @@ } ] }, - "RepetitionError": { + "RepetitionError": { + "Struct": [ + { + "name": "instruction_type", + "type": "InstructionType" + }, + { + "name": "id", + "type": "IdBox" + } + ] + }, + "Revoke": { + "Struct": [ + { + "name": "object", + "type": "PermissionToken" + }, + { + "name": "destination_id", + "type": "AccountId" + } + ] + }, + "Revoke": { "Struct": [ { - "name": "instruction_type", - "type": "InstructionType" + "name": "object", + "type": "RoleId" }, { - "name": "id", - "type": "IdBox" + "name": "destination_id", + "type": "AccountId" } ] }, - "RevokeExpr": { - "Struct": [ + "RevokeBox": { + "Enum": [ { - "name": "object", - "type": "EvaluatesTo" + "tag": "PermissionToken", + "discriminant": 0, + "type": "Revoke" }, { - "name": "destination_id", - "type": "EvaluatesTo" + "tag": "Role", + "discriminant": 1, + "type": "Revoke" } ] }, @@ -3905,35 +3768,99 @@ } ] }, - "SequenceExpr": { + "SetKeyValue": { "Struct": [ { - "name": "instructions", - "type": "Vec" + "name": "object_id", + "type": "AccountId" + }, + { + "name": "key", + "type": "Name" + }, + { + "name": "value", + "type": "Value" + } + ] + }, + "SetKeyValue": { + "Struct": [ + { + "name": "object_id", + "type": "AssetId" + }, + { + "name": "key", + "type": "Name" + }, + { + "name": "value", + "type": "Value" + } + ] + }, + "SetKeyValue": { + "Struct": [ + { + "name": "object_id", + "type": "AssetDefinitionId" + }, + { + "name": "key", + "type": "Name" + }, + { + "name": "value", + "type": "Value" } ] }, - "SetKeyValueExpr": { + "SetKeyValue": { "Struct": [ { "name": "object_id", - "type": "EvaluatesTo" + "type": "DomainId" }, { "name": "key", - "type": "EvaluatesTo" + "type": "Name" }, { "name": "value", - "type": "EvaluatesTo" + "type": "Value" + } + ] + }, + "SetKeyValueBox": { + "Enum": [ + { + "tag": "Domain", + "discriminant": 0, + "type": "SetKeyValue" + }, + { + "tag": "Account", + "discriminant": 1, + "type": "SetKeyValue" + }, + { + "tag": "AssetDefinition", + "discriminant": 2, + "type": "SetKeyValue" + }, + { + "tag": "Asset", + "discriminant": 3, + "type": "SetKeyValue" } ] }, - "SetParameterExpr": { + "SetParameter": { "Struct": [ { "name": "parameter", - "type": "EvaluatesTo" + "type": "Parameter" } ] }, @@ -4136,12 +4063,6 @@ "value": "Asset" } }, - "SortedMap>": { - "Map": { - "key": "Name", - "value": "EvaluatesTo" - } - }, "SortedMap": { "Map": { "key": "Name", @@ -4186,18 +4107,6 @@ ] }, "StringWithJson": "String", - "Subtract": { - "Struct": [ - { - "name": "left", - "type": "EvaluatesTo" - }, - { - "name": "right", - "type": "EvaluatesTo" - } - ] - }, "TimeEvent": { "Struct": [ { @@ -4328,19 +4237,102 @@ } ] }, - "TransferExpr": { + "Transfer": { + "Struct": [ + { + "name": "source_id", + "type": "AccountId" + }, + { + "name": "object", + "type": "AssetDefinitionId" + }, + { + "name": "destination_id", + "type": "AccountId" + } + ] + }, + "Transfer": { + "Struct": [ + { + "name": "source_id", + "type": "AccountId" + }, + { + "name": "object", + "type": "DomainId" + }, + { + "name": "destination_id", + "type": "AccountId" + } + ] + }, + "Transfer": { + "Struct": [ + { + "name": "source_id", + "type": "AssetId" + }, + { + "name": "object", + "type": "Fixed" + }, + { + "name": "destination_id", + "type": "AccountId" + } + ] + }, + "Transfer": { + "Struct": [ + { + "name": "source_id", + "type": "AssetId" + }, + { + "name": "object", + "type": "u128" + }, + { + "name": "destination_id", + "type": "AccountId" + } + ] + }, + "Transfer": { "Struct": [ { "name": "source_id", - "type": "EvaluatesTo" + "type": "AssetId" }, { "name": "object", - "type": "EvaluatesTo" + "type": "u32" }, { "name": "destination_id", - "type": "EvaluatesTo" + "type": "AccountId" + } + ] + }, + "TransferBox": { + "Enum": [ + { + "tag": "Domain", + "discriminant": 0, + "type": "Transfer" + }, + { + "tag": "AssetDefinition", + "discriminant": 1, + "type": "Transfer" + }, + { + "tag": "Asset", + "discriminant": 2, + "type": "AssetTransferBox" } ] }, @@ -4529,11 +4521,98 @@ ] }, "UniqueVec": "Vec", - "UnregisterExpr": { + "Unregister": { + "Struct": [ + { + "name": "object_id", + "type": "AccountId" + } + ] + }, + "Unregister": { + "Struct": [ + { + "name": "object_id", + "type": "AssetId" + } + ] + }, + "Unregister": { + "Struct": [ + { + "name": "object_id", + "type": "AssetDefinitionId" + } + ] + }, + "Unregister": { + "Struct": [ + { + "name": "object_id", + "type": "DomainId" + } + ] + }, + "Unregister": { + "Struct": [ + { + "name": "object_id", + "type": "PeerId" + } + ] + }, + "Unregister": { + "Struct": [ + { + "name": "object_id", + "type": "RoleId" + } + ] + }, + "Unregister>": { "Struct": [ { "name": "object_id", - "type": "EvaluatesTo" + "type": "TriggerId" + } + ] + }, + "UnregisterBox": { + "Enum": [ + { + "tag": "Peer", + "discriminant": 0, + "type": "Unregister" + }, + { + "tag": "Domain", + "discriminant": 1, + "type": "Unregister" + }, + { + "tag": "Account", + "discriminant": 2, + "type": "Unregister" + }, + { + "tag": "AssetDefinition", + "discriminant": 3, + "type": "Unregister" + }, + { + "tag": "Asset", + "discriminant": 4, + "type": "Unregister" + }, + { + "tag": "Role", + "discriminant": 5, + "type": "Unregister" + }, + { + "tag": "Trigger", + "discriminant": 6, + "type": "Unregister>" } ] }, @@ -4546,11 +4625,11 @@ } ] }, - "UpgradeExpr": { + "Upgrade": { "Struct": [ { - "name": "object", - "type": "EvaluatesTo" + "name": "executor", + "type": "Executor" } ] }, @@ -4761,8 +4840,8 @@ "Vec>": { "Vec": "GenericPredicateBox" }, - "Vec": { - "Vec": "InstructionExpr" + "Vec": { + "Vec": "InstructionBox" }, "Vec": { "Vec": "Name" @@ -4782,8 +4861,8 @@ "Vec": { "Vec": "Value" }, - "Vec>": { - "Vec": "Vec" + "Vec>": { + "Vec": "Vec" }, "Vec": { "Vec": "u8" @@ -4797,18 +4876,6 @@ ] }, "WasmSmartContract": "Vec", - "Where": { - "Struct": [ - { - "name": "expression", - "type": "EvaluatesTo" - }, - { - "name": "values", - "type": "SortedMap>" - } - ] - }, "bool": "bool", "i64": { "Int": "FixedWidth" diff --git a/dsl/Cargo.toml b/dsl/Cargo.toml deleted file mode 100755 index 7cdbd40ee01..00000000000 --- a/dsl/Cargo.toml +++ /dev/null @@ -1,28 +0,0 @@ -[package] -name = "iroha_dsl" - -edition.workspace = true -version.workspace = true -authors.workspace = true - -license.workspace = true - -[lints] -workspace = true - -[lib] -proc-macro = true - -[dependencies] -quote = "1.0" -proc-macro2 = "1.0" -litrs = "0.4.0" - -[dev-dependencies] -iroha_data_model = { workspace = true } -iroha_crypto = { workspace = true } -iroha_config = { workspace = true } -iroha_client = { workspace = true } -serde_json = "1.0" - - diff --git a/dsl/src/lib.rs b/dsl/src/lib.rs deleted file mode 100755 index c1378c24b1d..00000000000 --- a/dsl/src/lib.rs +++ /dev/null @@ -1,208 +0,0 @@ -// TODO: add docs -#![allow(missing_docs)] - -use std::{convert::TryFrom, iter::Peekable, str::FromStr}; - -use litrs::Literal; -use proc_macro2::{ - token_stream::IntoIter, - TokenStream, TokenTree, - TokenTree::{Ident, Punct}, -}; -use quote::quote; - -#[derive(PartialEq)] -enum ExprType { - Nil, - Int, - Bool, -} - -// within the compiler, these generate tokens for -// Value -struct ParsedExpr { - t: ExprType, - tokens: TokenStream, -} - -struct Operator(i32, ExprType, &'static str); - -fn parse_literal(it: &mut Peekable) -> ParsedExpr { - let token = it - .next() - .expect("failed to parse literal, hit end of string"); - - match token { - Punct(ref x) => { - if x.as_char() == '(' { - let v = parse_expr(it); - - it.next() - .expect("expected closing paren, hit end of string"); - return v; - } - } - - // boolean literals - Ident(ref x) => match x.to_string().as_str() { - "true" => { - return ParsedExpr { - t: ExprType::Bool, - tokens: quote! { true }, - } - } - "false" => { - return ParsedExpr { - t: ExprType::Bool, - tokens: quote! { false }, - } - } - - // unary not - "not" => { - let v = parse_literal(it).tokens; - return ParsedExpr { - t: ExprType::Bool, - tokens: quote! { Not::new(#v) }, - }; - } - _ => panic!("error: unknown identifier"), - }, - - // kinda ugly but we fallthrough basically - _ => {} - } - - // integer literals - if let Ok(Literal::Integer(i)) = Literal::try_from(token) { - let v = i.value::().expect("i don't think this integer fits?"); - return ParsedExpr { - t: ExprType::Int, - tokens: quote! { #v }, - }; - } - - ParsedExpr { - t: ExprType::Nil, - tokens: TokenStream::new(), - } -} - -fn precedence(it: &mut Peekable) -> Option { - match it.peek() { - Some(Punct(x)) => match x.as_char() { - // arithmatic - '+' => Some(Operator(4, ExprType::Int, "Add")), - '-' => Some(Operator(4, ExprType::Int, "Subtract")), - '*' => Some(Operator(3, ExprType::Int, "Multiply")), - - // compares - '=' => Some(Operator(2, ExprType::Int, "Equal")), - '>' => Some(Operator(2, ExprType::Int, "Greater")), - '<' => Some(Operator(2, ExprType::Int, "Less")), - _ => None, - }, - Some(Ident(ref x)) => match x.to_string().as_str() { - "and" => Some(Operator(1, ExprType::Bool, "And")), - "or" => Some(Operator(1, ExprType::Bool, "Or")), - _ => None, - }, - _ => None, - } -} - -/// precedence walking -fn parse_binop(it: &mut Peekable, min_prec: i32) -> ParsedExpr { - let mut lhs = parse_literal(it); - while let Some(prec) = precedence(it) { - it.next(); - if prec.0 < min_prec { - break; - } - - let op = proc_macro2::TokenStream::from_str(prec.2).unwrap(); - let rhs = parse_literal(it); - - assert!( - lhs.t == prec.1 && rhs.t == prec.1, - "cannot perform binary operator on these!" - ); - - let lhs_tokens = lhs.tokens; - let rhs_tokens = rhs.tokens; - lhs = ParsedExpr { - t: prec.1, - tokens: quote! { #op::new(#lhs_tokens, #rhs_tokens) }, - }; - } - - lhs -} - -fn is_ident(a: &TokenTree, b: &'static str) -> bool { - if let Ident(ref x) = a { - return x.to_string().as_str() == b; - } - - false -} - -fn parse_expr(it: &mut Peekable) -> ParsedExpr { - if is_ident(it.peek().expect("hit end of string"), "if") { - it.next(); - - let cond_tokens = parse_binop(it, 0).tokens; - assert!( - is_ident(&it.next().expect("hit end of string"), "then"), - "expected 'then'" - ); - - let true_case = parse_binop(it, 0); - assert!( - is_ident(&it.next().expect("hit end of string"), "else"), - "expected 'else'" - ); - - let false_case = parse_binop(it, 0); - assert!( - true_case.t == false_case.t, - "both types in a conditional must match" - ); - - let true_tokens = true_case.tokens; - let false_tokens = false_case.tokens; - return ParsedExpr { - t: ExprType::Int, - tokens: quote! { If::new(#cond_tokens, #true_tokens, #false_tokens) }, - }; - } - - // normal expression - parse_binop(it, 0) -} - -/// Convert arithmetic expression into bare expression in the iroha data model. -/// -/// Basic arithmetic and boolean expressions are supported, namely: `> < = + - * and or if not` -/// -/// # Examples -/// -/// ``` -/// extern crate iroha_dsl; -/// extern crate iroha_data_model; -/// use iroha_dsl::expr; -/// use iroha_data_model::{prelude::*, ParseError}; -/// -/// fn main() { -/// assert_eq!(expr!(54654*5 + 1), Add::new(Multiply::new(54654_u64, 5_u64), 1_u64)); -/// println!("{}", expr!(not true and false)); -/// println!("{}", expr!(if 4 = 4 then 64 else 32)); -/// } -/// ``` -#[proc_macro] -pub fn expr(input: proc_macro::TokenStream) -> proc_macro::TokenStream { - let input = proc_macro2::TokenStream::from(input); - let mut it = input.into_iter().peekable(); - - proc_macro::TokenStream::from(parse_expr(&mut it).tokens) -} diff --git a/ffi/src/lib.rs b/ffi/src/lib.rs index dcdd52f0d9a..4590f55b1c7 100644 --- a/ffi/src/lib.rs +++ b/ffi/src/lib.rs @@ -415,6 +415,7 @@ pub struct Extern { /// ``` pub trait WrapperTypeOf { /// Correct return type of `T` in a function generated via [`ffi_import`] + // TODO: Is associated type necessary if we already have a generic? type Type; } diff --git a/futures/src/lib.rs b/futures/src/lib.rs index f45fa002b71..3865cc4e6fe 100644 --- a/futures/src/lib.rs +++ b/futures/src/lib.rs @@ -7,7 +7,7 @@ use std::{ }; pub use iroha_futures_derive::*; -use iroha_logger::telemetry::{Telemetry, TelemetryFields}; +use iroha_logger::telemetry::{Event as Telemetry, Fields as TelemetryFields}; use serde::{Deserialize, Serialize}; use serde_json::Value; diff --git a/futures/tests/basic.rs b/futures/tests/basic.rs index a1514e01ab1..190d6201000 100644 --- a/futures/tests/basic.rs +++ b/futures/tests/basic.rs @@ -1,10 +1,9 @@ use std::{thread, time::Duration}; -use iroha_config::base::proxy::Builder; use iroha_futures::FuturePollTelemetry; -use iroha_logger::ConfigurationProxy; +use iroha_logger::telemetry::Channel; use tokio::task; -use tokio_stream::{wrappers::ReceiverStream, StreamExt}; +use tokio_stream::{wrappers::BroadcastStream, StreamExt}; #[iroha_futures::telemetry_future] async fn sleep(times: Vec) -> i32 { @@ -32,15 +31,13 @@ async fn test_sleep() { Duration::from_nanos(80_000_000), ]; - let (_, telemetry_future) = iroha_logger::init( - &ConfigurationProxy::default() - .build() - .expect("Default logger config always builds"), - ) - .unwrap() - .unwrap(); + let future_telemetry = iroha_logger::test_logger() + .subscribe_on_telemetry(Channel::Future) + .await + .unwrap(); assert_eq!(sleep(sleep_times.clone()).await, 10_i32); - let telemetry = ReceiverStream::new(telemetry_future) + let telemetry = BroadcastStream::new(future_telemetry) + .filter_map(Result::ok) .map(FuturePollTelemetry::try_from) .filter_map(Result::ok) .take(3) diff --git a/genesis/src/lib.rs b/genesis/src/lib.rs index 0428a2240ac..84cd9964e49 100644 --- a/genesis/src/lib.rs +++ b/genesis/src/lib.rs @@ -61,7 +61,7 @@ impl GenesisNetwork { // First instruction should be Executor upgrade. // This makes possible to grant permissions to users in genesis. let transactions_iter = std::iter::once(GenesisTransactionBuilder { - isi: vec![UpgradeExpr::new(Executor::try_from(raw_block.executor)?).into()], + isi: vec![Upgrade::new(Executor::try_from(raw_block.executor)?).into()], }) .chain(raw_block.transactions); @@ -190,7 +190,7 @@ impl ExecutorPath { #[repr(transparent)] pub struct GenesisTransactionBuilder { /// Instructions - isi: Vec, + isi: Vec, } impl GenesisTransactionBuilder { @@ -208,7 +208,7 @@ impl GenesisTransactionBuilder { } /// Add new instruction to the transaction. - pub fn append_instruction(&mut self, instruction: InstructionExpr) { + pub fn append_instruction(&mut self, instruction: InstructionBox) { self.isi.push(instruction); } } @@ -287,7 +287,7 @@ impl RawGenesisBlockBuilder { let new_domain = Domain::new(domain_id.clone()).with_metadata(metadata); self.transaction .isi - .push(RegisterExpr::new(new_domain).into()); + .push(Register::domain(new_domain).into()); RawGenesisDomainBuilder { transaction: self.transaction, domain_id, @@ -322,7 +322,7 @@ impl RawGenesisDomainBuilder { let account_id = AccountId::new(account_name, self.domain_id.clone()); self.transaction .isi - .push(RegisterExpr::new(Account::new(account_id, [])).into()); + .push(Register::account(Account::new(account_id, [])).into()); self } @@ -340,7 +340,7 @@ impl RawGenesisDomainBuilder { ) -> Self { let account_id = AccountId::new(account_name, self.domain_id.clone()); let register = - RegisterExpr::new(Account::new(account_id, [public_key]).with_metadata(metadata)); + Register::account(Account::new(account_id, [public_key]).with_metadata(metadata)); self.transaction.isi.push(register.into()); self } @@ -356,7 +356,7 @@ impl RawGenesisDomainBuilder { }; self.transaction .isi - .push(RegisterExpr::new(asset_definition).into()); + .push(Register::asset_definition(asset_definition).into()); self } } @@ -418,11 +418,11 @@ mod tests { let domain_id: DomainId = "wonderland".parse().unwrap(); assert_eq!( finished_genesis_block.transactions[0].isi[0], - RegisterExpr::new(Domain::new(domain_id.clone())).into() + Register::domain(Domain::new(domain_id.clone())).into() ); assert_eq!( finished_genesis_block.transactions[0].isi[1], - RegisterExpr::new(Account::new( + Register::account(Account::new( AccountId::new("alice".parse().unwrap(), domain_id.clone()), [] )) @@ -430,7 +430,7 @@ mod tests { ); assert_eq!( finished_genesis_block.transactions[0].isi[2], - RegisterExpr::new(Account::new( + Register::account(Account::new( AccountId::new("bob".parse().unwrap(), domain_id), [] )) @@ -441,11 +441,11 @@ mod tests { let domain_id: DomainId = "tulgey_wood".parse().unwrap(); assert_eq!( finished_genesis_block.transactions[0].isi[3], - RegisterExpr::new(Domain::new(domain_id.clone())).into() + Register::domain(Domain::new(domain_id.clone())).into() ); assert_eq!( finished_genesis_block.transactions[0].isi[4], - RegisterExpr::new(Account::new( + Register::account(Account::new( AccountId::new("Cheshire_Cat".parse().unwrap(), domain_id), [] )) @@ -456,11 +456,11 @@ mod tests { let domain_id: DomainId = "meadow".parse().unwrap(); assert_eq!( finished_genesis_block.transactions[0].isi[5], - RegisterExpr::new(Domain::new(domain_id.clone())).into() + Register::domain(Domain::new(domain_id.clone())).into() ); assert_eq!( finished_genesis_block.transactions[0].isi[6], - RegisterExpr::new(Account::new( + Register::account(Account::new( AccountId::new("Mad_Hatter".parse().unwrap(), domain_id), [public_key.parse().unwrap()], )) @@ -468,7 +468,7 @@ mod tests { ); assert_eq!( finished_genesis_block.transactions[0].isi[7], - RegisterExpr::new(AssetDefinition::big_quantity( + Register::asset_definition(AssetDefinition::big_quantity( "hats#meadow".parse().unwrap() )) .into() diff --git a/hooks/pre-commit.sample b/hooks/pre-commit.sample index fdf09848d91..72addb9c6b5 100755 --- a/hooks/pre-commit.sample +++ b/hooks/pre-commit.sample @@ -2,7 +2,6 @@ set -e cargo +nightly fmt --all -- --check cargo +nightly lints clippy --workspace --benches --tests --examples --all-features -cargo run --bin kagami -- docs >docs/source/references/config.md cargo run --bin kagami -- genesis >configs/peer/genesis.json cargo run --bin kagami -- schema >docs/source/references/schema.json -git add docs/source/references/config.md configs/peer/genesis.json docs/source/references/schema.json +git add configs/peer/genesis.json docs/source/references/schema.json diff --git a/logger/Cargo.toml b/logger/Cargo.toml index 9d3ce2ac20c..83aba591aea 100644 --- a/logger/Cargo.toml +++ b/logger/Cargo.toml @@ -19,13 +19,13 @@ serde_json = { workspace = true } tracing = { workspace = true } tracing-core = "0.1.31" tracing-futures = { version = "0.2.5", default-features = false, features = ["std-future", "std"] } -tracing-subscriber = { workspace = true, features = ["fmt", "ansi"] } -tracing-bunyan-formatter = { version = "0.3.9", default-features = false } -tokio = { workspace = true, features = ["sync"] } +tracing-subscriber = { workspace = true, features = ["fmt", "ansi", "json"] } +tokio = { workspace = true, features = ["sync", "rt", "macros"] } console-subscriber = { version = "0.2.0", optional = true } once_cell = { workspace = true } derive_more = { workspace = true } tracing-error = "0.2.0" +thiserror = { workspace = true } [dev-dependencies] tokio = { workspace = true, features = ["macros", "time", "rt"] } diff --git a/logger/src/actor.rs b/logger/src/actor.rs new file mode 100644 index 00000000000..e9e2d91280e --- /dev/null +++ b/logger/src/actor.rs @@ -0,0 +1,145 @@ +//! Actor encapsulating interaction with logger & telemetry subsystems. + +use iroha_config::logger::into_tracing_level; +use iroha_data_model::Level; +use tokio::sync::{broadcast, mpsc, oneshot}; +use tracing_core::Subscriber; +use tracing_subscriber::{reload, reload::Error as ReloadError}; + +use crate::telemetry; + +/// TODO +#[derive(Clone)] +pub struct LoggerHandle { + sender: mpsc::Sender, +} + +impl LoggerHandle { + pub(crate) fn new( + handle: reload::Handle, + telemetry_receiver: mpsc::Receiver, + ) -> Self { + let (tx, rx) = mpsc::channel(32); + let (regular, _) = broadcast::channel(32); + let (future_forward, _) = broadcast::channel(32); + let mut actor = LoggerActor { + message_receiver: rx, + level_handle: handle, + telemetry_receiver, + telemetry_forwarder_regular: regular, + telemetry_forwarder_future: future_forward, + }; + tokio::spawn(async move { actor.run().await }); + + Self { sender: tx } + } + + /// Reload the log level filter. + /// + /// # Errors + /// - If reloading on the side of [`reload::Handle`] fails + /// - If actor communication fails + pub async fn reload_level(&self, new_value: Level) -> color_eyre::Result<(), Error> { + let (tx, rx) = oneshot::channel(); + let _ = self + .sender + .send(Message::ReloadLevel { + value: new_value, + respond_to: tx, + }) + .await; + Ok(rx.await??) + } + + /// Subscribe to the telemetry events broadcasting. + /// + /// # Errors + /// If actor communication fails + pub async fn subscribe_on_telemetry( + &self, + channel: telemetry::Channel, + ) -> color_eyre::Result, Error> { + let (tx, rx) = oneshot::channel(); + let _ = self + .sender + .send(Message::SubscribeOnTelemetry { + channel, + respond_to: tx, + }) + .await; + Ok(rx.await?) + } +} + +enum Message { + ReloadLevel { + value: Level, + respond_to: oneshot::Sender>, + }, + SubscribeOnTelemetry { + channel: telemetry::Channel, + respond_to: oneshot::Sender>, + }, +} + +/// Possible errors that might occur while interacting with the actor. +#[derive(thiserror::Error, Debug)] +pub enum Error { + /// If dynamic log level reloading failed + #[error("cannot dynamically reload the log level")] + LevelReload(#[from] ReloadError), + /// If actor communication is broken + #[error("failed to communicate with the actor")] + Communication(#[from] oneshot::error::RecvError), +} + +struct LoggerActor { + message_receiver: mpsc::Receiver, + telemetry_receiver: mpsc::Receiver, + telemetry_forwarder_regular: broadcast::Sender, + telemetry_forwarder_future: broadcast::Sender, + level_handle: reload::Handle, +} + +impl LoggerActor { + async fn run(&mut self) { + loop { + tokio::select! { + Some(msg) = self.message_receiver.recv() => { + self.handle_message(msg); + }, + Some(telemetry::ChannelEvent(channel, event)) = self.telemetry_receiver.recv() => { + let forward_to = match channel { + telemetry::Channel::Regular => &self.telemetry_forwarder_regular, + telemetry::Channel::Future => &self.telemetry_forwarder_future, + }; + + let _ = forward_to.send(event); + }, + else => break + } + tokio::task::yield_now().await; + } + } + + fn handle_message(&mut self, msg: Message) { + match msg { + Message::ReloadLevel { value, respond_to } => { + let level = into_tracing_level(value); + let filter = tracing_subscriber::filter::LevelFilter::from_level(level); + let result = self.level_handle.reload(filter); + let _ = respond_to.send(result); + } + Message::SubscribeOnTelemetry { + channel: kind, + respond_to, + } => { + let receiver = match kind { + telemetry::Channel::Regular => self.telemetry_forwarder_regular.subscribe(), + telemetry::Channel::Future => self.telemetry_forwarder_future.subscribe(), + }; + let _ = respond_to.send(receiver); + } + } + } +} diff --git a/logger/src/layer.rs b/logger/src/layer.rs index 5b17d93cfe4..6da758b35e9 100644 --- a/logger/src/layer.rs +++ b/logger/src/layer.rs @@ -111,7 +111,7 @@ pub struct LevelFilter { static CURRENT_LEVEL: AtomicU8 = AtomicU8::new(0); /// Return max log level -pub fn max_log_level() -> u8 { +pub fn current_level() -> u8 { CURRENT_LEVEL.load(Ordering::Relaxed) } @@ -129,12 +129,12 @@ impl LevelFilter { /// Constructor of level filter #[allow(clippy::new_ret_no_self)] pub fn new(level: Level, subscriber: S) -> impl Subscriber { - Self::update_max_log_level(level); + Self::update_log_level(level); EventSubscriber(Self { subscriber }) } /// Updater of max level - fn update_max_log_level(level: Level) { + fn update_log_level(level: Level) { CURRENT_LEVEL.store(Self::level_as_u8(level), Ordering::SeqCst); } } @@ -148,7 +148,7 @@ impl EventInspectorTrait for LevelFilter { fn event(&self, event: &Event<'_>) { let level = Self::level_as_u8(*event.metadata().level()); - if level >= max_log_level() { + if level >= current_level() { self.subscriber.event(event) } } diff --git a/logger/src/lib.rs b/logger/src/lib.rs index fceecc1d26c..f84ddc6a7d8 100644 --- a/logger/src/lib.rs +++ b/logger/src/lib.rs @@ -1,159 +1,139 @@ //! Iroha's logging utilities. +pub mod actor; pub mod layer; pub mod telemetry; use std::{ fmt::Debug, - fs::OpenOptions, - path::PathBuf, sync::{ atomic::{AtomicBool, Ordering}, - Arc, + OnceLock, }, }; -use color_eyre::{eyre::WrapErr, Report, Result}; -use iroha_config::logger::into_tracing_level; -pub use iroha_config::logger::{Configuration, ConfigurationProxy}; -pub use telemetry::{Telemetry, TelemetryFields, TelemetryLayer}; -use tokio::sync::mpsc::Receiver; +use actor::LoggerHandle; +use color_eyre::{eyre::eyre, Report, Result}; +pub use iroha_config::logger::{Configuration, ConfigurationProxy, Format, Level}; +use iroha_config::{base::proxy::Builder, logger::into_tracing_level}; +use tracing::subscriber::set_global_default; pub use tracing::{ debug, debug_span, error, error_span, info, info_span, instrument as log, trace, trace_span, warn, warn_span, Instrument, }; -use tracing::{subscriber::set_global_default, Subscriber}; -use tracing_bunyan_formatter::{BunyanFormattingLayer, JsonStorageLayer}; pub use tracing_futures::Instrument as InstrumentFutures; +pub use tracing_subscriber::reload::Error as ReloadError; use tracing_subscriber::{layer::SubscriberExt, registry::Registry, reload}; -/// Substrate telemetry -pub type SubstrateTelemetry = Receiver; - -/// Future telemetry -pub type FutureTelemetry = Receiver; - -/// Convenience wrapper for Telemetry types. -pub type Telemetries = (SubstrateTelemetry, FutureTelemetry); +const TELEMETRY_CAPACITY: usize = 1000; static LOGGER_SET: AtomicBool = AtomicBool::new(false); -/// Initializes `Logger` with given [`Configuration`]. -/// After the initialization `log` macros will print with the use of this `Logger`. -/// Returns the receiving side of telemetry channels (regular telemetry, future telemetry) -/// -/// # Errors -/// If the logger is already set, raises a generic error. -pub fn init(configuration: &Configuration) -> Result> { +fn try_set_logger() -> Result<()> { if LOGGER_SET .compare_exchange(false, true, Ordering::SeqCst, Ordering::SeqCst) .is_err() { - return Ok(None); + return Err(eyre!("Logger is already set.")); } - Ok(Some(setup_logger(configuration)?)) + Ok(()) } -/// Disables the logger by setting `LOGGER_SET` to true. Will fail -/// if the logger has already been initialized. This function is -/// required in order to generate flamegraphs and flamecharts. +/// Initializes the logger globally with given [`Configuration`]. /// -/// Returns true on success. -pub fn disable_logger() -> bool { - LOGGER_SET - .compare_exchange(false, true, Ordering::SeqCst, Ordering::SeqCst) - .is_ok() -} +/// Returns [`LoggerHandle`] to interact with the logger instance +/// +/// Works only once per process, all subsequent invocations will fail. +/// +/// For usage in tests consider [`test_logger`]. +/// +/// # Errors +/// If the logger is already set, raises a generic error. +// TODO: refactor configuration in a way that `terminal_colors` is part of it +// https://github.com/hyperledger/iroha/issues/3500 +pub fn init_global(configuration: &Configuration, terminal_colors: bool) -> Result { + try_set_logger()?; -fn setup_logger(configuration: &Configuration) -> Result { let layer = tracing_subscriber::fmt::layer() - .with_ansi(configuration.terminal_colors) + .with_ansi(terminal_colors) .with_test_writer(); - if configuration.compact_mode { - add_bunyan(configuration, layer.compact()) - } else { - add_bunyan(configuration, layer) + match configuration.format { + Format::Full => step2(configuration, layer), + Format::Compact => step2(configuration, layer.compact()), + Format::Pretty => step2(configuration, layer.pretty()), + Format::Json => step2(configuration, layer.json()), } } -fn bunyan_writer_create(destination: PathBuf) -> Result> { - OpenOptions::new() - .create(true) - .append(true) - .open(destination) - .wrap_err("Failed to create or open bunyan logs file") - .map(Arc::new) +/// Returns once lazily initialised global logger for testing purposes. +/// +/// # Panics +/// If [`init_global`] or [`disable_global`] were called first. +#[allow(clippy::needless_update)] // `tokio-console` feature adds additional fields to Configuration +pub fn test_logger() -> LoggerHandle { + static LOGGER: OnceLock = OnceLock::new(); + + LOGGER + .get_or_init(|| { + // NOTE: if this config should be changed for some specific tests, consider + // isolating those tests into a separate process and controlling default logger config + // with ENV vars rather than by extending `test_logger` signature. This will both remain + // `test_logger` simple and also will emphasise isolation which is necessary anyway in + // case of singleton mocking (where the logger is the singleton). + let config = Configuration { + level: Level::DEBUG, + format: Format::Pretty, + ..ConfigurationProxy::default().build().unwrap() + }; + + init_global(&config, true).expect( + "`init_global()` or `disable_global()` should not be called before `test_logger()`", + ) + }) + .clone() +} + +/// Disables the logger globally, so that subsequent calls to [`init_global`] will fail. +/// +/// Disabling logger is required in order to generate flamegraphs and flamecharts. +/// +/// # Errors +/// If global logger was already initialised/disabled. +pub fn disable_global() -> Result<()> { + try_set_logger() } -fn add_bunyan(configuration: &Configuration, layer: L) -> Result +fn step2(configuration: &Configuration, layer: L) -> Result where L: tracing_subscriber::Layer + Debug + Send + Sync + 'static, { - let level: tracing::Level = into_tracing_level(configuration.max_log_level.value()); + let level: tracing::Level = into_tracing_level(configuration.level); let level_filter = tracing_subscriber::filter::LevelFilter::from_level(level); - let (filter, handle) = reload::Layer::new(level_filter); - configuration - .max_log_level - .set_handle(iroha_config::logger::ReloadHandle(handle)); - let (bunyan_layer, storage_layer) = match configuration.log_file_path.clone() { - Some(path) => ( - Some(BunyanFormattingLayer::new( - "bunyan_layer".into(), - bunyan_writer_create(path)?, - )), - Some(JsonStorageLayer), - ), - None => (None, None), - }; + let (level_filter, level_filter_handle) = reload::Layer::new(level_filter); let subscriber = Registry::default() .with(layer) - .with(filter) - .with(storage_layer) - .with(tracing_error::ErrorLayer::default()) - .with(bunyan_layer); - - add_tokio_console_subscriber(configuration, subscriber) -} + .with(level_filter) + .with(tracing_error::ErrorLayer::default()); -fn add_tokio_console_subscriber< - S: Subscriber + Send + Sync + 'static + for<'a> tracing_subscriber::registry::LookupSpan<'a>, ->( - configuration: &Configuration, - subscriber: S, -) -> Result { #[cfg(all(feature = "tokio-console", not(feature = "no-tokio-console")))] - { + let subscriber = { let console_subscriber = console_subscriber::ConsoleLayer::builder() .server_addr( configuration .tokio_console_addr - .parse::() + .into() .expect("Invalid address for tokio console"), ) .spawn(); - add_telemetry_and_set_default(configuration, subscriber.with(console_subscriber)) - } - #[cfg(any(not(feature = "tokio-console"), feature = "no-tokio-console"))] - { - add_telemetry_and_set_default(configuration, subscriber) - } -} - -fn add_telemetry_and_set_default( - configuration: &Configuration, - subscriber: S, -) -> Result { - // static global_subscriber: dyn Subscriber = once_cell::new; - let (subscriber, receiver, receiver_future) = TelemetryLayer::from_capacity( - subscriber, - configuration - .telemetry_capacity - .try_into() - .expect("u32 should always fit in usize"), - ); + subscriber.with(console_subscriber) + }; + let (subscriber, receiver) = telemetry::Layer::with_capacity(subscriber, TELEMETRY_CAPACITY); set_global_default(subscriber)?; - Ok((receiver, receiver_future)) + + let handle = LoggerHandle::new(level_filter_handle, receiver); + + Ok(handle) } /// Macro for sending telemetry info diff --git a/logger/src/telemetry.rs b/logger/src/telemetry.rs index 526209daa60..3e65e5914bb 100644 --- a/logger/src/telemetry.rs +++ b/logger/src/telemetry.rs @@ -4,25 +4,25 @@ use std::{error::Error, fmt::Debug}; use derive_more::{Deref, DerefMut}; use serde_json::Value; -use tokio::sync::mpsc::{self, Receiver, Sender}; +use tokio::sync::mpsc; use tracing::{ field::{Field, Visit}, - Event, Subscriber, + Event as TracingEvent, Subscriber, }; use crate::layer::{EventInspectorTrait, EventSubscriber}; /// Target for telemetry in `tracing` -pub const TELEMETRY_TARGET_PREFIX: &str = "telemetry::"; +pub const TARGET_PREFIX: &str = "telemetry::"; /// Target for telemetry future in `tracing` -pub const TELEMETRY_FUTURE_TARGET_PREFIX: &str = "telemetry_future::"; +pub const FUTURE_TARGET_PREFIX: &str = "telemetry_future::"; /// Fields for telemetry (type for efficient saving) #[derive(Clone, Debug, PartialEq, Eq, Default, Deref, DerefMut)] -pub struct TelemetryFields(pub Vec<(&'static str, Value)>); +pub struct Fields(pub Vec<(&'static str, Value)>); -impl From for Value { - fn from(TelemetryFields(fields): TelemetryFields) -> Self { +impl From for Value { + fn from(Fields(fields): Fields) -> Self { fields .into_iter() .map(|(key, value)| (key.to_owned(), value)) @@ -32,14 +32,14 @@ impl From for Value { /// Telemetry which can be received from telemetry layer #[derive(Clone, Debug, PartialEq, Eq)] -pub struct Telemetry { +pub struct Event { /// Subsystem from which telemetry was received pub target: &'static str, /// Fields which was recorded - pub fields: TelemetryFields, + pub fields: Fields, } -impl Visit for Telemetry { +impl Visit for Event { fn record_debug(&mut self, field: &Field, value: &dyn Debug) { self.fields .push((field.name(), format!("{:?}", &value).into())) @@ -71,9 +71,9 @@ impl Visit for Telemetry { } } -impl Telemetry { - fn from_event(target: &'static str, event: &Event<'_>) -> Self { - let fields = TelemetryFields::default(); +impl Event { + fn from_event(target: &'static str, event: &TracingEvent<'_>) -> Self { + let fields = Fields::default(); let mut telemetry = Self { target, fields }; event.record(&mut telemetry); telemetry @@ -82,70 +82,58 @@ impl Telemetry { /// Telemetry layer #[derive(Debug, Clone)] -pub struct TelemetryLayer { - telemetry_sender: Sender, - telemetry_future_sender: Sender, +pub struct Layer { + sender: mpsc::Sender, subscriber: S, } -impl TelemetryLayer { - /// Create telemetry from channel sender - pub fn from_senders( - subscriber: S, - telemetry_sender: Sender, - telemetry_future_sender: Sender, - ) -> impl Subscriber { - EventSubscriber(Self { - telemetry_sender, - telemetry_future_sender, - subscriber, - }) - } - - /// Create new telemetry layer with specific channel size (via const generic) - #[allow(clippy::new_ret_no_self)] - pub fn new( - subscriber: S, - ) -> (impl Subscriber, Receiver, Receiver) { - let (sender, receiver) = mpsc::channel(CHANNEL_SIZE); - let (sender_future, receiver_future) = mpsc::channel(CHANNEL_SIZE); - let telemetry = Self::from_senders(subscriber, sender, sender_future); - (telemetry, receiver, receiver_future) - } - +impl Layer { /// Create new telemetry layer with specific channel size #[allow(clippy::new_ret_no_self)] - pub fn from_capacity( + pub fn with_capacity( subscriber: S, channel_size: usize, - ) -> (impl Subscriber, Receiver, Receiver) { + ) -> (impl Subscriber, mpsc::Receiver) { let (sender, receiver) = mpsc::channel(channel_size); - let (sender_future, receiver_future) = mpsc::channel(channel_size); - let telemetry = Self::from_senders(subscriber, sender, sender_future); - (telemetry, receiver, receiver_future) + let telemetry = EventSubscriber(Self { sender, subscriber }); + (telemetry, receiver) + } + + fn send_event(&self, channel: Channel, target: &'static str, event: &TracingEvent<'_>) { + let _ = self + .sender + .try_send(ChannelEvent(channel, Event::from_event(target, event))); } } -impl EventInspectorTrait for TelemetryLayer { +impl EventInspectorTrait for Layer { type Subscriber = S; fn inner_subscriber(&self) -> &Self::Subscriber { &self.subscriber } - fn event(&self, event: &Event<'_>) { + fn event(&self, event: &TracingEvent<'_>) { let target = event.metadata().target(); #[allow(clippy::option_if_let_else)] // This is actually more readable. - if let Some(telemetry_target) = target.strip_prefix(TELEMETRY_TARGET_PREFIX) { - let _result = self - .telemetry_sender - .try_send(Telemetry::from_event(telemetry_target, event)); - } else if let Some(future_target) = target.strip_prefix(TELEMETRY_FUTURE_TARGET_PREFIX) { - let _result = self - .telemetry_future_sender - .try_send(Telemetry::from_event(future_target, event)); + if let Some(target) = target.strip_prefix(TARGET_PREFIX) { + self.send_event(Channel::Regular, target, event); + } else if let Some(target) = target.strip_prefix(FUTURE_TARGET_PREFIX) { + self.send_event(Channel::Future, target, event); } else { self.subscriber.event(event) } } } + +/// A pair of [`Channel`] associated with [`Event`] +pub struct ChannelEvent(pub Channel, pub Event); + +/// Supported telemetry channels +#[derive(Copy, Clone)] +pub enum Channel { + /// Regular telemetry + Regular, + /// Telemetry collected from futures instrumented with `iroha_futures::TelemetryFuture`. + Future, +} diff --git a/logger/tests/configuration.rs b/logger/tests/configuration.rs index 661443ed256..d63d837200c 100644 --- a/logger/tests/configuration.rs +++ b/logger/tests/configuration.rs @@ -1,26 +1,23 @@ use std::time::Duration; -use iroha_data_model::Level; -use iroha_logger::{info, init, Configuration, Telemetry, TelemetryFields}; +use iroha_logger::{ + info, + telemetry::{Channel, Event, Fields}, + test_logger, +}; use tokio::time; #[tokio::test] async fn telemetry_separation_custom() { - let config = Configuration { - max_log_level: Level::TRACE.into(), - telemetry_capacity: 100, - compact_mode: true, - log_file_path: Some("/dev/stdout".into()), - terminal_colors: true, - #[cfg(feature = "tokio-console")] - tokio_console_addr: "127.0.0.1:5555".into(), - }; - let (mut receiver, _) = init(&config).unwrap().unwrap(); + let mut receiver = test_logger() + .subscribe_on_telemetry(Channel::Regular) + .await + .unwrap(); info!(target: "telemetry::test", a = 2, c = true, d = "this won't be logged"); info!("This will be logged in bunyan-readable format"); - let telemetry = Telemetry { + let telemetry = Event { target: "test", - fields: TelemetryFields(vec![ + fields: Fields(vec![ ("a", serde_json::json!(2)), ("c", serde_json::json!(true)), ("d", serde_json::json!("this won't be logged")), diff --git a/logger/tests/setting_logger.rs b/logger/tests/setting_logger.rs index 6d204f7abca..209a6b45928 100644 --- a/logger/tests/setting_logger.rs +++ b/logger/tests/setting_logger.rs @@ -1,21 +1,17 @@ use iroha_config::base::proxy::Builder; -use iroha_logger::{init, ConfigurationProxy}; +use iroha_logger::{init_global, ConfigurationProxy}; #[tokio::test] async fn setting_logger_twice_fails() { - assert!(init( - &ConfigurationProxy::default() - .build() - .expect("Default logger config always builds") - ) - .is_ok()); - let second_init = init( - &ConfigurationProxy::default() - .build() - .expect("Default logger config always builds"), - ); - assert!(second_init.is_ok()); - assert!(second_init.unwrap().is_none()); + let cfg = ConfigurationProxy::default() + .build() + .expect("Default logger config always builds"); + + let first = init_global(&cfg, false); + assert!(first.is_ok()); + + let second = init_global(&cfg, false); + assert!(second.is_err()); } #[test] diff --git a/logger/tests/telemetry.rs b/logger/tests/telemetry.rs index bfab41332eb..64b8985ca0d 100644 --- a/logger/tests/telemetry.rs +++ b/logger/tests/telemetry.rs @@ -1,19 +1,23 @@ use std::time::Duration; -use iroha_config::base::proxy::Builder; -use iroha_logger::{info, init, ConfigurationProxy, Telemetry, TelemetryFields}; +use iroha_logger::{ + info, + telemetry::{Channel, Event, Fields}, + test_logger, +}; use tokio::time; #[tokio::test] async fn telemetry_separation_default() { - let (mut receiver, _) = init(&ConfigurationProxy::default().build().unwrap()) - .unwrap() + let mut receiver = test_logger() + .subscribe_on_telemetry(Channel::Regular) + .await .unwrap(); info!(target: "telemetry::test", a = 2, c = true, d = "this won't be logged"); info!("This will be logged"); - let telemetry = Telemetry { + let telemetry = Event { target: "test", - fields: TelemetryFields(vec![ + fields: Fields(vec![ ("a", serde_json::json!(2)), ("c", serde_json::json!(true)), ("d", serde_json::json!("this won't be logged")), diff --git a/p2p/src/network.rs b/p2p/src/network.rs index 751eb779d3d..51b97d661e2 100644 --- a/p2p/src/network.rs +++ b/p2p/src/network.rs @@ -366,7 +366,7 @@ impl NetworkBase { }: Connected, ) { if !self.current_topology.contains_key(&peer_id) { - iroha_logger::warn!(topology=?self.current_topology, "Peer not present in topology is trying to connect"); + iroha_logger::warn!(%peer_id, topology=?self.current_topology, "Peer not present in topology is trying to connect"); return; } diff --git a/p2p/src/peer.rs b/p2p/src/peer.rs index e08a93f6aca..182b72e9e7d 100644 --- a/p2p/src/peer.rs +++ b/p2p/src/peer.rs @@ -409,6 +409,7 @@ mod state { } impl ConnectedTo { + #[allow(clippy::similar_names)] pub(super) async fn send_client_hello( Self { peer_addr, @@ -452,6 +453,7 @@ mod state { } impl ConnectedFrom { + #[allow(clippy::similar_names)] pub(super) async fn read_client_hello( Self { peer_addr, diff --git a/p2p/tests/integration/p2p.rs b/p2p/tests/integration/p2p.rs index 93f9a391765..b23faff5036 100644 --- a/p2p/tests/integration/p2p.rs +++ b/p2p/tests/integration/p2p.rs @@ -10,8 +10,8 @@ use std::{ use futures::{prelude::*, stream::FuturesUnordered, task::AtomicWaker}; use iroha_config_base::proxy::Builder; use iroha_crypto::KeyPair; -use iroha_data_model::{prelude::PeerId, Level}; -use iroha_logger::{prelude::*, Configuration, ConfigurationProxy}; +use iroha_data_model::prelude::PeerId; +use iroha_logger::{prelude::*, ConfigurationProxy}; use iroha_p2p::{network::message::*, NetworkHandle}; use iroha_primitives::addr::socket_addr; use parity_scale_codec::{Decode, Encode}; @@ -23,18 +23,16 @@ use tokio::{ #[derive(Clone, Debug, Decode, Encode)] struct TestMessage(String); -static INIT: Once = Once::new(); - fn setup_logger() { + static INIT: Once = Once::new(); + INIT.call_once(|| { - let log_config = Configuration { - max_log_level: Level::TRACE.into(), - compact_mode: false, - ..ConfigurationProxy::default() - .build() - .expect("Default logger config failed to build. This is a programmer error") - }; - iroha_logger::init(&log_config).expect("Failed to start logger"); + let mut config = ConfigurationProxy::default() + .build() + .expect("Default logger config failed to build. This is a programmer error"); + config.level = iroha_logger::Level::TRACE; + config.format = iroha_logger::Format::Pretty; + iroha_logger::init_global(&config, true).unwrap(); }) } @@ -230,17 +228,7 @@ async fn two_networks() { #[tokio::test(flavor = "multi_thread", worker_threads = 8)] async fn multiple_networks() { - let log_config = Configuration { - max_log_level: Level::TRACE.into(), - compact_mode: false, - ..ConfigurationProxy::default() - .build() - .expect("Default logger config should always build") - }; - // Can't use logger because it's failed to initialize. - if let Err(err) = iroha_logger::init(&log_config) { - eprintln!("Failed to initialize logger: {err}"); - } + setup_logger(); info!("Starting..."); let mut peers = Vec::new(); diff --git a/schema/derive/src/lib.rs b/schema/derive/src/lib.rs index 233a901e5cb..3f801b67459 100644 --- a/schema/derive/src/lib.rs +++ b/schema/derive/src/lib.rs @@ -65,6 +65,7 @@ impl FromMeta for Transparent { #[darling(attributes(schema))] struct SchemaAttributes { transparent: Transparent, + bounds: Option, } // NOTE: this will fail on unknown attributes.. This is not ideal @@ -184,12 +185,16 @@ pub fn schema_derive(input: TokenStream) -> Result { let impl_type_id = impl_type_id(&mut syn2::parse2(original_input).unwrap()); let impl_schema = match &input.schema_attrs.transparent { - Transparent::NotTransparent => impl_into_schema(&input), + Transparent::NotTransparent => impl_into_schema(&input, input.schema_attrs.bounds.as_ref()), Transparent::Transparent(transparent_type) => { let transparent_type = transparent_type .clone() .unwrap_or_else(|| infer_transparent_type(&input.data, &mut emitter)); - Ok(impl_transparent_into_schema(&input, &transparent_type)) + impl_transparent_into_schema( + &input, + &transparent_type, + input.schema_attrs.bounds.as_ref(), + ) } }; let impl_schema = match impl_schema { @@ -211,11 +216,16 @@ pub fn schema_derive(input: TokenStream) -> Result { fn impl_transparent_into_schema( input: &IntoSchemaInput, transparent_type: &syn2::Type, -) -> TokenStream { + bounds: Option<&String>, +) -> Result { let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); let name = &input.ident; + let where_clause: Option = match bounds { + Some(bounds) => Some(syn2::parse_str(&format!("where {bounds}"))?), + None => where_clause.cloned(), + }; - quote! { + Ok(quote! { impl #impl_generics iroha_schema::IntoSchema for #name #ty_generics #where_clause { fn update_schema_map(map: &mut iroha_schema::MetaMap) { if !map.contains_key::() { @@ -233,14 +243,18 @@ fn impl_transparent_into_schema( <#transparent_type as iroha_schema::IntoSchema>::type_name() } } - } + }) } -fn impl_into_schema(input: &IntoSchemaInput) -> Result { +fn impl_into_schema(input: &IntoSchemaInput, bounds: Option<&String>) -> Result { let name = &input.ident; let type_name_body = trait_body(name, &input.generics, false); let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); let metadata = metadata(&input.data)?; + let where_clause: Option = match bounds { + Some(bounds) => Some(syn2::parse_str(&format!("where {bounds}"))?), + None => where_clause.cloned(), + }; Ok(quote! { impl #impl_generics iroha_schema::IntoSchema for #name #ty_generics #where_clause { diff --git a/schema/derive/tests/ui_pass/derive_into_schema.rs b/schema/derive/tests/ui_pass/derive_into_schema.rs index 576fb77f5ce..f75295a3f82 100644 --- a/schema/derive/tests/ui_pass/derive_into_schema.rs +++ b/schema/derive/tests/ui_pass/derive_into_schema.rs @@ -42,4 +42,14 @@ pub enum Enum { FortyTwo, } +pub trait Trait { + type Assoc; +} + +#[derive(IntoSchema)] +#[schema(bounds = "T: Trait, T::Assoc: IntoSchema")] +pub struct WithComplexGeneric { + _value: T::Assoc, +} + pub fn main() {} diff --git a/schema/gen/src/lib.rs b/schema/gen/src/lib.rs index ddac0fad272..ef3c1cab1f8 100644 --- a/schema/gen/src/lib.rs +++ b/schema/gen/src/lib.rs @@ -66,13 +66,13 @@ types!( AccountEventFilter, AccountFilter, AccountId, + AccountMintBox, AccountPermissionChanged, AccountRoleChanged, Action, - Add, Algorithm, - And, Asset, + AssetBurnBox, AssetChanged, AssetDefinition, AssetDefinitionEvent, @@ -85,6 +85,8 @@ types!( AssetEventFilter, AssetFilter, AssetId, + AssetMintBox, + AssetTransferBox, AssetValue, AssetValueType, AtIndex, @@ -92,7 +94,6 @@ types!( BTreeMap, BTreeMap, BTreeMap, - BTreeMap>, BTreeMap, BTreeSet, BTreeSet, @@ -106,53 +107,31 @@ types!( BlockSubscriptionRequest, Box, Box>, - Box, Box, Box, - BurnExpr, - ConditionalExpr, + BurnBox, ConfigurationEvent, ConstString, Container, - Contains, - ContainsAll, - ContainsAny, - ContextValue, DataEntityFilter, DataEvent, DataEventFilter, - Divide, Domain, DomainEvent, DomainEventFilter, DomainFilter, DomainId, Duration, - Equal, - EvaluatesTo, - EvaluatesTo, - EvaluatesTo, - EvaluatesTo, - EvaluatesTo, - EvaluatesTo, - EvaluatesTo, - EvaluatesTo, - EvaluatesTo, - EvaluatesTo, - EvaluatesTo, - EvaluatesTo, - EvaluatesTo, - EvaluatesTo>, - EvaluatesTo, Event, EventMessage, EventSubscriptionRequest, Executable, - ExecuteTriggerExpr, + ExecuteTrigger, ExecuteTriggerEvent, ExecuteTriggerEventFilter, ExecutionTime, - Expression, + Executor, + ExecutorEvent, Fail, FilterBox, FilterOpt, @@ -220,16 +199,14 @@ types!( FixNum, Fixed, ForwardCursor, - GrantExpr, - Greater, + GrantBox, Hash, HashOf>, HashOf, HashOf, IdBox, IdentifiableBox, - If, - InstructionExpr, + InstructionBox, InstructionExecutionFail, Interval, Interval, @@ -239,7 +216,6 @@ types!( Ipv6Addr, Ipv6Predicate, LengthLimits, - Less, MerkleTree, Metadata, MetadataChanged, @@ -247,19 +223,16 @@ types!( MetadataChanged, MetadataChanged, MetadataLimits, - MintExpr, + MintBox, Mintable, - Mod, - Multiply, Name, NewAccount, NewAssetDefinition, NewDomain, - NewParameterExpr, + NewParameter, NewRole, NonTrivial, NonZeroU64, - Not, NotificationEventFilter, NumericValue, Option, @@ -267,7 +240,6 @@ types!( Option, Option>>, Option>, - Option, Option, Option, Option, @@ -275,7 +247,6 @@ types!( Option, Option, Option, - Or, OriginFilter, OriginFilter, OriginFilter, @@ -283,7 +254,6 @@ types!( OriginFilter, OriginFilter, OriginFilter, - PairExpr, Parameter, ParameterId, Peer, @@ -306,12 +276,11 @@ types!( QueryBox, QueryExecutionFail, QueryPayload, - RaiseTo, - RegisterExpr, + RegisterBox, RegistrableBox, - RemoveKeyValueExpr, + RemoveKeyValueBox, Repeats, - RevokeExpr, + RevokeBox, Role, RoleEvent, RoleEventFilter, @@ -321,9 +290,8 @@ types!( SemiInterval, SemiInterval, SemiRange, - SequenceExpr, - SetKeyValueExpr, - SetParameterExpr, + SetKeyValueBox, + SetParameter, Signature, SignatureCheckCondition, SignatureOf, @@ -339,7 +307,6 @@ types!( SignedTransactionV1, String, StringPredicate, - Subtract, TimeEvent, TimeEventFilter, TimeInterval, @@ -350,7 +317,7 @@ types!( TransactionQueryOutput, TransactionRejectionReason, TransactionValue, - TransferExpr, + TransferBox, Trigger, TriggerCompletedEventFilter, TriggerCompletedOutcomeType, @@ -360,16 +327,14 @@ types!( TriggerId, TriggerNumberOfExecutionsChanged, TriggeringFilterBox, - UnregisterExpr, + UnregisterBox, UpgradableBox, ValidationFail, - Executor, - ExecutorEvent, Value, ValueOfKey, ValuePredicate, Vec, - Vec, + Vec, Vec, Vec, Vec, @@ -377,7 +342,6 @@ types!( Vec, WasmExecutionFail, WasmSmartContract, - Where, [Interval; 8], [Interval; 4], [u16; 8], @@ -465,40 +429,18 @@ mod tests { let mut missing_schemas = HashMap::<&str, _>::new(); for type_name in type_names { - if let (Some(mut start), Some(end)) = (type_name.find('<'), type_name.rfind('>')) { - start += 1; - - let mut angle_bracket_diff = 0_u8; - for (i, c) in type_name[start..end].chars().enumerate() { - if c == '<' { - angle_bracket_diff += 1_u8; - } - if c == '>' { - angle_bracket_diff -= 1_u8; - } - - if c == ',' && angle_bracket_diff == 0_u8 { - let generic = type_name[start..(start + i)].trim(); + let (Some(start), Some(end)) = (type_name.find('<'), type_name.rfind('>')) else { + continue; + }; - start += i + 1; - if !is_const_generic(generic) { - continue; - } + assert!(start < end, "Invalid type name: {type_name}"); - if !type_names.contains(generic) { - missing_schemas - .entry(type_name) - .or_insert_with(Vec::new) - .push(generic); - } - } + for generic in type_name.split(", ") { + if !is_const_generic(generic) { + continue; } - let generic = type_name[start..end].trim(); - if !generic.is_empty() - && !is_const_generic(generic) - && !type_names.contains(generic) - { + if !type_names.contains(generic) { missing_schemas .entry(type_name) .or_insert_with(Vec::new) diff --git a/scripts/test_env.py b/scripts/test_env.py index 65b2266c3ca..458fc0ae6c1 100755 --- a/scripts/test_env.py +++ b/scripts/test_env.py @@ -118,14 +118,14 @@ def run(self, is_genesis: bool = False): os.environ["KURA_BLOCK_STORE_PATH"] = str(peer_dir.joinpath("storage")) os.environ["SNAPSHOT_DIR_PATH"] = str(peer_dir.joinpath("storage")) - os.environ["LOG_FILE_PATH"] = str(peer_dir.joinpath("log.json")) - os.environ["MAX_LOG_LEVEL"] = "TRACE" + os.environ["LOG_LEVEL"] = "TRACE" + os.environ["LOG_FORMAT"] = "\"pretty\"" + os.environ["LOG_TOKIO_CONSOLE_ADDR"] = f"{self.host_ip}:{self.tokio_console_port}" os.environ["IROHA_PUBLIC_KEY"] = self.public_key os.environ["IROHA_PRIVATE_KEY"] = self.private_key os.environ["SUMERAGI_DEBUG_FORCE_SOFT_FORK"] = "false" os.environ["TORII_P2P_ADDR"] = f"{self.host_ip}:{self.p2p_port}" os.environ["TORII_API_URL"] = f"{self.host_ip}:{self.api_port}" - os.environ["TOKIO_CONSOLE_ADDR"] = f"{self.host_ip}:{self.tokio_console_port}" genesis_arg = "--submit-genesis" if is_genesis else "" # FD never gets closed diff --git a/scripts/tests/consistency.sh b/scripts/tests/consistency.sh index dd5a5291a5c..bf5873f2f81 100755 --- a/scripts/tests/consistency.sh +++ b/scripts/tests/consistency.sh @@ -2,11 +2,6 @@ set -e case $1 in - "docs") - cargo run --release --bin kagami -- docs | diff - docs/source/references/config.md || { - echo 'Please re-generate docs using `cargo run --release --bin kagami -- docs > docs/source/references/config.md`' - exit 1 - };; "genesis") cargo run --release --bin kagami -- genesis --executor-path-in-genesis ./executor.wasm | diff - configs/peer/genesis.json || { echo 'Please re-generate the genesis with `cargo run --release --bin kagami -- genesis --executor-path-in-genesis ./executor.wasm > configs/peer/genesis.json`' diff --git a/smart_contract/derive/Cargo.toml b/smart_contract/derive/Cargo.toml index 72658aa9aa8..8ecc878acec 100644 --- a/smart_contract/derive/Cargo.toml +++ b/smart_contract/derive/Cargo.toml @@ -14,6 +14,9 @@ workspace = true proc-macro = true [dependencies] -syn.workspace = true -quote.workspace = true -proc-macro2.workspace = true +iroha_macro_utils = { workspace = true } + +syn2 = { workspace = true } +manyhow = { workspace = true } +quote = { workspace = true } +proc-macro2 = { workspace = true } diff --git a/smart_contract/derive/src/entrypoint.rs b/smart_contract/derive/src/entrypoint.rs index 4970b406ea3..426c2ab091f 100644 --- a/smart_contract/derive/src/entrypoint.rs +++ b/smart_contract/derive/src/entrypoint.rs @@ -1,26 +1,32 @@ //! Macro for writing smart contract entrypoint -use proc_macro::TokenStream; +#![allow(clippy::str_to_string)] + +use iroha_macro_utils::Emitter; +use manyhow::emit; +use proc_macro2::TokenStream; use quote::quote; -use syn::{parse_macro_input, parse_quote}; +use syn2::parse_quote; mod export { pub const SMART_CONTRACT_MAIN: &str = "_iroha_smart_contract_main"; } #[allow(clippy::needless_pass_by_value)] -pub fn impl_entrypoint(_attr: TokenStream, item: TokenStream) -> TokenStream { - let syn::ItemFn { +pub fn impl_entrypoint(emitter: &mut Emitter, item: syn2::ItemFn) -> TokenStream { + let syn2::ItemFn { attrs, vis, sig, mut block, - } = parse_macro_input!(item); + } = item; - assert!( - syn::ReturnType::Default == sig.output, - "Smart contract `main()` function must not have a return type" - ); + if sig.output != syn2::ReturnType::Default { + emit!( + emitter, + "Smart contract entrypoint must not have a return type" + ); + } let fn_name = &sig.ident; @@ -33,7 +39,8 @@ pub fn impl_entrypoint(_attr: TokenStream, item: TokenStream) -> TokenStream { ), ); - let main_fn_name = syn::Ident::new(export::SMART_CONTRACT_MAIN, proc_macro2::Span::call_site()); + let main_fn_name = + syn2::Ident::new(export::SMART_CONTRACT_MAIN, proc_macro2::Span::call_site()); quote! { /// Smart contract entrypoint @@ -51,5 +58,4 @@ pub fn impl_entrypoint(_attr: TokenStream, item: TokenStream) -> TokenStream { #vis #sig #block } - .into() } diff --git a/smart_contract/derive/src/lib.rs b/smart_contract/derive/src/lib.rs index af82cd24fbe..da3faa41190 100644 --- a/smart_contract/derive/src/lib.rs +++ b/smart_contract/derive/src/lib.rs @@ -1,6 +1,8 @@ //! Macros for writing smart contracts. -use proc_macro::TokenStream; +use iroha_macro_utils::Emitter; +use manyhow::{emit, manyhow}; +use proc_macro2::TokenStream; mod entrypoint; @@ -23,7 +25,23 @@ mod entrypoint; /// todo!() /// } /// ``` +#[manyhow] #[proc_macro_attribute] pub fn main(attr: TokenStream, item: TokenStream) -> TokenStream { - entrypoint::impl_entrypoint(attr, item) + let mut emitter = Emitter::new(); + + if !attr.is_empty() { + emit!( + emitter, + "Smart contract entrypoint does not accept attributes" + ); + } + + let Some(item) = emitter.handle(syn2::parse2(item)) else { + return emitter.finish_token_stream(); + }; + + let result = entrypoint::impl_entrypoint(&mut emitter, item); + + emitter.finish_token_stream_with(result) } diff --git a/smart_contract/executor/derive/Cargo.toml b/smart_contract/executor/derive/Cargo.toml index 652714e3625..43e093218fd 100644 --- a/smart_contract/executor/derive/Cargo.toml +++ b/smart_contract/executor/derive/Cargo.toml @@ -15,10 +15,10 @@ workspace = true proc-macro = true [dependencies] -iroha_macro_utils.workspace = true -syn = { workspace = true, features = ["full", "derive"] } +iroha_macro_utils = { workspace = true } + syn2 = { workspace = true, features = ["full", "derive"] } -quote.workspace = true -proc-macro2.workspace = true -manyhow.workspace = true -darling.workspace = true +quote = { workspace = true } +proc-macro2 = { workspace = true } +manyhow = { workspace = true } +darling = { workspace = true } diff --git a/smart_contract/executor/derive/src/conversion.rs b/smart_contract/executor/derive/src/conversion.rs index 87b27becbb5..009ace5a426 100644 --- a/smart_contract/executor/derive/src/conversion.rs +++ b/smart_contract/executor/derive/src/conversion.rs @@ -1,76 +1,66 @@ //! Module with conversion derive macros implementation -use super::*; +use proc_macro2::TokenStream; +use quote::quote; +use syn2::DeriveInput; /// [`derive_ref_into_asset_owner`](crate::derive_ref_into_asset_owner) macro implementation -pub fn impl_derive_ref_into_asset_owner(input: TokenStream) -> TokenStream { - let input = parse_macro_input!(input as DeriveInput); - +pub fn impl_derive_ref_into_asset_owner(input: &DeriveInput) -> TokenStream { impl_from( &input.ident, &input.generics, - &syn::parse_quote!(::iroha_executor::permission::asset::Owner), - &syn::parse_quote!(asset_id), + &syn2::parse_quote!(::iroha_executor::permission::asset::Owner), + &syn2::parse_quote!(asset_id), ) - .into() } /// [`derive_ref_into_asset_definition_creator`](crate::derive_ref_into_asset_definition_creator) /// macro implementation -pub fn impl_derive_ref_into_asset_definition_owner(input: TokenStream) -> TokenStream { - let input = parse_macro_input!(input as DeriveInput); - +pub fn impl_derive_ref_into_asset_definition_owner(input: &DeriveInput) -> TokenStream { impl_from( &input.ident, &input.generics, - &syn::parse_quote!(::iroha_executor::permission::asset_definition::Owner), - &syn::parse_quote!(asset_definition_id), + &syn2::parse_quote!(::iroha_executor::permission::asset_definition::Owner), + &syn2::parse_quote!(asset_definition_id), ) - .into() } /// [`derive_ref_into_account_owner`](crate::derive_ref_into_account_owner) macro implementation -pub fn impl_derive_ref_into_account_owner(input: TokenStream) -> TokenStream { - let input = parse_macro_input!(input as DeriveInput); - +pub fn impl_derive_ref_into_account_owner(input: &DeriveInput) -> TokenStream { impl_from( &input.ident, &input.generics, - &syn::parse_quote!(::iroha_executor::permission::account::Owner), - &syn::parse_quote!(account_id), + &syn2::parse_quote!(::iroha_executor::permission::account::Owner), + &syn2::parse_quote!(account_id), ) - .into() } /// [`derive_ref_into_domain_owner`](crate::derive_ref_into_domain_owner) macro implementation -pub fn impl_derive_ref_into_domain_owner(input: TokenStream) -> TokenStream { - let input = parse_macro_input!(input as DeriveInput); - +pub fn impl_derive_ref_into_domain_owner(input: &DeriveInput) -> TokenStream { impl_from( &input.ident, &input.generics, - &syn::parse_quote!(::iroha_executor::permission::domain::Owner), - &syn::parse_quote!(domain_id), + &syn2::parse_quote!(::iroha_executor::permission::domain::Owner), + &syn2::parse_quote!(domain_id), ) - .into() } fn impl_from( - ident: &syn::Ident, - generics: &syn::Generics, - pass_condition_type: &syn::Type, - field: &syn::Ident, -) -> proc_macro2::TokenStream { + ident: &syn2::Ident, + generics: &syn2::Generics, + pass_condition_type: &syn2::Type, + field: &syn2::Ident, +) -> TokenStream { use quote::ToTokens; let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); - let mut generics: proc_macro2::TokenStream = syn::parse_str("<'token, ").unwrap(); + let mut generics: TokenStream = syn2::parse_str("<'token, ").unwrap(); let impl_generics_tokens = impl_generics.into_token_stream(); if impl_generics_tokens.is_empty() { generics.extend(core::iter::once(proc_macro2::TokenTree::Punct( - syn::parse_str(">").unwrap(), + syn2::parse_str(">").unwrap(), ))); } else { generics.extend(impl_generics_tokens.into_iter().skip(1)); diff --git a/smart_contract/executor/derive/src/default.rs b/smart_contract/executor/derive/src/default.rs index 6045bd40b82..fa12601e750 100644 --- a/smart_contract/executor/derive/src/default.rs +++ b/smart_contract/executor/derive/src/default.rs @@ -54,7 +54,7 @@ pub fn impl_derive_entrypoints(emitter: &mut Emitter, input: &syn2::DeriveInput) #[::iroha_executor::prelude::entrypoint] pub fn validate_instruction( authority: ::iroha_executor::prelude::AccountId, - instruction: ::iroha_executor::prelude::InstructionExpr, + instruction: ::iroha_executor::prelude::InstructionBox, block_height: u64, #(#custom_args),* ) -> ::iroha_executor::prelude::Result { @@ -107,54 +107,63 @@ pub fn impl_derive_entrypoints(emitter: &mut Emitter, input: &syn2::DeriveInput) } } +#[allow(clippy::too_many_lines)] pub fn impl_derive_visit(emitter: &mut Emitter, input: &syn2::DeriveInput) -> TokenStream2 { let Some(input) = emitter.handle(ExecutorDeriveInput::from_derive_input(input)) else { return quote!(); }; let ExecutorDeriveInput { ident, custom, .. } = &input; let default_visit_sigs: Vec = [ - "fn visit_unsupported(operation: T)", "fn visit_transaction(operation: &SignedTransaction)", - "fn visit_instruction(operation: &InstructionExpr)", - "fn visit_expression(operation: &EvaluatesTo)", - "fn visit_sequence(operation: &SequenceExpr)", - "fn visit_if(operation: &ConditionalExpr)", - "fn visit_pair(operation: &PairExpr)", - "fn visit_unregister_peer(operation: Unregister)", - "fn visit_unregister_domain(operation: Unregister)", - "fn visit_transfer_domain(operation: Transfer)", - "fn visit_set_domain_key_value(operation: SetKeyValue)", - "fn visit_remove_domain_key_value(operation: RemoveKeyValue)", - "fn visit_unregister_account(operation: Unregister)", - "fn visit_mint_account_public_key(operation: Mint)", - "fn visit_burn_account_public_key(operation: Burn)", - "fn visit_mint_account_signature_check_condition(operation: Mint)", - "fn visit_set_account_key_value(operation: SetKeyValue)", - "fn visit_remove_account_key_value(operation: RemoveKeyValue)", - "fn visit_register_asset(operation: Register)", - "fn visit_unregister_asset(operation: Unregister)", - "fn visit_mint_asset(operation: Mint)", - "fn visit_burn_asset(operation: Burn)", - "fn visit_transfer_asset(operation: Transfer)", - "fn visit_set_asset_key_value(operation: SetKeyValue)", - "fn visit_remove_asset_key_value(operation: RemoveKeyValue)", - "fn visit_unregister_asset_definition(operation: Unregister)", - "fn visit_transfer_asset_definition(operation: Transfer)", - "fn visit_set_asset_definition_key_value(operation: SetKeyValue)", - "fn visit_remove_asset_definition_key_value(operation: RemoveKeyValue)", - "fn visit_grant_account_permission(operation: Grant)", - "fn visit_revoke_account_permission(operation: Revoke)", - "fn visit_register_role(operation: Register)", - "fn visit_unregister_role(operation: Unregister)", - "fn visit_grant_account_role(operation: Grant)", - "fn visit_revoke_account_role(operation: Revoke)", - "fn visit_unregister_trigger(operation: Unregister>)", - "fn visit_mint_trigger_repetitions(operation: Mint>)", - "fn visit_burn_trigger_repetitions(operation: Burn>)", - "fn visit_execute_trigger(operation: ExecuteTrigger)", - "fn visit_set_parameter(operation: SetParameter)", - "fn visit_new_parameter(operation: NewParameter)", - "fn visit_upgrade_executor(operation: Upgrade)", + "fn visit_instruction(operation: &InstructionBox)", + "fn visit_register_peer(operation: &Register)", + "fn visit_unregister_peer(operation: &Unregister)", + "fn visit_register_domain(operation: &Register)", + "fn visit_unregister_domain(operation: &Unregister)", + "fn visit_transfer_domain(operation: &Transfer)", + "fn visit_set_domain_key_value(operation: &SetKeyValue)", + "fn visit_remove_domain_key_value(operation: &RemoveKeyValue)", + "fn visit_register_account(operation: &Register)", + "fn visit_unregister_account(operation: &Unregister)", + "fn visit_mint_account_public_key(operation: &Mint)", + "fn visit_burn_account_public_key(operation: &Burn)", + "fn visit_mint_account_signature_check_condition(operation: &Mint)", + "fn visit_set_account_key_value(operation: &SetKeyValue)", + "fn visit_remove_account_key_value(operation: &RemoveKeyValue)", + "fn visit_register_asset(operation: &Register)", + "fn visit_unregister_asset(operation: &Unregister)", + "fn visit_mint_asset_quantity(operation: &Mint)", + "fn visit_burn_asset_quantity(operation: &Burn)", + "fn visit_mint_asset_big_quantity(operation: &Mint)", + "fn visit_burn_asset_big_quantity(operation: &Burn)", + "fn visit_mint_asset_fixed(operation: &Mint)", + "fn visit_burn_asset_fixed(operation: &Burn)", + "fn visit_transfer_asset_quantity(operation: &Transfer)", + "fn visit_transfer_asset_big_quantity(operation: &Transfer)", + "fn visit_transfer_asset_fixed(operation: &Transfer)", + "fn visit_set_asset_key_value(operation: &SetKeyValue)", + "fn visit_remove_asset_key_value(operation: &RemoveKeyValue)", + "fn visit_register_asset_definition(operation: &Register)", + "fn visit_unregister_asset_definition(operation: &Unregister)", + "fn visit_transfer_asset_definition(operation: &Transfer)", + "fn visit_set_asset_definition_key_value(operation: &SetKeyValue)", + "fn visit_remove_asset_definition_key_value(operation: &RemoveKeyValue)", + "fn visit_grant_account_permission(operation: &Grant)", + "fn visit_revoke_account_permission(operation: &Revoke)", + "fn visit_register_role(operation: &Register)", + "fn visit_unregister_role(operation: &Unregister)", + "fn visit_grant_account_role(operation: &Grant)", + "fn visit_revoke_account_role(operation: &Revoke)", + "fn visit_register_trigger(operation: &Register>)", + "fn visit_unregister_trigger(operation: &Unregister>)", + "fn visit_mint_trigger_repetitions(operation: &Mint>)", + "fn visit_burn_trigger_repetitions(operation: &Burn>)", + "fn visit_execute_trigger(operation: &ExecuteTrigger)", + "fn visit_set_parameter(operation: &SetParameter)", + "fn visit_new_parameter(operation: &NewParameter)", + "fn visit_upgrade(operation: &Upgrade)", + "fn visit_log(operation: &Log)", + "fn visit_fail(operation: &Fail)", ] .into_iter() .map(|item| { @@ -228,29 +237,6 @@ pub fn impl_derive_validate(emitter: &mut Emitter, input: &syn2::DeriveInput) -> } } -pub fn impl_derive_expression_evaluator( - emitter: &mut Emitter, - input: &syn2::DeriveInput, -) -> TokenStream2 { - let Some(input) = emitter.handle(ExecutorDeriveInput::from_derive_input(input)) else { - return quote!(); - }; - let ExecutorDeriveInput { ident, data, .. } = &input; - check_required_fields(data, emitter); - quote! { - impl ::iroha_executor::data_model::evaluate::ExpressionEvaluator for #ident { - fn evaluate( - &self, - expression: &E, - ) -> ::core::result::Result - { - self.host.evaluate(expression) - } - } - - } -} - pub fn impl_derive_constructor(emitter: &mut Emitter, input: &syn2::DeriveInput) -> TokenStream2 { let Some(input) = emitter.handle(ExecutorDeriveInput::from_derive_input(input)) else { return quote!(); @@ -268,7 +254,6 @@ pub fn impl_derive_constructor(emitter: &mut Emitter, input: &syn2::DeriveInput) Self { verdict: Ok(()), block_height, - host: ::iroha_executor::smart_contract::Host, #(#custom_idents),* } } @@ -278,7 +263,8 @@ pub fn impl_derive_constructor(emitter: &mut Emitter, input: &syn2::DeriveInput) } fn check_required_fields(ast: &ExecutorData, emitter: &mut Emitter) { - let required_fields: syn2::FieldsNamed = parse_quote!({ verdict: ::iroha_executor::prelude::Result, block_height: u64, host: ::iroha_executor::smart_contract::Host }); + let required_fields: syn2::FieldsNamed = + parse_quote!({ verdict: ::iroha_executor::prelude::Result, block_height: u64 }); let struct_fields = ast .as_ref() .take_struct() @@ -328,7 +314,7 @@ fn check_type_equivalence(full_ty: &syn2::Type, given_ty: &syn2::Type) -> bool { /// Processes an `Executor` by draining it of default fields and returning the idents of the /// custom fields and the corresponding function arguments for use in the constructor fn custom_field_idents_and_fn_args(ast: &ExecutorData) -> (Vec<&Ident>, Vec) { - let required_idents: Vec = ["verdict", "block_height", "host"] + let required_idents: Vec = ["verdict", "block_height"] .iter() .map(|s| Ident::new(s, Span::call_site())) .collect(); diff --git a/smart_contract/executor/derive/src/entrypoint.rs b/smart_contract/executor/derive/src/entrypoint.rs index 60b6cd9ad65..01c69e99c03 100644 --- a/smart_contract/executor/derive/src/entrypoint.rs +++ b/smart_contract/executor/derive/src/entrypoint.rs @@ -1,6 +1,10 @@ //! Module [`executor_entrypoint`](crate::executor_entrypoint) macro implementation -use super::*; +use iroha_macro_utils::Emitter; +use manyhow::emit; +use proc_macro2::TokenStream; +use quote::quote; +use syn2::parse_quote; mod export { pub const EXECUTOR_VALIDATE_TRANSACTION: &str = "_iroha_executor_validate_transaction"; @@ -17,14 +21,7 @@ mod import { /// [`executor_entrypoint`](crate::executor_entrypoint()) macro implementation #[allow(clippy::needless_pass_by_value)] -pub fn impl_entrypoint(attr: TokenStream, item: TokenStream) -> TokenStream { - let fn_item = parse_macro_input!(item as syn::ItemFn); - - assert!( - attr.is_empty(), - "`#[entrypoint]` macro for Executor entrypoints accepts no attributes" - ); - +pub fn impl_entrypoint(emitter: &mut Emitter, item: syn2::ItemFn) -> TokenStream { macro_rules! match_entrypoints { (validate: { $($user_entrypoint_name:ident => @@ -33,23 +30,27 @@ pub fn impl_entrypoint(attr: TokenStream, item: TokenStream) -> TokenStream { other: { $($other_user_entrypoint_name:ident => $branch:block),* $(,)? }) => { - match &fn_item.sig.ident { + match &item.sig.ident { $(fn_name if fn_name == stringify!($user_entrypoint_name) => { impl_validate_entrypoint( - fn_item, + item, stringify!($user_entrypoint_name), export::$generated_entrypoint_name, import::$query_validating_object_fn_name, ) })* $(fn_name if fn_name == stringify!($other_user_entrypoint_name) => $branch),* - _ => panic!( - "Executor entrypoint name must be one of: {:?}", - [ - $(stringify!($user_entrypoint_name),)* - $(stringify!($other_user_entrypoint_name),)* - ] - ), + _ => { + emit!( + emitter, + "Executor entrypoint name must be one of: {:?}", + [ + $(stringify!($user_entrypoint_name),)* + $(stringify!($other_user_entrypoint_name),)* + ] + ); + return quote!(); + }, } }; } @@ -61,18 +62,18 @@ pub fn impl_entrypoint(attr: TokenStream, item: TokenStream) -> TokenStream { validate_query => EXECUTOR_VALIDATE_QUERY(GET_VALIDATE_QUERY_PAYLOAD), } other: { - migrate => { impl_migrate_entrypoint(fn_item) } + migrate => { impl_migrate_entrypoint(item) } } } } fn impl_validate_entrypoint( - fn_item: syn::ItemFn, + fn_item: syn2::ItemFn, user_entrypoint_name: &'static str, generated_entrypoint_name: &'static str, get_validation_payload_fn_name: &'static str, ) -> TokenStream { - let syn::ItemFn { + let syn2::ItemFn { attrs, vis, sig, @@ -81,7 +82,7 @@ fn impl_validate_entrypoint( let fn_name = &sig.ident; assert!( - matches!(sig.output, syn::ReturnType::Type(_, _)), + matches!(sig.output, syn2::ReturnType::Type(_, _)), "Executor `{user_entrypoint_name}` entrypoint must have `Result` return type" ); @@ -92,11 +93,11 @@ fn impl_validate_entrypoint( ), ); - let generated_entrypoint_ident: syn::Ident = syn::parse_str(generated_entrypoint_name) + let generated_entrypoint_ident: syn2::Ident = syn2::parse_str(generated_entrypoint_name) .expect("Provided entrypoint name to generate is not a valid Ident, this is a bug"); - let get_validation_payload_fn_ident: syn::Ident = - syn::parse_str(get_validation_payload_fn_name).expect( + let get_validation_payload_fn_ident: syn2::Ident = + syn2::parse_str(get_validation_payload_fn_name).expect( "Provided function name to query validating object is not a valid Ident, this is a bug", ); @@ -125,11 +126,10 @@ fn impl_validate_entrypoint( #vis #sig #block } - .into() } -fn impl_migrate_entrypoint(fn_item: syn::ItemFn) -> TokenStream { - let syn::ItemFn { +fn impl_migrate_entrypoint(fn_item: syn2::ItemFn) -> TokenStream { + let syn2::ItemFn { attrs, vis, sig, @@ -138,11 +138,12 @@ fn impl_migrate_entrypoint(fn_item: syn::ItemFn) -> TokenStream { let fn_name = &sig.ident; assert!( - matches!(sig.output, syn::ReturnType::Type(_, _)), + matches!(sig.output, syn2::ReturnType::Type(_, _)), "Executor `migrate()` entrypoint must have `MigrationResult` return type" ); - let migrate_fn_name = syn::Ident::new(export::EXECUTOR_MIGRATE, proc_macro2::Span::call_site()); + let migrate_fn_name = + syn2::Ident::new(export::EXECUTOR_MIGRATE, proc_macro2::Span::call_site()); quote! { /// Executor `permission_token_schema` entrypoint @@ -167,5 +168,4 @@ fn impl_migrate_entrypoint(fn_item: syn::ItemFn) -> TokenStream { #vis #sig #block } - .into() } diff --git a/smart_contract/executor/derive/src/lib.rs b/smart_contract/executor/derive/src/lib.rs index b5795582d44..71d682c974c 100644 --- a/smart_contract/executor/derive/src/lib.rs +++ b/smart_contract/executor/derive/src/lib.rs @@ -1,11 +1,8 @@ //! Crate with executor-related derive macros. use iroha_macro_utils::Emitter; -use manyhow::manyhow; -use proc_macro::TokenStream; -use proc_macro2::TokenStream as TokenStream2; -use quote::quote; -use syn::{parse_macro_input, parse_quote, DeriveInput}; +use manyhow::{emit, manyhow, Result}; +use proc_macro2::TokenStream; mod conversion; mod default; @@ -46,9 +43,25 @@ mod validate; /// todo!() /// } /// ``` +#[manyhow] #[proc_macro_attribute] pub fn entrypoint(attr: TokenStream, item: TokenStream) -> TokenStream { - entrypoint::impl_entrypoint(attr, item) + let mut emitter = Emitter::new(); + + if !attr.is_empty() { + emit!( + emitter, + "`#[entrypoint]` macro for Executor entrypoints accepts no attributes" + ); + } + + let Some(item) = emitter.handle(syn2::parse2(item)) else { + return emitter.finish_token_stream(); + }; + + let result = entrypoint::impl_entrypoint(&mut emitter, item); + + emitter.finish_token_stream_with(result) } /// Derive macro for `Token` trait. @@ -79,9 +92,12 @@ pub fn entrypoint(attr: TokenStream, item: TokenStream) -> TokenStream { /// }.is_owned_by(&authority) /// } /// ``` +#[manyhow] #[proc_macro_derive(Token)] -pub fn derive_token(input: TokenStream) -> TokenStream { - token::impl_derive_token(input) +pub fn derive_token(input: TokenStream) -> Result { + let input = syn2::parse2(input)?; + + Ok(token::impl_derive_token(&input)) } /// Derive macro for `ValidateGrantRevoke` trait. @@ -144,12 +160,14 @@ pub fn derive_token(input: TokenStream) -> TokenStream { // ... // } // ``` +#[manyhow] #[proc_macro_derive( ValidateGrantRevoke, attributes(validate, validate_grant, validate_revoke) )] -pub fn derive_validate_grant_revoke(input: TokenStream) -> TokenStream { - validate::impl_derive_validate_grant_revoke(input) +pub fn derive_validate_grant_revoke(input: TokenStream) -> Result { + let input = syn2::parse2(input)?; + validate::impl_derive_validate_grant_revoke(&input) } /// Should be used together with [`ValidateGrantRevoke`] derive macro to derive a conversion @@ -159,9 +177,14 @@ pub fn derive_validate_grant_revoke(input: TokenStream) -> TokenStream { /// /// Implements [`From`] for `permission::asset_definition::Owner` /// and not [`Into`] for your type. [`Into`] will be implemented automatically. +#[manyhow] #[proc_macro_derive(RefIntoAssetDefinitionOwner)] -pub fn derive_ref_into_asset_definition_owner(input: TokenStream) -> TokenStream { - conversion::impl_derive_ref_into_asset_definition_owner(input) +pub fn derive_ref_into_asset_definition_owner(input: TokenStream) -> Result { + let input = syn2::parse2(input)?; + + Ok(conversion::impl_derive_ref_into_asset_definition_owner( + &input, + )) } /// Should be used together with [`ValidateGrantRevoke`] derive macro to derive a conversion @@ -171,9 +194,12 @@ pub fn derive_ref_into_asset_definition_owner(input: TokenStream) -> TokenStream /// /// Implements [`From`] for `permission::asset::Owner` /// and not [`Into`] for your type. [`Into`] will be implemented automatically. +#[manyhow] #[proc_macro_derive(RefIntoAssetOwner)] -pub fn derive_ref_into_asset_owner(input: TokenStream) -> TokenStream { - conversion::impl_derive_ref_into_asset_owner(input) +pub fn derive_ref_into_asset_owner(input: TokenStream) -> Result { + let input = syn2::parse2(input)?; + + Ok(conversion::impl_derive_ref_into_asset_owner(&input)) } /// Should be used together with [`ValidateGrantRevoke`] derive macro to derive a conversion @@ -183,9 +209,12 @@ pub fn derive_ref_into_asset_owner(input: TokenStream) -> TokenStream { /// /// Implements [`From`] for `permission::asset::Owner` /// and not [`Into`] for your type. [`Into`] will be implemented automatically. +#[manyhow] #[proc_macro_derive(RefIntoAccountOwner)] -pub fn derive_ref_into_account_owner(input: TokenStream) -> TokenStream { - conversion::impl_derive_ref_into_account_owner(input) +pub fn derive_ref_into_account_owner(input: TokenStream) -> Result { + let input = syn2::parse2(input)?; + + Ok(conversion::impl_derive_ref_into_account_owner(&input)) } /// Should be used together with [`ValidateGrantRevoke`] derive macro to derive a conversion @@ -195,15 +224,16 @@ pub fn derive_ref_into_account_owner(input: TokenStream) -> TokenStream { /// /// Implements [`From`] for `permission::domain::Owner` /// and not [`Into`] for your type. [`Into`] will be implemented automatically. +#[manyhow] #[proc_macro_derive(RefIntoDomainOwner)] -pub fn derive_ref_into_domain_owner(input: TokenStream) -> TokenStream { - conversion::impl_derive_ref_into_domain_owner(input) +pub fn derive_ref_into_domain_owner(input: TokenStream) -> Result { + let input = syn2::parse2(input)?; + + Ok(conversion::impl_derive_ref_into_domain_owner(&input)) } /// Implements the `iroha_executor::Validate` trait for the given `Executor` struct. As -/// this trait has a `iroha_executor::prelude::Visit`, and the latter has an -/// `iroha_executor::data_model::evaluate::ExpressionEvaluator` -/// bound, at least these two should be implemented as well. +/// this trait has a `iroha_executor::prelude::Visit` at least this one should be implemented as well. /// /// Emits a compile error if the struct didn't have all the expected fields with corresponding /// types, i.e. `verdict`: `iroha_executor::prelude::Result`, `block_height`: `u64` and @@ -211,7 +241,7 @@ pub fn derive_ref_into_domain_owner(input: TokenStream) -> TokenStream { /// `block_height` are needed. The types can be unqualified, but not aliased. #[manyhow] #[proc_macro_derive(Validate)] -pub fn derive_validate(input: TokenStream2) -> TokenStream2 { +pub fn derive_validate(input: TokenStream) -> TokenStream { let mut emitter = Emitter::new(); let Some(input) = emitter.handle(syn2::parse2(input)) else { @@ -239,7 +269,7 @@ pub fn derive_validate(input: TokenStream2) -> TokenStream2 { /// ```ignore /// use iroha_executor::{smart_contract, prelude::*}; /// -/// #[derive(Constructor, Entrypoints, ExpressionEvaluator, Validate, Visit)] +/// #[derive(Constructor, Entrypoints, Validate, Visit)] /// #[visit(custom(visit_query)] /// pub struct Executor { /// verdict: Result, @@ -256,7 +286,7 @@ pub fn derive_validate(input: TokenStream2) -> TokenStream2 { /// ``` #[manyhow] #[proc_macro_derive(Visit, attributes(visit))] -pub fn derive_visit(input: TokenStream2) -> TokenStream2 { +pub fn derive_visit(input: TokenStream) -> TokenStream { let mut emitter = Emitter::new(); let Some(input) = emitter.handle(syn2::parse2(input)) else { @@ -286,7 +316,7 @@ pub fn derive_visit(input: TokenStream2) -> TokenStream2 { /// ```ignore /// use iroha_executor::{smart_contract, prelude::*}; /// -/// #[derive(Constructor, Entrypoints, ExpressionEvaluator, Validate, Visit)] +/// #[derive(Constructor, Entrypoints, Validate, Visit)] /// #[entrypoints(custom(validate_query))] /// pub struct Executor { /// verdict: Result, @@ -297,7 +327,7 @@ pub fn derive_visit(input: TokenStream2) -> TokenStream2 { /// ``` #[manyhow] #[proc_macro_derive(ValidateEntrypoints, attributes(entrypoints))] -pub fn derive_entrypoints(input: TokenStream2) -> TokenStream2 { +pub fn derive_entrypoints(input: TokenStream) -> TokenStream { let mut emitter = Emitter::new(); let Some(input) = emitter.handle(syn2::parse2(input)) else { @@ -309,27 +339,6 @@ pub fn derive_entrypoints(input: TokenStream2) -> TokenStream2 { emitter.finish_token_stream_with(result) } -/// Implements `iroha_executor::data_model::evaluate::ExpressionEvaluator` trait -/// for the given `Executor` struct. -/// -/// Emits a compile error if the struct didn't have all the expected fields with corresponding -/// types, i.e. `verdict`: `iroha_executor::prelude::Result`, `block_height`: `u64` and -/// `host`: `iroha_executor::smart_contract::Host`, though technically only `host` is needed. -/// The types can be unqualified, but not aliased. -#[manyhow] -#[proc_macro_derive(ExpressionEvaluator)] -pub fn derive_expression_evaluator(input: TokenStream2) -> TokenStream2 { - let mut emitter = Emitter::new(); - - let Some(input) = emitter.handle(syn2::parse2(input)) else { - return emitter.finish_token_stream(); - }; - - let result = default::impl_derive_expression_evaluator(&mut emitter, &input); - - emitter.finish_token_stream_with(result) -} - /// Implements a constructor for the given `Executor` struct. If the `Executor` has any custom fields /// (i.e. different from the expected fields listed below), they will be included into the constructor /// automatically and will need to be passed into `new()` function explicitly. In the default case, @@ -340,7 +349,7 @@ pub fn derive_expression_evaluator(input: TokenStream2) -> TokenStream2 { /// `host`: `iroha_executor::smart_contract::Host`. The types can be unqualified, but not aliased. #[manyhow] #[proc_macro_derive(Constructor)] -pub fn derive_constructor(input: TokenStream2) -> TokenStream2 { +pub fn derive_constructor(input: TokenStream) -> TokenStream { let mut emitter = Emitter::new(); let Some(input) = emitter.handle(syn2::parse2(input)) else { diff --git a/smart_contract/executor/derive/src/token.rs b/smart_contract/executor/derive/src/token.rs index 69f7915d65e..6d961c1c3ad 100644 --- a/smart_contract/executor/derive/src/token.rs +++ b/smart_contract/executor/derive/src/token.rs @@ -1,10 +1,10 @@ //! Module with [`derive_token`](crate::derive_token) macro implementation -use super::*; +use proc_macro2::TokenStream; +use quote::quote; /// [`derive_token`](crate::derive_token()) macro implementation -pub fn impl_derive_token(input: TokenStream) -> TokenStream { - let input = parse_macro_input!(input as DeriveInput); +pub fn impl_derive_token(input: &syn2::DeriveInput) -> TokenStream { let generics = &input.generics; let ident = &input.ident; @@ -15,10 +15,9 @@ pub fn impl_derive_token(input: TokenStream) -> TokenStream { #impl_token #impl_try_from_permission_token } - .into() } -fn impl_token(ident: &syn::Ident, generics: &syn::Generics) -> proc_macro2::TokenStream { +fn impl_token(ident: &syn2::Ident, generics: &syn2::Generics) -> proc_macro2::TokenStream { let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); quote! { @@ -46,10 +45,7 @@ fn impl_token(ident: &syn::Ident, generics: &syn::Generics) -> proc_macro2::Toke } } -fn impl_try_from_permission_token( - ident: &syn::Ident, - generics: &syn::Generics, -) -> proc_macro2::TokenStream { +fn impl_try_from_permission_token(ident: &syn2::Ident, generics: &syn2::Generics) -> TokenStream { let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); let token_id = quote! { ::name() }; diff --git a/smart_contract/executor/derive/src/validate.rs b/smart_contract/executor/derive/src/validate.rs index de6cc982cf1..3b7df471f0f 100644 --- a/smart_contract/executor/derive/src/validate.rs +++ b/smart_contract/executor/derive/src/validate.rs @@ -1,28 +1,27 @@ //! Module with [`derive_validate`](crate::derive_validate) macro implementation -use proc_macro2::Span; -use syn::{Attribute, Ident, Path, Type}; - -use super::*; +use darling::FromAttributes; +use manyhow::Result; +use proc_macro2::{Span, TokenStream}; +use quote::quote; +use syn2::{Attribute, Ident, Type}; /// [`derive_validate`](crate::derive_validate()) macro implementation -pub fn impl_derive_validate_grant_revoke(input: TokenStream) -> TokenStream { - let input = parse_macro_input!(input as DeriveInput); - let ident = input.ident; +pub fn impl_derive_validate_grant_revoke(input: &syn2::DeriveInput) -> Result { + let ident = &input.ident; - let (validate_grant_impl, validate_revoke_impl) = gen_validate_impls(&input.attrs); + let (validate_grant_impl, validate_revoke_impl) = gen_validate_impls(&input.attrs)?; let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); - quote! { + Ok(quote! { impl #impl_generics ::iroha_executor::permission::ValidateGrantRevoke for #ident #ty_generics #where_clause { #validate_grant_impl #validate_revoke_impl } - } - .into() + }) } /// Enum representing possible attributes. @@ -37,119 +36,144 @@ enum ValidateAttribute { }, } -impl ValidateAttribute { - fn from_attributes<'attr, A>(attributes: A) -> Self - where - A: IntoIterator, - { +impl FromAttributes for ValidateAttribute { + // we use `Option::or` to select the first specified condition in case of duplicates + // but we still _want_ to validate that each attribute parses successfully + // this is to ensure that we provide the user with as much validation as possible, instead of bailing out early + // `Option::or_else` would NOT work here, as it would not validate conditions after the first valid one + #[allow(clippy::or_fun_call)] + fn from_attributes(attrs: &[Attribute]) -> darling::Result { + let mut accumulator = darling::error::Accumulator::default(); + let mut general_condition: Option = None; let mut grant_condition: Option = None; let mut revoke_condition: Option = None; - let general_path: Path = syn::parse_str("validate").unwrap(); - let grant_path: Path = syn::parse_str("validate_grant").unwrap(); - let revoke_path: Path = syn::parse_str("validate_revoke").unwrap(); - - for attribute in attributes { - let path = &attribute.path; - - // Skip if it's not our attribute - if path != &general_path && path != &grant_path && path != &revoke_path { + for attr in attrs { + let path = attr.path(); + if !path.is_ident("validate") + && !path.is_ident("validate_grant") + && !path.is_ident("validate_revoke") + { continue; } - let Some(proc_macro2::TokenTree::Group(group)) = - attribute.tokens.clone().into_iter().next() + let Some(list) = + accumulator.handle(attr.meta.require_list().map_err(darling::Error::from)) else { - panic!("Expected parentheses group"); + continue; }; - assert!( - group.delimiter() == proc_macro2::Delimiter::Parenthesis, - "Expected parentheses" - ); - let tokens = group.stream().into(); - - match path { - _general if path == &general_path => { - assert!(grant_condition.is_none() && revoke_condition.is_none(), - "`validate` attribute can't be used with `validate_grant` or `validate_revoke` attributes"); - assert!( - general_condition.is_none(), - "`validate` attribute duplication is not allowed" - ); - - general_condition.replace(syn::parse(tokens).unwrap()); + let tokens = &list.tokens; + + if path.is_ident("validate") { + if grant_condition.is_some() || revoke_condition.is_some() { + accumulator.push(darling::Error::custom( + "`validate` attribute can't be used with `validate_grant` or `validate_revoke` attributes" + ).with_span(&attr)) + } + if general_condition.is_some() { + accumulator.push( + darling::Error::custom("`validate` attribute duplication is not allowed") + .with_span(&attr), + ) + } + + general_condition = general_condition + .or(accumulator + .handle(syn2::parse2(tokens.clone()).map_err(darling::Error::from))); + } else if path.is_ident("grant") { + if general_condition.is_some() { + accumulator.push( + darling::Error::custom( + "`validate_grant` attribute can't be used with `validate` attribute", + ) + .with_span(&attr), + ) } - _grant if path == &grant_path => { - assert!( - general_condition.is_none(), - "`validate_grant` attribute can't be used with `validate` attribute" - ); - assert!( - grant_condition.is_none(), - "`validate_grant` attribute duplication is not allowed" - ); - - grant_condition.replace(syn::parse(tokens).unwrap()); + if grant_condition.is_some() { + accumulator.push( + darling::Error::custom( + "`validate_grant` attribute duplication is not allowed", + ) + .with_span(&attr), + ) } - _revoke if path == &revoke_path => { - assert!( - general_condition.is_none(), - "`validate_revoke` attribute can't be used with `validate` attribute" - ); - assert!( - revoke_condition.is_none(), - "`validate_revoke` attribute duplication is not allowed" - ); - - revoke_condition.replace(syn::parse(tokens).unwrap()); + + grant_condition = grant_condition + .or(accumulator + .handle(syn2::parse2(tokens.clone()).map_err(darling::Error::from))); + } else if path.is_ident("revoke") { + if general_condition.is_some() { + accumulator.push( + darling::Error::custom( + "`validate_revoke` attribute can't be used with `validate` attribute", + ) + .with_span(&attr), + ) } - path => { - panic!( - "Unexpected attribute: `{}`. Expected `validate`, `validate_grant` or `validate_revoke`", - path.get_ident().map_or_else(|| "".to_owned(), ToString::to_string) + if revoke_condition.is_some() { + accumulator.push( + darling::Error::custom( + "`validate_revoke` attribute duplication is not allowed", + ) + .with_span(&attr), ) } + + revoke_condition = revoke_condition + .or(accumulator + .handle(syn2::parse2(tokens.clone()).map_err(darling::Error::from))); + } else { + unreachable!() } } - match (general_condition, grant_condition, revoke_condition) { - (Some(condition), None, None) => ValidateAttribute::General(condition), + let result = match (general_condition, grant_condition, revoke_condition) { + (Some(condition), None, None) => Ok(ValidateAttribute::General(condition)), (None, Some(grant_condition), Some(revoke_condition)) => { - ValidateAttribute::Separate { + Ok(ValidateAttribute::Separate { grant_condition, revoke_condition, - } + }) } (None, Some(_grant_condition), None) => { - panic!("`validate_grant` attribute should be used together with `validate_revoke` attribute") + Err(darling::Error::custom( + "`validate_grant` attribute should be used together with `validate_revoke` attribute" + )) } (None, None, Some(_revoke_condition)) => { - panic!("`validate_revoke` attribute should be used together with `validate_grant` attribute") + Err(darling::Error::custom( + "`validate_revoke` attribute should be used together with `validate_grant` attribute" + )) } - (None, None, None) => panic!("`validate` attribute or combination of `validate_grant` and `validate_revoke` attributes is required"), - _ => unreachable!(), - } + (None, None, None) => Err(darling::Error::custom( + "`validate` attribute or combination of `validate_grant` and `validate_revoke` attributes is required", + )), + _ => Err(darling::Error::custom("Invalid combination of attributes")), + }; + + let res = accumulator.handle(result); + + accumulator.finish().map(|()| res.unwrap()) } } fn gen_validate_impls( attributes: &[Attribute], -) -> (proc_macro2::TokenStream, proc_macro2::TokenStream) { - let validate_attribute = ValidateAttribute::from_attributes(attributes); - +) -> Result<(proc_macro2::TokenStream, proc_macro2::TokenStream)> { + let validate_attribute = ValidateAttribute::from_attributes(attributes)?; match validate_attribute { - ValidateAttribute::General(pass_condition) => ( + ValidateAttribute::General(pass_condition) => Ok(( gen_validate_impl(IsiName::Grant, &pass_condition), gen_validate_impl(IsiName::Revoke, &pass_condition), - ), + )), ValidateAttribute::Separate { grant_condition, revoke_condition, - } => ( + } => Ok(( gen_validate_impl(IsiName::Grant, &grant_condition), gen_validate_impl(IsiName::Revoke, &revoke_condition), - ), + )), } } diff --git a/smart_contract/executor/src/default.rs b/smart_contract/executor/src/default.rs index 771439e9742..5b70d96a884 100644 --- a/smart_contract/executor/src/default.rs +++ b/smart_contract/executor/src/default.rs @@ -7,48 +7,42 @@ use alloc::format; pub use account::{ visit_burn_account_public_key, visit_mint_account_public_key, - visit_mint_account_signature_check_condition, visit_remove_account_key_value, - visit_set_account_key_value, visit_unregister_account, + visit_mint_account_signature_check_condition, visit_register_account, + visit_remove_account_key_value, visit_set_account_key_value, visit_unregister_account, }; pub use asset::{ - visit_burn_asset, visit_mint_asset, visit_register_asset, visit_remove_asset_key_value, - visit_set_asset_key_value, visit_transfer_asset, visit_unregister_asset, + visit_burn_asset_big_quantity, visit_burn_asset_fixed, visit_burn_asset_quantity, + visit_mint_asset_big_quantity, visit_mint_asset_fixed, visit_mint_asset_quantity, + visit_register_asset, visit_remove_asset_key_value, visit_set_asset_key_value, + visit_transfer_asset_big_quantity, visit_transfer_asset_fixed, visit_transfer_asset_quantity, + visit_unregister_asset, }; pub use asset_definition::{ - visit_remove_asset_definition_key_value, visit_set_asset_definition_key_value, - visit_transfer_asset_definition, visit_unregister_asset_definition, + visit_register_asset_definition, visit_remove_asset_definition_key_value, + visit_set_asset_definition_key_value, visit_transfer_asset_definition, + visit_unregister_asset_definition, }; pub use domain::{ - visit_remove_domain_key_value, visit_set_domain_key_value, visit_transfer_domain, - visit_unregister_domain, + visit_register_domain, visit_remove_domain_key_value, visit_set_domain_key_value, + visit_transfer_domain, visit_unregister_domain, }; -pub use executor::visit_upgrade_executor; -use iroha_smart_contract::debug::DebugExpectExt as _; +pub use executor::visit_upgrade; +pub use fail::visit_fail; +use iroha_smart_contract::data_model::isi::InstructionBox; +pub use log::visit_log; pub use parameter::{visit_new_parameter, visit_set_parameter}; -pub use peer::visit_unregister_peer; +pub use peer::{visit_register_peer, visit_unregister_peer}; pub use permission_token::{visit_grant_account_permission, visit_revoke_account_permission}; pub use role::{ visit_grant_account_role, visit_register_role, visit_revoke_account_role, visit_unregister_role, }; pub use trigger::{ visit_burn_trigger_repetitions, visit_execute_trigger, visit_mint_trigger_repetitions, - visit_unregister_trigger, + visit_register_trigger, visit_unregister_trigger, }; use crate::{permission, permission::Token as _, prelude::*}; -macro_rules! evaluate_expr { - ($visitor:ident, $authority:ident, <$isi:ident as $isi_type:ty>::$field:ident()) => {{ - $visitor.visit_expression($authority, $isi.$field()); - - $visitor.evaluate($isi.$field()).dbg_expect(&alloc::format!( - "Failed to evaluate field '{}::{}'", - stringify!($isi_type), - stringify!($field), - )) - }}; -} - pub fn default_permission_token_schema() -> PermissionTokenSchema { let mut schema = iroha_executor::PermissionTokenSchema::default(); @@ -86,7 +80,7 @@ pub fn visit_transaction( } } -/// Default validation for [`InstructionExpr`]. +/// Default validation for [`InstructionBox`]. /// /// # Warning /// @@ -94,198 +88,53 @@ pub fn visit_transaction( pub fn visit_instruction( executor: &mut V, authority: &AccountId, - isi: &InstructionExpr, + isi: &InstructionBox, ) { - macro_rules! isi_executors { - ( - single {$( - $executor:ident($isi:ident) - ),+ $(,)?} - composite {$( - $composite_executor:ident($composite_isi:ident) - ),+ $(,)?} - ) => { - match isi { - InstructionExpr::NewParameter(isi) => { - let parameter = evaluate_expr!(executor, authority, ::parameter()); - executor.visit_new_parameter(authority, NewParameter{parameter}); - - if executor.verdict().is_ok() { - isi_executors!(@execute isi); - } - } - InstructionExpr::SetParameter(isi) => { - let parameter = evaluate_expr!(executor, authority, ::parameter()); - executor.visit_set_parameter(authority, SetParameter{parameter}); - - if executor.verdict().is_ok() { - isi_executors!(@execute isi); - } - } - InstructionExpr::ExecuteTrigger(isi) => { - let trigger_id = evaluate_expr!(executor, authority, ::trigger_id()); - executor.visit_execute_trigger(authority, ExecuteTrigger{trigger_id}); - - if executor.verdict().is_ok() { - isi_executors!(@execute isi); - } - } - InstructionExpr::Log(isi) => { - let msg = evaluate_expr!(executor, authority, ::msg()); - let level = evaluate_expr!(executor, authority, ::level()); - executor.visit_log(authority, Log{level, msg}); - - if executor.verdict().is_ok() { - isi_executors!(@execute isi); - } - } $( - InstructionExpr::$isi(isi) => { - executor.$executor(authority, isi); - - if executor.verdict().is_ok() { - isi_executors!(@execute isi); - } - } )+ $( - // NOTE: `visit_and_execute_instructions` is reentrant, so don't execute composite instructions - InstructionExpr::$composite_isi(isi) => executor.$composite_executor(authority, isi), )+ - } - }; - (@execute $isi:ident) => { - // TODO: Execution should be infallible after successful validation - if let Err(err) = isi.execute() { - executor.deny(err); - } + match isi { + InstructionBox::NewParameter(isi) => { + executor.visit_new_parameter(authority, isi); } - } - - isi_executors! { - single { - visit_burn(Burn), - visit_fail(Fail), - visit_grant(Grant), - visit_mint(Mint), - visit_register(Register), - visit_remove_key_value(RemoveKeyValue), - visit_revoke(Revoke), - visit_set_key_value(SetKeyValue), - visit_transfer(Transfer), - visit_unregister(Unregister), - visit_upgrade(Upgrade), - } - - composite { - visit_sequence(Sequence), - visit_pair(Pair), - visit_if(If), + InstructionBox::SetParameter(isi) => { + executor.visit_set_parameter(authority, isi); } - } -} - -pub fn visit_unsupported( - executor: &mut V, - _authority: &AccountId, - isi: T, -) { - deny!(executor, "{isi:?}: Unsupported operation"); -} - -pub fn visit_expression( - executor: &mut V, - authority: &AccountId, - expression: &EvaluatesTo, -) { - macro_rules! visit_binary_expression { - ($e:ident) => {{ - executor.visit_expression(authority, $e.left()); - - if executor.verdict().is_ok() { - executor.visit_expression(authority, $e.right()); - } - }}; - } - - match expression.expression() { - Expression::Add(expr) => visit_binary_expression!(expr), - Expression::Subtract(expr) => visit_binary_expression!(expr), - Expression::Multiply(expr) => visit_binary_expression!(expr), - Expression::Divide(expr) => visit_binary_expression!(expr), - Expression::Mod(expr) => visit_binary_expression!(expr), - Expression::RaiseTo(expr) => visit_binary_expression!(expr), - Expression::Greater(expr) => visit_binary_expression!(expr), - Expression::Less(expr) => visit_binary_expression!(expr), - Expression::Equal(expr) => visit_binary_expression!(expr), - Expression::Not(expr) => executor.visit_expression(authority, expr.expression()), - Expression::And(expr) => visit_binary_expression!(expr), - Expression::Or(expr) => visit_binary_expression!(expr), - Expression::If(expr) => { - executor.visit_expression(authority, expr.condition()); - - if executor.verdict().is_ok() { - executor.visit_expression(authority, expr.then()); - } - - if executor.verdict().is_ok() { - executor.visit_expression(authority, expr.otherwise()); - } + InstructionBox::Log(isi) => { + executor.visit_log(authority, isi); } - Expression::Contains(expr) => { - executor.visit_expression(authority, expr.collection()); - - if executor.verdict().is_ok() { - executor.visit_expression(authority, expr.element()); - } + InstructionBox::ExecuteTrigger(isi) => { + executor.visit_execute_trigger(authority, isi); } - Expression::ContainsAll(expr) => { - executor.visit_expression(authority, expr.collection()); - - if executor.verdict().is_ok() { - executor.visit_expression(authority, expr.elements()); - } + InstructionBox::Burn(isi) => { + executor.visit_burn(authority, isi); } - Expression::ContainsAny(expr) => { - executor.visit_expression(authority, expr.collection()); - - if executor.verdict().is_ok() { - executor.visit_expression(authority, expr.elements()); - } + InstructionBox::Fail(isi) => { + executor.visit_fail(authority, isi); } - Expression::Where(expr) => executor.visit_expression(authority, expr.expression()), - Expression::Query(query) => executor.visit_query(authority, query), - Expression::ContextValue(_) | Expression::Raw(_) => (), - } -} - -pub fn visit_if( - executor: &mut V, - authority: &AccountId, - isi: &ConditionalExpr, -) { - let condition = evaluate_expr!(executor, authority, ::condition()); - - // TODO: Do we have to make sure both branches are syntactically valid? - if condition { - executor.visit_instruction(authority, isi.then()); - } else if let Some(otherwise) = isi.otherwise() { - executor.visit_instruction(authority, otherwise); - } -} - -pub fn visit_pair(executor: &mut V, authority: &AccountId, isi: &PairExpr) { - executor.visit_instruction(authority, isi.left_instruction()); - - if executor.verdict().is_ok() { - executor.visit_instruction(authority, isi.right_instruction()) - } -} - -pub fn visit_sequence( - executor: &mut V, - authority: &AccountId, - sequence: &SequenceExpr, -) { - for isi in sequence.instructions() { - if executor.verdict().is_ok() { - executor.visit_instruction(authority, isi); + InstructionBox::Grant(isi) => { + executor.visit_grant(authority, isi); + } + InstructionBox::Mint(isi) => { + executor.visit_mint(authority, isi); + } + InstructionBox::Register(isi) => { + executor.visit_register(authority, isi); + } + InstructionBox::RemoveKeyValue(isi) => { + executor.visit_remove_key_value(authority, isi); + } + InstructionBox::Revoke(isi) => { + executor.visit_revoke(authority, isi); + } + InstructionBox::SetKeyValue(isi) => { + executor.visit_set_key_value(authority, isi); + } + InstructionBox::Transfer(isi) => { + executor.visit_transfer(authority, isi); + } + InstructionBox::Unregister(isi) => { + executor.visit_unregister(authority, isi); + } + InstructionBox::Upgrade(isi) => { + executor.visit_upgrade(authority, isi); } } } @@ -293,17 +142,25 @@ pub fn visit_sequence( pub mod peer { use super::*; + pub fn visit_register_peer( + executor: &mut V, + _authority: &AccountId, + isi: &Register, + ) { + execute!(executor, isi) + } + #[allow(clippy::needless_pass_by_value)] pub fn visit_unregister_peer( executor: &mut V, authority: &AccountId, - _isi: Unregister, + isi: &Unregister, ) { if is_genesis(executor) { - pass!(executor); + execute!(executor, isi); } if tokens::peer::CanUnregisterAnyPeer.is_owned_by(authority) { - pass!(executor); + execute!(executor, isi); } deny!(executor, "Can't unregister peer"); @@ -315,24 +172,34 @@ pub mod domain { use super::*; + pub fn visit_register_domain( + executor: &mut V, + _authority: &AccountId, + isi: &Register, + ) { + execute!(executor, isi) + } + pub fn visit_unregister_domain( executor: &mut V, authority: &AccountId, - isi: Unregister, + isi: &Unregister, ) { - let domain_id = isi.object_id; + let domain_id = isi.object_id(); if is_genesis(executor) { - pass!(executor); + execute!(executor, isi); } - match is_domain_owner(&domain_id, authority) { + match is_domain_owner(domain_id, authority) { Err(err) => deny!(executor, err), - Ok(true) => pass!(executor), + Ok(true) => execute!(executor, isi), Ok(false) => {} } - let can_unregister_domain_token = tokens::domain::CanUnregisterDomain { domain_id }; + let can_unregister_domain_token = tokens::domain::CanUnregisterDomain { + domain_id: domain_id.clone(), + }; if can_unregister_domain_token.is_owned_by(authority) { - pass!(executor); + execute!(executor, isi); } deny!(executor, "Can't unregister domain"); @@ -341,16 +208,16 @@ pub mod domain { pub fn visit_transfer_domain( executor: &mut V, authority: &AccountId, - isi: Transfer, + isi: &Transfer, ) { - let destination_id = isi.object; + let destination_id = isi.object(); if is_genesis(executor) { - pass!(executor); + execute!(executor, isi); } - match is_domain_owner(&destination_id, authority) { + match is_domain_owner(destination_id, authority) { Err(err) => deny!(executor, err), - Ok(true) => pass!(executor), + Ok(true) => execute!(executor, isi), Ok(false) => {} } @@ -360,22 +227,23 @@ pub mod domain { pub fn visit_set_domain_key_value( executor: &mut V, authority: &AccountId, - isi: SetKeyValue, + isi: &SetKeyValue, ) { - let domain_id = isi.object_id; + let domain_id = isi.object_id(); if is_genesis(executor) { - pass!(executor); + execute!(executor, isi); } - match is_domain_owner(&domain_id, authority) { + match is_domain_owner(domain_id, authority) { Err(err) => deny!(executor, err), - Ok(true) => pass!(executor), + Ok(true) => execute!(executor, isi), Ok(false) => {} } - let can_set_key_value_in_domain_token = - tokens::domain::CanSetKeyValueInDomain { domain_id }; + let can_set_key_value_in_domain_token = tokens::domain::CanSetKeyValueInDomain { + domain_id: domain_id.clone(), + }; if can_set_key_value_in_domain_token.is_owned_by(authority) { - pass!(executor); + execute!(executor, isi); } deny!(executor, "Can't set key value in domain metadata"); @@ -384,22 +252,23 @@ pub mod domain { pub fn visit_remove_domain_key_value( executor: &mut V, authority: &AccountId, - isi: RemoveKeyValue, + isi: &RemoveKeyValue, ) { - let domain_id = isi.object_id; + let domain_id = isi.object_id(); if is_genesis(executor) { - pass!(executor); + execute!(executor, isi); } - match is_domain_owner(&domain_id, authority) { + match is_domain_owner(domain_id, authority) { Err(err) => deny!(executor, err), - Ok(true) => pass!(executor), + Ok(true) => execute!(executor, isi), Ok(false) => {} } - let can_remove_key_value_in_domain_token = - tokens::domain::CanRemoveKeyValueInDomain { domain_id }; + let can_remove_key_value_in_domain_token = tokens::domain::CanRemoveKeyValueInDomain { + domain_id: domain_id.clone(), + }; if can_remove_key_value_in_domain_token.is_owned_by(authority) { - pass!(executor); + execute!(executor, isi); } deny!(executor, "Can't remove key value in domain metadata"); @@ -411,24 +280,34 @@ pub mod account { use super::*; + pub fn visit_register_account( + executor: &mut V, + _authority: &AccountId, + isi: &Register, + ) { + execute!(executor, isi) + } + pub fn visit_unregister_account( executor: &mut V, authority: &AccountId, - isi: Unregister, + isi: &Unregister, ) { - let account_id = isi.object_id; + let account_id = isi.object_id(); if is_genesis(executor) { - pass!(executor); + execute!(executor, isi); } - match is_account_owner(&account_id, authority) { + match is_account_owner(account_id, authority) { Err(err) => deny!(executor, err), - Ok(true) => pass!(executor), + Ok(true) => execute!(executor, isi), Ok(false) => {} } - let can_unregister_user_account = tokens::account::CanUnregisterAccount { account_id }; + let can_unregister_user_account = tokens::account::CanUnregisterAccount { + account_id: account_id.clone(), + }; if can_unregister_user_account.is_owned_by(authority) { - pass!(executor); + execute!(executor, isi); } deny!(executor, "Can't unregister another account"); @@ -437,21 +316,23 @@ pub mod account { pub fn visit_mint_account_public_key( executor: &mut V, authority: &AccountId, - isi: Mint, + isi: &Mint, ) { - let account_id = isi.destination_id; + let account_id = isi.destination_id(); if is_genesis(executor) { - pass!(executor); + execute!(executor, isi); } - match is_account_owner(&account_id, authority) { + match is_account_owner(account_id, authority) { Err(err) => deny!(executor, err), - Ok(true) => pass!(executor), + Ok(true) => execute!(executor, isi), Ok(false) => {} } - let can_mint_user_public_keys = tokens::account::CanMintUserPublicKeys { account_id }; + let can_mint_user_public_keys = tokens::account::CanMintUserPublicKeys { + account_id: account_id.clone(), + }; if can_mint_user_public_keys.is_owned_by(authority) { - pass!(executor); + execute!(executor, isi); } deny!(executor, "Can't mint public keys of another account"); @@ -460,21 +341,23 @@ pub mod account { pub fn visit_burn_account_public_key( executor: &mut V, authority: &AccountId, - isi: Burn, + isi: &Burn, ) { - let account_id = isi.destination_id; + let account_id = isi.destination_id(); if is_genesis(executor) { - pass!(executor); + execute!(executor, isi); } - match is_account_owner(&account_id, authority) { + match is_account_owner(account_id, authority) { Err(err) => deny!(executor, err), - Ok(true) => pass!(executor), + Ok(true) => execute!(executor, isi), Ok(false) => {} } - let can_burn_user_public_keys = tokens::account::CanBurnUserPublicKeys { account_id }; + let can_burn_user_public_keys = tokens::account::CanBurnUserPublicKeys { + account_id: account_id.clone(), + }; if can_burn_user_public_keys.is_owned_by(authority) { - pass!(executor); + execute!(executor, isi); } deny!(executor, "Can't burn public keys of another account"); @@ -483,22 +366,24 @@ pub mod account { pub fn visit_mint_account_signature_check_condition( executor: &mut V, authority: &AccountId, - isi: Mint, + isi: &Mint, ) { - let account_id = isi.destination_id; + let account_id = isi.destination_id(); if is_genesis(executor) { - pass!(executor); + execute!(executor, isi); } - match is_account_owner(&account_id, authority) { + match is_account_owner(account_id, authority) { Err(err) => deny!(executor, err), - Ok(true) => pass!(executor), + Ok(true) => execute!(executor, isi), Ok(false) => {} } let can_mint_user_signature_check_conditions_token = - tokens::account::CanMintUserSignatureCheckConditions { account_id }; + tokens::account::CanMintUserSignatureCheckConditions { + account_id: account_id.clone(), + }; if can_mint_user_signature_check_conditions_token.is_owned_by(authority) { - pass!(executor); + execute!(executor, isi); } deny!( @@ -510,22 +395,24 @@ pub mod account { pub fn visit_set_account_key_value( executor: &mut V, authority: &AccountId, - isi: SetKeyValue, + isi: &SetKeyValue, ) { - let account_id = isi.object_id; + let account_id = isi.object_id(); if is_genesis(executor) { - pass!(executor); + execute!(executor, isi); } - match is_account_owner(&account_id, authority) { + match is_account_owner(account_id, authority) { Err(err) => deny!(executor, err), - Ok(true) => pass!(executor), + Ok(true) => execute!(executor, isi), Ok(false) => {} } let can_set_key_value_in_user_account_token = - tokens::account::CanSetKeyValueInUserAccount { account_id }; + tokens::account::CanSetKeyValueInUserAccount { + account_id: account_id.clone(), + }; if can_set_key_value_in_user_account_token.is_owned_by(authority) { - pass!(executor); + execute!(executor, isi); } deny!( @@ -537,22 +424,24 @@ pub mod account { pub fn visit_remove_account_key_value( executor: &mut V, authority: &AccountId, - isi: RemoveKeyValue, + isi: &RemoveKeyValue, ) { - let account_id = isi.object_id; + let account_id = isi.object_id(); if is_genesis(executor) { - pass!(executor); + execute!(executor, isi); } - match is_account_owner(&account_id, authority) { + match is_account_owner(account_id, authority) { Err(err) => deny!(executor, err), - Ok(true) => pass!(executor), + Ok(true) => execute!(executor, isi), Ok(false) => {} } let can_remove_key_value_in_user_account_token = - tokens::account::CanRemoveKeyValueInUserAccount { account_id }; + tokens::account::CanRemoveKeyValueInUserAccount { + account_id: account_id.clone(), + }; if can_remove_key_value_in_user_account_token.is_owned_by(authority) { - pass!(executor); + execute!(executor, isi); } deny!( @@ -567,27 +456,35 @@ pub mod asset_definition { use super::*; + pub fn visit_register_asset_definition( + executor: &mut V, + _authority: &AccountId, + isi: &Register, + ) { + execute!(executor, isi); + } + pub fn visit_unregister_asset_definition( executor: &mut V, authority: &AccountId, - isi: Unregister, + isi: &Unregister, ) { - let asset_definition_id = isi.object_id; + let asset_definition_id = isi.object_id(); if is_genesis(executor) { - pass!(executor); + execute!(executor, isi); } - match is_asset_definition_owner(&asset_definition_id, authority) { + match is_asset_definition_owner(asset_definition_id, authority) { Err(err) => deny!(executor, err), - Ok(true) => pass!(executor), + Ok(true) => execute!(executor, isi), Ok(false) => {} } let can_unregister_asset_definition_token = tokens::asset_definition::CanUnregisterAssetDefinition { - asset_definition_id, + asset_definition_id: asset_definition_id.clone(), }; if can_unregister_asset_definition_token.is_owned_by(authority) { - pass!(executor); + execute!(executor, isi); } deny!( @@ -599,22 +496,22 @@ pub mod asset_definition { pub fn visit_transfer_asset_definition( executor: &mut V, authority: &AccountId, - isi: Transfer, + isi: &Transfer, ) { - let source_id = isi.source_id; - let destination_id = isi.object; + let source_id = isi.source_id(); + let destination_id = isi.object(); if is_genesis(executor) { - pass!(executor); + execute!(executor, isi); } - match is_account_owner(&source_id, authority) { + match is_account_owner(source_id, authority) { Err(err) => deny!(executor, err), - Ok(true) => pass!(executor), + Ok(true) => execute!(executor, isi), Ok(false) => {} } - match is_asset_definition_owner(&destination_id, authority) { + match is_asset_definition_owner(destination_id, authority) { Err(err) => deny!(executor, err), - Ok(true) => pass!(executor), + Ok(true) => execute!(executor, isi), Ok(false) => {} } @@ -627,24 +524,24 @@ pub mod asset_definition { pub fn visit_set_asset_definition_key_value( executor: &mut V, authority: &AccountId, - isi: SetKeyValue, + isi: &SetKeyValue, ) { - let asset_definition_id = isi.object_id; + let asset_definition_id = isi.object_id(); if is_genesis(executor) { - pass!(executor); + execute!(executor, isi); } - match is_asset_definition_owner(&asset_definition_id, authority) { + match is_asset_definition_owner(asset_definition_id, authority) { Err(err) => deny!(executor, err), - Ok(true) => pass!(executor), + Ok(true) => execute!(executor, isi), Ok(false) => {} } let can_set_key_value_in_asset_definition_token = tokens::asset_definition::CanSetKeyValueInAssetDefinition { - asset_definition_id, + asset_definition_id: asset_definition_id.clone(), }; if can_set_key_value_in_asset_definition_token.is_owned_by(authority) { - pass!(executor); + execute!(executor, isi); } deny!( @@ -656,24 +553,24 @@ pub mod asset_definition { pub fn visit_remove_asset_definition_key_value( executor: &mut V, authority: &AccountId, - isi: RemoveKeyValue, + isi: &RemoveKeyValue, ) { - let asset_definition_id = isi.object_id; + let asset_definition_id = isi.object_id(); if is_genesis(executor) { - pass!(executor); + execute!(executor, isi); } - match is_asset_definition_owner(&asset_definition_id, authority) { + match is_asset_definition_owner(asset_definition_id, authority) { Err(err) => deny!(executor, err), - Ok(true) => pass!(executor), + Ok(true) => execute!(executor, isi), Ok(false) => {} } let can_remove_key_value_in_asset_definition_token = tokens::asset_definition::CanRemoveKeyValueInAssetDefinition { - asset_definition_id, + asset_definition_id: asset_definition_id.clone(), }; if can_remove_key_value_in_asset_definition_token.is_owned_by(authority) { - pass!(executor); + execute!(executor, isi); } deny!( @@ -684,6 +581,8 @@ pub mod asset_definition { } pub mod asset { + use iroha_smart_contract::data_model::isi::Instruction; + use iroha_smart_contract_utils::Encode; use permission::{asset::is_asset_owner, asset_definition::is_asset_definition_owner}; use super::*; @@ -691,16 +590,16 @@ pub mod asset { pub fn visit_register_asset( executor: &mut V, authority: &AccountId, - isi: Register, + isi: &Register, ) { - let asset = isi.object; + let asset = isi.object(); if is_genesis(executor) { - pass!(executor); + execute!(executor, isi); } match is_asset_definition_owner(asset.id().definition_id(), authority) { Err(err) => deny!(executor, err), - Ok(true) => pass!(executor), + Ok(true) => execute!(executor, isi), Ok(false) => {} } let can_register_assets_with_definition_token = @@ -708,7 +607,7 @@ pub mod asset { asset_definition_id: asset.id().definition_id().clone(), }; if can_register_assets_with_definition_token.is_owned_by(authority) { - pass!(executor); + execute!(executor, isi); } deny!( @@ -720,21 +619,21 @@ pub mod asset { pub fn visit_unregister_asset( executor: &mut V, authority: &AccountId, - isi: Unregister, + isi: &Unregister, ) { - let asset_id = isi.object_id; + let asset_id = isi.object_id(); if is_genesis(executor) { - pass!(executor); + execute!(executor, isi); } - match is_asset_owner(&asset_id, authority) { + match is_asset_owner(asset_id, authority) { Err(err) => deny!(executor, err), - Ok(true) => pass!(executor), + Ok(true) => execute!(executor, isi), Ok(false) => {} } match is_asset_definition_owner(asset_id.definition_id(), authority) { Err(err) => deny!(executor, err), - Ok(true) => pass!(executor), + Ok(true) => execute!(executor, isi), Ok(false) => {} } let can_unregister_assets_with_definition_token = @@ -742,36 +641,38 @@ pub mod asset { asset_definition_id: asset_id.definition_id().clone(), }; if can_unregister_assets_with_definition_token.is_owned_by(authority) { - pass!(executor); + execute!(executor, isi); } - let can_unregister_user_asset_token = tokens::asset::CanUnregisterUserAsset { asset_id }; + let can_unregister_user_asset_token = tokens::asset::CanUnregisterUserAsset { + asset_id: asset_id.clone(), + }; if can_unregister_user_asset_token.is_owned_by(authority) { - pass!(executor); + execute!(executor, isi); } deny!(executor, "Can't unregister asset from another account"); } - pub fn visit_mint_asset( - executor: &mut V, - authority: &AccountId, - isi: Mint, - ) { - let asset_id = isi.destination_id; - + fn validate_mint_asset(executor: &mut V, authority: &AccountId, isi: &Mint) + where + V: Validate + ?Sized, + Q: Into, + Mint: Instruction + Encode + Clone, + { + let asset_id = isi.destination_id(); if is_genesis(executor) { - pass!(executor); + execute!(executor, isi); } match is_asset_definition_owner(asset_id.definition_id(), authority) { Err(err) => deny!(executor, err), - Ok(true) => pass!(executor), + Ok(true) => execute!(executor, isi), Ok(false) => {} } let can_mint_assets_with_definition_token = tokens::asset::CanMintAssetsWithDefinition { asset_definition_id: asset_id.definition_id().clone(), }; if can_mint_assets_with_definition_token.is_owned_by(authority) { - pass!(executor); + execute!(executor, isi); } deny!( @@ -780,58 +681,111 @@ pub mod asset { ); } - pub fn visit_burn_asset( + pub fn visit_mint_asset_quantity( executor: &mut V, authority: &AccountId, - isi: Burn, + isi: &Mint, ) { - let asset_id = isi.destination_id; + validate_mint_asset(executor, authority, isi); + } + pub fn visit_mint_asset_big_quantity( + executor: &mut V, + authority: &AccountId, + isi: &Mint, + ) { + validate_mint_asset(executor, authority, isi); + } + + pub fn visit_mint_asset_fixed( + executor: &mut V, + authority: &AccountId, + isi: &Mint, + ) { + validate_mint_asset(executor, authority, isi); + } + + fn validate_burn_asset(executor: &mut V, authority: &AccountId, isi: &Burn) + where + V: Validate + ?Sized, + Q: Into, + Burn: Instruction + Encode + Clone, + { + let asset_id = isi.destination_id(); if is_genesis(executor) { - pass!(executor); + execute!(executor, isi); } - match is_asset_owner(&asset_id, authority) { + match is_asset_owner(asset_id, authority) { Err(err) => deny!(executor, err), - Ok(true) => pass!(executor), + Ok(true) => execute!(executor, isi), Ok(false) => {} } match is_asset_definition_owner(asset_id.definition_id(), authority) { Err(err) => deny!(executor, err), - Ok(true) => pass!(executor), + Ok(true) => execute!(executor, isi), Ok(false) => {} } let can_burn_assets_with_definition_token = tokens::asset::CanBurnAssetsWithDefinition { asset_definition_id: asset_id.definition_id().clone(), }; if can_burn_assets_with_definition_token.is_owned_by(authority) { - pass!(executor); + execute!(executor, isi); } - let can_burn_user_asset_token = tokens::asset::CanBurnUserAsset { asset_id }; + let can_burn_user_asset_token = tokens::asset::CanBurnUserAsset { + asset_id: asset_id.clone(), + }; if can_burn_user_asset_token.is_owned_by(authority) { - pass!(executor); + execute!(executor, isi); } deny!(executor, "Can't burn assets from another account"); } - pub fn visit_transfer_asset( + pub fn visit_burn_asset_quantity( + executor: &mut V, + authority: &AccountId, + isi: &Burn, + ) { + validate_burn_asset(executor, authority, isi); + } + + pub fn visit_burn_asset_big_quantity( + executor: &mut V, + authority: &AccountId, + isi: &Burn, + ) { + validate_burn_asset(executor, authority, isi); + } + + pub fn visit_burn_asset_fixed( executor: &mut V, authority: &AccountId, - isi: Transfer, + isi: &Burn, ) { - let asset_id = isi.source_id; + validate_burn_asset(executor, authority, isi); + } + fn validate_transfer_asset( + executor: &mut V, + authority: &AccountId, + isi: &Transfer, + ) where + V: Validate + ?Sized, + Q: Into, + Transfer: Instruction + Encode + Clone, + { + let asset_id = isi.source_id(); if is_genesis(executor) { - pass!(executor); + execute!(executor, isi); } - match is_asset_owner(&asset_id, authority) { + match is_asset_owner(asset_id, authority) { Err(err) => deny!(executor, err), - Ok(true) => pass!(executor), + Ok(true) => execute!(executor, isi), Ok(false) => {} } match is_asset_definition_owner(asset_id.definition_id(), authority) { Err(err) => deny!(executor, err), - Ok(true) => pass!(executor), + Ok(true) => execute!(executor, isi), Ok(false) => {} } let can_transfer_assets_with_definition_token = @@ -839,36 +793,63 @@ pub mod asset { asset_definition_id: asset_id.definition_id().clone(), }; if can_transfer_assets_with_definition_token.is_owned_by(authority) { - pass!(executor); + execute!(executor, isi); } - let can_transfer_user_asset_token = tokens::asset::CanTransferUserAsset { asset_id }; + let can_transfer_user_asset_token = tokens::asset::CanTransferUserAsset { + asset_id: asset_id.clone(), + }; if can_transfer_user_asset_token.is_owned_by(authority) { - pass!(executor); + execute!(executor, isi); } deny!(executor, "Can't transfer assets of another account"); } + pub fn visit_transfer_asset_quantity( + executor: &mut V, + authority: &AccountId, + isi: &Transfer, + ) { + validate_transfer_asset(executor, authority, isi); + } + + pub fn visit_transfer_asset_big_quantity( + executor: &mut V, + authority: &AccountId, + isi: &Transfer, + ) { + validate_transfer_asset(executor, authority, isi); + } + + pub fn visit_transfer_asset_fixed( + executor: &mut V, + authority: &AccountId, + isi: &Transfer, + ) { + validate_transfer_asset(executor, authority, isi); + } + pub fn visit_set_asset_key_value( executor: &mut V, authority: &AccountId, - isi: SetKeyValue, + isi: &SetKeyValue, ) { - let asset_id = isi.object_id; + let asset_id = isi.object_id(); if is_genesis(executor) { - pass!(executor); + execute!(executor, isi); } - match is_asset_owner(&asset_id, authority) { + match is_asset_owner(asset_id, authority) { Err(err) => deny!(executor, err), - Ok(true) => pass!(executor), + Ok(true) => execute!(executor, isi), Ok(false) => {} } - let can_set_key_value_in_user_asset_token = - tokens::asset::CanSetKeyValueInUserAsset { asset_id }; + let can_set_key_value_in_user_asset_token = tokens::asset::CanSetKeyValueInUserAsset { + asset_id: asset_id.clone(), + }; if can_set_key_value_in_user_asset_token.is_owned_by(authority) { - pass!(executor); + execute!(executor, isi); } deny!( @@ -880,22 +861,24 @@ pub mod asset { pub fn visit_remove_asset_key_value( executor: &mut V, authority: &AccountId, - isi: RemoveKeyValue, + isi: &RemoveKeyValue, ) { - let asset_id = isi.object_id; + let asset_id = isi.object_id(); if is_genesis(executor) { - pass!(executor); + execute!(executor, isi); } - match is_asset_owner(&asset_id, authority) { + match is_asset_owner(asset_id, authority) { Err(err) => deny!(executor, err), - Ok(true) => pass!(executor), + Ok(true) => execute!(executor, isi), Ok(false) => {} } let can_remove_key_value_in_user_asset_token = - tokens::asset::CanRemoveKeyValueInUserAsset { asset_id }; + tokens::asset::CanRemoveKeyValueInUserAsset { + asset_id: asset_id.clone(), + }; if can_remove_key_value_in_user_asset_token.is_owned_by(authority) { - pass!(executor); + execute!(executor, isi); } deny!( @@ -912,13 +895,13 @@ pub mod parameter { pub fn visit_new_parameter( executor: &mut V, authority: &AccountId, - _isi: NewParameter, + isi: &NewParameter, ) { if is_genesis(executor) { - pass!(executor); + execute!(executor, isi); } if tokens::parameter::CanCreateParameters.is_owned_by(authority) { - pass!(executor); + execute!(executor, isi); } deny!( @@ -931,13 +914,13 @@ pub mod parameter { pub fn visit_set_parameter( executor: &mut V, authority: &AccountId, - _isi: SetParameter, + isi: &SetParameter, ) { if is_genesis(executor) { - pass!(executor); + execute!(executor, isi); } if tokens::parameter::CanSetParameters.is_owned_by(authority) { - pass!(executor); + execute!(executor, isi); } deny!( @@ -952,9 +935,9 @@ pub mod role { macro_rules! impl_validate { ($executor:ident, $isi:ident, $authority:ident, $method:ident) => { - let role_id = $isi.object; + let role_id = $isi.object(); - let find_role_query_res = match FindRoleByRoleId::new(role_id).execute() { + let find_role_query_res = match FindRoleByRoleId::new(role_id.clone()).execute() { Ok(res) => res.into_raw_parts().0, Err(error) => { deny!($executor, error); @@ -986,6 +969,7 @@ pub mod role { } assert!(unknown_tokens.is_empty(), "Role contains unknown permission tokens: {unknown_tokens:?}"); + execute!($executor, $isi) }; } @@ -993,9 +977,9 @@ pub mod role { pub fn visit_register_role( executor: &mut V, _authority: &AccountId, - isi: Register, + isi: &Register, ) { - let role = isi.object.inner(); + let role = isi.object().inner(); let mut unknown_tokens = Vec::new(); for token in role.permissions() { @@ -1021,20 +1005,20 @@ pub mod role { ); } - pass!(executor); + execute!(executor, isi); } #[allow(clippy::needless_pass_by_value)] pub fn visit_unregister_role( executor: &mut V, authority: &AccountId, - _isi: Unregister, + isi: &Unregister, ) { if is_genesis(executor) { - pass!(executor); + execute!(executor, isi); } if tokens::role::CanUnregisterAnyRole.is_owned_by(authority) { - pass!(executor); + execute!(executor, isi); } deny!(executor, "Can't unregister role"); @@ -1043,7 +1027,7 @@ pub mod role { pub fn visit_grant_account_role( executor: &mut V, authority: &AccountId, - isi: Grant, + isi: &Grant, ) { impl_validate!(executor, isi, authority, validate_grant); } @@ -1051,7 +1035,7 @@ pub mod role { pub fn visit_revoke_account_role( executor: &mut V, authority: &AccountId, - isi: Revoke, + isi: &Revoke, ) { impl_validate!(executor, isi, authority, validate_revoke); } @@ -1062,25 +1046,34 @@ pub mod trigger { use super::*; + pub fn visit_register_trigger( + executor: &mut V, + _authority: &AccountId, + isi: &Register>, + ) { + execute!(executor, isi) + } + pub fn visit_unregister_trigger( executor: &mut V, authority: &AccountId, - isi: Unregister>, + isi: &Unregister>, ) { - let trigger_id = isi.object_id; + let trigger_id = isi.object_id(); if is_genesis(executor) { - pass!(executor); + execute!(executor, isi); } - match is_trigger_owner(&trigger_id, authority) { + match is_trigger_owner(trigger_id, authority) { Err(err) => deny!(executor, err), - Ok(true) => pass!(executor), + Ok(true) => execute!(executor, isi), Ok(false) => {} } - let can_unregister_user_trigger_token = - tokens::trigger::CanUnregisterUserTrigger { trigger_id }; + let can_unregister_user_trigger_token = tokens::trigger::CanUnregisterUserTrigger { + trigger_id: trigger_id.clone(), + }; if can_unregister_user_trigger_token.is_owned_by(authority) { - pass!(executor); + execute!(executor, isi); } deny!( @@ -1092,21 +1085,23 @@ pub mod trigger { pub fn visit_mint_trigger_repetitions( executor: &mut V, authority: &AccountId, - isi: Mint>, + isi: &Mint>, ) { - let trigger_id = isi.destination_id; + let trigger_id = isi.destination_id(); if is_genesis(executor) { - pass!(executor); + execute!(executor, isi); } - match is_trigger_owner(&trigger_id, authority) { + match is_trigger_owner(trigger_id, authority) { Err(err) => deny!(executor, err), - Ok(true) => pass!(executor), + Ok(true) => execute!(executor, isi), Ok(false) => {} } - let can_mint_user_trigger_token = tokens::trigger::CanMintUserTrigger { trigger_id }; + let can_mint_user_trigger_token = tokens::trigger::CanMintUserTrigger { + trigger_id: trigger_id.clone(), + }; if can_mint_user_trigger_token.is_owned_by(authority) { - pass!(executor); + execute!(executor, isi); } deny!( @@ -1118,21 +1113,23 @@ pub mod trigger { pub fn visit_burn_trigger_repetitions( executor: &mut V, authority: &AccountId, - isi: Burn>, + isi: &Burn>, ) { - let trigger_id = isi.destination_id; + let trigger_id = isi.destination_id(); if is_genesis(executor) { - pass!(executor); + execute!(executor, isi); } - match is_trigger_owner(&trigger_id, authority) { + match is_trigger_owner(trigger_id, authority) { Err(err) => deny!(executor, err), - Ok(true) => pass!(executor), + Ok(true) => execute!(executor, isi), Ok(false) => {} } - let can_mint_user_trigger_token = tokens::trigger::CanBurnUserTrigger { trigger_id }; + let can_mint_user_trigger_token = tokens::trigger::CanBurnUserTrigger { + trigger_id: trigger_id.clone(), + }; if can_mint_user_trigger_token.is_owned_by(authority) { - pass!(executor); + execute!(executor, isi); } deny!( @@ -1144,21 +1141,23 @@ pub mod trigger { pub fn visit_execute_trigger( executor: &mut V, authority: &AccountId, - isi: ExecuteTrigger, + isi: &ExecuteTrigger, ) { - let trigger_id = isi.trigger_id; + let trigger_id = isi.trigger_id(); if is_genesis(executor) { - pass!(executor); + execute!(executor, isi); } - match is_trigger_owner(&trigger_id, authority) { + match is_trigger_owner(trigger_id, authority) { Err(err) => deny!(executor, err), - Ok(true) => pass!(executor), + Ok(true) => execute!(executor, isi), Ok(false) => {} } - let can_execute_trigger_token = tokens::trigger::CanExecuteUserTrigger { trigger_id }; + let can_execute_trigger_token = tokens::trigger::CanExecuteUserTrigger { + trigger_id: trigger_id.clone(), + }; if can_execute_trigger_token.is_owned_by(authority) { - pass!(executor); + execute!(executor, isi); } deny!(executor, "Can't execute trigger owned by another account"); @@ -1169,13 +1168,14 @@ pub mod permission_token { use super::*; macro_rules! impl_validate { - ($executor:ident, $authority:ident, $self:ident, $method:ident) => { - let token = $self.object; + ($executor:ident, $authority:ident, $isi:ident, $method:ident) => { + // TODO: https://github.com/hyperledger/iroha/issues/4082 + let token = $isi.object().clone(); macro_rules! visit_internal { ($token:ident) => { if is_genesis($executor) { - pass!($executor); + execute!($executor, $isi); } if let Err(error) = permission::ValidateGrantRevoke::$method( &$token, @@ -1185,7 +1185,7 @@ pub mod permission_token { deny!($executor, error); } - pass!($executor); + execute!($executor, $isi); }; } @@ -1201,7 +1201,7 @@ pub mod permission_token { pub fn visit_grant_account_permission( executor: &mut V, authority: &AccountId, - isi: Grant, + isi: &Grant, ) { impl_validate!(executor, authority, isi, validate_grant); } @@ -1209,7 +1209,7 @@ pub mod permission_token { pub fn visit_revoke_account_permission( executor: &mut V, authority: &AccountId, - isi: Revoke, + isi: &Revoke, ) { impl_validate!(executor, authority, isi, validate_revoke); } @@ -1219,22 +1219,38 @@ pub mod executor { use super::*; #[allow(clippy::needless_pass_by_value)] - pub fn visit_upgrade_executor( + pub fn visit_upgrade( executor: &mut V, authority: &AccountId, - _isi: Upgrade, + isi: &Upgrade, ) { if is_genesis(executor) { - pass!(executor); + execute!(executor, isi); } if tokens::executor::CanUpgradeExecutor.is_owned_by(authority) { - pass!(executor); + execute!(executor, isi); } deny!(executor, "Can't upgrade executor"); } } +pub mod log { + use super::*; + + pub fn visit_log(executor: &mut V, _authority: &AccountId, isi: &Log) { + execute!(executor, isi) + } +} + +pub mod fail { + use super::*; + + pub fn visit_fail(executor: &mut V, _authority: &AccountId, isi: &Fail) { + execute!(executor, isi) + } +} + fn is_genesis(executor: &V) -> bool { executor.block_height() == 0 } diff --git a/smart_contract/executor/src/lib.rs b/smart_contract/executor/src/lib.rs index 8b9a7403a52..ef953f78f14 100644 --- a/smart_contract/executor/src/lib.rs +++ b/smart_contract/executor/src/lib.rs @@ -49,7 +49,7 @@ pub fn get_validate_transaction_payload() -> payloads::Validate payloads::Validate { +pub fn get_validate_instruction_payload() -> payloads::Validate { // Safety: ownership of the returned result is transferred into `_decode_from_raw` unsafe { decode_with_length_prefix_from_raw(host::get_validate_instruction_payload()) } } @@ -131,20 +131,23 @@ mod host { } } -/// Shortcut for `return Ok(())`. +/// Execute instruction if verdict is [`Ok`], deny if execution failed and return. +/// +/// Convention is that you have no checks left if you decided to execute instruction. #[macro_export] -macro_rules! pass { - ($executor:ident) => {{ - #[cfg(debug_assertions)] - if let Err(_error) = $executor.verdict() { - unreachable!("Executor already denied"); +macro_rules! execute { + ($executor:ident, $isi:ident) => {{ + if $executor.verdict().is_ok() { + if let Err(err) = $isi.execute() { + $executor.deny(err); + } } return; }}; } -/// Shortcut for `return Err(ValidationFail)`. +/// Shortcut for setting verdict to [`Err`] and return. /// /// Supports [`format!`](alloc::fmt::format) syntax as well as any expression returning [`String`](alloc::string::String). #[macro_export] @@ -219,10 +222,9 @@ pub mod prelude { pub use alloc::vec::Vec; pub use iroha_executor_derive::{ - entrypoint, Constructor, ExpressionEvaluator, Token, Validate, ValidateEntrypoints, - ValidateGrantRevoke, Visit, + entrypoint, Constructor, Token, Validate, ValidateEntrypoints, ValidateGrantRevoke, Visit, }; - pub use iroha_smart_contract::{prelude::*, Context}; + pub use iroha_smart_contract::prelude::*; pub use super::{ data_model::{ @@ -230,6 +232,6 @@ pub mod prelude { visit::Visit, ValidationFail, }, - deny, pass, PermissionTokenSchema, Validate, + deny, execute, PermissionTokenSchema, Validate, }; } diff --git a/smart_contract/src/lib.rs b/smart_contract/src/lib.rs index 45232f9cdfb..db86863330d 100644 --- a/smart_contract/src/lib.rs +++ b/smart_contract/src/lib.rs @@ -4,14 +4,14 @@ extern crate alloc; -use alloc::{boxed::Box, collections::BTreeMap, vec::Vec}; +use alloc::{boxed::Box, vec::Vec}; #[cfg(not(test))] use data_model::smart_contract::payloads; use data_model::{ isi::Instruction, prelude::*, - query::{cursor::ForwardCursor, sorting::Sorting, Pagination, Query, QueryBox}, + query::{cursor::ForwardCursor, sorting::Sorting, Pagination, Query}, smart_contract::SmartContractQueryRequest, BatchedResponse, }; @@ -88,7 +88,7 @@ impl ExecuteOnHost for I { use tests::_iroha_smart_contract_execute_instruction_mock as host_execute_instruction; // TODO: Redundant conversion into `InstructionExpr` - let isi_box: InstructionExpr = self.clone().into(); + let isi_box: InstructionBox = self.clone().into(); // Safety: `host_execute_instruction` doesn't take ownership of it's pointer parameter unsafe { decode_with_length_prefix_from_raw(encode_and_execute( @@ -351,7 +351,7 @@ impl> Iterator for QueryOutputCursorIterator { let mut next_iter = match self.next_batch() { Ok(next_iter) => next_iter, Err(QueryOutputCursorError::Validation(ValidationFail::QueryFailed( - iroha_data_model::query::error::QueryExecutionFail::UnknownCursor, + data_model::query::error::QueryExecutionFail::UnknownCursor, ))) => return None, Err(err) => return Some(Err(err)), }; @@ -374,52 +374,6 @@ pub enum QueryOutputCursorError { #[derive(Debug, Clone, Copy)] pub struct Host; -impl iroha_data_model::evaluate::ExpressionEvaluator for Host { - fn evaluate( - &self, - expression: &E, - ) -> Result { - expression.evaluate(&Context::new()) - } -} - -/// Context of expression evaluation -#[derive(Clone, Default)] -#[repr(transparent)] -pub struct Context { - values: BTreeMap, -} - -impl Context { - /// Create new [`Self`] - pub fn new() -> Self { - Self { - values: BTreeMap::new(), - } - } -} - -impl iroha_data_model::evaluate::Context for Context { - fn query(&self, query: &QueryBox) -> Result { - let value_cursor = query.clone().execute()?; - match value_cursor.collect() { - Ok(value) => Ok(value), - Err(QueryOutputCursorError::Validation(err)) => Err(err), - Err(QueryOutputCursorError::Conversion(err)) => { - panic!("Conversion error during collecting query result: {err:?}") - } - } - } - - fn get(&self, name: &Name) -> Option<&Value> { - self.values.get(name) - } - - fn update(&mut self, other: impl IntoIterator) { - self.values.extend(other) - } -} - /// Get payload for smart contract `main()` entrypoint. #[cfg(not(test))] pub fn get_smart_contract_payload() -> payloads::SmartContract { @@ -481,11 +435,10 @@ mod tests { cursor: ForwardCursor::new(None, None), }); const ISI_RESULT: Result<(), ValidationFail> = Ok(()); - const EXPRESSION_RESULT: NumericValue = NumericValue::U32(5_u32); - fn get_test_instruction() -> InstructionExpr { + fn get_test_instruction() -> InstructionBox { let new_account_id = "mad_hatter@wonderland".parse().expect("Valid"); - let register_isi = RegisterExpr::new(Account::new(new_account_id, [])); + let register_isi = Register::account(Account::new(new_account_id, [])); register_isi.into() } @@ -495,17 +448,13 @@ mod tests { FindAssetQuantityById::new(asset_id).into() } - fn get_test_expression() -> EvaluatesTo { - Add::new(2_u32, 3_u32).into() - } - #[no_mangle] pub unsafe extern "C" fn _iroha_smart_contract_execute_instruction_mock( ptr: *const u8, len: usize, ) -> *const u8 { let bytes = slice::from_raw_parts(ptr, len); - let instruction = InstructionExpr::decode_all(&mut &*bytes); + let instruction = InstructionBox::decode_all(&mut &*bytes); assert_eq!(get_test_instruction(), instruction.unwrap()); ManuallyDrop::new(encode_with_length_prefix(&ISI_RESULT)).as_ptr() @@ -538,12 +487,4 @@ mod tests { fn execute_query() { assert_eq!(get_test_query().execute(), QUERY_RESULT); } - - #[webassembly_test] - fn evaluate_expression() { - assert_eq!( - get_test_expression().evaluate(&Context::new()), - Ok(EXPRESSION_RESULT) - ); - } } diff --git a/smart_contract/trigger/derive/Cargo.toml b/smart_contract/trigger/derive/Cargo.toml index 486eaa75ad7..b2c4d84a6b4 100644 --- a/smart_contract/trigger/derive/Cargo.toml +++ b/smart_contract/trigger/derive/Cargo.toml @@ -15,6 +15,10 @@ workspace = true proc-macro = true [dependencies] -syn.workspace = true -quote.workspace = true -proc-macro2.workspace = true +iroha_macro_utils = { workspace = true } + +syn2 = { workspace = true } +manyhow = { workspace = true } +quote = { workspace = true } +proc-macro2 = { workspace = true } +darling = { workspace = true } diff --git a/smart_contract/trigger/derive/src/entrypoint.rs b/smart_contract/trigger/derive/src/entrypoint.rs index e045d262b5a..57a5b467e93 100644 --- a/smart_contract/trigger/derive/src/entrypoint.rs +++ b/smart_contract/trigger/derive/src/entrypoint.rs @@ -1,6 +1,10 @@ //! Module wht [`main`](super::main) macro implementation -use super::*; +use iroha_macro_utils::Emitter; +use manyhow::emit; +use proc_macro2::TokenStream; +use quote::quote; +use syn2::parse_quote; mod export { pub const TRIGGER_MAIN: &str = "_iroha_trigger_main"; @@ -8,18 +12,21 @@ mod export { /// [`main`](super::main()) macro implementation #[allow(clippy::needless_pass_by_value)] -pub fn impl_entrypoint(_attr: TokenStream, item: TokenStream) -> TokenStream { - let syn::ItemFn { +pub fn impl_entrypoint(emitter: &mut Emitter, item: syn2::ItemFn) -> TokenStream { + let syn2::ItemFn { attrs, vis, sig, mut block, - } = parse_macro_input!(item); - - assert!( - syn::ReturnType::Default == sig.output, - "Trigger `main()` function must not have a return type" - ); + } = item; + + if sig.output != syn2::ReturnType::Default { + emit!( + emitter, + sig.output, + "Trigger `main()` function must not have a return type" + ) + } let fn_name = &sig.ident; @@ -32,7 +39,7 @@ pub fn impl_entrypoint(_attr: TokenStream, item: TokenStream) -> TokenStream { ), ); - let main_fn_name = syn::Ident::new(export::TRIGGER_MAIN, proc_macro2::Span::call_site()); + let main_fn_name = syn2::Ident::new(export::TRIGGER_MAIN, proc_macro2::Span::call_site()); quote! { /// Smart contract entrypoint @@ -50,5 +57,4 @@ pub fn impl_entrypoint(_attr: TokenStream, item: TokenStream) -> TokenStream { #vis #sig #block } - .into() } diff --git a/smart_contract/trigger/derive/src/lib.rs b/smart_contract/trigger/derive/src/lib.rs index 71a0fb417c6..01701a708e9 100644 --- a/smart_contract/trigger/derive/src/lib.rs +++ b/smart_contract/trigger/derive/src/lib.rs @@ -1,8 +1,8 @@ //! Crate with trigger procedural macros. -use proc_macro::TokenStream; -use quote::quote; -use syn::{parse_macro_input, parse_quote}; +use iroha_macro_utils::Emitter; +use manyhow::{emit, manyhow}; +use proc_macro2::TokenStream; mod entrypoint; @@ -22,7 +22,20 @@ mod entrypoint; /// todo!() /// } /// ``` +#[manyhow] #[proc_macro_attribute] pub fn main(attr: TokenStream, item: TokenStream) -> TokenStream { - entrypoint::impl_entrypoint(attr, item) + let mut emitter = Emitter::new(); + + if !attr.is_empty() { + emit!(emitter, "#[main] attribute does not accept arguments"); + } + + let Some(item) = emitter.handle(syn2::parse2(item)) else { + return emitter.finish_token_stream(); + }; + + let result = entrypoint::impl_entrypoint(&mut emitter, item); + + emitter.finish_token_stream_with(result) } diff --git a/smart_contract/utils/src/lib.rs b/smart_contract/utils/src/lib.rs index ec9f70a242e..5e0919b095a 100644 --- a/smart_contract/utils/src/lib.rs +++ b/smart_contract/utils/src/lib.rs @@ -7,7 +7,7 @@ extern crate alloc; use alloc::{boxed::Box, format, vec::Vec}; use core::ops::RangeFrom; -use parity_scale_codec::{DecodeAll, Encode}; +pub use parity_scale_codec::{DecodeAll, Encode}; pub mod debug; pub mod log; diff --git a/telemetry/Cargo.toml b/telemetry/Cargo.toml index 4289c80e87d..e1fb573541b 100644 --- a/telemetry/Cargo.toml +++ b/telemetry/Cargo.toml @@ -31,7 +31,7 @@ serde_json = { workspace = true } streaming-stats = "0.2.3" serde = { workspace = true, features = ["derive"] } tokio = { workspace = true, features = ["rt", "rt-multi-thread", "macros"] } -tokio-stream = { workspace = true, features = ["fs"] } +tokio-stream = { workspace = true, features = ["fs", "sync"] } tokio-tungstenite = { workspace = true } url = { workspace = true, features = ["serde"] } prometheus = { workspace = true } diff --git a/telemetry/src/dev.rs b/telemetry/src/dev.rs index d4970f7c653..674b7bca748 100644 --- a/telemetry/src/dev.rs +++ b/telemetry/src/dev.rs @@ -1,31 +1,26 @@ //! Module with development telemetry use eyre::{Result, WrapErr}; -use iroha_logger::telemetry::Telemetry; +use iroha_config::telemetry::DevTelemetryConfig; +use iroha_logger::telemetry::Event as Telemetry; use tokio::{ fs::OpenOptions, io::AsyncWriteExt, - sync::mpsc::Receiver, + sync::broadcast::Receiver, task::{self, JoinHandle}, }; -use tokio_stream::{wrappers::ReceiverStream, StreamExt}; - -use crate::Configuration; +use tokio_stream::{wrappers::BroadcastStream, StreamExt}; /// Starts telemetry writing to a file /// # Errors /// Fails if unable to open the file pub async fn start( - config: &Configuration, + DevTelemetryConfig { + file: telemetry_file, + }: DevTelemetryConfig, telemetry: Receiver, ) -> Result> { - let mut telemetry = crate::futures::get_stream(ReceiverStream::new(telemetry)); - - let Some(telemetry_file) = &config.file else { - return Ok(task::spawn(async move { - while telemetry.next().await.is_some() {} - })); - }; + let mut stream = crate::futures::get_stream(BroadcastStream::new(telemetry).fuse()); let mut file = OpenOptions::new() .write(true) @@ -40,11 +35,11 @@ pub async fn start( .wrap_err("Failed to create and open file for telemetry")?; // Serde doesn't support async Read Write traits. - // So let synchonous synchronous code be here. + // So let synchronous code be here. // // TODO: After migration to tokio move to https://docs.rs/tokio-serde let join_handle = task::spawn(async move { - while let Some(item) = telemetry.next().await { + while let Some(item) = stream.next().await { let telemetry_json = match serde_json::to_string(&item) { Ok(json) => json, Err(error) => { diff --git a/telemetry/src/futures.rs b/telemetry/src/futures.rs index df723f38406..5f4cb834d22 100644 --- a/telemetry/src/futures.rs +++ b/telemetry/src/futures.rs @@ -2,10 +2,10 @@ use std::{collections::HashMap, marker::Unpin, time::Duration}; use iroha_futures::FuturePollTelemetry; -use iroha_logger::telemetry::Telemetry; +use iroha_logger::telemetry::Event as Telemetry; use serde::{Deserialize, Serialize}; use tokio::time; -use tokio_stream::{Stream, StreamExt}; +use tokio_stream::{wrappers::errors::BroadcastStreamRecvError, Stream, StreamExt}; pub mod post_process { //! Module with telemetry post processing @@ -80,9 +80,10 @@ pub mod post_process { /// Gets stream of future poll telemetry out of general telemetry stream pub fn get_stream( - receiver: impl Stream + Unpin, + receiver: impl Stream> + Unpin, ) -> impl Stream + Unpin { receiver + .filter_map(Result::ok) .map(FuturePollTelemetry::try_from) .filter_map(Result::ok) .map( diff --git a/telemetry/src/metrics.rs b/telemetry/src/metrics.rs index 1043cbb9954..ad4f7744750 100644 --- a/telemetry/src/metrics.rs +++ b/telemetry/src/metrics.rs @@ -35,10 +35,10 @@ impl Encode for Uptime { /// Response body for GET status request #[derive(Clone, Copy, Debug, Default, Deserialize, Serialize, Encode)] pub struct Status { - /// Number of connected peers, except for the reporting peer itself + /// Number of currently connected peers excluding the reporting peer #[codec(compact)] pub peers: u64, - /// Number of committed blocks + /// Number of committed blocks (blockchain height) #[codec(compact)] pub blocks: u64, /// Number of accepted transactions @@ -77,9 +77,9 @@ impl> From<&T> for Status { pub struct Metrics { /// Total number of transactions pub txs: IntCounterVec, - /// Current block height + /// Number of committed blocks (blockchain height) pub block_height: IntCounter, - /// Total number of currently connected peers + /// Number of currently connected peers excluding the reporting peer pub connected_peers: GenericGauge, /// Uptime of the network, starting from commit of the genesis block pub uptime_since_genesis_ms: GenericGauge, diff --git a/telemetry/src/ws.rs b/telemetry/src/ws.rs index 700860f2de8..c8f1486e76c 100644 --- a/telemetry/src/ws.rs +++ b/telemetry/src/ws.rs @@ -4,13 +4,15 @@ use std::time::Duration; use chrono::Local; use eyre::{eyre, Result}; use futures::{stream::SplitSink, Sink, SinkExt, StreamExt}; -use iroha_logger::Telemetry; +use iroha_config::telemetry::RegularTelemetryConfig; +use iroha_logger::telemetry::Event as Telemetry; use serde_json::Map; use tokio::{ net::TcpStream, - sync::mpsc::{self, Receiver, Sender}, + sync::{broadcast, mpsc}, + task::JoinHandle, }; -use tokio_stream::wrappers::ReceiverStream; +use tokio_stream::wrappers::{BroadcastStream, ReceiverStream}; use tokio_tungstenite::{ tungstenite::{Error, Message}, MaybeTlsStream, WebSocketStream, @@ -21,36 +23,43 @@ use crate::retry_period::RetryPeriod; type WebSocketSplitSink = SplitSink>, Message>; +const INTERNAL_CHANNEL_CAPACITY: usize = 10; + /// Starts telemetry sending data to a server /// # Errors /// Fails if unable to connect to the server -pub async fn start(config: &crate::Configuration, telemetry: Receiver) -> Result { - if let (Some(name), Some(url)) = (&config.name, &config.url) { - iroha_logger::info!(%url, "Starting telemetry"); - let (ws, _) = tokio_tungstenite::connect_async(url).await?; - let (write, _read) = ws.split(); - let (internal_sender, internal_receiver) = mpsc::channel(10); - let client = Client::new( - name.clone(), - write, - WebsocketSinkFactory::new(url.clone()), - RetryPeriod::new(config.min_retry_period, config.max_retry_delay_exponent), - internal_sender, - ); - tokio::task::spawn(async move { - client.run(telemetry, internal_receiver).await; - }); - Ok(true) - } else { - Ok(false) - } +pub async fn start( + RegularTelemetryConfig { + name, + url, + max_retry_delay_exponent, + min_retry_period, + }: RegularTelemetryConfig, + telemetry: broadcast::Receiver, +) -> Result> { + iroha_logger::info!(%url, "Starting telemetry"); + let (ws, _) = tokio_tungstenite::connect_async(&url).await?; + let (write, _read) = ws.split(); + let (internal_sender, internal_receiver) = mpsc::channel(INTERNAL_CHANNEL_CAPACITY); + let client = Client::new( + name, + write, + WebsocketSinkFactory::new(url), + RetryPeriod::new(min_retry_period, max_retry_delay_exponent), + internal_sender, + ); + let handle = tokio::task::spawn(async move { + client.run(telemetry, internal_receiver).await; + }); + + Ok(handle) } struct Client { name: String, sink_factory: F, retry_period: RetryPeriod, - internal_sender: Sender, + internal_sender: mpsc::Sender, sink: Option, init_msg: Option, } @@ -65,7 +74,7 @@ where sink: S, sink_factory: F, retry_period: RetryPeriod, - internal_sender: Sender, + internal_sender: mpsc::Sender, ) -> Self { Self { name, @@ -79,15 +88,15 @@ where pub async fn run( mut self, - receiver: Receiver, - internal_receiver: Receiver, + receiver: broadcast::Receiver, + internal_receiver: mpsc::Receiver, ) { - let mut stream = ReceiverStream::new(receiver).fuse(); + let mut stream = BroadcastStream::new(receiver).fuse(); let mut internal_stream = ReceiverStream::new(internal_receiver).fuse(); loop { tokio::select! { msg = stream.next() => { - if let Some(msg) = msg { + if let Some(Ok(msg)) = msg { self.on_telemetry(msg).await; } else { break; @@ -272,8 +281,7 @@ mod tests { use eyre::{eyre, Result}; use futures::{Sink, StreamExt}; - use iroha_config::base::proxy::Builder; - use iroha_logger::telemetry::{Telemetry, TelemetryFields}; + use iroha_logger::telemetry::{Event, Fields}; use serde_json::{Map, Value}; use tokio::task::JoinHandle; use tokio_tungstenite::tungstenite::{Error, Message}; @@ -356,13 +364,13 @@ mod tests { struct Suite { fail_send: Arc, fail_factory_create: Arc, - telemetry_sender: tokio::sync::mpsc::Sender, + telemetry_sender: tokio::sync::broadcast::Sender, message_receiver: futures::channel::mpsc::Receiver, } impl Suite { pub fn new() -> (Self, JoinHandle<()>) { - let (telemetry_sender, telemetry_receiver) = tokio::sync::mpsc::channel(100); + let (telemetry_sender, telemetry_receiver) = tokio::sync::broadcast::channel(100); let (message_sender, message_receiver) = futures::channel::mpsc::channel(100); let fail_send = Arc::new(AtomicBool::new(false)); let message_sender = { @@ -402,10 +410,10 @@ mod tests { } } - fn system_connected_telemetry() -> Telemetry { - Telemetry { + fn system_connected_telemetry() -> Event { + Event { target: "telemetry::test", - fields: TelemetryFields(vec![ + fields: Fields(vec![ ("msg", Value::String("system.connected".to_owned())), ( "genesis_hash", @@ -415,10 +423,10 @@ mod tests { } } - fn system_interval_telemetry(peers: u64) -> Telemetry { - Telemetry { + fn system_interval_telemetry(peers: u64) -> Event { + Event { target: "telemetry::test", - fields: TelemetryFields(vec![ + fields: Fields(vec![ ("msg", Value::String("system.interval".to_owned())), ("peers", Value::Number(peers.into())), ]), @@ -433,10 +441,7 @@ mod tests { } = suite; // The first message is `initialization` - telemetry_sender - .send(system_connected_telemetry()) - .await - .unwrap(); + telemetry_sender.send(system_connected_telemetry()).unwrap(); tokio::time::sleep(Duration::from_millis(100)).await; { let msg = message_receiver.next().await.unwrap(); @@ -467,10 +472,7 @@ mod tests { } // The second message is `update` - telemetry_sender - .send(system_interval_telemetry(2)) - .await - .unwrap(); + telemetry_sender.send(system_interval_telemetry(2)).unwrap(); tokio::time::sleep(Duration::from_millis(100)).await; { let msg = message_receiver.next().await.unwrap(); @@ -500,19 +502,13 @@ mod tests { // Fail sending the first message fail_send.store(true, Ordering::SeqCst); - telemetry_sender - .send(system_connected_telemetry()) - .await - .unwrap(); + telemetry_sender.send(system_connected_telemetry()).unwrap(); message_receiver.try_next().unwrap_err(); tokio::time::sleep(Duration::from_millis(100)).await; // The second message is not sent because the sink is reset fail_send.store(false, Ordering::SeqCst); - telemetry_sender - .send(system_interval_telemetry(1)) - .await - .unwrap(); + telemetry_sender.send(system_interval_telemetry(1)).unwrap(); message_receiver.try_next().unwrap_err(); tokio::time::sleep(Duration::from_millis(100)).await; @@ -521,10 +517,7 @@ mod tests { tokio::time::sleep(Duration::from_secs(1)).await; // The third message is not sent because the sink is not created yet - telemetry_sender - .send(system_interval_telemetry(1)) - .await - .unwrap(); + telemetry_sender.send(system_interval_telemetry(1)).unwrap(); message_receiver.try_next().unwrap_err(); } @@ -538,19 +531,13 @@ mod tests { // Fail sending the first message fail_send.store(true, Ordering::SeqCst); - telemetry_sender - .send(system_connected_telemetry()) - .await - .unwrap(); + telemetry_sender.send(system_connected_telemetry()).unwrap(); message_receiver.try_next().unwrap_err(); tokio::time::sleep(Duration::from_millis(100)).await; // The second message is not sent because the sink is reset fail_send.store(false, Ordering::SeqCst); - telemetry_sender - .send(system_interval_telemetry(1)) - .await - .unwrap(); + telemetry_sender.send(system_interval_telemetry(1)).unwrap(); message_receiver.try_next().unwrap_err(); tokio::time::sleep(Duration::from_millis(100)).await; @@ -569,12 +556,6 @@ mod tests { ($ident:ident, $future:ident) => { #[tokio::test] async fn $ident() { - iroha_logger::init( - &iroha_logger::ConfigurationProxy::default() - .build() - .expect("Default logger config should always build"), - ) - .unwrap(); let (suite, run_handle) = Suite::new(); $future(suite).await; run_handle.await.unwrap(); diff --git a/tools/kagami/src/docs.rs b/tools/kagami/src/docs.rs deleted file mode 100644 index 737959c5aef..00000000000 --- a/tools/kagami/src/docs.rs +++ /dev/null @@ -1,130 +0,0 @@ -use std::{fmt::Debug, io::Write}; - -use color_eyre::eyre::WrapErr as _; -use iroha_config::{base::proxy::Documented, iroha::ConfigurationProxy}; -use serde_json::Value; - -use super::*; - -impl + Send + Sync + Default> PrintDocs for C {} - -#[derive(ClapArgs, Debug, Clone, Copy)] -pub struct Args; - -impl RunArgs for Args { - fn run(self, writer: &mut BufWriter) -> crate::Outcome { - ConfigurationProxy::get_markdown(writer).wrap_err("Failed to generate documentation") - } -} - -pub trait PrintDocs: Documented + Send + Sync + Default -where - Self::Error: Debug, -{ - fn get_markdown(writer: &mut W) -> color_eyre::Result<()> { - let Value::Object(docs) = Self::get_docs() else { - unreachable!("Top level structure is always an object") - }; - let mut vec = Vec::new(); - let defaults = serde_json::to_string_pretty(&Self::default())?; - - writeln!(writer, "# Iroha Configuration reference\n")?; - writeln!(writer, "In this document we provide a reference and detailed descriptions of Iroha's configuration options. \ - The options have different underlying types and default values, which are denoted in code as types wrapped in a single \ - `Option<..>` or in a double `Option>`. For the detailed explanation, please refer to \ - this [section](#configuration-types).\n")?; - writeln!( - writer, - "## Configuration types\n\n\ - ### `Option<..>`\n\n\ - A type wrapped in a single `Option<..>` signifies that in the corresponding `json` block there is a fallback value for this type, \ - and that it only serves as a reference. If a default for such a type has a `null` value, it means that there is no meaningful fallback \ - available for this particular value.\n\nAll the default values can be freely obtained from a provided [sample configuration file](../../../configs/peer/config.json), \ - but it should only serve as a starting point. If left unchanged, the sample configuration file would still fail to build due to it having `null` in place of \ - [public](#public_key) and [private](#private_key) keys as well as [API endpoint URL](#torii.api_url). \ - These should be provided either by modifying the sample config file or as environment variables. \ - No other overloading of configuration values happens besides reading them from a file and capturing the environment variables.\n\n\ - For both types of configuration options wrapped in a single `Option<..>` (i.e. both those that have meaningful defaults and those that have `null`), \ - failure to provide them in any of the above two ways results in an error.\n\n\ - ### `Option>`\n\n\ - `Option>` types should be distinguished from types wrapped in a single `Option<..>`. Only the double option ones are allowed to stay `null`, \ - meaning that **not** providing them in an environment variable or a file will **not** result in an error.\n\n\ - Thus, only these types are truly optional in the mundane sense of the word. \ - An example of this distinction is genesis [public](#genesis.account_public_key) and [private](#genesis.account_private_key) key. \ - While the first one is a single `Option<..>` wrapped type, the latter is wrapped in `Option>`. This means that the genesis *public* key should always be \ - provided by the user, be it via a file config or an environment variable, whereas the *private* key is only needed for the peer that submits the genesis block, \ - and can be omitted for all others. The same logic goes for other double option fields such as logger file path.\n\n\ - ### Sumeragi: default `null` values\n\n\ - A special note about sumeragi fields with `null` as default: only the [`trusted_peers`](#sumeragi.trusted_peers) field out of the three can be initialized via a \ - provided file or an environment variable.\n\n\ - The other two fields, namely [`key_pair`](#sumeragi.key_pair) and [`peer_id`](#sumeragi.peer_id), go through a process of finalization where their values \ - are derived from the corresponding ones in the uppermost Iroha config (using its [`public_key`](#public_key) and [`private_key`](#private_key) fields) \ - or the Torii config (via its [`p2p_addr`](#torii.p2p_addr)). \ - This ensures that these linked fields stay in sync, and prevents the programmer error when different values are provided to these field pairs. \ - Providing either `sumeragi.key_pair` or `sumeragi.peer_id` by hand will result in an error, as it should never be done directly.\n" - )?; - writeln!(writer, "## Default configuration\n")?; - writeln!( - writer, - "The following is the default configuration used by Iroha.\n" - )?; - writeln!(writer, "```json\n{defaults}\n```\n")?; - Self::get_markdown_with_depth(writer, &docs, &mut vec, 2)?; - Ok(()) - } - - fn get_markdown_with_depth( - writer: &mut W, - docs: &serde_json::Map, - field: &mut Vec, - depth: usize, - ) -> color_eyre::Result<()> { - let current_field = { - let mut docs = docs; - for f in &*field { - docs = match &docs[f] { - Value::Object(obj) => obj, - _ => unreachable!(), - }; - } - docs - }; - - for (f, value) in current_field { - field.push(f.clone()); - let get_field = field.iter().map(AsRef::as_ref).collect::>(); - let (doc, inner) = match value { - Value::Object(_) => { - let doc = Self::get_doc_recursive(&get_field) - .expect("Should be there, as already in docs"); - (doc.unwrap_or_default(), true) - } - Value::String(s) => (s.clone(), false), - _ => unreachable!("Only strings and objects in docs"), - }; - // Hacky workaround to avoid duplicating inner fields docs in the reference - let doc = doc.lines().take(3).collect::>().join("\n"); - let doc = doc.strip_prefix(' ').unwrap_or(&doc); - let defaults = Self::default() - .get_recursive(get_field) - .expect("Failed to get defaults."); - let defaults = serde_json::to_string_pretty(&defaults)?; - let field_str = field - .join(".") - .chars() - .filter(|&chr| chr != ' ') - .collect::(); - - write!(writer, "{} `{}`\n\n", "#".repeat(depth), field_str)?; - write!(writer, "{doc}\n\n")?; - write!(writer, "```json\n{defaults}\n```\n\n")?; - - if inner { - Self::get_markdown_with_depth(writer, docs, field, depth + 1)?; - } - - field.pop(); - } - Ok(()) - } -} diff --git a/tools/kagami/src/genesis.rs b/tools/kagami/src/genesis.rs index 48caeb74337..6036e4723ab 100644 --- a/tools/kagami/src/genesis.rs +++ b/tools/kagami/src/genesis.rs @@ -4,11 +4,9 @@ use clap::{ArgGroup, Parser, Subcommand}; use iroha_config::{sumeragi::default::*, wasm::default::*, wsv::default::*}; use iroha_data_model::{ asset::AssetValueType, - isi::{MintExpr, RegisterExpr}, metadata::Limits, parameter::{default::*, ParametersBuilder}, prelude::AssetId, - IdBox, }; use iroha_genesis::{ExecutorMode, ExecutorPath, RawGenesisBlock, RawGenesisBlockBuilder}; use serde_json::json; @@ -146,22 +144,19 @@ pub fn generate_default(executor: ExecutorMode) -> color_eyre::Result color_eyre::Result), } impl RunArgs for Args { @@ -67,7 +64,6 @@ impl RunArgs for Args { Schema(args) => args.run(writer), Genesis(args) => args.run(writer), Config(args) => args.run(writer), - Docs(args) => args.run(writer), } } } diff --git a/tools/parity_scale_decoder/samples/trigger.bin b/tools/parity_scale_decoder/samples/trigger.bin index aa17a69bcd0..9d084408877 100644 Binary files a/tools/parity_scale_decoder/samples/trigger.bin and b/tools/parity_scale_decoder/samples/trigger.bin differ diff --git a/tools/parity_scale_decoder/samples/trigger.json b/tools/parity_scale_decoder/samples/trigger.json index 28dff83acfc..b37816c0352 100644 --- a/tools/parity_scale_decoder/samples/trigger.json +++ b/tools/parity_scale_decoder/samples/trigger.json @@ -5,9 +5,11 @@ "Instructions": [ { "Mint": { - "object": "1_u32", - "destination_id": { - "AssetId": "rose##alice@wonderland" + "Asset": { + "Quantity": { + "object": 1, + "destination_id": "rose##alice@wonderland" + } } } } diff --git a/tools/parity_scale_decoder/src/main.rs b/tools/parity_scale_decoder/src/main.rs index dcbb4069df3..47dc7801e7a 100644 --- a/tools/parity_scale_decoder/src/main.rs +++ b/tools/parity_scale_decoder/src/main.rs @@ -261,7 +261,7 @@ mod tests { let rose_id = AssetId::new(rose_definition_id, account_id.clone()); let trigger_id = "mint_rose".parse().expect("Valid"); let action = Action::::new( - vec![MintExpr::new(1_u32, rose_id)], + vec![Mint::asset_quantity(1_u32, rose_id)], Repeats::Indefinitely, account_id, FilterBox::Data(DataEventFilter::BySome(DataEntityFilter::ByAccount( diff --git a/tools/swarm/src/compose.rs b/tools/swarm/src/compose.rs index e08ab109e91..36c43317750 100644 --- a/tools/swarm/src/compose.rs +++ b/tools/swarm/src/compose.rs @@ -215,7 +215,8 @@ struct FullPeerEnv { iroha_genesis_account_public_key: Option, #[serde(skip_serializing_if = "Option::is_none")] iroha_genesis_account_private_key: Option>, - sumeragi_trusted_peers: SerializeAsJsonStr>, + #[serde(skip_serializing_if = "Option::is_none")] + sumeragi_trusted_peers: Option>>, } struct CompactPeerEnv { @@ -237,7 +238,11 @@ impl From for FullPeerEnv { iroha_genesis_account_private_key: value.genesis_private_key.map(SerializeAsJsonStr), torii_p2p_addr: value.p2p_addr, torii_api_url: value.api_addr, - sumeragi_trusted_peers: SerializeAsJsonStr(value.trusted_peers), + sumeragi_trusted_peers: if value.trusted_peers.is_empty() { + None + } else { + Some(SerializeAsJsonStr(value.trusted_peers)) + }, } } } @@ -311,7 +316,11 @@ impl DockerComposeBuilder<'_> { peer, service_source.clone(), volumes.clone(), - trusted_peers.clone(), + trusted_peers + .iter() + .filter(|trusted_peer| trusted_peer.public_key() != peer.key_pair.public_key()) + .cloned() + .collect(), Some(genesis_key_pair.public_key().clone()), Some(genesis_key_pair.private_key().clone()), ); @@ -325,7 +334,13 @@ impl DockerComposeBuilder<'_> { peer, service_source.clone(), volumes.clone(), - trusted_peers.clone(), + trusted_peers + .iter() + .filter(|trusted_peer| { + trusted_peer.public_key() != peer.key_pair.public_key() + }) + .cloned() + .collect(), Some(genesis_key_pair.public_key().clone()), None, ); @@ -615,7 +630,6 @@ mod tests { TORII_API_URL: iroha1:1338 IROHA_GENESIS_ACCOUNT_PUBLIC_KEY: ed012039E5BF092186FACC358770792A493CA98A83740643A3D41389483CF334F748C8 IROHA_GENESIS_ACCOUNT_PRIVATE_KEY: '{"digest_function":"ed25519","payload":"db9d90d20f969177bd5882f9fe211d14d1399d5440d04e3468783d169bbc4a8e39e5bf092186facc358770792a493ca98a83740643a3d41389483cf334f748c8"}' - SUMERAGI_TRUSTED_PEERS: '[]' ports: - 1337:1337 - 8080:8080 @@ -651,7 +665,6 @@ mod tests { TORII_P2P_ADDR: iroha0:1337 TORII_API_URL: iroha0:1337 IROHA_GENESIS_ACCOUNT_PUBLIC_KEY: ed0120415388A90FA238196737746A70565D041CFB32EAA0C89FF8CB244C7F832A6EBD - SUMERAGI_TRUSTED_PEERS: '[]' "#]]; expected.assert_eq(&actual); } @@ -691,7 +704,7 @@ mod tests { TORII_API_URL: iroha0:8080 IROHA_GENESIS_ACCOUNT_PUBLIC_KEY: ed01203420F48A9EEB12513B8EB7DAF71979CE80A1013F5F341C10DCDA4F6AA19F97A9 IROHA_GENESIS_ACCOUNT_PRIVATE_KEY: '{"digest_function":"ed25519","payload":"5a6d5f06a90d29ad906e2f6ea8b41b4ef187849d0d397081a4a15ffcbe71e7c73420f48a9eeb12513b8eb7daf71979ce80a1013f5f341c10dcda4f6aa19f97a9"}' - SUMERAGI_TRUSTED_PEERS: '[{"address":"iroha2:1339","public_key":"ed0120312C1B7B5DE23D366ADCF23CD6DB92CE18B2AA283C7D9F5033B969C2DC2B92F4"},{"address":"iroha3:1340","public_key":"ed0120854457B2E3D6082181DA73DC01C1E6F93A72D0C45268DC8845755287E98A5DEE"},{"address":"iroha1:1338","public_key":"ed0120A88554AA5C86D28D0EEBEC497235664433E807881CD31E12A1AF6C4D8B0F026C"},{"address":"iroha0:1337","public_key":"ed0120F0321EB4139163C35F88BF78520FF7071499D7F4E79854550028A196C7B49E13"}]' + SUMERAGI_TRUSTED_PEERS: '[{"address":"iroha2:1339","public_key":"ed0120312C1B7B5DE23D366ADCF23CD6DB92CE18B2AA283C7D9F5033B969C2DC2B92F4"},{"address":"iroha3:1340","public_key":"ed0120854457B2E3D6082181DA73DC01C1E6F93A72D0C45268DC8845755287E98A5DEE"},{"address":"iroha1:1338","public_key":"ed0120A88554AA5C86D28D0EEBEC497235664433E807881CD31E12A1AF6C4D8B0F026C"}]' ports: - 1337:1337 - 8080:8080 @@ -708,7 +721,7 @@ mod tests { TORII_P2P_ADDR: iroha1:1338 TORII_API_URL: iroha1:8081 IROHA_GENESIS_ACCOUNT_PUBLIC_KEY: ed01203420F48A9EEB12513B8EB7DAF71979CE80A1013F5F341C10DCDA4F6AA19F97A9 - SUMERAGI_TRUSTED_PEERS: '[{"address":"iroha2:1339","public_key":"ed0120312C1B7B5DE23D366ADCF23CD6DB92CE18B2AA283C7D9F5033B969C2DC2B92F4"},{"address":"iroha3:1340","public_key":"ed0120854457B2E3D6082181DA73DC01C1E6F93A72D0C45268DC8845755287E98A5DEE"},{"address":"iroha1:1338","public_key":"ed0120A88554AA5C86D28D0EEBEC497235664433E807881CD31E12A1AF6C4D8B0F026C"},{"address":"iroha0:1337","public_key":"ed0120F0321EB4139163C35F88BF78520FF7071499D7F4E79854550028A196C7B49E13"}]' + SUMERAGI_TRUSTED_PEERS: '[{"address":"iroha2:1339","public_key":"ed0120312C1B7B5DE23D366ADCF23CD6DB92CE18B2AA283C7D9F5033B969C2DC2B92F4"},{"address":"iroha3:1340","public_key":"ed0120854457B2E3D6082181DA73DC01C1E6F93A72D0C45268DC8845755287E98A5DEE"},{"address":"iroha0:1337","public_key":"ed0120F0321EB4139163C35F88BF78520FF7071499D7F4E79854550028A196C7B49E13"}]' ports: - 1338:1338 - 8081:8081 @@ -724,7 +737,7 @@ mod tests { TORII_P2P_ADDR: iroha2:1339 TORII_API_URL: iroha2:8082 IROHA_GENESIS_ACCOUNT_PUBLIC_KEY: ed01203420F48A9EEB12513B8EB7DAF71979CE80A1013F5F341C10DCDA4F6AA19F97A9 - SUMERAGI_TRUSTED_PEERS: '[{"address":"iroha2:1339","public_key":"ed0120312C1B7B5DE23D366ADCF23CD6DB92CE18B2AA283C7D9F5033B969C2DC2B92F4"},{"address":"iroha3:1340","public_key":"ed0120854457B2E3D6082181DA73DC01C1E6F93A72D0C45268DC8845755287E98A5DEE"},{"address":"iroha1:1338","public_key":"ed0120A88554AA5C86D28D0EEBEC497235664433E807881CD31E12A1AF6C4D8B0F026C"},{"address":"iroha0:1337","public_key":"ed0120F0321EB4139163C35F88BF78520FF7071499D7F4E79854550028A196C7B49E13"}]' + SUMERAGI_TRUSTED_PEERS: '[{"address":"iroha3:1340","public_key":"ed0120854457B2E3D6082181DA73DC01C1E6F93A72D0C45268DC8845755287E98A5DEE"},{"address":"iroha1:1338","public_key":"ed0120A88554AA5C86D28D0EEBEC497235664433E807881CD31E12A1AF6C4D8B0F026C"},{"address":"iroha0:1337","public_key":"ed0120F0321EB4139163C35F88BF78520FF7071499D7F4E79854550028A196C7B49E13"}]' ports: - 1339:1339 - 8082:8082 @@ -740,7 +753,7 @@ mod tests { TORII_P2P_ADDR: iroha3:1340 TORII_API_URL: iroha3:8083 IROHA_GENESIS_ACCOUNT_PUBLIC_KEY: ed01203420F48A9EEB12513B8EB7DAF71979CE80A1013F5F341C10DCDA4F6AA19F97A9 - SUMERAGI_TRUSTED_PEERS: '[{"address":"iroha2:1339","public_key":"ed0120312C1B7B5DE23D366ADCF23CD6DB92CE18B2AA283C7D9F5033B969C2DC2B92F4"},{"address":"iroha3:1340","public_key":"ed0120854457B2E3D6082181DA73DC01C1E6F93A72D0C45268DC8845755287E98A5DEE"},{"address":"iroha1:1338","public_key":"ed0120A88554AA5C86D28D0EEBEC497235664433E807881CD31E12A1AF6C4D8B0F026C"},{"address":"iroha0:1337","public_key":"ed0120F0321EB4139163C35F88BF78520FF7071499D7F4E79854550028A196C7B49E13"}]' + SUMERAGI_TRUSTED_PEERS: '[{"address":"iroha2:1339","public_key":"ed0120312C1B7B5DE23D366ADCF23CD6DB92CE18B2AA283C7D9F5033B969C2DC2B92F4"},{"address":"iroha1:1338","public_key":"ed0120A88554AA5C86D28D0EEBEC497235664433E807881CD31E12A1AF6C4D8B0F026C"},{"address":"iroha0:1337","public_key":"ed0120F0321EB4139163C35F88BF78520FF7071499D7F4E79854550028A196C7B49E13"}]' ports: - 1340:1340 - 8083:8083