From 9b8b52c4e374a2a9018b3a028dc9575cdd56630e Mon Sep 17 00:00:00 2001 From: Lohachov Mykhailo Date: Wed, 25 Dec 2024 23:03:39 +0900 Subject: [PATCH 1/2] merge client, wasm and codec Signed-off-by: Lohachov Mykhailo --- CONTRIBUTING.md | 2 +- Cargo.lock | 35 +- crates/iroha_cli/Cargo.toml | 14 + crates/iroha_cli/README.md | 174 +- crates/iroha_cli/build.rs | 38 +- crates/iroha_cli/examples/client.md | 92 + crates/iroha_cli/examples/codec.md | 101 + crates/iroha_cli/examples/wasm.md | 27 + crates/iroha_cli/src/client.rs | 1615 ++++++++++++++++ crates/iroha_cli/src/codec.rs | 435 +++++ crates/iroha_cli/src/main.rs | 1619 +---------------- crates/iroha_cli/src/options.rs | 10 + crates/iroha_cli/src/wasm.rs | 228 +++ crates/iroha_codec/Cargo.toml | 35 - crates/iroha_codec/README.md | 119 -- crates/iroha_codec/build.rs | 39 - crates/iroha_codec/src/main.rs | 433 ----- crates/iroha_wasm_builder/README.md | 23 - crates/iroha_wasm_builder/src/main.rs | 106 -- crates/iroha_wasm_test_runner/Cargo.toml | 16 - crates/iroha_wasm_test_runner/src/main.rs | 82 - flake.nix | 1 - .../samples => samples/codec}/account.bin | Bin .../samples => samples/codec}/account.json | 0 .../samples => samples/codec}/domain.bin | 0 .../samples => samples/codec}/domain.json | 0 .../samples => samples/codec}/trigger.bin | Bin .../samples => samples/codec}/trigger.json | 0 scripts/build_wasm.sh | 2 +- wasm/libs/default_executor/README.md | 5 +- 30 files changed, 2661 insertions(+), 2590 deletions(-) create mode 100644 crates/iroha_cli/examples/client.md create mode 100644 crates/iroha_cli/examples/codec.md create mode 100644 crates/iroha_cli/examples/wasm.md create mode 100644 crates/iroha_cli/src/client.rs create mode 100644 crates/iroha_cli/src/codec.rs create mode 100644 crates/iroha_cli/src/options.rs create mode 100644 crates/iroha_cli/src/wasm.rs delete mode 100644 crates/iroha_codec/Cargo.toml delete mode 100644 crates/iroha_codec/README.md delete mode 100644 crates/iroha_codec/build.rs delete mode 100644 crates/iroha_codec/src/main.rs delete mode 100644 crates/iroha_wasm_builder/README.md delete mode 100644 crates/iroha_wasm_builder/src/main.rs delete mode 100644 crates/iroha_wasm_test_runner/Cargo.toml delete mode 100644 crates/iroha_wasm_test_runner/src/main.rs rename {crates/iroha_codec/samples => samples/codec}/account.bin (100%) rename {crates/iroha_codec/samples => samples/codec}/account.json (100%) rename {crates/iroha_codec/samples => samples/codec}/domain.bin (100%) rename {crates/iroha_codec/samples => samples/codec}/domain.json (100%) rename {crates/iroha_codec/samples => samples/codec}/trigger.bin (100%) rename {crates/iroha_codec/samples => samples/codec}/trigger.json (100%) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index a79912111f3..2f886538fa5 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -282,7 +282,7 @@ It can be done by running: ```bash # compile executor without optimizations -cargo run --bin iroha_wasm_builder -- build ./path/to/executor --out-file executor.wasm +cargo run --bin iroha wasm build ./path/to/executor --out-file executor.wasm ``` With profiling feature enabled Iroha exposes endpoint to scrap pprof profiles: diff --git a/Cargo.lock b/Cargo.lock index 65c495e40e1..4a535a8686c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2960,6 +2960,7 @@ version = "2.0.0-rc.1.0" dependencies = [ "clap", "color-eyre", + "colored", "derive_more", "erased-serde", "error-stack", @@ -2967,32 +2968,24 @@ dependencies = [ "futures", "humantime", "iroha", + "iroha_data_model", + "iroha_genesis", "iroha_primitives", + "iroha_schema", + "iroha_schema_gen", + "iroha_wasm_builder", "json5", + "owo-colors 4.1.0", + "parity-scale-codec", "serde", "serde_json", "serde_with", + "spinoff", "supports-color 2.1.0", "thiserror", "tokio", "vergen", -] - -[[package]] -name = "iroha_codec" -version = "2.0.0-rc.1.0" -dependencies = [ - "clap", - "colored", - "eyre", - "iroha_data_model", - "iroha_genesis", - "iroha_schema", - "iroha_schema_gen", - "parity-scale-codec", - "serde", - "serde_json", - "supports-color 2.1.0", + "wasmtime", ] [[package]] @@ -3759,14 +3752,6 @@ dependencies = [ "syn 2.0.75", ] -[[package]] -name = "iroha_wasm_test_runner" -version = "2.0.0-rc.1.0" -dependencies = [ - "anyhow", - "wasmtime", -] - [[package]] name = "irohad" version = "2.0.0-rc.1.0" diff --git a/crates/iroha_cli/Cargo.toml b/crates/iroha_cli/Cargo.toml index f6ba6beeae0..8f39796b50e 100644 --- a/crates/iroha_cli/Cargo.toml +++ b/crates/iroha_cli/Cargo.toml @@ -29,6 +29,14 @@ path = "src/main.rs" [dependencies] iroha = { workspace = true } iroha_primitives = { workspace = true } +iroha_schema = { workspace = true } +iroha_schema_gen = { workspace = true } +iroha_genesis = { workspace = true } +iroha_data_model = { workspace = true } +iroha_wasm_builder = { workspace = true } +spinoff = { workspace = true, features = ["binary"] } +owo-colors = { workspace = true, features = ["supports-colors"] } +wasmtime = { workspace = true } thiserror = { workspace = true } error-stack = { workspace = true, features = ["eyre"] } @@ -44,7 +52,13 @@ supports-color = { workspace = true } derive_more = { workspace = true } tokio = { workspace = true, features = ["rt"] } futures = { workspace = true } +parity-scale-codec = { workspace = true } +colored = "2.1.0" [build-dependencies] vergen = { version = "8.3.1", default-features = false } color-eyre = "0.6.3" +parity-scale-codec = { workspace = true } +iroha_data_model = { workspace = true } +serde = { workspace = true } +serde_json = { workspace = true } diff --git a/crates/iroha_cli/README.md b/crates/iroha_cli/README.md index 57a50db8136..b8431e003b2 100644 --- a/crates/iroha_cli/README.md +++ b/crates/iroha_cli/README.md @@ -1,6 +1,11 @@ -# Iroha CLI Client +# Iroha CLI -Iroha Client CLI is a "thin" wrapper around functionality exposed in the `iroha` crate. Specifically, it should be used as a reference for using `iroha`'s features, and not as a production-ready client. As such, the CLI client is not guaranteed to support all features supported by the client library. Check [Iroha 2 documentation](https://docs.iroha.tech/get-started/operate-iroha-2-via-cli.html) for a detailed tutorial on working with Iroha Client CLI. +Iroha cli is a multi-purpose tool for interactions with iroha components. + +# Table of Contents +1. [Installation](#installation) +2. [Usage](#usage) +3. [Examples](#examples) ## Installation @@ -14,137 +19,68 @@ cargo build The above command will produce the `iroha` ELF executable file for Linux/BSD, the `iroha` executable for MacOS, and the `iroha.exe` executable for Windows, depending on your platform and configuration. -Alternatively, check out the [documentation](https://docs.iroha.tech/get-started/install-iroha-2.html) for system-wide installation instructions. + +Alternatively, check out the [documentation](https://docs.iroha.tech/get-started/install-iroha-2.html) (**TBU**) for system-wide installation instructions. ## Usage -Run Iroha Client CLI: +Run Iroha CLI: ``` -iroha [OPTIONS] +iroha [OPTIONS] ``` -### Options - -| Option | Description | -| --------------------- | -------------------------------------------------- | -| -c, --config | Set a config file path (`config.json` by default). | - ### Subcommands +| Command | Description | +|---------|-----------------------------------------------------------------------------------------------------------------------------------------------------| +| codec | Execute commands related to [Parity Scale Codec](https://github.com/paritytech/parity-scale-codec): list available types, decode SCALE to iroha types, decode SCALE to json, encode SCALE from json. | +| wasm | Execute commands related to smartcontracts: build and check source files, run wasm tests. | +| client | Execute commands relatedto interactions with iroha peers Web API. | + +
Codec subcommands + +| Command | Description | +|-----------------------------------------------------|------------------------------------------------------------------------------------------------------------------------------------| +| `list-types` | List all available data types. | +| `scale-to-json` | Decode the data type from SCALE to JSON. | +| `json-to-scale` | Encode the data type from JSON to SCALE. | +| `scale-to-rust` | Decode the data type from SCALE binary file to Rust debug format.
Can be used to analyze binary input if data type is not known. | +| `help` | Print the help message for the tool or a subcommand. | +
+ +
Wasm subcommands + +| Command | Description | +|-----------------------------------------------------|------------------------------------------------------------------------------------------------------------------------------------| +| `check` | Check if smartcontract sources are valid (`cargo check`). | +| `build` | Build smartcontracs from given sources (`cargo build`). | +| `test` | Run WebAssembly tests. | +| `help` | Print the help message for the tool or a subcommand. | +
+ + +
Client subcommands + +`client` commands require a valid configuration file, refer to `defaults/client.toml` as example. | Command | Description | | --------- | ------------------------------------------------------------------------------------------------------------------------------------------- | -| `account` | Execute commands related to accounts: register a new one, list all accounts, grant a permission to an account, list all account permissions | -| `asset` | Execute commands related to assets: register a new one, mint or transfer assets, get info about an asset, list all assets | -| `blocks` | Get block stream from Iroha peer | -| `domain` | Execute commands related to domains: register a new one, list all domains | -| `events` | Get event stream from Iroha peer | -| `json` | Submit multi-instructions or request query as JSON | -| `peer` | Execute commands related to peer administration and networking | -| `wasm` | Execute commands related to WASM | -| `help` | Print the help message for `iroha` and/or the current subcommand other than `help` subcommand | +| `account` | Execute commands related to accounts: register a new one, list all accounts, grant a permission to an account, list all account permissions. | +| `asset` | Execute commands related to assets: register a new one, mint or transfer assets, get info about an asset, list all assets. | +| `blocks` | Get block stream from Iroha peer. | +| `domain` | Execute commands related to domains: register a new one, list all domains. | +| `events` | Get event stream from Iroha peer. | +| `json` | Submit multi-instructions or request query as JSON. | +| `peer` | Execute commands related to peer administration and networking. | +| `wasm` | Execute commands related to WASM. | +| `help` | Print the help message for `iroha` and/or the current subcommand other than `help` subcommand. | Refer to [Iroha Special Instructions](https://docs.iroha.tech/blockchain/instructions.html) for more information about Iroha instructions such as register, mint, grant, and so on. +
## Examples - :grey_exclamation: All examples below are Unix-oriented. If you're working on Windows, we would highly encourage you to consider using WSL, as most documentation assumes a POSIX-like shell running on your system. Please be advised that the differences in the syntax may go beyond executing `iroha.exe` instead of `iroha`. -```bash -./iroha domain register --id="Soramitsu" -./iroha account register --id="ed01204A3C5A6B77BBE439969F95F0AA4E01AE31EC45A0D68C131B2C622751FCC5E3B6@Soramitsu" -./iroha asset register --id="XOR#Soramitsu" --type=Numeric -./iroha asset mint --account="ed01204A3C5A6B77BBE439969F95F0AA4E01AE31EC45A0D68C131B2C622751FCC5E3B6@Soramitsu" --asset="XOR#Soramitsu" --quantity=1010 -./iroha asset get --account="ed01204A3C5A6B77BBE439969F95F0AA4E01AE31EC45A0D68C131B2C622751FCC5E3B6@Soramitsu" --asset="XOR#Soramitsu" -``` - -In this section we will show you how to use Iroha CLI Client to do the following: - - - [Create new Domain](#create-new-domain) - - [Create new Account](#create-new-account) - - [Mint Asset to Account](#mint-asset-to-account) - - [Query Account Assets Quantity](#query-account-assets-quantity) - - [Execute WASM transaction](#execute-wasm-transaction) - - [Execute Multi-instruction Transactions](#execute-multi-instruction-transactions) - -### Create new Domain - -To create a domain, you need to specify the entity type first (`domain` in our case) and then the command (`register`) with a list of required parameters. For the `domain` entity, you only need to provide the `id` argument as a string that doesn't contain the `@` and `#` symbols. - -```bash -./iroha domain register --id="Soramitsu" -``` - -### Create new Account - -To create an account, specify the entity type (`account`) and the command (`register`). Then define the value of the `id` argument in "signatory@domain" format, where signatory is the account's public key in multihash representation: - -```bash -./iroha account register --id="ed01204A3C5A6B77BBE439969F95F0AA4E01AE31EC45A0D68C131B2C622751FCC5E3B6@Soramitsu" -``` - -### Mint Asset to Account - -To add assets to the account, you must first register an Asset Definition. Specify the `asset` entity and then use the `register` and `mint` commands respectively. Here is an example of adding Assets of the type `Quantity` to the account: - -```bash -./iroha asset register --id="XOR#Soramitsu" --type=Numeric -./iroha asset mint --account="ed01204A3C5A6B77BBE439969F95F0AA4E01AE31EC45A0D68C131B2C622751FCC5E3B6@Soramitsu" --asset="XOR#Soramitsu" --quantity=1010 -``` - -With this, you created `XOR#Soramitsu`, an asset of type `Numeric`, and then gave `1010` units of this asset to the account `ed01204A3C5A6B77BBE439969F95F0AA4E01AE31EC45A0D68C131B2C622751FCC5E3B6@Soramitsu`. - -### Query Account Assets Quantity - -You can use Query API to check that your instructions were applied and the _world_ is in the desired state. For example, to know how many units of a particular asset an account has, use `asset get` with the specified account and asset: - -```bash -./iroha asset get --account="ed01204A3C5A6B77BBE439969F95F0AA4E01AE31EC45A0D68C131B2C622751FCC5E3B6@Soramitsu" --asset="XOR#Soramitsu" -``` - -This query returns the quantity of `XOR#Soramitsu` asset for the `ed01204A3C5A6B77BBE439969F95F0AA4E01AE31EC45A0D68C131B2C622751FCC5E3B6@Soramitsu` account. - -You can also filter based on either account, asset or domain id by using the filtering API provided by the Iroha client CLI. Generally, filtering follows the `./iroha ENTITY list filter PREDICATE` pattern, where ENTITY is asset, account or domain and PREDICATE is condition used for filtering serialized using JSON5 (check `iroha::data_model::predicate::value::ValuePredicate` type). - -Here are some examples of filtering: - -```bash -# Filter domains by id -./iroha domain list filter '{"Identifiable": {"Is": "wonderland"}}' -# Filter accounts by domain -./iroha account list filter '{"Identifiable": {"EndsWith": "@wonderland"}}' -# Filter asset by domain -./iroha asset list filter '{"Or": [{"Identifiable": {"Contains": "#wonderland#"}}, {"And": [{"Identifiable": {"Contains": "##"}}, {"Identifiable": {"EndsWith": "@wonderland"}}]}]}' -``` - -### Execute WASM transaction - -Use `--file` to specify a path to the WASM file: - -```bash -./iroha wasm --file=/path/to/file.wasm -``` - -Or skip `--file` to read WASM from standard input: - -```bash -cat /path/to/file.wasm | ./iroha wasm -``` - -These subcommands submit the provided wasm binary as an `Executable` to be executed outside a trigger context. - -### Execute Multi-instruction Transactions - -The reference implementation of the Rust client, `iroha`, is often used for diagnosing problems in other implementations. - -To test transactions in the JSON format (used in the genesis block and by other SDKs), pipe the transaction into the client and add the `json` subcommand to the arguments: - -```bash -cat /path/to/file.json | ./iroha json transaction -``` - -### Request arbitrary query - -```bash -echo '{ "FindAllParameters": null }' | ./iroha --config client.toml json query -``` +- [Codec](examples/codec.md) tutorial and examples +- [Wasm](examples/wasm.md) basic usage examples +- [Client](examples/client.md) basic usage examples \ No newline at end of file diff --git a/crates/iroha_cli/build.rs b/crates/iroha_cli/build.rs index 92e754aab91..df782490e28 100644 --- a/crates/iroha_cli/build.rs +++ b/crates/iroha_cli/build.rs @@ -1,14 +1,50 @@ //! Build script to extract git hash of Iroha build +use std::{fs, path::PathBuf}; + use color_eyre::{ eyre::{eyre, WrapErr}, Result, }; +use iroha_data_model::{account::NewAccount, domain::NewDomain, prelude::*}; +use parity_scale_codec::Encode; +use serde::de::DeserializeOwned; fn main() -> Result<()> { vergen::EmitBuilder::builder() .git_sha(true) .emit() .map_err(|err| eyre!(Box::new(err))) - .wrap_err("Failed to extract git hash") + .wrap_err("Failed to extract git hash")?; + + // Codec + sample_into_binary_file::("account").expect("Failed to encode into account.bin."); + sample_into_binary_file::("domain").expect("Failed to encode into domain.bin."); + sample_into_binary_file::("trigger").expect("Failed to encode into trigger.bin."); + + Ok(()) +} + +fn sample_into_binary_file(filename: &str) -> Result<()> +where + T: Encode + DeserializeOwned, +{ + let mut path_to = PathBuf::from(env!("CARGO_MANIFEST_DIR")); + path_to.push("../../"); + path_to.push("samples/codec/"); + path_to.push(filename); + + let path_to_json = path_to.with_extension("json"); + let path_to_binary = path_to.with_extension("bin"); + + println!("cargo:rerun-if-changed={}", path_to_json.to_str().unwrap()); + let buf = fs::read_to_string(path_to_json)?; + + let sample = serde_json::from_str::(buf.as_str())?; + + let buf = sample.encode(); + + fs::write(path_to_binary, buf)?; + + Ok(()) } diff --git a/crates/iroha_cli/examples/client.md b/crates/iroha_cli/examples/client.md new file mode 100644 index 00000000000..fb81ae2c2ac --- /dev/null +++ b/crates/iroha_cli/examples/client.md @@ -0,0 +1,92 @@ +# Examples for `iroha client` + +In this section we will show you how to use Iroha CLI Client to do the following: + + - [Create new Domain](#create-new-domain) + - [Create new Account](#create-new-account) + - [Mint Asset to Account](#mint-asset-to-account) + - [Query Account Assets Quantity](#query-account-assets-quantity) + - [Execute WASM transaction](#execute-wasm-transaction) + - [Execute Multi-instruction Transactions](#execute-multi-instruction-transactions) + +### Create new Domain + +To create a domain, you need to specify the entity type first (`domain` in our case) and then the command (`register`) with a list of required parameters. For the `domain` entity, you only need to provide the `id` argument as a string that doesn't contain the `@` and `#` symbols. + +```bash +./iroha client domain register --id="Soramitsu" +``` + +### Create new Account + +To create an account, specify the entity type (`account`) and the command (`register`). Then define the value of the `id` argument in "signatory@domain" format, where signatory is the account's public key in multihash representation: + +```bash +./iroha client account register --id="ed01204A3C5A6B77BBE439969F95F0AA4E01AE31EC45A0D68C131B2C622751FCC5E3B6@Soramitsu" +``` + +### Mint Asset to Account + +To add assets to the account, you must first register an Asset Definition. Specify the `asset` entity and then use the `register` and `mint` commands respectively. Here is an example of adding Assets of the type `Quantity` to the account: + +```bash +./iroha client asset register --id="XOR#Soramitsu" --type=Numeric +./iroha client asset mint --account="ed01204A3C5A6B77BBE439969F95F0AA4E01AE31EC45A0D68C131B2C622751FCC5E3B6@Soramitsu" --asset="XOR#Soramitsu" --quantity=1010 +``` + +With this, you created `XOR#Soramitsu`, an asset of type `Numeric`, and then gave `1010` units of this asset to the account `ed01204A3C5A6B77BBE439969F95F0AA4E01AE31EC45A0D68C131B2C622751FCC5E3B6@Soramitsu`. + +### Query Account Assets Quantity + +You can use Query API to check that your instructions were applied and the _world_ is in the desired state. For example, to know how many units of a particular asset an account has, use `asset get` with the specified account and asset: + +```bash +./iroha client asset get --account="ed01204A3C5A6B77BBE439969F95F0AA4E01AE31EC45A0D68C131B2C622751FCC5E3B6@Soramitsu" --asset="XOR#Soramitsu" +``` + +This query returns the quantity of `XOR#Soramitsu` asset for the `ed01204A3C5A6B77BBE439969F95F0AA4E01AE31EC45A0D68C131B2C622751FCC5E3B6@Soramitsu` account. + +You can also filter based on either account, asset or domain id by using the filtering API provided by the Iroha client CLI. Generally, filtering follows the `./iroha client ENTITY list filter PREDICATE` pattern, where ENTITY is asset, account or domain and PREDICATE is condition used for filtering serialized using JSON5 (check `iroha::data_model::predicate::value::ValuePredicate` type). + +Here are some examples of filtering: + +```bash +# Filter domains by id +./iroha client domain list filter '{"Identifiable": {"Is": "wonderland"}}' +# Filter accounts by domain +./iroha client account list filter '{"Identifiable": {"EndsWith": "@wonderland"}}' +# Filter asset by domain +./iroha client asset list filter '{"Or": [{"Identifiable": {"Contains": "#wonderland#"}}, {"And": [{"Identifiable": {"Contains": "##"}}, {"Identifiable": {"EndsWith": "@wonderland"}}]}]}' +``` + +### Execute WASM transaction + +Use `--file` to specify a path to the WASM file: + +```bash +./iroha client wasm --file=/path/to/file.wasm +``` + +Or skip `--file` to read WASM from standard input: + +```bash +cat /path/to/file.wasm | ./iroha client wasm +``` + +These subcommands submit the provided wasm binary as an `Executable` to be executed outside a trigger context. + +### Execute Multi-instruction Transactions + +The reference implementation of the Rust client, `iroha`, is often used for diagnosing problems in other implementations. + +To test transactions in the JSON format (used in the genesis block and by other SDKs), pipe the transaction into the client and add the `json` subcommand to the arguments: + +```bash +cat /path/to/file.json | ./iroha client json transaction +``` + +### Request arbitrary query + +```bash +echo '{ "FindAllParameters": null }' | ./iroha client --config client.toml json query +``` diff --git a/crates/iroha_cli/examples/codec.md b/crates/iroha_cli/examples/codec.md new file mode 100644 index 00000000000..a4543e89663 --- /dev/null +++ b/crates/iroha_cli/examples/codec.md @@ -0,0 +1,101 @@ +# Examples for `iroha codec` + +In this section we will show you how to use Iroha CLI Codec to do the following: + - [List availible types](#list-availible-types) + - [Decode SCALE ⇔ JSON](#decode-scale-⇔-json) + - [Decode SCALE to supported type](#decode-scale-to-supported-type) + +## List availible types + +To list all supported data types, run from the project main directory: + +```bash +./iroha codec list-types +``` + +
Expand to see expected output + +``` +Account +AccountEvent +AccountEventFilter +AccountEventSet +AccountId +AccountMintBox +AccountPermissionChanged +AccountRoleChanged +Action +Algorithm +... + +344 types are supported +``` + +
+ + +## Decode SCALE ⇔ JSON + +Commands: `scale-to-json` and `json-to-scale` + +Both commands by default read data from `stdin` and print result to `stdout`. +There are flags `--input` and `--output` which can be used to read/write from files instead. + +These commands require `--type` argument. If data type is not known, [`scale-to-rust`](#scale-to-rust) can be used to detect it. + +* Decode the specified data type from a binary: + + ```bash + ./iroha codec scale-to-json --input --type + ``` + +### `scale-to-json` and `json-to-scale` usage examples + +* Decode the `NewAccount` data type from the `samples/account.bin` binary: + + ```bash + ./iroha codec scale-to-json --input iroha_codec/samples/account.bin --type NewAccount + ``` + +* Encode the `NewAccount` data type from the `samples/account.json`: + + ```bash + ./iroha codec json-to-scale --input iroha_codec/samples/account.json --output result.bin --type NewAccount + ``` + + +## Decode SCALE to supported type +Command: `scale-to-rust` + +Decode the data type from a given binary. + +| Option | Description | Type | +| ---------- | ----------------------------------------------------------------------------------------------------------------------------- | ---------------------- | +| `--binary` | The path to the binary file with an encoded Iroha structure for the tool to decode. | An owned, mutable path | +| `--type` | The data type that is expected to be encoded in the provided binary.
If not specified, the tool tries to guess the type. | String | + +* Decode the specified data type from a binary: + + ```bash + ./iroha codec scale-to-rust --type + ``` + +* If you are not sure which data type is encoded in the binary, run the tool without the `--type` option: + + ```bash + ./iroha codec scale-to-rust + ``` + +### `scale-to-rust` usage examples + +* Decode the `NewAccount` data type from the `samples/account.bin` binary: + + ```bash + ./iroha codec scale-to-rust iroha_codec/samples/account.bin --type NewAccount + ``` + +* Decode the `NewDomain` data type from the `samples/domain.bin` binary: + + ```bash + ./iroha codec scale-to-rust iroha_codec/samples/domain.bin --type NewDomain + ``` diff --git a/crates/iroha_cli/examples/wasm.md b/crates/iroha_cli/examples/wasm.md new file mode 100644 index 00000000000..3c5ac89e26e --- /dev/null +++ b/crates/iroha_cli/examples/wasm.md @@ -0,0 +1,27 @@ +# Examples for `iroha wasm` + +In this section we will show you how to use Iroha CLI Wasm to do the following: + + - [Check smartcontracts](#check-smartcontract) + - [Build smartcontracts](#build-smartcontracts) + +## Check smartcontract + +```bash +./iroha wasm check path/to/project +``` + +## Build smartcontracts + +```bash +./iroha wasm build path/to/project --out-file ./smartcontract.wasm +``` + +**Build with options:** + +```bash +./iroha wasm build path/to/project --optimize --format --out-file ./smartcontract.wasm +``` + +## Test WebAssembly +This command copies functionality of `webassembly-test-runner`, but with an ability to indicate failure with an exit code. \ No newline at end of file diff --git a/crates/iroha_cli/src/client.rs b/crates/iroha_cli/src/client.rs new file mode 100644 index 00000000000..8effb947e92 --- /dev/null +++ b/crates/iroha_cli/src/client.rs @@ -0,0 +1,1615 @@ +//! Client CLI + +use std::{ + fs::{self, read as read_file}, + io::{stdin, stdout}, + path::PathBuf, + str::FromStr, + time::Duration, +}; + +use erased_serde::Serialize; +use error_stack::{IntoReportCompat, ResultExt}; +use eyre::{eyre, Error, Result, WrapErr}; +use futures::TryStreamExt; +use iroha::{client::Client, config::Config, data_model::prelude::*}; +use iroha_primitives::json::Json; +use thiserror::Error; +use tokio::runtime::Runtime; + +use crate::options; + +/// Re-usable clap `--metadata ` (`-m`) argument. +/// Should be combined with `#[command(flatten)]` attr. +#[derive(clap::Args, Debug, Clone)] +pub struct MetadataArgs { + /// The JSON/JSON5 file with key-value metadata pairs + #[arg(short, long, value_name("PATH"), value_hint(clap::ValueHint::FilePath))] + metadata: Option, +} + +impl MetadataArgs { + fn load(self) -> Result { + let value: Option = self + .metadata + .map(|path| { + let content = fs::read_to_string(&path).wrap_err_with(|| { + eyre!("Failed to read the metadata file `{}`", path.display()) + })?; + let metadata: Metadata = json5::from_str(&content).wrap_err_with(|| { + eyre!( + "Failed to deserialize metadata from file `{}`", + path.display() + ) + })?; + Ok::<_, eyre::Report>(metadata) + }) + .transpose()?; + + Ok(value.unwrap_or_default()) + } +} + +/// Re-usable clap `--value ` (`-v`) argument. +/// Should be combined with `#[command(flatten)]` attr. +#[derive(clap::Args, Debug, Clone, PartialEq, Eq)] +pub struct MetadataValueArg { + /// Wrapper around `MetadataValue` to accept possible values and fallback to json. + /// + /// The following types are supported: + /// Numbers: decimal with optional point + /// Booleans: false/true + /// Objects: e.g. {"Vec":[{"String":"a"},{"String":"b"}]} + #[arg(short, long)] + value: Json, +} + +impl FromStr for MetadataValueArg { + type Err = Error; + + fn from_str(s: &str) -> Result { + Ok(MetadataValueArg { + value: Json::from_str(s)?, + }) + } +} + +/// Arguments for client subcommand +#[derive(clap::Args, Debug)] +pub struct Args { + /// Path to the configuration file + #[arg(short, long, value_name("PATH"), value_hint(clap::ValueHint::FilePath))] + #[clap(default_value = "client.toml")] + config: PathBuf, + /// More verbose output + #[arg(short, long)] + verbose: bool, + /// Subcommands of client cli + #[command(subcommand)] + subcommand: Subcommand, +} + +#[derive(clap::Subcommand, Debug)] +enum Subcommand { + /// The subcommand related to domains + #[clap(subcommand)] + Domain(domain::Args), + /// The subcommand related to accounts + #[clap(subcommand)] + Account(account::Args), + /// The subcommand related to assets + #[clap(subcommand)] + Asset(asset::Args), + /// The subcommand related to p2p networking + #[clap(subcommand)] + Peer(peer::Args), + /// The subcommand related to event streaming + Events(events::Args), + /// The subcommand related to Wasm + Wasm(wasm::Args), + /// The subcommand related to block streaming + Blocks(blocks::Args), + /// The subcommand related to multi-instructions as Json or Json5 + Json(json::Args), + /// The subcommand related to multisig accounts and transactions + #[clap(subcommand)] + Multisig(multisig::Args), +} + +/// Context inside which command is executed +trait RunContext { + /// Get access to configuration + fn configuration(&self) -> &Config; + + fn client_from_config(&self) -> Client { + Client::new(self.configuration().clone()) + } + + /// Serialize and print data + /// + /// # Errors + /// - if serialization fails + /// - if printing fails + fn print_data(&mut self, data: &dyn Serialize) -> Result<()>; +} + +struct PrintJsonContext { + write: W, + config: Config, +} + +impl RunContext for PrintJsonContext { + fn configuration(&self) -> &Config { + &self.config + } + + fn print_data(&mut self, data: &dyn Serialize) -> Result<()> { + writeln!(&mut self.write, "{}", serde_json::to_string_pretty(data)?)?; + Ok(()) + } +} + +/// Runs subcommand +trait RunArgs { + /// Runs command + /// + /// # Errors + /// if inner command errors + fn run(self, context: &mut dyn RunContext) -> Result<()>; +} + +macro_rules! match_all { + (($self:ident, $context:ident), { $($variants:path),* $(,)?}) => { + match $self { + $($variants(variant) => RunArgs::run(variant, $context),)* + } + }; +} + +impl RunArgs for Subcommand { + fn run(self, context: &mut dyn RunContext) -> Result<()> { + use Subcommand::*; + match_all!((self, context), { Domain, Account, Asset, Peer, Events, Wasm, Blocks, Json, Multisig }) + } +} + +#[derive(Error, Debug)] +enum ExecutionError { + #[error("Failed to load Iroha client configuration")] + Config, + #[error("Failed to serialize config")] + SerializeConfig, +} + +impl options::RunArgs for Args { + fn run(self) -> Result<()> { + let config = Config::load(self.config) + // FIXME: would be nice to NOT change the context, it's unnecessary + .change_context(ExecutionError::Config) + .attach_printable("config path was set by `--config` argument") + .map_err(|e| eyre::eyre!(e))?; + if self.verbose { + eprintln!( + "Configuration: {}", + &serde_json::to_string_pretty(&config) + .change_context(ExecutionError::SerializeConfig) + .attach_printable("caused by `--verbose` argument") + .map_err(|e| eyre::eyre!(e))? + ); + } + + let mut context = PrintJsonContext { + write: stdout(), + config, + }; + self.subcommand + .run(&mut context) + .into_report() + .map_err(|e| eyre::eyre!(e))?; + + Ok(()) + } +} + +/// Submit instruction with metadata to network. +/// +/// # Errors +/// Fails if submitting over network fails +#[allow(clippy::shadow_unrelated)] +fn submit( + instructions: impl Into, + metadata: Metadata, + context: &mut dyn RunContext, +) -> Result<()> { + let client = context.client_from_config(); + let instructions = instructions.into(); + let tx = client.build_transaction(instructions, metadata); + + #[cfg(not(debug_assertions))] + let err_msg = "Failed to submit transaction."; + #[cfg(debug_assertions)] + let err_msg = format!("Failed to submit transaction {tx:?}"); + let hash = client.submit_transaction_blocking(&tx).wrap_err(err_msg)?; + context.print_data(&hash)?; + + Ok(()) +} + +mod filter { + use iroha::data_model::query::dsl::CompoundPredicate; + use serde::Deserialize; + + use super::*; + + /// Filter for domain queries + #[derive(Clone, Debug, clap::Parser)] + pub struct DomainFilter { + /// Predicate for filtering given as JSON5 string + #[clap(value_parser = parse_json5::>)] + pub predicate: CompoundPredicate, + } + + /// Filter for account queries + #[derive(Clone, Debug, clap::Parser)] + pub struct AccountFilter { + /// Predicate for filtering given as JSON5 string + #[clap(value_parser = parse_json5::>)] + pub predicate: CompoundPredicate, + } + + /// Filter for asset queries + #[derive(Clone, Debug, clap::Parser)] + pub struct AssetFilter { + /// Predicate for filtering given as JSON5 string + #[clap(value_parser = parse_json5::>)] + pub predicate: CompoundPredicate, + } + + /// Filter for asset definition queries + #[derive(Clone, Debug, clap::Parser)] + pub struct AssetDefinitionFilter { + /// Predicate for filtering given as JSON5 string + #[clap(value_parser = parse_json5::>)] + pub predicate: CompoundPredicate, + } + + fn parse_json5(s: &str) -> Result + where + T: for<'a> Deserialize<'a>, + { + json5::from_str(s).map_err(|err| format!("Failed to deserialize filter from JSON5: {err}")) + } +} + +mod events { + + use iroha::data_model::events::pipeline::{BlockEventFilter, TransactionEventFilter}; + + use super::*; + + #[derive(clap::Args, Debug, Clone, Copy)] + pub struct Args { + /// Wait timeout + #[clap(short, long, global = true)] + timeout: Option, + #[clap(subcommand)] + command: Command, + } + + /// Get event stream from Iroha peer + #[derive(clap::Subcommand, Debug, Clone, Copy)] + enum Command { + /// Gets block pipeline events + BlockPipeline, + /// Gets transaction pipeline events + TransactionPipeline, + /// Gets data events + Data, + /// Get execute trigger events + ExecuteTrigger, + /// Get trigger completed events + TriggerCompleted, + } + + impl RunArgs for Args { + fn run(self, context: &mut dyn RunContext) -> Result<()> { + let timeout: Option = self.timeout.map(Into::into); + + match self.command { + Command::TransactionPipeline => { + listen(TransactionEventFilter::default(), context, timeout) + } + Command::BlockPipeline => listen(BlockEventFilter::default(), context, timeout), + Command::Data => listen(DataEventFilter::Any, context, timeout), + Command::ExecuteTrigger => { + listen(ExecuteTriggerEventFilter::new(), context, timeout) + } + Command::TriggerCompleted => { + listen(TriggerCompletedEventFilter::new(), context, timeout) + } + } + } + } + + fn listen( + filter: impl Into, + context: &mut dyn RunContext, + timeout: Option, + ) -> Result<()> { + let filter = filter.into(); + let client = context.client_from_config(); + + if let Some(timeout) = timeout { + eprintln!("Listening to events with filter: {filter:?} and timeout: {timeout:?}"); + let rt = Runtime::new().wrap_err("Failed to create runtime.")?; + rt.block_on(async { + let mut stream = client + .listen_for_events_async([filter]) + .await + .expect("Failed to listen for events."); + while let Ok(event) = tokio::time::timeout(timeout, stream.try_next()).await { + context.print_data(&event?)?; + } + eprintln!("Timeout period has expired."); + Result::<()>::Ok(()) + })?; + } else { + eprintln!("Listening to events with filter: {filter:?}"); + client + .listen_for_events([filter]) + .wrap_err("Failed to listen for events.")? + .try_for_each(|event| context.print_data(&event?))?; + } + Ok(()) + } +} + +mod blocks { + use std::num::NonZeroU64; + + use super::*; + + /// Get block stream from Iroha peer + #[derive(clap::Args, Debug, Clone, Copy)] + pub struct Args { + /// Block height from which to start streaming blocks + height: NonZeroU64, + + /// Wait timeout + #[clap(short, long)] + timeout: Option, + } + + impl RunArgs for Args { + fn run(self, context: &mut dyn RunContext) -> Result<()> { + let Args { height, timeout } = self; + let timeout: Option = timeout.map(Into::into); + listen(height, context, timeout) + } + } + + fn listen( + height: NonZeroU64, + context: &mut dyn RunContext, + timeout: Option, + ) -> Result<()> { + let client = context.client_from_config(); + if let Some(timeout) = timeout { + eprintln!("Listening to blocks from height: {height} and timeout: {timeout:?}"); + let rt = Runtime::new().wrap_err("Failed to create runtime.")?; + rt.block_on(async { + let mut stream = client + .listen_for_blocks_async(height) + .await + .expect("Failed to listen for blocks."); + while let Ok(event) = tokio::time::timeout(timeout, stream.try_next()).await { + context.print_data(&event?)?; + } + eprintln!("Timeout period has expired."); + Result::<()>::Ok(()) + })?; + } else { + eprintln!("Listening to blocks from height: {height}"); + client + .listen_for_blocks(height) + .wrap_err("Failed to listen for blocks.")? + .try_for_each(|event| context.print_data(&event?))?; + } + Ok(()) + } +} + +mod domain { + use super::*; + + /// Arguments for domain subcommand + #[derive(Debug, clap::Subcommand)] + pub enum Args { + /// Register domain + Register(Register), + /// List domains + #[clap(subcommand)] + List(List), + /// Transfer domain + Transfer(Transfer), + /// Edit domain metadata + #[clap(subcommand)] + Metadata(metadata::Args), + } + + impl RunArgs for Args { + fn run(self, context: &mut dyn RunContext) -> Result<()> { + match_all!((self, context), { Args::Register, Args::List, Args::Transfer, Args::Metadata, }) + } + } + + /// Add subcommand for domain + #[derive(Debug, clap::Args)] + pub struct Register { + /// Domain name as double-quoted string + #[arg(short, long)] + pub id: DomainId, + #[command(flatten)] + pub metadata: MetadataArgs, + } + + impl RunArgs for Register { + fn run(self, context: &mut dyn RunContext) -> Result<()> { + let Self { id, metadata } = self; + let create_domain = iroha::data_model::isi::Register::domain(Domain::new(id)); + submit([create_domain], metadata.load()?, context).wrap_err("Failed to create domain") + } + } + + /// List domains with this command + #[derive(clap::Subcommand, Debug, Clone)] + pub enum List { + /// All domains + All, + /// Filter domains by given predicate + Filter(filter::DomainFilter), + } + + impl RunArgs for List { + fn run(self, context: &mut dyn RunContext) -> Result<()> { + let client = context.client_from_config(); + + let query = client.query(FindDomains::new()); + + let query = match self { + List::All => query, + List::Filter(filter) => query.filter(filter.predicate), + }; + + let result = query.execute_all().wrap_err("Failed to get all accounts")?; + context.print_data(&result)?; + + Ok(()) + } + } + + /// Transfer a domain between accounts + #[derive(Debug, clap::Args)] + pub struct Transfer { + /// Domain name as double-quited string + #[arg(short, long)] + pub id: DomainId, + /// Account from which to transfer (in form `name@domain_name`) + #[arg(short, long)] + pub from: AccountId, + /// Account to which to transfer (in form `name@domain_name`) + #[arg(short, long)] + pub to: AccountId, + #[command(flatten)] + pub metadata: MetadataArgs, + } + + impl RunArgs for Transfer { + fn run(self, context: &mut dyn RunContext) -> Result<()> { + let Self { + id, + from, + to, + metadata, + } = self; + let transfer_domain = iroha::data_model::isi::Transfer::domain(from, id, to); + submit([transfer_domain], metadata.load()?, context) + .wrap_err("Failed to transfer domain") + } + } + + mod metadata { + use iroha::data_model::domain::DomainId; + + use super::*; + + /// Edit domain subcommands + #[derive(Debug, Clone, clap::Subcommand)] + pub enum Args { + /// Set domain metadata + Set(Set), + /// Remove domain metadata + Remove(Remove), + } + + impl RunArgs for Args { + fn run(self, context: &mut dyn RunContext) -> Result<()> { + match_all!((self, context), { Args::Set, Args::Remove, }) + } + } + + /// Set metadata into domain + #[derive(Debug, Clone, clap::Args)] + pub struct Set { + /// A domain id from which metadata is to be removed + #[arg(short, long)] + id: DomainId, + /// A key of metadata + #[arg(short, long)] + key: Name, + #[command(flatten)] + value: MetadataValueArg, + } + + impl RunArgs for Set { + fn run(self, context: &mut dyn RunContext) -> Result<()> { + let Self { + id, + key, + value: MetadataValueArg { value }, + } = self; + let set_key_value = SetKeyValue::domain(id, key, value); + submit([set_key_value], Metadata::default(), context) + .wrap_err("Failed to submit Set instruction") + } + } + + /// Remove metadata into domain by key + #[derive(Debug, Clone, clap::Args)] + pub struct Remove { + /// A domain id from which metadata is to be removed + #[arg(short, long)] + id: DomainId, + /// A key of metadata + #[arg(short, long)] + key: Name, + } + + impl RunArgs for Remove { + fn run(self, context: &mut dyn RunContext) -> Result<()> { + let Self { id, key } = self; + let remove_key_value = RemoveKeyValue::domain(id, key); + submit([remove_key_value], Metadata::default(), context) + .wrap_err("Failed to submit Remove instruction") + } + } + } +} + +mod account { + use std::fmt::Debug; + + use super::{Permission as DataModelPermission, *}; + + /// subcommands for account subcommand + #[derive(clap::Subcommand, Debug)] + pub enum Args { + /// Register account + Register(Register), + /// List accounts + #[command(subcommand)] + List(List), + /// Grant a permission to the account + Grant(Grant), + /// List all account permissions + ListPermissions(ListPermissions), + } + + impl RunArgs for Args { + fn run(self, context: &mut dyn RunContext) -> Result<()> { + match_all!((self, context), { + Args::Register, + Args::List, + Args::Grant, + Args::ListPermissions, + }) + } + } + + /// Register account + #[derive(clap::Args, Debug)] + pub struct Register { + /// Id of account in form `name@domain_name` + #[arg(short, long)] + pub id: AccountId, + #[command(flatten)] + pub metadata: MetadataArgs, + } + + impl RunArgs for Register { + fn run(self, context: &mut dyn RunContext) -> Result<()> { + let Self { id, metadata } = self; + let create_account = iroha::data_model::isi::Register::account(Account::new(id)); + submit([create_account], metadata.load()?, context) + .wrap_err("Failed to register account") + } + } + + /// List accounts with this command + #[derive(clap::Subcommand, Debug, Clone)] + pub enum List { + /// All accounts + All, + /// Filter accounts by given predicate + Filter(filter::AccountFilter), + } + + impl RunArgs for List { + fn run(self, context: &mut dyn RunContext) -> Result<()> { + let client = context.client_from_config(); + + let query = client.query(FindAccounts::new()); + + let query = match self { + List::All => query, + List::Filter(filter) => query.filter(filter.predicate), + }; + + let result = query.execute_all().wrap_err("Failed to get all accounts")?; + context.print_data(&result)?; + + Ok(()) + } + } + + #[derive(clap::Args, Debug)] + pub struct Grant { + /// Account id + #[arg(short, long)] + pub id: AccountId, + /// The JSON/JSON5 file with a permission token + #[arg(short, long)] + pub permission: Permission, + #[command(flatten)] + pub metadata: MetadataArgs, + } + + /// [`DataModelPermission`] wrapper implementing [`FromStr`] + #[derive(Debug, Clone)] + pub struct Permission(DataModelPermission); + + impl FromStr for Permission { + type Err = Error; + + fn from_str(s: &str) -> Result { + let content = fs::read_to_string(s) + .wrap_err(format!("Failed to read the permission token file {}", &s))?; + let permission: DataModelPermission = json5::from_str(&content).wrap_err(format!( + "Failed to deserialize the permission token from file {}", + &s + ))?; + Ok(Self(permission)) + } + } + + impl RunArgs for Grant { + fn run(self, context: &mut dyn RunContext) -> Result<()> { + let Self { + id, + permission, + metadata, + } = self; + let grant = iroha::data_model::isi::Grant::account_permission(permission.0, id); + submit([grant], metadata.load()?, context) + .wrap_err("Failed to grant the permission to the account") + } + } + + /// List all account permissions + #[derive(clap::Args, Debug)] + pub struct ListPermissions { + /// Account id + #[arg(short, long)] + id: AccountId, + } + + impl RunArgs for ListPermissions { + fn run(self, context: &mut dyn RunContext) -> Result<()> { + let client = context.client_from_config(); + let find_all_permissions = FindPermissionsByAccountId::new(self.id); + let permissions = client + .query(find_all_permissions) + .execute_all() + .wrap_err("Failed to get all account permissions")?; + context.print_data(&permissions)?; + Ok(()) + } + } +} + +mod asset { + use iroha::data_model::name::Name; + + use super::*; + + /// Subcommand for dealing with asset + #[derive(clap::Subcommand, Debug)] + pub enum Args { + /// Command for managing asset definitions + #[clap(subcommand)] + Definition(definition::Args), + /// Command for minting asset in existing Iroha account + Mint(Mint), + /// Command for burning asset in existing Iroha account + Burn(Burn), + /// Transfer asset between accounts + Transfer(Transfer), + /// Get info of asset + Get(Get), + /// List assets + #[clap(subcommand)] + List(List), + /// Get a value from a Store asset + GetKeyValue(GetKeyValue), + /// Set a key-value entry in a Store asset + SetKeyValue(SetKeyValue), + /// Remove a key-value entry from a Store asset + RemoveKeyValue(RemoveKeyValue), + } + + impl RunArgs for Args { + fn run(self, context: &mut dyn RunContext) -> Result<()> { + match_all!( + (self, context), + { Args::Definition, Args::Mint, Args::Burn, Args::Transfer, Args::Get, Args::List, Args::SetKeyValue, Args::RemoveKeyValue, Args::GetKeyValue} + ) + } + } + + mod definition { + use iroha::data_model::asset::{AssetDefinition, AssetDefinitionId, AssetType}; + + use super::*; + + /// Subcommand for managing asset definitions + #[derive(clap::Subcommand, Debug)] + pub enum Args { + /// Command for Registering a new asset + Register(Register), + /// List asset definitions + #[clap(subcommand)] + List(List), + } + + impl RunArgs for Args { + fn run(self, context: &mut dyn RunContext) -> Result<()> { + match_all!( + (self, context), + { Args::Register, Args::List } + ) + } + } + + /// Register subcommand of asset + #[derive(clap::Args, Debug)] + pub struct Register { + /// Asset definition id for registering (in form of `asset#domain_name`) + #[arg(long)] + pub id: AssetDefinitionId, + /// Mintability of asset + #[arg(short, long)] + pub unmintable: bool, + /// Value type stored in asset + #[arg(short, long)] + pub r#type: AssetType, + #[command(flatten)] + pub metadata: MetadataArgs, + } + + impl RunArgs for Register { + fn run(self, context: &mut dyn RunContext) -> Result<()> { + let Self { + id: asset_id, + r#type, + unmintable, + metadata, + } = self; + let mut asset_definition = AssetDefinition::new(asset_id, r#type); + if unmintable { + asset_definition = asset_definition.mintable_once(); + } + let create_asset_definition = + iroha::data_model::isi::Register::asset_definition(asset_definition); + submit([create_asset_definition], metadata.load()?, context) + .wrap_err("Failed to register asset") + } + } + + /// List asset definitions with this command + #[derive(clap::Subcommand, Debug, Clone)] + pub enum List { + /// All asset definitions + All, + /// Filter asset definitions by given predicate + Filter(filter::AssetDefinitionFilter), + } + + impl RunArgs for List { + fn run(self, context: &mut dyn RunContext) -> Result<()> { + let client = context.client_from_config(); + + let query = client.query(FindAssetsDefinitions::new()); + + let query = match self { + List::All => query, + List::Filter(filter) => query.filter(filter.predicate), + }; + + let result = query + .execute_all() + .wrap_err("Failed to get all asset definitions")?; + + context.print_data(&result)?; + Ok(()) + } + } + } + + /// Command for minting asset in existing Iroha account + #[derive(clap::Args, Debug)] + pub struct Mint { + /// Asset id for the asset (in form of `asset##account@domain_name`) + #[arg(long)] + pub id: AssetId, + /// Quantity to mint + #[arg(short, long)] + pub quantity: Numeric, + #[command(flatten)] + pub metadata: MetadataArgs, + } + + impl RunArgs for Mint { + fn run(self, context: &mut dyn RunContext) -> Result<()> { + let Self { + id: asset_id, + quantity, + metadata, + } = self; + let mint_asset = iroha::data_model::isi::Mint::asset_numeric(quantity, asset_id); + submit([mint_asset], metadata.load()?, context) + .wrap_err("Failed to mint asset of type `Numeric`") + } + } + + /// Command for minting asset in existing Iroha account + #[derive(clap::Args, Debug)] + pub struct Burn { + /// Asset id for the asset (in form of `asset##account@domain_name`) + #[arg(long)] + pub id: AssetId, + /// Quantity to mint + #[arg(short, long)] + pub quantity: Numeric, + #[command(flatten)] + pub metadata: MetadataArgs, + } + + impl RunArgs for Burn { + fn run(self, context: &mut dyn RunContext) -> Result<()> { + let Self { + id: asset_id, + quantity, + metadata, + } = self; + let burn_asset = iroha::data_model::isi::Burn::asset_numeric(quantity, asset_id); + submit([burn_asset], metadata.load()?, context) + .wrap_err("Failed to burn asset of type `Numeric`") + } + } + + /// Transfer asset between accounts + #[derive(clap::Args, Debug)] + pub struct Transfer { + /// Account to which to transfer (in form `name@domain_name`) + #[arg(long)] + pub to: AccountId, + /// Asset id to transfer (in form like `asset##account@domain_name`) + #[arg(long)] + pub id: AssetId, + /// Quantity of asset as number + #[arg(short, long)] + pub quantity: Numeric, + #[command(flatten)] + pub metadata: MetadataArgs, + } + + impl RunArgs for Transfer { + fn run(self, context: &mut dyn RunContext) -> Result<()> { + let Self { + to, + id: asset_id, + quantity, + metadata, + } = self; + let transfer_asset = + iroha::data_model::isi::Transfer::asset_numeric(asset_id, quantity, to); + submit([transfer_asset], metadata.load()?, context).wrap_err("Failed to transfer asset") + } + } + + /// Get info of asset + #[derive(clap::Args, Debug)] + pub struct Get { + /// Asset id for the asset (in form of `asset##account@domain_name`) + #[arg(long)] + pub id: AssetId, + } + + impl RunArgs for Get { + fn run(self, context: &mut dyn RunContext) -> Result<()> { + let Self { id: asset_id } = self; + let client = context.client_from_config(); + let asset = client + .query(FindAssets::new()) + .filter_with(|asset| asset.id.eq(asset_id)) + .execute_single() + .wrap_err("Failed to get asset.")?; + context.print_data(&asset)?; + Ok(()) + } + } + + /// List assets with this command + #[derive(clap::Subcommand, Debug, Clone)] + pub enum List { + /// All assets + All, + /// Filter assets by given predicate + Filter(filter::AssetFilter), + } + + impl RunArgs for List { + fn run(self, context: &mut dyn RunContext) -> Result<()> { + let client = context.client_from_config(); + + let query = client.query(FindAssets::new()); + + let query = match self { + List::All => query, + List::Filter(filter) => query.filter(filter.predicate), + }; + + let result = query.execute_all().wrap_err("Failed to get all accounts")?; + context.print_data(&result)?; + + Ok(()) + } + } + + #[derive(clap::Args, Debug)] + pub struct SetKeyValue { + /// Asset id for the Store asset (in form of `asset##account@domain_name`) + #[clap(long)] + pub id: AssetId, + /// The key for the store value + #[clap(long)] + pub key: Name, + #[command(flatten)] + pub value: MetadataValueArg, + } + + impl RunArgs for SetKeyValue { + fn run(self, context: &mut dyn RunContext) -> Result<()> { + let Self { + id: asset_id, + key, + value: MetadataValueArg { value }, + } = self; + + let set = iroha::data_model::isi::SetKeyValue::asset(asset_id, key, value); + submit([set], Metadata::default(), context)?; + Ok(()) + } + } + #[derive(clap::Args, Debug)] + pub struct RemoveKeyValue { + /// Asset id for the Store asset (in form of `asset##account@domain_name`) + #[clap(long)] + pub id: AssetId, + /// The key for the store value + #[clap(long)] + pub key: Name, + } + + impl RunArgs for RemoveKeyValue { + fn run(self, context: &mut dyn RunContext) -> Result<()> { + let Self { id: asset_id, key } = self; + let remove = iroha::data_model::isi::RemoveKeyValue::asset(asset_id, key); + submit([remove], Metadata::default(), context)?; + Ok(()) + } + } + + #[derive(clap::Args, Debug)] + pub struct GetKeyValue { + /// Asset id for the Store asset (in form of `asset##account@domain_name`) + #[clap(long)] + pub id: AssetId, + /// The key for the store value + #[clap(long)] + pub key: Name, + } + + impl RunArgs for GetKeyValue { + fn run(self, context: &mut dyn RunContext) -> Result<()> { + let Self { id: asset_id, key } = self; + let client = context.client_from_config(); + let asset = client + .query(FindAssets) + .filter_with(|asset| asset.id.eq(asset_id)) + .select_with(|asset| asset.value.store.key(key)) + .execute_single() + .wrap_err("Failed to get key-value")?; + + context.print_data(&asset)?; + Ok(()) + } + } +} + +mod peer { + use super::*; + + /// Subcommand for dealing with peer + #[derive(clap::Subcommand, Debug)] + pub enum Args { + /// Register subcommand of peer + Register(Box), + /// Unregister subcommand of peer + Unregister(Box), + } + + impl RunArgs for Args { + fn run(self, context: &mut dyn RunContext) -> Result<()> { + match self { + Args::Register(register) => RunArgs::run(*register, context), + Args::Unregister(unregister) => RunArgs::run(*unregister, context), + } + } + } + + /// Register subcommand of peer + #[derive(clap::Args, Debug)] + pub struct Register { + /// Public key of the peer + #[arg(short, long)] + pub key: PublicKey, + #[command(flatten)] + pub metadata: MetadataArgs, + } + + impl RunArgs for Register { + fn run(self, context: &mut dyn RunContext) -> Result<()> { + let Self { key, metadata } = self; + let register_peer = iroha::data_model::isi::Register::peer(key.into()); + submit([register_peer], metadata.load()?, context).wrap_err("Failed to register peer") + } + } + + /// Unregister subcommand of peer + #[derive(clap::Args, Debug)] + pub struct Unregister { + /// Public key of the peer + #[arg(short, long)] + pub key: PublicKey, + #[command(flatten)] + pub metadata: MetadataArgs, + } + + impl RunArgs for Unregister { + fn run(self, context: &mut dyn RunContext) -> Result<()> { + let Self { key, metadata } = self; + let unregister_peer = iroha::data_model::isi::Unregister::peer(key.into()); + submit([unregister_peer], metadata.load()?, context) + .wrap_err("Failed to unregister peer") + } + } +} + +mod wasm { + use std::{io::Read, path::PathBuf}; + + use super::*; + + /// Subcommand for dealing with Wasm + #[derive(Debug, clap::Args)] + pub struct Args { + /// Specify a path to the Wasm file or skip this flag to read from stdin + #[arg(short, long, value_name("PATH"), value_hint(clap::ValueHint::FilePath))] + path: Option, + } + + impl RunArgs for Args { + fn run(self, context: &mut dyn RunContext) -> Result<()> { + let raw_data = if let Some(path) = self.path { + read_file(path).wrap_err("Failed to read a Wasm from the file into the buffer")? + } else { + let mut buf = Vec::::new(); + stdin() + .read_to_end(&mut buf) + .wrap_err("Failed to read a Wasm from stdin into the buffer")?; + buf + }; + + submit( + WasmSmartContract::from_compiled(raw_data), + Metadata::default(), + context, + ) + .wrap_err("Failed to submit a Wasm smart contract") + } + } +} + +mod json { + use std::io::{BufReader, Read as _}; + + use clap::Subcommand; + use iroha::data_model::query::AnyQueryBox; + + use super::*; + + /// Subcommand for submitting multi-instructions + #[derive(Clone, Copy, Debug, clap::Args)] + pub struct Args { + #[clap(subcommand)] + variant: Variant, + } + + #[derive(Clone, Copy, Debug, Subcommand)] + enum Variant { + Transaction, + Query, + } + + impl RunArgs for Args { + fn run(self, context: &mut dyn RunContext) -> Result<()> { + let mut reader = BufReader::new(stdin()); + let mut raw_content = Vec::new(); + reader.read_to_end(&mut raw_content)?; + + let string_content = String::from_utf8(raw_content)?; + + match self.variant { + Variant::Transaction => { + let instructions: Vec = json5::from_str(&string_content)?; + submit(instructions, Metadata::default(), context) + .wrap_err("Failed to submit parsed instructions") + } + Variant::Query => { + let client = Client::new(context.configuration().clone()); + let query: AnyQueryBox = json5::from_str(&string_content)?; + + match query { + AnyQueryBox::Singular(query) => { + let result = client + .query_single(query) + .wrap_err("Failed to query response")?; + + context.print_data(&result)?; + } + AnyQueryBox::Iterable(query) => { + // we can't really do type-erased iterable queries in a nice way right now... + use iroha::data_model::query::builder::QueryExecutor; + + let (mut accumulated_batch, _remaining_items, mut continue_cursor) = + client.start_query(query)?; + + while let Some(cursor) = continue_cursor { + let (next_batch, _remaining_items, next_continue_cursor) = + ::continue_query(cursor)?; + + accumulated_batch.extend(next_batch); + continue_cursor = next_continue_cursor; + } + + // for efficiency reasons iroha encodes query results in a columnar format, + // so we need to transpose the batch to get the format that is more natural for humans + let mut batches = vec![Vec::new(); accumulated_batch.len()]; + for batch in accumulated_batch { + // downcast to json and extract the actual array + // dynamic typing is just easier to use here than introducing a bunch of new types only for iroha_cli + let batch = serde_json::to_value(batch)?; + let serde_json::Value::Object(batch) = batch else { + panic!("Expected the batch serialization to be a JSON object"); + }; + let (_ty, batch) = batch + .into_iter() + .next() + .expect("Expected the batch to have exactly one key"); + let serde_json::Value::Array(batch_vec) = batch else { + panic!("Expected the batch payload to be a JSON array"); + }; + for (target, value) in batches.iter_mut().zip(batch_vec) { + target.push(value); + } + } + + context.print_data(&batches)?; + } + } + + Ok(()) + } + } + } + } +} + +mod multisig { + use std::{ + collections::BTreeMap, + io::{BufReader, Read as _}, + num::{NonZeroU16, NonZeroU64}, + time::{Duration, SystemTime}, + }; + + use derive_more::{Constructor, Display}; + use iroha::executor_data_model::isi::multisig::*; + use serde::Serialize; + use serde_with::{serde_as, DisplayFromStr, SerializeDisplay}; + + use super::*; + + /// Arguments for multisig subcommand + #[derive(Debug, clap::Subcommand)] + pub enum Args { + /// Register a multisig account + Register(Register), + /// Propose a multisig transaction, with `Vec` stdin + Propose(Propose), + /// Approve a multisig transaction + Approve(Approve), + /// List pending multisig transactions relevant to you + #[clap(subcommand)] + List(List), + } + + impl RunArgs for Args { + fn run(self, context: &mut dyn RunContext) -> Result<()> { + match_all!((self, context), { Args::Register, Args::Propose, Args::Approve, Args::List }) + } + } + /// Args to register a multisig account + #[derive(Debug, clap::Args)] + pub struct Register { + /// ID of the multisig account to be registered + #[arg(short, long)] + pub account: AccountId, + /// Signatories of the multisig account + #[arg(short, long, num_args(2..))] + pub signatories: Vec, + /// Relative weights of responsibility of respective signatories + #[arg(short, long, num_args(2..))] + pub weights: Vec, + /// Threshold of total weight at which the multisig is considered authenticated + #[arg(short, long)] + pub quorum: u16, + /// Time-to-live of multisig transactions made by the multisig account + #[arg(short, long, default_value_t = default_transaction_ttl())] + pub transaction_ttl: humantime::Duration, + } + + fn default_transaction_ttl() -> humantime::Duration { + std::time::Duration::from_millis(DEFAULT_MULTISIG_TTL_MS).into() + } + + impl RunArgs for Register { + fn run(self, context: &mut dyn RunContext) -> Result<()> { + if self.signatories.len() != self.weights.len() { + return Err(eyre!("signatories and weights must be equal in length")); + } + let register_multisig_account = MultisigRegister::new( + self.account, + MultisigSpec::new( + self.signatories.into_iter().zip(self.weights).collect(), + NonZeroU16::new(self.quorum).expect("quorum should not be 0"), + self.transaction_ttl + .as_millis() + .try_into() + .ok() + .and_then(NonZeroU64::new) + .expect("ttl should be between 1 ms and 584942417 years"), + ), + ); + + submit([register_multisig_account], Metadata::default(), context) + .wrap_err("Failed to register multisig account") + } + } + + /// Args to propose a multisig transaction + #[derive(Debug, clap::Args)] + pub struct Propose { + /// Multisig authority of the multisig transaction + #[arg(short, long)] + pub account: AccountId, + /// Time-to-live of multisig transactions that overrides to shorten the account default + #[arg(short, long)] + pub transaction_ttl: Option, + } + + impl RunArgs for Propose { + fn run(self, context: &mut dyn RunContext) -> Result<()> { + let instructions: Vec = { + let mut reader = BufReader::new(stdin()); + let mut raw_content = Vec::new(); + reader.read_to_end(&mut raw_content)?; + let string_content = String::from_utf8(raw_content)?; + json5::from_str(&string_content)? + }; + let transaction_ttl_ms = self.transaction_ttl.map(|duration| { + duration + .as_millis() + .try_into() + .ok() + .and_then(NonZeroU64::new) + .expect("ttl should be between 1 ms and 584942417 years") + }); + + let instructions_hash = HashOf::new(&instructions); + println!("{instructions_hash}"); + + let propose_multisig_transaction = + MultisigPropose::new(self.account, instructions, transaction_ttl_ms); + + submit([propose_multisig_transaction], Metadata::default(), context) + .wrap_err("Failed to propose transaction") + } + } + + /// Args to approve a multisig transaction + #[derive(Debug, clap::Args)] + pub struct Approve { + /// Multisig authority of the multisig transaction + #[arg(short, long)] + pub account: AccountId, + /// Instructions to approve + #[arg(short, long)] + pub instructions_hash: ProposalKey, + } + + impl RunArgs for Approve { + fn run(self, context: &mut dyn RunContext) -> Result<()> { + let approve_multisig_transaction = + MultisigApprove::new(self.account, self.instructions_hash); + + submit([approve_multisig_transaction], Metadata::default(), context) + .wrap_err("Failed to approve transaction") + } + } + + /// List pending multisig transactions relevant to you + #[derive(clap::Subcommand, Debug, Clone)] + pub enum List { + /// All pending multisig transactions relevant to you + All, + } + + impl RunArgs for List { + fn run(self, context: &mut dyn RunContext) -> Result<()> { + let client = context.client_from_config(); + let me = client.account.clone(); + let Ok(my_multisig_roles) = client + .query(FindRolesByAccountId::new(me.clone())) + .filter_with(|role_id| role_id.name.starts_with(MULTISIG_SIGNATORY)) + .execute_all() + else { + return Ok(()); + }; + let mut stack = my_multisig_roles + .iter() + .filter_map(multisig_account_from) + .map(|account_id| Context::new(me.clone(), account_id, None)) + .collect(); + let mut proposals = BTreeMap::new(); + + fold_proposals(&mut proposals, &mut stack, &client)?; + context.print_data(&proposals)?; + + Ok(()) + } + } + + const DELIMITER: char = '/'; + const MULTISIG: &str = "multisig"; + const MULTISIG_SIGNATORY: &str = "MULTISIG_SIGNATORY"; + + fn spec_key() -> Name { + format!("{MULTISIG}{DELIMITER}spec").parse().unwrap() + } + + fn proposal_key_prefix() -> String { + format!("{MULTISIG}{DELIMITER}proposals{DELIMITER}") + } + + fn multisig_account_from(role: &RoleId) -> Option { + role.name() + .as_ref() + .strip_prefix(MULTISIG_SIGNATORY)? + .rsplit_once(DELIMITER) + .and_then(|(init, last)| { + format!("{last}@{}", init.trim_matches(DELIMITER)) + .parse() + .ok() + }) + } + + type PendingProposals = BTreeMap; + + type ProposalKey = HashOf>; + + #[serde_as] + #[derive(Debug, Serialize, Constructor)] + struct ProposalStatus { + instructions: Vec, + #[serde_as(as = "DisplayFromStr")] + proposed_at: humantime::Timestamp, + #[serde_as(as = "DisplayFromStr")] + expires_in: humantime::Duration, + approval_path: Vec, + } + + impl Default for ProposalStatus { + fn default() -> Self { + Self::new( + Vec::new(), + SystemTime::UNIX_EPOCH.into(), + Duration::ZERO.into(), + Vec::new(), + ) + } + } + + #[derive(Debug, SerializeDisplay, Display, Constructor)] + #[display(fmt = "{weight} {} [{got}/{quorum}] {target}", "self.relation()")] + struct ApprovalEdge { + weight: u8, + has_approved: bool, + got: u16, + quorum: u16, + target: AccountId, + } + + impl ApprovalEdge { + fn relation(&self) -> &str { + if self.has_approved { + "joined" + } else { + "->" + } + } + } + + #[derive(Debug, Constructor)] + struct Context { + child: AccountId, + this: AccountId, + key_span: Option<(ProposalKey, ProposalKey)>, + } + + fn fold_proposals( + proposals: &mut PendingProposals, + stack: &mut Vec, + client: &Client, + ) -> Result<()> { + let Some(context) = stack.pop() else { + return Ok(()); + }; + let account = client + .query(FindAccounts) + .filter_with(|account| account.id.eq(context.this.clone())) + .execute_single()?; + let spec: MultisigSpec = account + .metadata() + .get(&spec_key()) + .unwrap() + .try_into_any()?; + for (proposal_key, proposal_value) in account + .metadata() + .iter() + .filter_map(|(k, v)| { + k.as_ref().strip_prefix(&proposal_key_prefix()).map(|k| { + ( + k.parse::().unwrap(), + v.try_into_any::().unwrap(), + ) + }) + }) + .filter(|(k, _v)| context.key_span.map_or(true, |(_, top)| *k == top)) + { + let mut is_root_proposal = true; + for instruction in &proposal_value.instructions { + let InstructionBox::Custom(instruction) = instruction else { + continue; + }; + let Ok(MultisigInstructionBox::Approve(approve)) = instruction.payload().try_into() + else { + continue; + }; + is_root_proposal = false; + let leaf = context.key_span.map_or(proposal_key, |(leaf, _)| leaf); + let top = approve.instructions_hash; + stack.push(Context::new( + context.this.clone(), + approve.account, + Some((leaf, top)), + )); + } + let proposal_status = match context.key_span { + None => proposals.entry(proposal_key).or_default(), + Some((leaf, _)) => proposals.get_mut(&leaf).unwrap(), + }; + let edge = ApprovalEdge::new( + *spec.signatories.get(&context.child).unwrap(), + proposal_value.approvals.contains(&context.child), + spec.signatories + .iter() + .filter(|(id, _)| proposal_value.approvals.contains(id)) + .map(|(_, weight)| u16::from(*weight)) + .sum(), + spec.quorum.into(), + context.this.clone(), + ); + proposal_status.approval_path.push(edge); + if is_root_proposal { + proposal_status.instructions = proposal_value.instructions; + proposal_status.proposed_at = { + let proposed_at = Duration::from_secs( + Duration::from_millis(proposal_value.proposed_at_ms.into()).as_secs(), + ); + SystemTime::UNIX_EPOCH + .checked_add(proposed_at) + .unwrap() + .into() + }; + proposal_status.expires_in = { + let now = SystemTime::now() + .duration_since(SystemTime::UNIX_EPOCH) + .unwrap(); + let expires_at = Duration::from_millis(proposal_value.expires_at_ms.into()); + Duration::from_secs(expires_at.saturating_sub(now).as_secs()).into() + }; + } + } + fold_proposals(proposals, stack, client) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn parse_value_arg_cases() { + macro_rules! case { + ($input:expr, $expected:expr) => { + let MetadataValueArg { value } = + $input.parse().expect("should not fail with valid input"); + assert_eq!(value, $expected); + }; + } + + // Boolean values + case!("true", Json::new(true)); + case!("false", Json::new(false)); + + // Numeric values + case!("\"123\"", Json::new(numeric!(123))); + case!("\"123.0\"", Json::new(numeric!(123.0))); + + // JSON Value + let json_str = r#"{"Vec":[{"String":"a"},{"String":"b"}]}"#; + case!(json_str, serde_json::from_str(json_str).unwrap()); + } +} diff --git a/crates/iroha_cli/src/codec.rs b/crates/iroha_cli/src/codec.rs new file mode 100644 index 00000000000..75b2a64daf3 --- /dev/null +++ b/crates/iroha_cli/src/codec.rs @@ -0,0 +1,435 @@ +use core::num::{NonZeroU32, NonZeroU64}; +use std::{ + collections::{BTreeMap, BTreeSet}, + fmt::Debug, + fs, + fs::File, + io, + io::{BufRead, BufReader, BufWriter, Read, Write}, + marker::PhantomData, + path::PathBuf, +}; + +use colored::*; +use eyre::{eyre, Result}; +use iroha_schema_gen::complete_data_model::*; +use parity_scale_codec::{DecodeAll, Encode}; +use serde::{de::DeserializeOwned, Serialize}; + +use crate::options; + +/// Arguments for codec subcommand +#[derive(clap::Args, Debug)] +pub struct Args { + /// Subcommands related to codec + #[clap(subcommand)] + command: Command, +} + +#[derive(clap::Subcommand, Debug)] +enum Command { + /// Show all available types + ListTypes, + /// Decode SCALE to Rust debug format from binary file + ScaleToRust(rust::Args), + /// Decode SCALE to JSON. By default uses stdin and stdout + ScaleToJson(json::Args), + /// Encode JSON as SCALE. By default uses stdin and stdout + JsonToScale(json::Args), +} + +mod rust { + use super::*; + + #[derive(clap::Args, Debug)] + pub struct Args { + /// Path to the binary with encoded Iroha structure + #[arg(short, long, value_name("PATH"), value_hint(clap::ValueHint::FilePath))] + pub binary: PathBuf, + /// Type that is expected to be encoded in binary. + /// If not specified then a guess will be attempted + #[clap(short, long = "type")] + pub type_name: Option, + } + + /// Type decoder + pub struct ScaleDecoder<'map> { + args: Args, + map: &'map ConverterMap, + } + + impl<'map> ScaleDecoder<'map> { + /// Create new `Decoder` with `args` and `map` + pub fn new(args: Args, map: &'map ConverterMap) -> Self { + Self { args, map } + } + + /// Decode type and print to `writer` + pub fn scale_to_rust(&self, writer: &mut W) -> Result<()> { + let bytes = fs::read(self.args.binary.clone())?; + + if let Some(type_name) = &self.args.type_name { + return self.decode_by_type(type_name, &bytes, writer); + } + self.decode_by_guess(&bytes, writer) + } + + /// Decode concrete `type` from `bytes` and print to `writer` + fn decode_by_type( + &self, + type_name: &str, + bytes: &[u8], + writer: &mut W, + ) -> Result<()> { + self.map.get(type_name).map_or_else( + || Err(eyre!("Unknown type: `{type_name}`")), + |converter| Self::dump_decoded(converter.as_ref(), bytes, writer), + ) + } + + /// Try to decode every type from `bytes` and print to `writer` + // TODO: Can be parallelized when there will be too many types + fn decode_by_guess(&self, bytes: &[u8], writer: &mut W) -> Result<()> { + let count = self + .map + .iter() + .filter_map(|(type_name, converter)| { + let mut buf = Vec::new(); + Self::dump_decoded(converter.as_ref(), bytes, &mut buf).ok()?; + let formatted = String::from_utf8(buf).ok()?; + writeln!(writer, "{}:\n{}", type_name.italic().cyan(), formatted).ok() + }) + .count(); + match count { + 0 => writeln!(writer, "No compatible types found"), + 1 => writeln!(writer, "{} compatible type found", "1".bold()), + n => writeln!(writer, "{} compatible types found", n.to_string().bold()), + } + .map_err(Into::into) + } + + fn dump_decoded( + converter: &dyn Converter, + input: &[u8], + w: &mut dyn io::Write, + ) -> Result<()> { + let result = converter.rust_from_scale(input)?; + writeln!(w, "{result}")?; + Ok(()) + } + } + + pub trait Converter { + fn rust_from_scale(&self, input: &[u8]) -> Result; + } + + pub struct ConverterImpl(PhantomData); + impl ConverterImpl { + #[allow(clippy::unnecessary_box_returns)] + pub fn new() -> Box { + Box::new(Self(PhantomData)) + } + } + + impl Converter for ConverterImpl + where + T: Debug + Encode + DecodeAll + Serialize + DeserializeOwned, + { + fn rust_from_scale(&self, mut input: &[u8]) -> Result { + let object = T::decode_all(&mut input)?; + Ok(format!("{object:#?}")) + } + } +} + +mod json { + use super::*; + + #[derive(Debug, clap::Args)] + pub struct Args { + /// Path to the input file + #[arg(short, long, value_name("PATH"), value_hint(clap::ValueHint::FilePath))] + input: Option, + /// Path to the output file + #[arg(short, long, value_name("PATH"), value_hint(clap::ValueHint::FilePath))] + output: Option, + /// Type that is expected to be encoded in input + #[clap(short, long = "type")] + type_name: String, + } + + pub struct ScaleDecoder<'map> { + reader: Box, + writer: Box, + converter: &'map dyn Converter, + } + + impl<'map> ScaleDecoder<'map> { + pub fn new(args: Args, map: &'map ConverterMap) -> Result { + let reader: Box = match args.input { + None => Box::new(io::stdin().lock()), + Some(path) => Box::new(BufReader::new(File::open(path)?)), + }; + let writer: Box = match args.output { + None => Box::new(BufWriter::new(io::stdout().lock())), + Some(path) => Box::new(BufWriter::new(File::create(path)?)), + }; + let Some(converter) = map.get(&args.type_name) else { + return Err(eyre!("Unknown type: `{}`", args.type_name)); + }; + Ok(Self { + reader, + writer, + converter: converter.as_ref(), + }) + } + + pub fn scale_to_json(self) -> Result<()> { + let Self { + mut reader, + mut writer, + converter, + } = self; + let mut input = Vec::new(); + reader.read_to_end(&mut input)?; + let output = converter.json_from_scale(&input)?; + writeln!(writer, "{output}")?; + Ok(()) + } + + pub fn json_to_scale(self) -> Result<()> { + let Self { + mut reader, + mut writer, + converter, + } = self; + let mut input = String::new(); + reader.read_to_string(&mut input)?; + let output = converter.scale_from_json(&input)?; + writer.write_all(&output)?; + Ok(()) + } + } + + pub trait Converter { + fn json_from_scale(&self, input: &[u8]) -> Result; + fn scale_from_json(&self, input: &str) -> Result>; + } + + pub struct ConverterImpl(PhantomData); + impl ConverterImpl { + #[allow(clippy::unnecessary_box_returns)] + pub fn new() -> Box { + Box::new(Self(PhantomData)) + } + } + + impl Converter for ConverterImpl + where + T: Debug + Encode + DecodeAll + Serialize + DeserializeOwned, + { + fn json_from_scale(&self, mut input: &[u8]) -> Result { + let object = T::decode_all(&mut input)?; + let json = serde_json::to_string(&object)?; + Ok(json) + } + + fn scale_from_json(&self, input: &str) -> Result> { + let object: T = serde_json::from_str(input)?; + Ok(object.encode()) + } + } +} + +/// Print all supported types from `map` to `writer` +fn list_types(types: I, writer: &mut W) -> Result<()> +where + I: Iterator, +{ + let mut ntypes = 0; + for key in types { + writeln!(writer, "{key}")?; + ntypes += 1; + } + if ntypes != 0 { + writeln!(writer)?; + } + + match ntypes { + 0 => writeln!(writer, "No type is supported"), + 1 => writeln!(writer, "{} type is supported", "1".bold()), + n => writeln!(writer, "{} types are supported", n.to_string().bold()), + } + .map_err(Into::into) +} + +type ConverterMap = BTreeMap>; + +macro_rules! generate_map { + ($conv:path, $conv_impl:ident) => {{ + let mut map = ConverterMap::::new(); + + macro_rules! insert_into_map { + ($t:ty) => {{ + let type_id = <$t as iroha_schema::TypeId>::id(); + map.insert(type_id, $conv_impl::<$t>::new()) + }}; + } + + iroha_schema_gen::map_all_schema_types!(insert_into_map); + + map.insert( + as iroha_schema::TypeId>::id(), + $conv_impl::::new(), + ); + + map + }}; +} + +impl options::RunArgs for Args { + fn run(self) -> Result<()> { + match self.command { + Command::ScaleToRust(args) => { + use rust::ConverterImpl; + let mut writer = BufWriter::new(io::stdout().lock()); + let map = generate_map!(rust::Converter, ConverterImpl); + let decoder = rust::ScaleDecoder::new(args, &map); + decoder.scale_to_rust(&mut writer) + } + Command::ScaleToJson(args) => { + use json::ConverterImpl; + let map = generate_map!(json::Converter, ConverterImpl); + let decoder = json::ScaleDecoder::new(args, &map)?; + decoder.scale_to_json() + } + Command::JsonToScale(args) => { + use json::ConverterImpl; + let map = generate_map!(json::Converter, ConverterImpl); + let decoder = json::ScaleDecoder::new(args, &map)?; + decoder.json_to_scale() + } + Command::ListTypes => { + use rust::ConverterImpl; + let mut writer = BufWriter::new(io::stdout().lock()); + let map = generate_map!(rust::Converter, ConverterImpl); + list_types(map.keys().cloned(), &mut writer) + } + } + } +} + +#[cfg(test)] +mod tests { + use iroha_data_model::prelude::*; + + use super::*; + + #[test] + fn decode_account_sample() { + let account_id = + "ed0120CE7FA46C9DCE7EA4B125E2E36BDB63EA33073E7590AC92816AE1E861B7048B03@wonderland" + .parse() + .unwrap(); + let mut metadata = Metadata::default(); + metadata.insert( + "hat".parse().expect("Valid"), + "white".parse::().expect("Valid"), + ); + + let account = Account::new(account_id).with_metadata(metadata); + decode_sample("account.bin", String::from("NewAccount"), &account); + } + + #[test] + fn decode_domain_sample() { + let mut metadata = Metadata::default(); + metadata.insert("Is_Jabberwocky_alive".parse().expect("Valid"), true); + let domain = Domain::new("wonderland".parse().expect("Valid")) + .with_logo( + "/ipfs/Qme7ss3ARVgxv6rXqVPiikMJ8u2NLgmgszg13pYrDKEoiu" + .parse() + .expect("Valid"), + ) + .with_metadata(metadata); + + decode_sample("domain.bin", String::from("NewDomain"), &domain); + } + + #[test] + fn decode_trigger_sample() { + let account_id = + "ed0120CE7FA46C9DCE7EA4B125E2E36BDB63EA33073E7590AC92816AE1E861B7048B03@wonderland" + .parse::() + .unwrap(); + let rose_definition_id = AssetDefinitionId::new( + "wonderland".parse().expect("Valid"), + "rose".parse().expect("Valid"), + ); + let rose_id = AssetId::new(rose_definition_id, account_id.clone()); + let trigger_id = "mint_rose".parse().expect("Valid"); + let action = Action::new( + vec![Mint::asset_numeric(1u32, rose_id)], + Repeats::Indefinitely, + account_id, + DomainEventFilter::new().for_events(DomainEventSet::AnyAccount), + ); + + let trigger = Trigger::new(trigger_id, action); + decode_sample("trigger.bin", String::from("Trigger"), &trigger); + } + + fn decode_sample(sample_path: &str, type_id: String, expected: &T) { + let binary = PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .join("../../samples/codec/") + .join(sample_path); + + let args = rust::Args { + binary, + type_name: Some(type_id), + }; + + use rust::ConverterImpl; + let map = generate_map!(rust::Converter, ConverterImpl); + let decoder = rust::ScaleDecoder::new(args, &map); + let mut buf = Vec::new(); + decoder.scale_to_rust(&mut buf).expect("Decoding failed"); + let output = String::from_utf8(buf).expect("Invalid UTF-8"); + let expected_output = format!("{expected:#?}\n"); + + assert_eq!(output, expected_output,); + } + + #[test] + fn test_decode_encode_account() { + test_decode_encode("account.bin", "NewAccount"); + } + + #[test] + fn test_decode_encode_domain() { + test_decode_encode("domain.bin", "NewDomain"); + } + + #[test] + fn test_decode_encode_trigger() { + test_decode_encode("trigger.bin", "Trigger"); + } + + fn test_decode_encode(sample_path: &str, type_id: &str) { + let binary = PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .join("../../samples/codec/") + .join(sample_path); + let scale_expected = fs::read(binary).expect("Couldn't read file"); + + use json::ConverterImpl; + let map = generate_map!(json::Converter, ConverterImpl); + let converter = &map[type_id]; + let json = converter + .json_from_scale(&scale_expected) + .expect("Couldn't convert to SCALE"); + let scale_actual = converter + .scale_from_json(&json) + .expect("Couldn't convert to SCALE"); + assert_eq!(scale_actual, scale_expected); + } +} diff --git a/crates/iroha_cli/src/main.rs b/crates/iroha_cli/src/main.rs index 52ef99b42bd..c31ee4ab4b6 100644 --- a/crates/iroha_cli/src/main.rs +++ b/crates/iroha_cli/src/main.rs @@ -1,217 +1,67 @@ -//! Iroha client CLI +//! Iroha CLI tool -use std::{ - fs::{self, read as read_file}, - io::{stdin, stdout}, - path::PathBuf, - str::FromStr, - time::Duration, -}; - -use erased_serde::Serialize; -use error_stack::{fmt::ColorMode, IntoReportCompat, ResultExt}; -use eyre::{eyre, Error, Result, WrapErr}; -use futures::TryStreamExt; -use iroha::{client::Client, config::Config, data_model::prelude::*}; -use iroha_primitives::json::Json; +use error_stack::{fmt::ColorMode, IntoReportCompat}; +use eyre::Result; +use options::RunArgs; use thiserror::Error; -use tokio::runtime::Runtime; - -/// Re-usable clap `--metadata ` (`-m`) argument. -/// Should be combined with `#[command(flatten)]` attr. -#[derive(clap::Args, Debug, Clone)] -pub struct MetadataArgs { - /// The JSON/JSON5 file with key-value metadata pairs - #[arg(short, long, value_name("PATH"), value_hint(clap::ValueHint::FilePath))] - metadata: Option, -} -impl MetadataArgs { - fn load(self) -> Result { - let value: Option = self - .metadata - .map(|path| { - let content = fs::read_to_string(&path).wrap_err_with(|| { - eyre!("Failed to read the metadata file `{}`", path.display()) - })?; - let metadata: Metadata = json5::from_str(&content).wrap_err_with(|| { - eyre!( - "Failed to deserialize metadata from file `{}`", - path.display() - ) - })?; - Ok::<_, eyre::Report>(metadata) - }) - .transpose()?; - - Ok(value.unwrap_or_default()) - } -} - -/// Re-usable clap `--value ` (`-v`) argument. -/// Should be combined with `#[command(flatten)]` attr. -#[derive(clap::Args, Debug, Clone, PartialEq, Eq)] -pub struct MetadataValueArg { - /// Wrapper around `MetadataValue` to accept possible values and fallback to json. - /// - /// The following types are supported: - /// Numbers: decimal with optional point - /// Booleans: false/true - /// Objects: e.g. {"Vec":[{"String":"a"},{"String":"b"}]} - #[arg(short, long)] - value: Json, -} - -impl FromStr for MetadataValueArg { - type Err = Error; - - fn from_str(s: &str) -> Result { - Ok(MetadataValueArg { - value: Json::from_str(s)?, - }) - } -} +mod client; +mod codec; +mod options; +mod wasm; -/// Iroha CLI Client provides an ability to interact with Iroha Peers Web API without direct network usage. +/// Top-level arguments for iroha #[derive(clap::Parser, Debug)] #[command(name = "iroha", version = concat!("version=", env!("CARGO_PKG_VERSION"), " git_commit_sha=", env!("VERGEN_GIT_SHA")), author)] struct Args { - /// Path to the configuration file - #[arg(short, long, value_name("PATH"), value_hint(clap::ValueHint::FilePath))] - #[clap(default_value = "client.toml")] - config: PathBuf, - /// More verbose output - #[arg(short, long)] - verbose: bool, - /// Subcommands of client cli + /// Subcommands of iroha #[command(subcommand)] - subcommand: Subcommand, + command: Command, } +/// Subcomands of iroha cli #[derive(clap::Subcommand, Debug)] -enum Subcommand { - /// The subcommand related to domains - #[clap(subcommand)] - Domain(domain::Args), - /// The subcommand related to accounts - #[clap(subcommand)] - Account(account::Args), - /// The subcommand related to assets - #[clap(subcommand)] - Asset(asset::Args), - /// The subcommand related to p2p networking - #[clap(subcommand)] - Peer(peer::Args), - /// The subcommand related to event streaming - Events(events::Args), - /// The subcommand related to Wasm +enum Command { + /// Subcommands related to parity scale encoding + Codec(codec::Args), + /// Subcommands related to operations with smartcontracts Wasm(wasm::Args), - /// The subcommand related to block streaming - Blocks(blocks::Args), - /// The subcommand related to multi-instructions as Json or Json5 - Json(json::Args), - /// The subcommand related to multisig accounts and transactions - #[clap(subcommand)] - Multisig(multisig::Args), -} - -/// Context inside which command is executed -trait RunContext { - /// Get access to configuration - fn configuration(&self) -> &Config; - - fn client_from_config(&self) -> Client { - Client::new(self.configuration().clone()) - } - - /// Serialize and print data - /// - /// # Errors - /// - if serialization fails - /// - if printing fails - fn print_data(&mut self, data: &dyn Serialize) -> Result<()>; -} - -struct PrintJsonContext { - write: W, - config: Config, -} - -impl RunContext for PrintJsonContext { - fn configuration(&self) -> &Config { - &self.config - } - - fn print_data(&mut self, data: &dyn Serialize) -> Result<()> { - writeln!(&mut self.write, "{}", serde_json::to_string_pretty(data)?)?; - Ok(()) - } -} - -/// Runs subcommand -trait RunArgs { - /// Runs command - /// - /// # Errors - /// if inner command errors - fn run(self, context: &mut dyn RunContext) -> Result<()>; + /// Subcommands related to interactions with iroha peers Web API + Client(client::Args), } macro_rules! match_all { - (($self:ident, $context:ident), { $($variants:path),* $(,)?}) => { + ($self:ident, { $($variants:path),* $(,)?}) => { match $self { - $($variants(variant) => RunArgs::run(variant, $context),)* + $($variants(variant) => RunArgs::run(variant),)* } }; } -impl RunArgs for Subcommand { - fn run(self, context: &mut dyn RunContext) -> Result<()> { - use Subcommand::*; - match_all!((self, context), { Domain, Account, Asset, Peer, Events, Wasm, Blocks, Json, Multisig }) +impl options::RunArgs for Command { + fn run(self) -> Result<()> { + use Command::*; + match_all!(self, { Codec, Wasm, Client }) } } -#[derive(Error, Debug)] -enum MainError { - #[error("Failed to load Iroha client configuration")] - Config, - #[error("Failed to serialize config")] - SerializeConfig, +/// Errors generated by the top-level command +#[derive(Error, Debug, Copy, Clone)] +pub enum CommandError { + /// Executing subcommand #[error("Failed to run the command")] Subcommand, } -fn main() -> error_stack::Result<(), MainError> { - let Args { - config: config_path, - subcommand, - verbose, - } = clap::Parser::parse(); +fn main() -> error_stack::Result<(), CommandError> { + let Args { command } = clap::Parser::parse(); error_stack::Report::set_color_mode(color_mode()); - let config = Config::load(config_path) - // FIXME: would be nice to NOT change the context, it's unnecessary - .change_context(MainError::Config) - .attach_printable("config path was set by `--config` argument")?; - if verbose { - eprintln!( - "Configuration: {}", - &serde_json::to_string_pretty(&config) - .change_context(MainError::SerializeConfig) - .attach_printable("caused by `--verbose` argument")? - ); - } - - let mut context = PrintJsonContext { - write: stdout(), - config, - }; - subcommand - .run(&mut context) + command + .run() .into_report() - .map_err(|report| report.change_context(MainError::Subcommand))?; + .map_err(|report| report.change_context(CommandError::Subcommand))?; Ok(()) } @@ -225,1406 +75,3 @@ fn color_mode() -> ColorMode { ColorMode::None } } - -/// Submit instruction with metadata to network. -/// -/// # Errors -/// Fails if submitting over network fails -#[allow(clippy::shadow_unrelated)] -fn submit( - instructions: impl Into, - metadata: Metadata, - context: &mut dyn RunContext, -) -> Result<()> { - let client = context.client_from_config(); - let instructions = instructions.into(); - let tx = client.build_transaction(instructions, metadata); - - #[cfg(not(debug_assertions))] - let err_msg = "Failed to submit transaction."; - #[cfg(debug_assertions)] - let err_msg = format!("Failed to submit transaction {tx:?}"); - let hash = client.submit_transaction_blocking(&tx).wrap_err(err_msg)?; - context.print_data(&hash)?; - - Ok(()) -} - -mod filter { - use iroha::data_model::query::dsl::CompoundPredicate; - use serde::Deserialize; - - use super::*; - - /// Filter for domain queries - #[derive(Clone, Debug, clap::Parser)] - pub struct DomainFilter { - /// Predicate for filtering given as JSON5 string - #[clap(value_parser = parse_json5::>)] - pub predicate: CompoundPredicate, - } - - /// Filter for account queries - #[derive(Clone, Debug, clap::Parser)] - pub struct AccountFilter { - /// Predicate for filtering given as JSON5 string - #[clap(value_parser = parse_json5::>)] - pub predicate: CompoundPredicate, - } - - /// Filter for asset queries - #[derive(Clone, Debug, clap::Parser)] - pub struct AssetFilter { - /// Predicate for filtering given as JSON5 string - #[clap(value_parser = parse_json5::>)] - pub predicate: CompoundPredicate, - } - - /// Filter for asset definition queries - #[derive(Clone, Debug, clap::Parser)] - pub struct AssetDefinitionFilter { - /// Predicate for filtering given as JSON5 string - #[clap(value_parser = parse_json5::>)] - pub predicate: CompoundPredicate, - } - - fn parse_json5(s: &str) -> Result - where - T: for<'a> Deserialize<'a>, - { - json5::from_str(s).map_err(|err| format!("Failed to deserialize filter from JSON5: {err}")) - } -} - -mod events { - - use iroha::data_model::events::pipeline::{BlockEventFilter, TransactionEventFilter}; - - use super::*; - - #[derive(clap::Args, Debug, Clone, Copy)] - pub struct Args { - /// Wait timeout - #[clap(short, long, global = true)] - timeout: Option, - #[clap(subcommand)] - command: Command, - } - - /// Get event stream from Iroha peer - #[derive(clap::Subcommand, Debug, Clone, Copy)] - enum Command { - /// Gets block pipeline events - BlockPipeline, - /// Gets transaction pipeline events - TransactionPipeline, - /// Gets data events - Data, - /// Get execute trigger events - ExecuteTrigger, - /// Get trigger completed events - TriggerCompleted, - } - - impl RunArgs for Args { - fn run(self, context: &mut dyn RunContext) -> Result<()> { - let timeout: Option = self.timeout.map(Into::into); - - match self.command { - Command::TransactionPipeline => { - listen(TransactionEventFilter::default(), context, timeout) - } - Command::BlockPipeline => listen(BlockEventFilter::default(), context, timeout), - Command::Data => listen(DataEventFilter::Any, context, timeout), - Command::ExecuteTrigger => { - listen(ExecuteTriggerEventFilter::new(), context, timeout) - } - Command::TriggerCompleted => { - listen(TriggerCompletedEventFilter::new(), context, timeout) - } - } - } - } - - fn listen( - filter: impl Into, - context: &mut dyn RunContext, - timeout: Option, - ) -> Result<()> { - let filter = filter.into(); - let client = context.client_from_config(); - - if let Some(timeout) = timeout { - eprintln!("Listening to events with filter: {filter:?} and timeout: {timeout:?}"); - let rt = Runtime::new().wrap_err("Failed to create runtime.")?; - rt.block_on(async { - let mut stream = client - .listen_for_events_async([filter]) - .await - .expect("Failed to listen for events."); - while let Ok(event) = tokio::time::timeout(timeout, stream.try_next()).await { - context.print_data(&event?)?; - } - eprintln!("Timeout period has expired."); - Result::<()>::Ok(()) - })?; - } else { - eprintln!("Listening to events with filter: {filter:?}"); - client - .listen_for_events([filter]) - .wrap_err("Failed to listen for events.")? - .try_for_each(|event| context.print_data(&event?))?; - } - Ok(()) - } -} - -mod blocks { - use std::num::NonZeroU64; - - use super::*; - - /// Get block stream from Iroha peer - #[derive(clap::Args, Debug, Clone, Copy)] - pub struct Args { - /// Block height from which to start streaming blocks - height: NonZeroU64, - - /// Wait timeout - #[clap(short, long)] - timeout: Option, - } - - impl RunArgs for Args { - fn run(self, context: &mut dyn RunContext) -> Result<()> { - let Args { height, timeout } = self; - let timeout: Option = timeout.map(Into::into); - listen(height, context, timeout) - } - } - - fn listen( - height: NonZeroU64, - context: &mut dyn RunContext, - timeout: Option, - ) -> Result<()> { - let client = context.client_from_config(); - if let Some(timeout) = timeout { - eprintln!("Listening to blocks from height: {height} and timeout: {timeout:?}"); - let rt = Runtime::new().wrap_err("Failed to create runtime.")?; - rt.block_on(async { - let mut stream = client - .listen_for_blocks_async(height) - .await - .expect("Failed to listen for blocks."); - while let Ok(event) = tokio::time::timeout(timeout, stream.try_next()).await { - context.print_data(&event?)?; - } - eprintln!("Timeout period has expired."); - Result::<()>::Ok(()) - })?; - } else { - eprintln!("Listening to blocks from height: {height}"); - client - .listen_for_blocks(height) - .wrap_err("Failed to listen for blocks.")? - .try_for_each(|event| context.print_data(&event?))?; - } - Ok(()) - } -} - -mod domain { - use super::*; - - /// Arguments for domain subcommand - #[derive(Debug, clap::Subcommand)] - pub enum Args { - /// Register domain - Register(Register), - /// List domains - #[clap(subcommand)] - List(List), - /// Transfer domain - Transfer(Transfer), - /// Edit domain metadata - #[clap(subcommand)] - Metadata(metadata::Args), - } - - impl RunArgs for Args { - fn run(self, context: &mut dyn RunContext) -> Result<()> { - match_all!((self, context), { Args::Register, Args::List, Args::Transfer, Args::Metadata, }) - } - } - - /// Add subcommand for domain - #[derive(Debug, clap::Args)] - pub struct Register { - /// Domain name as double-quoted string - #[arg(short, long)] - pub id: DomainId, - #[command(flatten)] - pub metadata: MetadataArgs, - } - - impl RunArgs for Register { - fn run(self, context: &mut dyn RunContext) -> Result<()> { - let Self { id, metadata } = self; - let create_domain = iroha::data_model::isi::Register::domain(Domain::new(id)); - submit([create_domain], metadata.load()?, context).wrap_err("Failed to create domain") - } - } - - /// List domains with this command - #[derive(clap::Subcommand, Debug, Clone)] - pub enum List { - /// All domains - All, - /// Filter domains by given predicate - Filter(filter::DomainFilter), - } - - impl RunArgs for List { - fn run(self, context: &mut dyn RunContext) -> Result<()> { - let client = context.client_from_config(); - - let query = client.query(FindDomains::new()); - - let query = match self { - List::All => query, - List::Filter(filter) => query.filter(filter.predicate), - }; - - let result = query.execute_all().wrap_err("Failed to get all accounts")?; - context.print_data(&result)?; - - Ok(()) - } - } - - /// Transfer a domain between accounts - #[derive(Debug, clap::Args)] - pub struct Transfer { - /// Domain name as double-quited string - #[arg(short, long)] - pub id: DomainId, - /// Account from which to transfer (in form `name@domain_name`) - #[arg(short, long)] - pub from: AccountId, - /// Account to which to transfer (in form `name@domain_name`) - #[arg(short, long)] - pub to: AccountId, - #[command(flatten)] - pub metadata: MetadataArgs, - } - - impl RunArgs for Transfer { - fn run(self, context: &mut dyn RunContext) -> Result<()> { - let Self { - id, - from, - to, - metadata, - } = self; - let transfer_domain = iroha::data_model::isi::Transfer::domain(from, id, to); - submit([transfer_domain], metadata.load()?, context) - .wrap_err("Failed to transfer domain") - } - } - - mod metadata { - use iroha::data_model::domain::DomainId; - - use super::*; - - /// Edit domain subcommands - #[derive(Debug, Clone, clap::Subcommand)] - pub enum Args { - /// Set domain metadata - Set(Set), - /// Remove domain metadata - Remove(Remove), - } - - impl RunArgs for Args { - fn run(self, context: &mut dyn RunContext) -> Result<()> { - match_all!((self, context), { Args::Set, Args::Remove, }) - } - } - - /// Set metadata into domain - #[derive(Debug, Clone, clap::Args)] - pub struct Set { - /// A domain id from which metadata is to be removed - #[arg(short, long)] - id: DomainId, - /// A key of metadata - #[arg(short, long)] - key: Name, - #[command(flatten)] - value: MetadataValueArg, - } - - impl RunArgs for Set { - fn run(self, context: &mut dyn RunContext) -> Result<()> { - let Self { - id, - key, - value: MetadataValueArg { value }, - } = self; - let set_key_value = SetKeyValue::domain(id, key, value); - submit([set_key_value], Metadata::default(), context) - .wrap_err("Failed to submit Set instruction") - } - } - - /// Remove metadata into domain by key - #[derive(Debug, Clone, clap::Args)] - pub struct Remove { - /// A domain id from which metadata is to be removed - #[arg(short, long)] - id: DomainId, - /// A key of metadata - #[arg(short, long)] - key: Name, - } - - impl RunArgs for Remove { - fn run(self, context: &mut dyn RunContext) -> Result<()> { - let Self { id, key } = self; - let remove_key_value = RemoveKeyValue::domain(id, key); - submit([remove_key_value], Metadata::default(), context) - .wrap_err("Failed to submit Remove instruction") - } - } - } -} - -mod account { - use std::fmt::Debug; - - use super::{Permission as DataModelPermission, *}; - - /// subcommands for account subcommand - #[derive(clap::Subcommand, Debug)] - pub enum Args { - /// Register account - Register(Register), - /// List accounts - #[command(subcommand)] - List(List), - /// Grant a permission to the account - Grant(Grant), - /// List all account permissions - ListPermissions(ListPermissions), - } - - impl RunArgs for Args { - fn run(self, context: &mut dyn RunContext) -> Result<()> { - match_all!((self, context), { - Args::Register, - Args::List, - Args::Grant, - Args::ListPermissions, - }) - } - } - - /// Register account - #[derive(clap::Args, Debug)] - pub struct Register { - /// Id of account in form `name@domain_name` - #[arg(short, long)] - pub id: AccountId, - #[command(flatten)] - pub metadata: MetadataArgs, - } - - impl RunArgs for Register { - fn run(self, context: &mut dyn RunContext) -> Result<()> { - let Self { id, metadata } = self; - let create_account = iroha::data_model::isi::Register::account(Account::new(id)); - submit([create_account], metadata.load()?, context) - .wrap_err("Failed to register account") - } - } - - /// List accounts with this command - #[derive(clap::Subcommand, Debug, Clone)] - pub enum List { - /// All accounts - All, - /// Filter accounts by given predicate - Filter(filter::AccountFilter), - } - - impl RunArgs for List { - fn run(self, context: &mut dyn RunContext) -> Result<()> { - let client = context.client_from_config(); - - let query = client.query(FindAccounts::new()); - - let query = match self { - List::All => query, - List::Filter(filter) => query.filter(filter.predicate), - }; - - let result = query.execute_all().wrap_err("Failed to get all accounts")?; - context.print_data(&result)?; - - Ok(()) - } - } - - #[derive(clap::Args, Debug)] - pub struct Grant { - /// Account id - #[arg(short, long)] - pub id: AccountId, - /// The JSON/JSON5 file with a permission token - #[arg(short, long)] - pub permission: Permission, - #[command(flatten)] - pub metadata: MetadataArgs, - } - - /// [`DataModelPermission`] wrapper implementing [`FromStr`] - #[derive(Debug, Clone)] - pub struct Permission(DataModelPermission); - - impl FromStr for Permission { - type Err = Error; - - fn from_str(s: &str) -> Result { - let content = fs::read_to_string(s) - .wrap_err(format!("Failed to read the permission token file {}", &s))?; - let permission: DataModelPermission = json5::from_str(&content).wrap_err(format!( - "Failed to deserialize the permission token from file {}", - &s - ))?; - Ok(Self(permission)) - } - } - - impl RunArgs for Grant { - fn run(self, context: &mut dyn RunContext) -> Result<()> { - let Self { - id, - permission, - metadata, - } = self; - let grant = iroha::data_model::isi::Grant::account_permission(permission.0, id); - submit([grant], metadata.load()?, context) - .wrap_err("Failed to grant the permission to the account") - } - } - - /// List all account permissions - #[derive(clap::Args, Debug)] - pub struct ListPermissions { - /// Account id - #[arg(short, long)] - id: AccountId, - } - - impl RunArgs for ListPermissions { - fn run(self, context: &mut dyn RunContext) -> Result<()> { - let client = context.client_from_config(); - let find_all_permissions = FindPermissionsByAccountId::new(self.id); - let permissions = client - .query(find_all_permissions) - .execute_all() - .wrap_err("Failed to get all account permissions")?; - context.print_data(&permissions)?; - Ok(()) - } - } -} - -mod asset { - use iroha::data_model::name::Name; - - use super::*; - - /// Subcommand for dealing with asset - #[derive(clap::Subcommand, Debug)] - pub enum Args { - /// Command for managing asset definitions - #[clap(subcommand)] - Definition(definition::Args), - /// Command for minting asset in existing Iroha account - Mint(Mint), - /// Command for burning asset in existing Iroha account - Burn(Burn), - /// Transfer asset between accounts - Transfer(Transfer), - /// Get info of asset - Get(Get), - /// List assets - #[clap(subcommand)] - List(List), - /// Get a value from a Store asset - GetKeyValue(GetKeyValue), - /// Set a key-value entry in a Store asset - SetKeyValue(SetKeyValue), - /// Remove a key-value entry from a Store asset - RemoveKeyValue(RemoveKeyValue), - } - - impl RunArgs for Args { - fn run(self, context: &mut dyn RunContext) -> Result<()> { - match_all!( - (self, context), - { Args::Definition, Args::Mint, Args::Burn, Args::Transfer, Args::Get, Args::List, Args::SetKeyValue, Args::RemoveKeyValue, Args::GetKeyValue} - ) - } - } - - mod definition { - use iroha::data_model::asset::{AssetDefinition, AssetDefinitionId, AssetType}; - - use super::*; - - /// Subcommand for managing asset definitions - #[derive(clap::Subcommand, Debug)] - pub enum Args { - /// Command for Registering a new asset - Register(Register), - /// List asset definitions - #[clap(subcommand)] - List(List), - } - - impl RunArgs for Args { - fn run(self, context: &mut dyn RunContext) -> Result<()> { - match_all!( - (self, context), - { Args::Register, Args::List } - ) - } - } - - /// Register subcommand of asset - #[derive(clap::Args, Debug)] - pub struct Register { - /// Asset definition id for registering (in form of `asset#domain_name`) - #[arg(long)] - pub id: AssetDefinitionId, - /// Mintability of asset - #[arg(short, long)] - pub unmintable: bool, - /// Value type stored in asset - #[arg(short, long)] - pub r#type: AssetType, - #[command(flatten)] - pub metadata: MetadataArgs, - } - - impl RunArgs for Register { - fn run(self, context: &mut dyn RunContext) -> Result<()> { - let Self { - id: asset_id, - r#type, - unmintable, - metadata, - } = self; - let mut asset_definition = AssetDefinition::new(asset_id, r#type); - if unmintable { - asset_definition = asset_definition.mintable_once(); - } - let create_asset_definition = - iroha::data_model::isi::Register::asset_definition(asset_definition); - submit([create_asset_definition], metadata.load()?, context) - .wrap_err("Failed to register asset") - } - } - - /// List asset definitions with this command - #[derive(clap::Subcommand, Debug, Clone)] - pub enum List { - /// All asset definitions - All, - /// Filter asset definitions by given predicate - Filter(filter::AssetDefinitionFilter), - } - - impl RunArgs for List { - fn run(self, context: &mut dyn RunContext) -> Result<()> { - let client = context.client_from_config(); - - let query = client.query(FindAssetsDefinitions::new()); - - let query = match self { - List::All => query, - List::Filter(filter) => query.filter(filter.predicate), - }; - - let result = query - .execute_all() - .wrap_err("Failed to get all asset definitions")?; - - context.print_data(&result)?; - Ok(()) - } - } - } - - /// Command for minting asset in existing Iroha account - #[derive(clap::Args, Debug)] - pub struct Mint { - /// Asset id for the asset (in form of `asset##account@domain_name`) - #[arg(long)] - pub id: AssetId, - /// Quantity to mint - #[arg(short, long)] - pub quantity: Numeric, - #[command(flatten)] - pub metadata: MetadataArgs, - } - - impl RunArgs for Mint { - fn run(self, context: &mut dyn RunContext) -> Result<()> { - let Self { - id: asset_id, - quantity, - metadata, - } = self; - let mint_asset = iroha::data_model::isi::Mint::asset_numeric(quantity, asset_id); - submit([mint_asset], metadata.load()?, context) - .wrap_err("Failed to mint asset of type `Numeric`") - } - } - - /// Command for minting asset in existing Iroha account - #[derive(clap::Args, Debug)] - pub struct Burn { - /// Asset id for the asset (in form of `asset##account@domain_name`) - #[arg(long)] - pub id: AssetId, - /// Quantity to mint - #[arg(short, long)] - pub quantity: Numeric, - #[command(flatten)] - pub metadata: MetadataArgs, - } - - impl RunArgs for Burn { - fn run(self, context: &mut dyn RunContext) -> Result<()> { - let Self { - id: asset_id, - quantity, - metadata, - } = self; - let burn_asset = iroha::data_model::isi::Burn::asset_numeric(quantity, asset_id); - submit([burn_asset], metadata.load()?, context) - .wrap_err("Failed to burn asset of type `Numeric`") - } - } - - /// Transfer asset between accounts - #[derive(clap::Args, Debug)] - pub struct Transfer { - /// Account to which to transfer (in form `name@domain_name`) - #[arg(long)] - pub to: AccountId, - /// Asset id to transfer (in form like `asset##account@domain_name`) - #[arg(long)] - pub id: AssetId, - /// Quantity of asset as number - #[arg(short, long)] - pub quantity: Numeric, - #[command(flatten)] - pub metadata: MetadataArgs, - } - - impl RunArgs for Transfer { - fn run(self, context: &mut dyn RunContext) -> Result<()> { - let Self { - to, - id: asset_id, - quantity, - metadata, - } = self; - let transfer_asset = - iroha::data_model::isi::Transfer::asset_numeric(asset_id, quantity, to); - submit([transfer_asset], metadata.load()?, context).wrap_err("Failed to transfer asset") - } - } - - /// Get info of asset - #[derive(clap::Args, Debug)] - pub struct Get { - /// Asset id for the asset (in form of `asset##account@domain_name`) - #[arg(long)] - pub id: AssetId, - } - - impl RunArgs for Get { - fn run(self, context: &mut dyn RunContext) -> Result<()> { - let Self { id: asset_id } = self; - let client = context.client_from_config(); - let asset = client - .query(FindAssets::new()) - .filter_with(|asset| asset.id.eq(asset_id)) - .execute_single() - .wrap_err("Failed to get asset.")?; - context.print_data(&asset)?; - Ok(()) - } - } - - /// List assets with this command - #[derive(clap::Subcommand, Debug, Clone)] - pub enum List { - /// All assets - All, - /// Filter assets by given predicate - Filter(filter::AssetFilter), - } - - impl RunArgs for List { - fn run(self, context: &mut dyn RunContext) -> Result<()> { - let client = context.client_from_config(); - - let query = client.query(FindAssets::new()); - - let query = match self { - List::All => query, - List::Filter(filter) => query.filter(filter.predicate), - }; - - let result = query.execute_all().wrap_err("Failed to get all accounts")?; - context.print_data(&result)?; - - Ok(()) - } - } - - #[derive(clap::Args, Debug)] - pub struct SetKeyValue { - /// Asset id for the Store asset (in form of `asset##account@domain_name`) - #[clap(long)] - pub id: AssetId, - /// The key for the store value - #[clap(long)] - pub key: Name, - #[command(flatten)] - pub value: MetadataValueArg, - } - - impl RunArgs for SetKeyValue { - fn run(self, context: &mut dyn RunContext) -> Result<()> { - let Self { - id: asset_id, - key, - value: MetadataValueArg { value }, - } = self; - - let set = iroha::data_model::isi::SetKeyValue::asset(asset_id, key, value); - submit([set], Metadata::default(), context)?; - Ok(()) - } - } - #[derive(clap::Args, Debug)] - pub struct RemoveKeyValue { - /// Asset id for the Store asset (in form of `asset##account@domain_name`) - #[clap(long)] - pub id: AssetId, - /// The key for the store value - #[clap(long)] - pub key: Name, - } - - impl RunArgs for RemoveKeyValue { - fn run(self, context: &mut dyn RunContext) -> Result<()> { - let Self { id: asset_id, key } = self; - let remove = iroha::data_model::isi::RemoveKeyValue::asset(asset_id, key); - submit([remove], Metadata::default(), context)?; - Ok(()) - } - } - - #[derive(clap::Args, Debug)] - pub struct GetKeyValue { - /// Asset id for the Store asset (in form of `asset##account@domain_name`) - #[clap(long)] - pub id: AssetId, - /// The key for the store value - #[clap(long)] - pub key: Name, - } - - impl RunArgs for GetKeyValue { - fn run(self, context: &mut dyn RunContext) -> Result<()> { - let Self { id: asset_id, key } = self; - let client = context.client_from_config(); - let asset = client - .query(FindAssets) - .filter_with(|asset| asset.id.eq(asset_id)) - .select_with(|asset| asset.value.store.key(key)) - .execute_single() - .wrap_err("Failed to get key-value")?; - - context.print_data(&asset)?; - Ok(()) - } - } -} - -mod peer { - use super::*; - - /// Subcommand for dealing with peer - #[derive(clap::Subcommand, Debug)] - pub enum Args { - /// Register subcommand of peer - Register(Box), - /// Unregister subcommand of peer - Unregister(Box), - } - - impl RunArgs for Args { - fn run(self, context: &mut dyn RunContext) -> Result<()> { - match self { - Args::Register(register) => RunArgs::run(*register, context), - Args::Unregister(unregister) => RunArgs::run(*unregister, context), - } - } - } - - /// Register subcommand of peer - #[derive(clap::Args, Debug)] - pub struct Register { - /// Public key of the peer - #[arg(short, long)] - pub key: PublicKey, - #[command(flatten)] - pub metadata: MetadataArgs, - } - - impl RunArgs for Register { - fn run(self, context: &mut dyn RunContext) -> Result<()> { - let Self { key, metadata } = self; - let register_peer = iroha::data_model::isi::Register::peer(key.into()); - submit([register_peer], metadata.load()?, context).wrap_err("Failed to register peer") - } - } - - /// Unregister subcommand of peer - #[derive(clap::Args, Debug)] - pub struct Unregister { - /// Public key of the peer - #[arg(short, long)] - pub key: PublicKey, - #[command(flatten)] - pub metadata: MetadataArgs, - } - - impl RunArgs for Unregister { - fn run(self, context: &mut dyn RunContext) -> Result<()> { - let Self { key, metadata } = self; - let unregister_peer = iroha::data_model::isi::Unregister::peer(key.into()); - submit([unregister_peer], metadata.load()?, context) - .wrap_err("Failed to unregister peer") - } - } -} - -mod wasm { - use std::{io::Read, path::PathBuf}; - - use super::*; - - /// Subcommand for dealing with Wasm - #[derive(Debug, clap::Args)] - pub struct Args { - /// Specify a path to the Wasm file or skip this flag to read from stdin - #[arg(short, long)] - path: Option, - } - - impl RunArgs for Args { - fn run(self, context: &mut dyn RunContext) -> Result<()> { - let raw_data = if let Some(path) = self.path { - read_file(path).wrap_err("Failed to read a Wasm from the file into the buffer")? - } else { - let mut buf = Vec::::new(); - stdin() - .read_to_end(&mut buf) - .wrap_err("Failed to read a Wasm from stdin into the buffer")?; - buf - }; - - submit( - WasmSmartContract::from_compiled(raw_data), - Metadata::default(), - context, - ) - .wrap_err("Failed to submit a Wasm smart contract") - } - } -} - -mod json { - use std::io::{BufReader, Read as _}; - - use clap::Subcommand; - use iroha::data_model::query::AnyQueryBox; - - use super::*; - - /// Subcommand for submitting multi-instructions - #[derive(Clone, Copy, Debug, clap::Args)] - pub struct Args { - #[clap(subcommand)] - variant: Variant, - } - - #[derive(Clone, Copy, Debug, Subcommand)] - enum Variant { - Transaction, - Query, - } - - impl RunArgs for Args { - fn run(self, context: &mut dyn RunContext) -> Result<()> { - let mut reader = BufReader::new(stdin()); - let mut raw_content = Vec::new(); - reader.read_to_end(&mut raw_content)?; - - let string_content = String::from_utf8(raw_content)?; - - match self.variant { - Variant::Transaction => { - let instructions: Vec = json5::from_str(&string_content)?; - submit(instructions, Metadata::default(), context) - .wrap_err("Failed to submit parsed instructions") - } - Variant::Query => { - let client = Client::new(context.configuration().clone()); - let query: AnyQueryBox = json5::from_str(&string_content)?; - - match query { - AnyQueryBox::Singular(query) => { - let result = client - .query_single(query) - .wrap_err("Failed to query response")?; - - context.print_data(&result)?; - } - AnyQueryBox::Iterable(query) => { - // we can't really do type-erased iterable queries in a nice way right now... - use iroha::data_model::query::builder::QueryExecutor; - - let (mut accumulated_batch, _remaining_items, mut continue_cursor) = - client.start_query(query)?; - - while let Some(cursor) = continue_cursor { - let (next_batch, _remaining_items, next_continue_cursor) = - ::continue_query(cursor)?; - - accumulated_batch.extend(next_batch); - continue_cursor = next_continue_cursor; - } - - // for efficiency reasons iroha encodes query results in a columnar format, - // so we need to transpose the batch to get the format that is more natural for humans - let mut batches = vec![Vec::new(); accumulated_batch.len()]; - for batch in accumulated_batch { - // downcast to json and extract the actual array - // dynamic typing is just easier to use here than introducing a bunch of new types only for iroha_cli - let batch = serde_json::to_value(batch)?; - let serde_json::Value::Object(batch) = batch else { - panic!("Expected the batch serialization to be a JSON object"); - }; - let (_ty, batch) = batch - .into_iter() - .next() - .expect("Expected the batch to have exactly one key"); - let serde_json::Value::Array(batch_vec) = batch else { - panic!("Expected the batch payload to be a JSON array"); - }; - for (target, value) in batches.iter_mut().zip(batch_vec) { - target.push(value); - } - } - - context.print_data(&batches)?; - } - } - - Ok(()) - } - } - } - } -} - -mod multisig { - use std::{ - collections::BTreeMap, - io::{BufReader, Read as _}, - num::{NonZeroU16, NonZeroU64}, - time::{Duration, SystemTime}, - }; - - use derive_more::{Constructor, Display}; - use iroha::executor_data_model::isi::multisig::*; - use serde::Serialize; - use serde_with::{serde_as, DisplayFromStr, SerializeDisplay}; - - use super::*; - - /// Arguments for multisig subcommand - #[derive(Debug, clap::Subcommand)] - pub enum Args { - /// Register a multisig account - Register(Register), - /// Propose a multisig transaction, with `Vec` stdin - Propose(Propose), - /// Approve a multisig transaction - Approve(Approve), - /// List pending multisig transactions relevant to you - #[clap(subcommand)] - List(List), - } - - impl RunArgs for Args { - fn run(self, context: &mut dyn RunContext) -> Result<()> { - match_all!((self, context), { Args::Register, Args::Propose, Args::Approve, Args::List }) - } - } - /// Args to register a multisig account - #[derive(Debug, clap::Args)] - pub struct Register { - /// ID of the multisig account to be registered - #[arg(short, long)] - pub account: AccountId, - /// Signatories of the multisig account - #[arg(short, long, num_args(2..))] - pub signatories: Vec, - /// Relative weights of responsibility of respective signatories - #[arg(short, long, num_args(2..))] - pub weights: Vec, - /// Threshold of total weight at which the multisig is considered authenticated - #[arg(short, long)] - pub quorum: u16, - /// Time-to-live of multisig transactions made by the multisig account - #[arg(short, long, default_value_t = default_transaction_ttl())] - pub transaction_ttl: humantime::Duration, - } - - fn default_transaction_ttl() -> humantime::Duration { - std::time::Duration::from_millis(DEFAULT_MULTISIG_TTL_MS).into() - } - - impl RunArgs for Register { - fn run(self, context: &mut dyn RunContext) -> Result<()> { - if self.signatories.len() != self.weights.len() { - return Err(eyre!("signatories and weights must be equal in length")); - } - let register_multisig_account = MultisigRegister::new( - self.account, - MultisigSpec::new( - self.signatories.into_iter().zip(self.weights).collect(), - NonZeroU16::new(self.quorum).expect("quorum should not be 0"), - self.transaction_ttl - .as_millis() - .try_into() - .ok() - .and_then(NonZeroU64::new) - .expect("ttl should be between 1 ms and 584942417 years"), - ), - ); - - submit([register_multisig_account], Metadata::default(), context) - .wrap_err("Failed to register multisig account") - } - } - - /// Args to propose a multisig transaction - #[derive(Debug, clap::Args)] - pub struct Propose { - /// Multisig authority of the multisig transaction - #[arg(short, long)] - pub account: AccountId, - /// Time-to-live of multisig transactions that overrides to shorten the account default - #[arg(short, long)] - pub transaction_ttl: Option, - } - - impl RunArgs for Propose { - fn run(self, context: &mut dyn RunContext) -> Result<()> { - let instructions: Vec = { - let mut reader = BufReader::new(stdin()); - let mut raw_content = Vec::new(); - reader.read_to_end(&mut raw_content)?; - let string_content = String::from_utf8(raw_content)?; - json5::from_str(&string_content)? - }; - let transaction_ttl_ms = self.transaction_ttl.map(|duration| { - duration - .as_millis() - .try_into() - .ok() - .and_then(NonZeroU64::new) - .expect("ttl should be between 1 ms and 584942417 years") - }); - - let instructions_hash = HashOf::new(&instructions); - println!("{instructions_hash}"); - - let propose_multisig_transaction = - MultisigPropose::new(self.account, instructions, transaction_ttl_ms); - - submit([propose_multisig_transaction], Metadata::default(), context) - .wrap_err("Failed to propose transaction") - } - } - - /// Args to approve a multisig transaction - #[derive(Debug, clap::Args)] - pub struct Approve { - /// Multisig authority of the multisig transaction - #[arg(short, long)] - pub account: AccountId, - /// Instructions to approve - #[arg(short, long)] - pub instructions_hash: ProposalKey, - } - - impl RunArgs for Approve { - fn run(self, context: &mut dyn RunContext) -> Result<()> { - let approve_multisig_transaction = - MultisigApprove::new(self.account, self.instructions_hash); - - submit([approve_multisig_transaction], Metadata::default(), context) - .wrap_err("Failed to approve transaction") - } - } - - /// List pending multisig transactions relevant to you - #[derive(clap::Subcommand, Debug, Clone)] - pub enum List { - /// All pending multisig transactions relevant to you - All, - } - - impl RunArgs for List { - fn run(self, context: &mut dyn RunContext) -> Result<()> { - let client = context.client_from_config(); - let me = client.account.clone(); - let Ok(my_multisig_roles) = client - .query(FindRolesByAccountId::new(me.clone())) - .filter_with(|role_id| role_id.name.starts_with(MULTISIG_SIGNATORY)) - .execute_all() - else { - return Ok(()); - }; - let mut stack = my_multisig_roles - .iter() - .filter_map(multisig_account_from) - .map(|account_id| Context::new(me.clone(), account_id, None)) - .collect(); - let mut proposals = BTreeMap::new(); - - fold_proposals(&mut proposals, &mut stack, &client)?; - context.print_data(&proposals)?; - - Ok(()) - } - } - - const DELIMITER: char = '/'; - const MULTISIG: &str = "multisig"; - const MULTISIG_SIGNATORY: &str = "MULTISIG_SIGNATORY"; - - fn spec_key() -> Name { - format!("{MULTISIG}{DELIMITER}spec").parse().unwrap() - } - - fn proposal_key_prefix() -> String { - format!("{MULTISIG}{DELIMITER}proposals{DELIMITER}") - } - - fn multisig_account_from(role: &RoleId) -> Option { - role.name() - .as_ref() - .strip_prefix(MULTISIG_SIGNATORY)? - .rsplit_once(DELIMITER) - .and_then(|(init, last)| { - format!("{last}@{}", init.trim_matches(DELIMITER)) - .parse() - .ok() - }) - } - - type PendingProposals = BTreeMap; - - type ProposalKey = HashOf>; - - #[serde_as] - #[derive(Debug, Serialize, Constructor)] - struct ProposalStatus { - instructions: Vec, - #[serde_as(as = "DisplayFromStr")] - proposed_at: humantime::Timestamp, - #[serde_as(as = "DisplayFromStr")] - expires_in: humantime::Duration, - approval_path: Vec, - } - - impl Default for ProposalStatus { - fn default() -> Self { - Self::new( - Vec::new(), - SystemTime::UNIX_EPOCH.into(), - Duration::ZERO.into(), - Vec::new(), - ) - } - } - - #[derive(Debug, SerializeDisplay, Display, Constructor)] - #[display(fmt = "{weight} {} [{got}/{quorum}] {target}", "self.relation()")] - struct ApprovalEdge { - weight: u8, - has_approved: bool, - got: u16, - quorum: u16, - target: AccountId, - } - - impl ApprovalEdge { - fn relation(&self) -> &str { - if self.has_approved { - "joined" - } else { - "->" - } - } - } - - #[derive(Debug, Constructor)] - struct Context { - child: AccountId, - this: AccountId, - key_span: Option<(ProposalKey, ProposalKey)>, - } - - fn fold_proposals( - proposals: &mut PendingProposals, - stack: &mut Vec, - client: &Client, - ) -> Result<()> { - let Some(context) = stack.pop() else { - return Ok(()); - }; - let account = client - .query(FindAccounts) - .filter_with(|account| account.id.eq(context.this.clone())) - .execute_single()?; - let spec: MultisigSpec = account - .metadata() - .get(&spec_key()) - .unwrap() - .try_into_any()?; - for (proposal_key, proposal_value) in account - .metadata() - .iter() - .filter_map(|(k, v)| { - k.as_ref().strip_prefix(&proposal_key_prefix()).map(|k| { - ( - k.parse::().unwrap(), - v.try_into_any::().unwrap(), - ) - }) - }) - .filter(|(k, _v)| context.key_span.map_or(true, |(_, top)| *k == top)) - { - let mut is_root_proposal = true; - for instruction in &proposal_value.instructions { - let InstructionBox::Custom(instruction) = instruction else { - continue; - }; - let Ok(MultisigInstructionBox::Approve(approve)) = instruction.payload().try_into() - else { - continue; - }; - is_root_proposal = false; - let leaf = context.key_span.map_or(proposal_key, |(leaf, _)| leaf); - let top = approve.instructions_hash; - stack.push(Context::new( - context.this.clone(), - approve.account, - Some((leaf, top)), - )); - } - let proposal_status = match context.key_span { - None => proposals.entry(proposal_key).or_default(), - Some((leaf, _)) => proposals.get_mut(&leaf).unwrap(), - }; - let edge = ApprovalEdge::new( - *spec.signatories.get(&context.child).unwrap(), - proposal_value.approvals.contains(&context.child), - spec.signatories - .iter() - .filter(|(id, _)| proposal_value.approvals.contains(id)) - .map(|(_, weight)| u16::from(*weight)) - .sum(), - spec.quorum.into(), - context.this.clone(), - ); - proposal_status.approval_path.push(edge); - if is_root_proposal { - proposal_status.instructions = proposal_value.instructions; - proposal_status.proposed_at = { - let proposed_at = Duration::from_secs( - Duration::from_millis(proposal_value.proposed_at_ms.into()).as_secs(), - ); - SystemTime::UNIX_EPOCH - .checked_add(proposed_at) - .unwrap() - .into() - }; - proposal_status.expires_in = { - let now = SystemTime::now() - .duration_since(SystemTime::UNIX_EPOCH) - .unwrap(); - let expires_at = Duration::from_millis(proposal_value.expires_at_ms.into()); - Duration::from_secs(expires_at.saturating_sub(now).as_secs()).into() - }; - } - } - fold_proposals(proposals, stack, client) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn parse_value_arg_cases() { - macro_rules! case { - ($input:expr, $expected:expr) => { - let MetadataValueArg { value } = - $input.parse().expect("should not fail with valid input"); - assert_eq!(value, $expected); - }; - } - - // Boolean values - case!("true", Json::new(true)); - case!("false", Json::new(false)); - - // Numeric values - case!("\"123\"", Json::new(numeric!(123))); - case!("\"123.0\"", Json::new(numeric!(123.0))); - - // JSON Value - let json_str = r#"{"Vec":[{"String":"a"},{"String":"b"}]}"#; - case!(json_str, serde_json::from_str(json_str).unwrap()); - } -} diff --git a/crates/iroha_cli/src/options.rs b/crates/iroha_cli/src/options.rs new file mode 100644 index 00000000000..75cdd3e106b --- /dev/null +++ b/crates/iroha_cli/src/options.rs @@ -0,0 +1,10 @@ +use eyre::Result; + +/// Runs subcommand +pub trait RunArgs { + /// Runs command + /// + /// # Errors + /// if inner command errors + fn run(self) -> Result<()>; +} diff --git a/crates/iroha_cli/src/wasm.rs b/crates/iroha_cli/src/wasm.rs new file mode 100644 index 00000000000..440caa87515 --- /dev/null +++ b/crates/iroha_cli/src/wasm.rs @@ -0,0 +1,228 @@ +#![allow(missing_docs)] + +use std::path::PathBuf; + +use eyre::{eyre, Result, WrapErr}; +use iroha_wasm_builder::Builder; +use owo_colors::OwoColorize; + +use crate::options; + +/// Arguments for wasm subcommand +#[derive(clap::Args, Debug)] +pub struct Args { + /// Subcommands related to smartcontracts + #[clap(subcommand)] + command: Command, +} + +#[derive(clap::Subcommand, Debug)] +enum Command { + /// Check smartcontract souce files (apply cargo check) + Check(check::Args), + /// Build smartcontract from source files + Build(build::Args), + /// Run smartcontract tests + Test(test::Args), +} + +impl Command { + fn run(self) -> Result<()> { + use Command::*; + match self { + Check(args) => args.run(), + Build(args) => args.run(), + Test(args) => args.run(), + } + } +} + +mod test { + use wasmtime::{Engine, Instance, Module, Store}; + + use super::*; + + struct TestMeta<'a> { + name: &'a str, + ignore: bool, + } + + #[derive(clap::Args, Debug)] + pub struct Args { + #[command(flatten)] + common: CommonArgs, + } + + impl Args { + pub fn run(self) -> Result<()> { + // Modules can be compiled through either the text or binary format + let engine = Engine::default(); + let module = + Module::from_file(&engine, self.common.path).map_err(|e| eyre::eyre!(e))?; + let mut tests = Vec::new(); + for export in module.exports() { + if let Some(name) = export.name().strip_prefix("$webassembly-test$") { + let mut ignore = true; + let name = name.strip_prefix("ignore$").unwrap_or_else(|| { + ignore = false; + name + }); + tests.push((export, TestMeta { name, ignore })); + } + } + let total = tests.len(); + + eprintln!("\nrunning {total} tests"); + let mut store = Store::new(&engine, ()); + let mut instance = + Instance::new(&mut store, &module, &[]).map_err(|e| eyre::eyre!(e))?; + let mut passed = 0; + let mut failed = 0; + let mut ignored = 0; + for (export, meta) in tests { + eprint!("test {} ...", meta.name); + if meta.ignore { + ignored += 1; + eprintln!(" ignored"); + } else { + let f = instance + .get_typed_func::<(), ()>(&mut store, export.name()) + .map_err(|e| eyre::eyre!(e))?; + + let pass = f.call(&mut store, ()).is_ok(); + if pass { + passed += 1; + eprintln!(" ok"); + } else { + // Reset instance on test failure. WASM uses `panic=abort`, so + // `Drop`s are not called after test failures, and a failed test + // might leave an instance in an inconsistent state. + store = Store::new(&engine, ()); + instance = + Instance::new(&mut store, &module, &[]).map_err(|e| eyre::eyre!(e))?; + + failed += 1; + eprintln!(" FAILED"); + } + } + } + eprintln!( + "\ntest result: {}. {} passed; {} failed; {} ignored;", + if failed > 0 { "FAILED" } else { "ok" }, + passed, + failed, + ignored, + ); + + if failed > 0 { + Err(eyre!("Some tests failed!")) + } else { + Ok(()) + } + } + } +} + +mod check { + use super::*; + + #[derive(clap::Args, Debug)] + pub struct Args { + #[command(flatten)] + common: CommonArgs, + } + + impl Args { + pub fn run(self) -> Result<()> { + let builder = Builder::new(&self.common.path).show_output(); + builder.check() + } + } +} + +mod build { + use super::*; + + #[derive(clap::Args, Debug)] + pub struct Args { + #[command(flatten)] + common: super::CommonArgs, + /// Optimize WASM output. + #[arg(long)] + optimize: bool, + /// Where to store the output WASM. If the file exists, it will be overwritten. + #[arg(long, value_name("PATH"), value_hint(clap::ValueHint::FilePath))] + out_file: PathBuf, + } + + impl Args { + pub fn run(self) -> Result<()> { + let builder = Builder::new(&self.common.path).show_output(); + + let output = { + // not showing the spinner here, cargo does a progress bar for us + + match builder.build() { + Ok(output) => output, + err => err?, + } + }; + + let output = if self.optimize { + let sp = if std::env::var("CI").is_err() { + Some(spinoff::Spinner::new_with_stream( + spinoff::spinners::Binary, + "Optimizing the output", + None, + spinoff::Streams::Stderr, + )) + } else { + None + }; + + match output.optimize() { + Ok(optimized) => { + if let Some(mut sp) = sp { + sp.success("Output is optimized"); + } + optimized + } + err => { + if let Some(mut sp) = sp { + sp.fail("Optimization failed"); + } + err? + } + } + } else { + output + }; + + std::fs::copy(output.wasm_file_path(), &self.out_file).wrap_err_with(|| { + eyre!( + "Failed to write the resulting file into {}", + self.out_file.display() + ) + })?; + + println!( + "✓ File is written into {}", + self.out_file.display().green().bold() + ); + + Ok(()) + } + } +} + +#[derive(clap::Args, Debug)] +struct CommonArgs { + /// Path to the smartcontract + path: PathBuf, +} + +impl options::RunArgs for Args { + fn run(self) -> Result<()> { + self.command.run() + } +} diff --git a/crates/iroha_codec/Cargo.toml b/crates/iroha_codec/Cargo.toml deleted file mode 100644 index fd1bac9b464..00000000000 --- a/crates/iroha_codec/Cargo.toml +++ /dev/null @@ -1,35 +0,0 @@ -[package] -name = "iroha_codec" - -edition.workspace = true -version.workspace = true -authors.workspace = true - -license.workspace = true - -[lints] -workspace = true - -[dependencies] -iroha_schema = { workspace = true } -iroha_schema_gen = { workspace = true } -iroha_genesis = { workspace = true } - -clap = { workspace = true, features = ["derive", "cargo", "env", "string"] } -eyre = { workspace = true } -parity-scale-codec = { workspace = true } -colored = "2.1.0" -serde_json = { workspace = true, features = ["std"]} -serde = { workspace = true } -supports-color = { workspace = true } - -[dev-dependencies] -iroha_data_model = { workspace = true } - -[build-dependencies] -iroha_data_model = { workspace = true } - -parity-scale-codec = { workspace = true } -serde_json = { workspace = true, features = ["std"]} -serde = { workspace = true } -eyre = { workspace = true } diff --git a/crates/iroha_codec/README.md b/crates/iroha_codec/README.md deleted file mode 100644 index 80cb2dbd6bb..00000000000 --- a/crates/iroha_codec/README.md +++ /dev/null @@ -1,119 +0,0 @@ -# Parity Scale Decoder Tool - -This tool helps you decode **Iroha 2** data types from binaries using [Parity Scale Codec](https://github.com/paritytech/parity-scale-codec). - -## Build - -To build the tool, run: - -```bash -cargo build --bin iroha_codec -``` - -## Usage - -Run Parity Scale Decoder Tool: - -```bash -iroha_codec -``` - -### Subcommands - -| Command | Description | -|-----------------------------------------------------|------------------------------------------------------------------------------------------------------------------------------------| -| [`list-types`](#list-types) | List all available data types | -| [`scale-to-json`](#scale-to-json-and-json-to-scale) | Decode the data type from SCALE to JSON | -| [`json-to-scale`](#scale-to-json-and-json-to-scale) | Encode the data type from JSON to SCALE | -| [`scale-to-rust`](#scale-to-rust) | Decode the data type from SCALE binary file to Rust debug format.
Can be used to analyze binary input if data type is not known | -| `help` | Print the help message for the tool or a subcommand | - -## `list-types` - -To list all supported data types, run from the project main directory: - -```bash -./target/debug/iroha_codec list-types -``` - -
Expand to see expected output - -``` -Account -AccountEvent -AccountEventFilter -AccountEventSet -AccountId -AccountMintBox -AccountPermissionChanged -AccountRoleChanged -Action -Algorithm -... - -344 types are supported -``` - -
- -## `scale-to-json` and `json-to-scale` - -Both commands by default read data from `stdin` and print result to `stdout`. -There are flags `--input` and `--output` which can be used to read/write from files instead. - -These commands require `--type` argument. If data type is not known, [`scale-to-rust`](#scale-to-rust) can be used to detect it. - -* Decode the specified data type from a binary: - - ```bash - ./target/debug/iroha_codec scale-to-json --input --type - ``` - -### `scale-to-json` and `json-to-scale` usage examples - -* Decode the `NewAccount` data type from the `samples/account.bin` binary: - - ```bash - ./target/debug/iroha_codec scale-to-json --input iroha_codec/samples/account.bin --type NewAccount - ``` - -* Encode the `NewAccount` data type from the `samples/account.json`: - - ```bash - ./target/debug/iroha_codec json-to-scale --input iroha_codec/samples/account.json --output result.bin --type NewAccount - ``` - -## `scale-to-rust` - -Decode the data type from a given binary. - -| Option | Description | Type | -| ---------- | ----------------------------------------------------------------------------------------------------------------------------- | ---------------------- | -| `--binary` | The path to the binary file with an encoded Iroha structure for the tool to decode. | An owned, mutable path | -| `--type` | The data type that is expected to be encoded in the provided binary.
If not specified, the tool tries to guess the type. | String | - -* Decode the specified data type from a binary: - - ```bash - ./target/debug/iroha_codec scale-to-rust --type - ``` - -* If you are not sure which data type is encoded in the binary, run the tool without the `--type` option: - - ```bash - ./target/debug/iroha_codec scale-to-rust - ``` - -### `scale-to-rust` usage examples - -* Decode the `NewAccount` data type from the `samples/account.bin` binary: - - ```bash - ./target/debug/iroha_codec scale-to-rust iroha_codec/samples/account.bin --type NewAccount - ``` - -* Decode the `NewDomain` data type from the `samples/domain.bin` binary: - - ```bash - ./target/debug/iroha_codec scale-to-rust iroha_codec/samples/domain.bin --type NewDomain - ``` diff --git a/crates/iroha_codec/build.rs b/crates/iroha_codec/build.rs deleted file mode 100644 index 5ad93926c7d..00000000000 --- a/crates/iroha_codec/build.rs +++ /dev/null @@ -1,39 +0,0 @@ -//! Build script that auto-updates sample binaries from sources. - -use std::{fs, path::PathBuf}; - -use eyre::Result; -use iroha_data_model::{account::NewAccount, domain::NewDomain, prelude::*}; -use parity_scale_codec::Encode; -use serde::de::DeserializeOwned; - -fn main() { - sample_into_binary_file::("account").expect("Failed to encode into account.bin."); - - sample_into_binary_file::("domain").expect("Failed to encode into domain.bin."); - - sample_into_binary_file::("trigger").expect("Failed to encode into trigger.bin."); -} - -fn sample_into_binary_file(filename: &str) -> Result<()> -where - T: Encode + DeserializeOwned, -{ - let mut path_to = PathBuf::from(env!("CARGO_MANIFEST_DIR")); - path_to.push("samples/"); - path_to.push(filename); - - let path_to_json = path_to.with_extension("json"); - let path_to_binary = path_to.with_extension("bin"); - - println!("cargo:rerun-if-changed={}", path_to_json.to_str().unwrap()); - let buf = fs::read_to_string(path_to_json)?; - - let sample = serde_json::from_str::(buf.as_str())?; - - let buf = sample.encode(); - - fs::write(path_to_binary, buf)?; - - Ok(()) -} diff --git a/crates/iroha_codec/src/main.rs b/crates/iroha_codec/src/main.rs deleted file mode 100644 index 2aa7b236eb4..00000000000 --- a/crates/iroha_codec/src/main.rs +++ /dev/null @@ -1,433 +0,0 @@ -//! Parity Scale decoder tool for Iroha data types. For usage run with `--help` -use core::num::{NonZeroU32, NonZeroU64}; -use std::{ - collections::{BTreeMap, BTreeSet}, - fmt::Debug, - fs, - fs::File, - io, - io::{BufRead, BufReader, BufWriter, Read, Write}, - marker::PhantomData, - path::PathBuf, -}; - -use clap::Parser; -use colored::*; -use eyre::{eyre, Result}; -use iroha_schema_gen::complete_data_model::*; -use parity_scale_codec::{DecodeAll, Encode}; -use serde::{de::DeserializeOwned, Serialize}; - -/// Generate map with types and converter trait object -fn generate_map() -> ConverterMap { - let mut map = ConverterMap::new(); - - macro_rules! insert_into_map { - ($t:ty) => {{ - let type_id = <$t as iroha_schema::TypeId>::id(); - map.insert(type_id, ConverterImpl::<$t>::new()) - }}; - } - - iroha_schema_gen::map_all_schema_types!(insert_into_map); - - map.insert( - as iroha_schema::TypeId>::id(), - ConverterImpl::::new(), - ); - - map -} - -type ConverterMap = BTreeMap>; - -struct ConverterImpl(PhantomData); - -impl ConverterImpl { - #[allow(clippy::unnecessary_box_returns)] - fn new() -> Box { - Box::new(Self(PhantomData)) - } -} - -trait Converter { - fn scale_to_rust(&self, input: &[u8]) -> Result; - fn scale_to_json(&self, input: &[u8]) -> Result; - fn json_to_scale(&self, input: &str) -> Result>; -} - -impl Converter for ConverterImpl -where - T: Debug + Encode + DecodeAll + Serialize + DeserializeOwned, -{ - fn scale_to_rust(&self, mut input: &[u8]) -> Result { - let object = T::decode_all(&mut input)?; - Ok(format!("{object:#?}")) - } - fn scale_to_json(&self, mut input: &[u8]) -> Result { - let object = T::decode_all(&mut input)?; - let json = serde_json::to_string(&object)?; - Ok(json) - } - fn json_to_scale(&self, input: &str) -> Result> { - let object: T = serde_json::from_str(input)?; - Ok(object.encode()) - } -} - -/// Parity Scale decoder tool for Iroha data types -#[derive(Debug, Parser)] -#[clap(version, about, author)] -struct Args { - #[clap(subcommand)] - command: Command, - - /// Whether to enable ANSI colored output or not - /// - /// By default, Iroha determines whether the terminal supports colors or not. - /// - /// In order to disable this flag explicitly, pass `--terminal-colors=false`. - #[arg( - long, - env, - default_missing_value("true"), - default_value(default_terminal_colors_str()), - action(clap::ArgAction::Set), - require_equals(true), - num_args(0..=1), - )] - pub terminal_colors: bool, -} - -#[derive(Debug, Parser)] -enum Command { - /// Show all available types - ListTypes, - /// Decode SCALE to Rust debug format from binary file - ScaleToRust(ScaleToRustArgs), - /// Decode SCALE to JSON. By default uses stdin and stdout - ScaleToJson(ScaleJsonArgs), - /// Encode JSON as SCALE. By default uses stdin and stdout - JsonToScale(ScaleJsonArgs), -} - -#[derive(Debug, clap::Args)] -struct ScaleToRustArgs { - /// Path to the binary with encoded Iroha structure - binary: PathBuf, - /// Type that is expected to be encoded in binary. - /// If not specified then a guess will be attempted - #[clap(short, long = "type")] - type_name: Option, -} - -#[derive(Debug, clap::Args)] -struct ScaleJsonArgs { - /// Path to the input file - #[clap(short, long)] - input: Option, - /// Path to the output file - #[clap(short, long)] - output: Option, - /// Type that is expected to be encoded in input - #[clap(short, long = "type")] - type_name: String, -} - -fn is_coloring_supported() -> bool { - supports_color::on(supports_color::Stream::Stdout).is_some() -} - -fn default_terminal_colors_str() -> clap::builder::OsStr { - is_coloring_supported().to_string().into() -} - -fn main() -> Result<()> { - let args = Args::parse(); - - let map = generate_map(); - - match args.command { - Command::ScaleToRust(decode_args) => { - let mut writer = BufWriter::new(io::stdout().lock()); - let decoder = ScaleToRustDecoder::new(decode_args, &map); - decoder.decode(&mut writer) - } - Command::ScaleToJson(args) => { - let decoder = ScaleJsonDecoder::new(args, &map)?; - decoder.scale_to_json() - } - Command::JsonToScale(args) => { - let decoder = ScaleJsonDecoder::new(args, &map)?; - decoder.json_to_scale() - } - Command::ListTypes => { - let mut writer = BufWriter::new(io::stdout().lock()); - list_types(&map, &mut writer) - } - } -} - -/// Type decoder -struct ScaleToRustDecoder<'map> { - args: ScaleToRustArgs, - map: &'map ConverterMap, -} - -impl<'map> ScaleToRustDecoder<'map> { - /// Create new `Decoder` with `args` and `map` - pub fn new(args: ScaleToRustArgs, map: &'map ConverterMap) -> Self { - Self { args, map } - } - - /// Decode type and print to `writer` - pub fn decode(&self, writer: &mut W) -> Result<()> { - let bytes = fs::read(self.args.binary.clone())?; - - if let Some(type_name) = &self.args.type_name { - return self.decode_by_type(type_name, &bytes, writer); - } - self.decode_by_guess(&bytes, writer) - } - - /// Decode concrete `type` from `bytes` and print to `writer` - fn decode_by_type( - &self, - type_name: &str, - bytes: &[u8], - writer: &mut W, - ) -> Result<()> { - self.map.get(type_name).map_or_else( - || Err(eyre!("Unknown type: `{type_name}`")), - |converter| Self::dump_decoded(converter.as_ref(), bytes, writer), - ) - } - - /// Try to decode every type from `bytes` and print to `writer` - // TODO: Can be parallelized when there will be too many types - fn decode_by_guess(&self, bytes: &[u8], writer: &mut W) -> Result<()> { - let count = self - .map - .iter() - .filter_map(|(type_name, converter)| { - let mut buf = Vec::new(); - Self::dump_decoded(converter.as_ref(), bytes, &mut buf).ok()?; - let formatted = String::from_utf8(buf).ok()?; - writeln!(writer, "{}:\n{}", type_name.italic().cyan(), formatted).ok() - }) - .count(); - match count { - 0 => writeln!(writer, "No compatible types found"), - 1 => writeln!(writer, "{} compatible type found", "1".bold()), - n => writeln!(writer, "{} compatible types found", n.to_string().bold()), - } - .map_err(Into::into) - } - - fn dump_decoded(converter: &dyn Converter, input: &[u8], w: &mut dyn io::Write) -> Result<()> { - let result = converter.scale_to_rust(input)?; - writeln!(w, "{result}")?; - Ok(()) - } -} - -struct ScaleJsonDecoder<'map> { - reader: Box, - writer: Box, - converter: &'map dyn Converter, -} - -impl<'map> ScaleJsonDecoder<'map> { - fn new(args: ScaleJsonArgs, map: &'map ConverterMap) -> Result { - let reader: Box = match args.input { - None => Box::new(io::stdin().lock()), - Some(path) => Box::new(BufReader::new(File::open(path)?)), - }; - let writer: Box = match args.output { - None => Box::new(BufWriter::new(io::stdout().lock())), - Some(path) => Box::new(BufWriter::new(File::create(path)?)), - }; - let Some(converter) = map.get(&args.type_name) else { - return Err(eyre!("Unknown type: `{}`", args.type_name)); - }; - Ok(Self { - reader, - writer, - converter: converter.as_ref(), - }) - } - - fn scale_to_json(self) -> Result<()> { - let Self { - mut reader, - mut writer, - converter, - } = self; - let mut input = Vec::new(); - reader.read_to_end(&mut input)?; - let output = converter.scale_to_json(&input)?; - writeln!(writer, "{output}")?; - Ok(()) - } - - fn json_to_scale(self) -> Result<()> { - let Self { - mut reader, - mut writer, - converter, - } = self; - let mut input = String::new(); - reader.read_to_string(&mut input)?; - let output = converter.json_to_scale(&input)?; - writer.write_all(&output)?; - Ok(()) - } -} - -/// Print all supported types from `map` to `writer` -fn list_types(map: &ConverterMap, writer: &mut W) -> Result<()> { - for key in map.keys() { - writeln!(writer, "{key}")?; - } - if !map.is_empty() { - writeln!(writer)?; - } - - match map.len() { - 0 => writeln!(writer, "No type is supported"), - 1 => writeln!(writer, "{} type is supported", "1".bold()), - n => writeln!(writer, "{} types are supported", n.to_string().bold()), - } - .map_err(Into::into) -} - -#[cfg(test)] -mod tests { - use iroha_data_model::prelude::*; - - use super::*; - - #[test] - fn decode_account_sample() { - let account_id = - "ed0120CE7FA46C9DCE7EA4B125E2E36BDB63EA33073E7590AC92816AE1E861B7048B03@wonderland" - .parse() - .unwrap(); - let mut metadata = Metadata::default(); - metadata.insert( - "hat".parse().expect("Valid"), - "white".parse::().expect("Valid"), - ); - - let account = Account::new(account_id).with_metadata(metadata); - decode_sample("account.bin", String::from("NewAccount"), &account); - } - - #[test] - fn decode_domain_sample() { - let mut metadata = Metadata::default(); - metadata.insert("Is_Jabberwocky_alive".parse().expect("Valid"), true); - let domain = Domain::new("wonderland".parse().expect("Valid")) - .with_logo( - "/ipfs/Qme7ss3ARVgxv6rXqVPiikMJ8u2NLgmgszg13pYrDKEoiu" - .parse() - .expect("Valid"), - ) - .with_metadata(metadata); - - decode_sample("domain.bin", String::from("NewDomain"), &domain); - } - - #[test] - fn decode_trigger_sample() { - let account_id = - "ed0120CE7FA46C9DCE7EA4B125E2E36BDB63EA33073E7590AC92816AE1E861B7048B03@wonderland" - .parse::() - .unwrap(); - let rose_definition_id = AssetDefinitionId::new( - "wonderland".parse().expect("Valid"), - "rose".parse().expect("Valid"), - ); - let rose_id = AssetId::new(rose_definition_id, account_id.clone()); - let trigger_id = "mint_rose".parse().expect("Valid"); - let action = Action::new( - vec![Mint::asset_numeric(1u32, rose_id)], - Repeats::Indefinitely, - account_id, - DomainEventFilter::new().for_events(DomainEventSet::AnyAccount), - ); - - let trigger = Trigger::new(trigger_id, action); - decode_sample("trigger.bin", String::from("Trigger"), &trigger); - } - - fn decode_sample(sample_path: &str, type_id: String, expected: &T) { - let mut binary = PathBuf::from(env!("CARGO_MANIFEST_DIR")); - binary.push("samples/"); - binary.push(sample_path); - let args = ScaleToRustArgs { - binary, - type_name: Some(type_id), - }; - - let map = generate_map(); - let decoder = ScaleToRustDecoder::new(args, &map); - let mut buf = Vec::new(); - decoder.decode(&mut buf).expect("Decoding failed"); - let output = String::from_utf8(buf).expect("Invalid UTF-8"); - let expected_output = format!("{expected:#?}\n"); - - assert_eq!(output, expected_output,); - } - - #[test] - fn test_decode_encode_account() { - test_decode_encode("account.bin", "NewAccount"); - } - - #[test] - fn test_decode_encode_domain() { - test_decode_encode("domain.bin", "NewDomain"); - } - - #[test] - fn test_decode_encode_trigger() { - test_decode_encode("trigger.bin", "Trigger"); - } - - fn test_decode_encode(sample_path: &str, type_id: &str) { - let binary = PathBuf::from(env!("CARGO_MANIFEST_DIR")) - .join("samples/") - .join(sample_path); - let scale_expected = fs::read(binary).expect("Couldn't read file"); - - let map = generate_map(); - let converter = &map[type_id]; - let json = converter - .scale_to_json(&scale_expected) - .expect("Couldn't convert to SCALE"); - let scale_actual = converter - .json_to_scale(&json) - .expect("Couldn't convert to SCALE"); - assert_eq!(scale_actual, scale_expected); - } - - #[test] - fn terminal_colors_works_as_expected() -> eyre::Result<()> { - fn try_with(arg: &str) -> eyre::Result { - // Since arg contains enum Command and we must provide something for it, we use "list-types" - Ok(Args::try_parse_from(["test", arg, "list-types"])?.terminal_colors) - } - - assert_eq!( - Args::try_parse_from(["test", "list-types"])?.terminal_colors, - is_coloring_supported() - ); - assert!(try_with("--terminal-colors")?); - assert!(!try_with("--terminal-colors=false")?); - assert!(try_with("--terminal-colors=true")?); - assert!(try_with("--terminal-colors=random").is_err()); - - Ok(()) - } -} diff --git a/crates/iroha_wasm_builder/README.md b/crates/iroha_wasm_builder/README.md deleted file mode 100644 index cef8a10db00..00000000000 --- a/crates/iroha_wasm_builder/README.md +++ /dev/null @@ -1,23 +0,0 @@ -# `iroha_wasm_builder` - -A CLI for building wasm smartcontracts. - -## Usage - -**Check the smartcontract:** - -```bash -iroha_wasm_builder check path/to/project -``` - -**Build the smartcontract:** - -```bash -iroha_wasm_builder build path/to/project --out-file ./smartcontract.wasm -``` - -**Build with options:** - -```bash -iroha_wasm_builder build path/to/project --optimize --format --out-file ./smartcontract.wasm -``` diff --git a/crates/iroha_wasm_builder/src/main.rs b/crates/iroha_wasm_builder/src/main.rs deleted file mode 100644 index 73b35822d49..00000000000 --- a/crates/iroha_wasm_builder/src/main.rs +++ /dev/null @@ -1,106 +0,0 @@ -#![allow(missing_docs)] - -use std::path::PathBuf; - -use clap::{Args, Parser}; -use color_eyre::eyre::{eyre, Context}; -use iroha_wasm_builder::Builder; -use owo_colors::OwoColorize; - -#[derive(Parser, Debug)] -#[command(name = "iroha_wasm_builder", version, author)] -enum Cli { - /// Apply `cargo check` to the smartcontract - Check { - #[command(flatten)] - common: CommonArgs, - }, - /// Build the smartcontract - Build { - #[command(flatten)] - common: CommonArgs, - /// Optimize WASM output. - #[arg(long)] - optimize: bool, - /// Where to store the output WASM. If the file exists, it will be overwritten. - #[arg(long)] - out_file: PathBuf, - }, -} - -#[derive(Args, Debug)] -struct CommonArgs { - /// Path to the smartcontract - path: PathBuf, -} - -fn main() -> color_eyre::Result<()> { - match Cli::parse() { - Cli::Check { - common: CommonArgs { path }, - } => { - let builder = Builder::new(&path).show_output(); - builder.check()?; - } - Cli::Build { - common: CommonArgs { path }, - optimize, - out_file, - } => { - let builder = Builder::new(&path).show_output(); - - let output = { - // not showing the spinner here, cargo does a progress bar for us - - match builder.build() { - Ok(output) => output, - err => err?, - } - }; - - let output = if optimize { - let sp = if std::env::var("CI").is_err() { - Some(spinoff::Spinner::new_with_stream( - spinoff::spinners::Binary, - "Optimizing the output", - None, - spinoff::Streams::Stderr, - )) - } else { - None - }; - - match output.optimize() { - Ok(optimized) => { - if let Some(mut sp) = sp { - sp.success("Output is optimized"); - } - optimized - } - err => { - if let Some(mut sp) = sp { - sp.fail("Optimization failed"); - } - err? - } - } - } else { - output - }; - - std::fs::copy(output.wasm_file_path(), &out_file).wrap_err_with(|| { - eyre!( - "Failed to write the resulting file into {}", - out_file.display() - ) - })?; - - println!( - "✓ File is written into {}", - out_file.display().green().bold() - ); - } - } - - Ok(()) -} diff --git a/crates/iroha_wasm_test_runner/Cargo.toml b/crates/iroha_wasm_test_runner/Cargo.toml deleted file mode 100644 index 1a3499c0553..00000000000 --- a/crates/iroha_wasm_test_runner/Cargo.toml +++ /dev/null @@ -1,16 +0,0 @@ -[package] -name = "iroha_wasm_test_runner" - -edition.workspace = true -version.workspace = true -authors.workspace = true -license.workspace = true - -[lints] -workspace = true - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -wasmtime = { workspace = true } -anyhow = "1.0.86" diff --git a/crates/iroha_wasm_test_runner/src/main.rs b/crates/iroha_wasm_test_runner/src/main.rs deleted file mode 100644 index 951889169e8..00000000000 --- a/crates/iroha_wasm_test_runner/src/main.rs +++ /dev/null @@ -1,82 +0,0 @@ -//! A tool to run `WebAssembly` tests -//! -//! This copies functionality of `webassembly-test-runner`, but with an ability to indicate failure with an exit code. - -use std::process::ExitCode; - -use anyhow::{bail, Result}; -use wasmtime::{Engine, Instance, Module, Store}; - -struct TestMeta<'a> { - name: &'a str, - ignore: bool, -} - -fn main() -> Result { - let argv0 = std::env::args().next().unwrap(); - - let file = match std::env::args().nth(1) { - Some(it) => it, - None => { - bail!("usage: {} tests.wasm", argv0); - } - }; - // Modules can be compiled through either the text or binary format - let engine = Engine::default(); - let module = Module::from_file(&engine, file)?; - let mut tests = Vec::new(); - for export in module.exports() { - if let Some(name) = export.name().strip_prefix("$webassembly-test$") { - let mut ignore = true; - let name = name.strip_prefix("ignore$").unwrap_or_else(|| { - ignore = false; - name - }); - tests.push((export, TestMeta { name, ignore })); - } - } - let total = tests.len(); - - eprintln!("\nrunning {total} tests"); - let mut store = Store::new(&engine, ()); - let mut instance = Instance::new(&mut store, &module, &[])?; - let mut passed = 0; - let mut failed = 0; - let mut ignored = 0; - for (export, meta) in tests { - eprint!("test {} ...", meta.name); - if meta.ignore { - ignored += 1; - eprintln!(" ignored"); - } else { - let f = instance.get_typed_func::<(), ()>(&mut store, export.name())?; - - let pass = f.call(&mut store, ()).is_ok(); - if pass { - passed += 1; - eprintln!(" ok"); - } else { - // Reset instance on test failure. WASM uses `panic=abort`, so - // `Drop`s are not called after test failures, and a failed test - // might leave an instance in an inconsistent state. - store = Store::new(&engine, ()); - instance = Instance::new(&mut store, &module, &[])?; - - failed += 1; - eprintln!(" FAILED"); - } - } - } - eprintln!( - "\ntest result: {}. {} passed; {} failed; {} ignored;", - if failed > 0 { "FAILED" } else { "ok" }, - passed, - failed, - ignored, - ); - Ok(if failed > 0 { - ExitCode::FAILURE - } else { - ExitCode::SUCCESS - }) -} diff --git a/flake.nix b/flake.nix index 74d15a02cd7..b91376730a2 100755 --- a/flake.nix +++ b/flake.nix @@ -49,7 +49,6 @@ "iroha" "kagami" "kura_inspector" - "iroha_codec" ]; # HACK: A hook to filter out darwin-specific flags when cross-compiling. diff --git a/crates/iroha_codec/samples/account.bin b/samples/codec/account.bin similarity index 100% rename from crates/iroha_codec/samples/account.bin rename to samples/codec/account.bin diff --git a/crates/iroha_codec/samples/account.json b/samples/codec/account.json similarity index 100% rename from crates/iroha_codec/samples/account.json rename to samples/codec/account.json diff --git a/crates/iroha_codec/samples/domain.bin b/samples/codec/domain.bin similarity index 100% rename from crates/iroha_codec/samples/domain.bin rename to samples/codec/domain.bin diff --git a/crates/iroha_codec/samples/domain.json b/samples/codec/domain.json similarity index 100% rename from crates/iroha_codec/samples/domain.json rename to samples/codec/domain.json diff --git a/crates/iroha_codec/samples/trigger.bin b/samples/codec/trigger.bin similarity index 100% rename from crates/iroha_codec/samples/trigger.bin rename to samples/codec/trigger.bin diff --git a/crates/iroha_codec/samples/trigger.json b/samples/codec/trigger.json similarity index 100% rename from crates/iroha_codec/samples/trigger.json rename to samples/codec/trigger.json diff --git a/scripts/build_wasm.sh b/scripts/build_wasm.sh index da0ee864647..4dc5d505212 100755 --- a/scripts/build_wasm.sh +++ b/scripts/build_wasm.sh @@ -23,7 +23,7 @@ build() { mkdir -p "$TARGET_DIR/$1" for name in ${NAMES[@]}; do out_file="$TARGET_DIR/$1/$name.wasm" - cargo run --bin iroha_wasm_builder -- build "$CARGO_DIR/$1/$name" --optimize --out-file "$out_file" + cargo run --bin iroha wasm build "$CARGO_DIR/$1/$name" --optimize --out-file "$out_file" done echo "info: WASM $1 build complete" echo "artifacts written to $TARGET_DIR/$1/" diff --git a/wasm/libs/default_executor/README.md b/wasm/libs/default_executor/README.md index d646e872ed7..f27487aa8aa 100644 --- a/wasm/libs/default_executor/README.md +++ b/wasm/libs/default_executor/README.md @@ -1,8 +1,7 @@ # `iroha_default_executor` -Use the [Wasm Builder CLI](../../../crates/iroha_wasm_builder) in order to build it: +Use the [Wasm Builder CLI](../../../crates/iroha) in order to build it: ```bash -cargo run --bin iroha_wasm_builder -- \ - build ./wasm/libs/default_executor --optimize --out-file ./defaults/executor.wasm +cargo run --bin iroha wasm build ./wasm/libs/default_executor --optimize --out-file ./defaults/executor.wasm ``` From 12152f561b01e9931ec9ab35c7703301939a8e8e Mon Sep 17 00:00:00 2001 From: Lohachov Mykhailo Date: Thu, 26 Dec 2024 15:07:29 +0900 Subject: [PATCH 2/2] fix: update doc links Signed-off-by: Lohachov Mykhailo --- docs/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/README.md b/docs/README.md index 457f3a3cf17..e01d2abfafe 100644 --- a/docs/README.md +++ b/docs/README.md @@ -12,6 +12,6 @@ You can also check out [Iroha 2 Whitepaper](./source/iroha_2_whitepaper.md) for In this repository you can find documentation for Iroha 2 tools: +- [Iroha CLI Tool](../crates/iroha_cli/README.md) - [Kagami](../crates/iroha_kagami/README.md) - [Kura Inspector](../crates/kura_inspector/README.md) -- [Parity Scale Decoder Tool](../crates/iroha_codec/README.md)