Skip to content

Commit

Permalink
refactor: migrate to axum (#4718)
Browse files Browse the repository at this point in the history
* refactor: migrate to axum
* fix: adjust torii pytests
* refactor: exit early if tcp bind fails
* refactor: adjust web socket messages to be more precise
* refactor(torii): add accept header extractor

Signed-off-by: Shanin Roman <[email protected]>
  • Loading branch information
Erigara authored Jul 2, 2024
1 parent 99745dc commit b432910
Show file tree
Hide file tree
Showing 21 changed files with 577 additions and 696 deletions.
272 changes: 165 additions & 107 deletions Cargo.lock

Large diffs are not rendered by default.

5 changes: 2 additions & 3 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -106,10 +106,10 @@ getset = "0.1.2"
hex-literal = "0.4.1"

rand = { version = "0.8.5", default-features = false, features = ["getrandom", "alloc"] }
warp = { version = "0.3.7", default-features = false }
axum = { version = "0.7.5", default-features = false }
wasmtime = "15.0.1"

tracing = "0.1.40"
tracing = { version = "0.1.40", features = ["log"] }
tracing-subscriber = { version = "0.3.18", default-features = false }

dashmap = "5.5.3"
Expand Down Expand Up @@ -244,7 +244,6 @@ members = [
"tools/wasm_test_runner",

"torii",
"torii/derive",
"torii/const",

"version",
Expand Down
13 changes: 6 additions & 7 deletions cli/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -391,13 +391,12 @@ impl Iroha {
metrics_reporter,
);

tokio::spawn(async move {
torii
.start()
.await
.into_report()
.map_err(|report| report.change_context(StartError::StartTorii))
});
let run_torii = torii
.start()
.await
.map_err(|report| report.change_context(StartError::StartTorii))?;

tokio::spawn(run_torii);

Self::spawn_config_updates_broadcasting(kiso.clone(), logger.clone());

Expand Down
2 changes: 1 addition & 1 deletion client/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ iroha_primitives = { workspace = true }
iroha_logger = { workspace = true }
iroha_telemetry = { workspace = true }
iroha_torii_const = { workspace = true }
iroha_version = { workspace = true, features = ["http"] }
iroha_version = { workspace = true }
test_samples = { workspace = true }

attohttpc = { version = "0.28.0", default-features = false }
Expand Down
Binary file modified configs/swarm/executor.wasm
Binary file not shown.
3 changes: 1 addition & 2 deletions data_model/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ default = ["std"]
# Please refer to https://docs.rust-embedded.org/book/intro/no-std.html
std = ["iroha_macro/std", "iroha_version/std", "iroha_crypto/std", "iroha_primitives/std", "thiserror", "displaydoc/std", "strum/std", "once_cell"]
# Enable API for HTTP requests. Should be activated for HTTP clients
http = ["std", "warp", "iroha_version/http"]
http = ["std"]
# Replace structures and methods with FFI equivalents to facilitate dynamic linkage (mainly used in smartcontracts)
#ffi_import = ["iroha_ffi", "iroha_primitives/ffi_import", "iroha_crypto/ffi_import"]

Expand All @@ -41,7 +41,6 @@ derive_more = { workspace = true, features = ["as_ref", "display", "constructor"
serde = { workspace = true, features = ["derive"] }
serde_with = { workspace = true, features = ["macros"] }
serde_json = { workspace = true }
warp = { workspace = true, optional = true }
thiserror = { workspace = true, optional = true }
displaydoc = { workspace = true }
getset = { workspace = true }
Expand Down
2 changes: 1 addition & 1 deletion genesis/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ iroha_data_model = { workspace = true, features = ["http"] }

derive_more = { workspace = true, features = ["deref"] }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true }
serde_json = { workspace = true, features = ["std"] }
once_cell = { workspace = true }
tracing = { workspace = true }
eyre = { workspace = true }
Expand Down
11 changes: 6 additions & 5 deletions torii/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -23,29 +23,30 @@ telemetry = ["iroha_telemetry", "iroha_core/telemetry", "serde_json"]
# Enables profiling endpoint
profiling = ["pprof"]
# Enables Data Model Schema endpoint
schema = ["iroha_schema_gen"]
schema = ["iroha_schema", "iroha_schema_gen"]

[dependencies]
iroha_torii_derive = { path = "derive" }

iroha_core = { workspace = true }
iroha_config = { workspace = true }
iroha_primitives = { workspace = true }
iroha_logger = { workspace = true }
iroha_data_model = { workspace = true, features = ["http"] }
iroha_version = { workspace = true, features = ["http"] }
iroha_version = { workspace = true }
iroha_torii_const = { workspace = true }
iroha_futures = { workspace = true }
iroha_macro = { workspace = true }
iroha_schema = { workspace = true, optional = true }
iroha_schema_gen = { workspace = true, optional = true }
iroha_telemetry = { workspace = true, optional = true }

thiserror = { workspace = true }
displaydoc = { workspace = true }
futures = { workspace = true, features = ["std", "async-await"] }
warp = { workspace = true, features = ["multipart", "websocket"] }
axum = { workspace = true, features = ["multipart", "ws", "query", "json", "tokio", "http1"] }
tower-http = { version = "0.5.0", features = ["trace", "timeout"] }
tokio = { workspace = true, features = ["sync", "time", "macros"] }
eyre = { workspace = true }
error-stack = { workspace = true, features = ["eyre"] }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true, optional = true }
async-trait = { workspace = true }
Expand Down
26 changes: 13 additions & 13 deletions torii/const/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,30 +7,30 @@ pub mod uri {
pub const DEFAULT_API_ADDR: iroha_primitives::addr::SocketAddr =
iroha_primitives::addr::socket_addr!(127.0.0.1:8080);
/// Query URI is used to handle incoming Query requests.
pub const QUERY: &str = "query";
pub const QUERY: &str = "/query";
/// Transaction URI is used to handle incoming ISI requests.
pub const TRANSACTION: &str = "transaction";
pub const TRANSACTION: &str = "/transaction";
/// Block URI is used to handle incoming Block requests.
pub const CONSENSUS: &str = "consensus";
pub const CONSENSUS: &str = "/consensus";
/// Health URI is used to handle incoming Healthcheck requests.
pub const HEALTH: &str = "health";
pub const HEALTH: &str = "/health";
/// The URI used for block synchronization.
pub const BLOCK_SYNC: &str = "block/sync";
pub const BLOCK_SYNC: &str = "/block/sync";
/// The web socket uri used to subscribe to block and transactions statuses.
pub const SUBSCRIPTION: &str = "events";
pub const SUBSCRIPTION: &str = "/events";
/// The web socket uri used to subscribe to blocks stream.
pub const BLOCKS_STREAM: &str = "block/stream";
pub const BLOCKS_STREAM: &str = "/block/stream";
/// The URI for local config changing inspecting
pub const CONFIGURATION: &str = "configuration";
pub const CONFIGURATION: &str = "/configuration";
/// URI to report status for administration
pub const STATUS: &str = "status";
pub const STATUS: &str = "/status";
/// Metrics URI is used to export metrics according to [Prometheus
/// Guidance](https://prometheus.io/docs/instrumenting/writing_exporters/).
pub const METRICS: &str = "metrics";
pub const METRICS: &str = "/metrics";
/// URI for retrieving the schema with which Iroha was built.
pub const SCHEMA: &str = "schema";
pub const SCHEMA: &str = "/schema";
/// URI for getting the API version currently used
pub const API_VERSION: &str = "api_version";
pub const API_VERSION: &str = "/api_version";
/// URI for getting cpu profile
pub const PROFILE: &str = "debug/pprof/profile";
pub const PROFILE: &str = "/debug/pprof/profile";
}
23 changes: 0 additions & 23 deletions torii/derive/Cargo.toml

This file was deleted.

161 changes: 0 additions & 161 deletions torii/derive/src/lib.rs

This file was deleted.

Loading

0 comments on commit b432910

Please sign in to comment.