From 34e8e750e37853602a33ff5e57f10ddeaf73754b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Marin=20Ver=C5=A1i=C4=87?= Date: Tue, 11 Jul 2023 12:35:06 +0200 Subject: [PATCH] [feature] #3468: implement server-side cursor MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Marin Veršić --- Cargo.lock | 1 + cli/Cargo.toml | 1 + cli/src/torii/mod.rs | 16 +- cli/src/torii/pagination.rs | 2 +- cli/src/torii/routing.rs | 119 +++++++-- cli/src/torii/utils.rs | 2 +- client/src/client.rs | 286 ++++++++++++---------- client/tests/integration/pagination.rs | 2 +- client/tests/integration/tx_history.rs | 2 +- config/src/torii.rs | 5 + core/src/smartcontracts/isi/query.rs | 9 +- core/src/smartcontracts/isi/tx.rs | 8 +- core/src/smartcontracts/mod.rs | 2 +- core/src/tx.rs | 1 + core/src/wsv.rs | 2 +- core/test_network/src/lib.rs | 102 ++++---- data_model/src/isi.rs | 8 +- data_model/src/lib.rs | 14 +- data_model/src/numeric.rs | 2 +- data_model/src/predicate.rs | 30 ++- data_model/src/query/cursor.rs | 6 + data_model/src/{query.rs => query/mod.rs} | 138 +++++++---- data_model/src/{ => query}/pagination.rs | 0 data_model/src/{ => query}/sorting.rs | 2 +- data_model/src/transaction.rs | 27 +- schema/gen/src/lib.rs | 13 +- tools/parity_scale_decoder/src/main.rs | 5 +- wasm/src/lib.rs | 7 +- 28 files changed, 489 insertions(+), 323 deletions(-) create mode 100644 data_model/src/query/cursor.rs rename data_model/src/{query.rs => query/mod.rs} (94%) rename data_model/src/{ => query}/pagination.rs (100%) rename data_model/src/{ => query}/sorting.rs (97%) diff --git a/Cargo.lock b/Cargo.lock index a308139c430..825017219b9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2820,6 +2820,7 @@ version = "2.0.0-pre-rc.16" dependencies = [ "async-trait", "color-eyre", + "dashmap", "eyre", "futures", "iroha_cli_derive", diff --git a/cli/Cargo.toml b/cli/Cargo.toml index f05ff0da5cf..6f7ed12d085 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -55,6 +55,7 @@ iroha_genesis = { version = "=2.0.0-pre-rc.16", path = "../genesis" } iroha_wasm_builder = { version = "=2.0.0-pre-rc.16", path = "../wasm_builder" } +dashmap = "5.4.0" async-trait = "0.1.60" color-eyre = "0.6.2" eyre = "0.6.8" diff --git a/cli/src/torii/mod.rs b/cli/src/torii/mod.rs index 386ec96b4b5..742f59659e8 100644 --- a/cli/src/torii/mod.rs +++ b/cli/src/torii/mod.rs @@ -6,9 +6,11 @@ use std::{ convert::Infallible, fmt::{Debug, Write as _}, net::ToSocketAddrs, + num::NonZeroU64, sync::Arc, }; +use dashmap::DashMap; use futures::{stream::FuturesUnordered, StreamExt}; use iroha_core::{ kura::Kura, @@ -17,6 +19,7 @@ use iroha_core::{ sumeragi::SumeragiHandle, EventsSender, }; +use iroha_data_model::Value; use thiserror::Error; use tokio::sync::Notify; use utils::*; @@ -32,6 +35,9 @@ pub(crate) mod utils; mod pagination; pub mod routing; +type LiveQuery = Box + Send + Sync>; +type LiveQueryStore = DashMap, (LiveQuery, NonZeroU64)>; + /// Main network handler and the only entrypoint of the Iroha. pub struct Torii { iroha_cfg: super::Configuration, @@ -39,6 +45,7 @@ pub struct Torii { events: EventsSender, notify_shutdown: Arc, sumeragi: SumeragiHandle, + query_store: Arc, kura: Arc, } @@ -64,10 +71,13 @@ pub enum Error { /// Error while getting Prometheus metrics #[error("Failed to produce Prometheus metrics")] Prometheus(#[source] eyre::Report), + /// Error while resuming cursor + #[error("Failed to find cursor")] + UnknownCursor, } /// Status code for query error response. -pub(crate) fn query_status_code(validation_error: &iroha_data_model::ValidationFail) -> StatusCode { +fn query_status_code(validation_error: &iroha_data_model::ValidationFail) -> StatusCode { use iroha_data_model::{ isi::error::InstructionExecutionError, query::error::QueryExecutionFail::*, ValidationFail::*, @@ -110,7 +120,9 @@ impl Error { use Error::*; match self { Query(e) => query_status_code(e), - AcceptTransaction(_) | ConfigurationReload(_) => StatusCode::BAD_REQUEST, + AcceptTransaction(_) | ConfigurationReload(_) | UnknownCursor => { + StatusCode::BAD_REQUEST + } Config(_) => StatusCode::NOT_FOUND, PushIntoQueue(err) => match **err { queue::Error::Full => StatusCode::INTERNAL_SERVER_ERROR, diff --git a/cli/src/torii/pagination.rs b/cli/src/torii/pagination.rs index 2a20556a686..3cb7e8772bd 100644 --- a/cli/src/torii/pagination.rs +++ b/cli/src/torii/pagination.rs @@ -1,4 +1,4 @@ -use iroha_data_model::prelude::*; +use iroha_data_model::query::Pagination; /// Describes a collection to which pagination can be applied. /// Implemented for the [`Iterator`] implementors. diff --git a/cli/src/torii/routing.rs b/cli/src/torii/routing.rs index bed382b7bc7..9b689e5bed3 100644 --- a/cli/src/torii/routing.rs +++ b/cli/src/torii/routing.rs @@ -5,7 +5,7 @@ // FIXME: This can't be fixed, because one trait in `warp` is private. #![allow(opaque_hidden_inferred_bound)] -use std::cmp::Ordering; +use std::{cmp::Ordering, num::NonZeroUsize}; use eyre::WrapErr; use futures::TryStreamExt; @@ -28,23 +28,30 @@ use iroha_data_model::{ VersionedCommittedBlock, }, prelude::*, + query::{ForwardCursor, Pagination, Sorting}, }; use iroha_logger::prelude::*; #[cfg(feature = "telemetry")] use iroha_telemetry::metrics::Status; use pagination::{paginate, Paginate}; +use parity_scale_codec::Encode; use tokio::task; use super::*; use crate::stream::{Sink, Stream}; /// Filter for warp which extracts sorting -pub fn sorting() -> impl warp::Filter + Copy { +fn sorting() -> impl warp::Filter + Copy { + warp::query() +} + +/// Filter for warp which extracts cursor +fn cursor() -> impl warp::Filter + Copy { warp::query() } #[iroha_futures::telemetry_future] -pub(crate) async fn handle_instructions( +async fn handle_instructions( queue: Arc, sumeragi: SumeragiHandle, transaction: VersionedSignedTransaction, @@ -68,29 +75,84 @@ pub(crate) async fn handle_instructions( } #[iroha_futures::telemetry_future] -pub(crate) async fn handle_queries( +async fn handle_queries( sumeragi: SumeragiHandle, - pagination: Pagination, - sorting: Sorting, + query_store: Arc, + fetch_size: NonZeroU64, + request: VersionedSignedQuery, -) -> Result> { - let mut wsv = sumeragi.wsv_clone(); + sorting: Sorting, + pagination: Pagination, - let valid_request = ValidQueryRequest::validate(request, &mut wsv)?; - let result = valid_request.execute(&wsv).map_err(ValidationFail::from)?; + cursor: ForwardCursor, +) -> Result> { + let encoded_request = (&request, &sorting, &pagination).encode(); - let result = match result { - LazyValue::Value(value) => value, - LazyValue::Iter(iter) => { - Value::Vec(apply_sorting_and_pagination(iter, &sorting, pagination)) + let mut live_query: (_, NonZeroU64) = if let Some(cursor) = cursor { + if let Some((_, live_query)) = query_store.remove(&encoded_request) { + if cursor != live_query.1 { + return Err(Error::UnknownCursor); + } + + live_query + } else { + return Err(Error::UnknownCursor); + } + } else { + let mut wsv = sumeragi.wsv_clone(); + + let valid_request = ValidQueryRequest::validate(request, &mut wsv)?; + let res = valid_request.execute(&wsv).map_err(ValidationFail::from)?; + + match res { + LazyValue::Iter(iter) => ( + Box::new(apply_sorting_and_pagination(iter, &sorting, pagination).into_iter()), + NonZeroU64::new(0).expect("Valid"), + ), + LazyValue::Value(result) => { + return Ok(Scale( + QueryResponse { + result, + cursor: None, + pagination, + sorting, + } + .into(), + )); + } } }; - let paginated_result = QueryResult { - result, + let result = live_query + .0 + .by_ref() + .take( + fetch_size + .get() + .try_into() + .expect("u64 larger than usize::MAX"), + ) + .collect::>(); + + let cursor = if result.len() as u64 >= fetch_size.get() { + query_store.insert(encoded_request, live_query); + + cursor.map(|cursor| { + cursor + .checked_add(fetch_size.get()) + .expect("Cursor size too big") + }) + } else { + None + }; + + let paginated_result = QueryResponse { + result: Value::Vec(result), + cursor, pagination, sorting, }; + Ok(Scale(paginated_result.into())) } @@ -297,7 +359,10 @@ mod subscription { /// There should be a [`warp::filters::ws::Message::close()`] /// message to end subscription #[iroha_futures::telemetry_future] - pub async fn handle_subscription(events: EventsSender, stream: WebSocket) -> eyre::Result<()> { + pub(crate) async fn handle_subscription( + events: EventsSender, + stream: WebSocket, + ) -> eyre::Result<()> { let mut consumer = event::Consumer::new(stream).await?; match subscribe_forever(events, &mut consumer).await { @@ -409,6 +474,7 @@ impl Torii { queue, notify_shutdown, sumeragi, + query_store: Arc::default(), kura, } } @@ -448,9 +514,7 @@ impl Torii { } /// Helper function to create router. This router can tested without starting up an HTTP server - pub(crate) fn create_api_router( - &self, - ) -> impl warp::Filter + Clone + Send { + fn create_api_router(&self) -> impl warp::Filter + Clone + Send { let health_route = warp::get() .and(warp::path(uri::HEALTH)) .and_then(|| async { Ok::<_, Infallible>(handle_health()) }); @@ -483,13 +547,18 @@ impl Torii { )) .and(body::versioned()), )) - .or(endpoint4( + .or(endpoint7( handle_queries, warp::path(uri::QUERY) - .and(add_state!(self.sumeragi)) - .and(paginate()) + .and(add_state!( + self.sumeragi, + self.query_store, + self.iroha_cfg.torii.fetch_size, + )) + .and(body::versioned()) .and(sorting()) - .and(body::versioned()), + .and(paginate()) + .and(cursor()), )) .or(endpoint2( handle_post_configuration, @@ -614,7 +683,7 @@ impl Torii { /// # Errors /// Can fail due to listening to network or if http server fails #[iroha_futures::telemetry_future] - pub async fn start(self) -> eyre::Result<()> { + pub(crate) async fn start(self) -> eyre::Result<()> { let mut handles = vec![]; let torii = Arc::new(self); diff --git a/cli/src/torii/utils.rs b/cli/src/torii/utils.rs index 80c0d0028ab..77d31319c06 100644 --- a/cli/src/torii/utils.rs +++ b/cli/src/torii/utils.rs @@ -66,4 +66,4 @@ impl Reply for WarpResult { } } -iroha_cli_derive::generate_endpoints!(2, 3, 4, 5); +iroha_cli_derive::generate_endpoints!(2, 3, 4, 5, 7); diff --git a/client/src/client.rs b/client/src/client.rs index 687f74b8aca..0074847e983 100644 --- a/client/src/client.rs +++ b/client/src/client.rs @@ -9,7 +9,7 @@ use std::{ collections::HashMap, fmt::Debug, marker::PhantomData, - num::{NonZeroU32, NonZeroU64}, + num::{NonZeroU32, NonZeroU64, NonZeroUsize}, thread, time::Duration, }; @@ -21,8 +21,13 @@ use http_default::{AsyncWebSocketStream, WebSocketStream}; use iroha_config::{client::Configuration, torii::uri, GetConfiguration, PostConfiguration}; use iroha_crypto::{HashOf, KeyPair}; use iroha_data_model::{ - block::VersionedCommittedBlock, predicate::PredicateBox, prelude::*, - transaction::TransactionPayload, ValidationFail, + block::VersionedCommittedBlock, + isi::Instruction, + predicate::PredicateBox, + prelude::*, + query::{ForwardCursor, Pagination, Query, Sorting}, + transaction::TransactionPayload, + ValidationFail, }; use iroha_logger::prelude::*; use iroha_telemetry::metrics::Status; @@ -40,28 +45,13 @@ use crate::{ const APPLICATION_JSON: &str = "application/json"; -/// General trait for all response handlers -pub trait ResponseHandler> { - /// What is the output of the handler - type Output; - - /// Handles HTTP response - fn handle(self, response: Response) -> Self::Output; -} - /// Phantom struct that handles responses of Query API. /// Depending on input query struct, transforms a response into appropriate output. #[derive(Clone, Copy)] pub struct QueryResponseHandler(PhantomData); -impl Default for QueryResponseHandler { - fn default() -> Self { - Self(PhantomData) - } -} - /// `Result` with [`ClientQueryError`] as an error -pub type QueryHandlerResult = core::result::Result; +pub type QueryResult = core::result::Result; /// Trait for signing transactions pub trait Sign { @@ -94,21 +84,23 @@ impl Sign for VersionedSignedTransaction { } } -impl ResponseHandler for QueryResponseHandler +impl QueryResponseHandler where - R: Query + Debug, + R::Output: QueryOutput, >::Error: Into, { - type Output = QueryHandlerResult>; - - fn handle(self, resp: Response>) -> Self::Output { + fn handle( + self, + req: DefaultRequestBuilder, + resp: Response>, + ) -> QueryResult<::Target> { // Separate-compilation friendly response handling fn _handle_query_response_base( resp: &Response>, - ) -> QueryHandlerResult { + ) -> QueryResult { match resp.status() { StatusCode::OK => { - let res = VersionedQueryResult::decode_all_versioned(resp.body()); + let res = VersionedQueryResponse::decode_all_versioned(resp.body()); res.wrap_err( "Failed to decode response from Iroha. \ You are likely using a version of the client library \ @@ -143,9 +135,26 @@ where } } - _handle_query_response_base(&resp).and_then(|VersionedQueryResult::V1(result)| { - ClientQueryRequest::try_from(result).map_err(Into::into) - }) + let response = _handle_query_response_base(&resp) + .map(|VersionedQueryResponse::V1(response)| response)?; + + let cursor = response.cursor; + let value = R::Output::try_from(response.result) + .map_err(Into::into) + .wrap_err("Unexpected type")?; + + let output = QueryOutput::new( + value, + cursor + .map(|cursor| { + cursor + .try_into() + .wrap_err("Query result length exceeds usize::MAX") + }) + .transpose()?, + ); + + Ok(output) } } @@ -171,10 +180,8 @@ impl From for ClientQueryError { #[derive(Clone, Copy)] pub struct TransactionResponseHandler; -impl ResponseHandler for TransactionResponseHandler { - type Output = Result<()>; - - fn handle(self, resp: Response>) -> Self::Output { +impl TransactionResponseHandler { + fn handle(self, resp: Response>) -> Result<()> { if resp.status() == StatusCode::OK { Ok(()) } else { @@ -191,10 +198,8 @@ impl ResponseHandler for TransactionResponseHandler { #[derive(Clone, Copy)] pub struct StatusResponseHandler; -impl ResponseHandler for StatusResponseHandler { - type Output = Result; - - fn handle(self, resp: Response>) -> Self::Output { +impl StatusResponseHandler { + fn handle(self, resp: Response>) -> Result { if resp.status() != StatusCode::OK { return Err( ResponseReport::with_msg("Unexpected status response", &resp) @@ -214,10 +219,7 @@ impl ResponseReport { /// /// # Errors /// If response body isn't a valid utf-8 string - fn with_msg(msg: S, response: &Response>) -> Result - where - S: AsRef, - { + fn with_msg>(msg: S, response: &Response>) -> Result { let status = response.status(); let body = std::str::from_utf8(response.body()); let msg = msg.as_ref(); @@ -238,60 +240,79 @@ impl From for eyre::Report { } } -/// More convenient version of [`iroha_data_model::prelude::QueryResult`]. -/// The only difference is that this struct has `output` field extracted from the result -/// accordingly to the source query. -#[derive(Clone, Debug)] -pub struct ClientQueryRequest -where - R: Query + Debug, - >::Error: Into, -{ - /// Query output - pub output: R::Output, - /// See [`iroha_data_model::prelude::QueryResult`] - pub pagination: Pagination, - /// See [`iroha_data_model::prelude::QueryResult`] - pub sorting: Sorting, +pub trait QueryOutput { + type Target: Debug + Clone; + + fn new(value: Self, server_cursor: Option) -> Self::Target; } -impl ClientQueryRequest -where - R: Query + Debug, - >::Error: Into, -{ - /// Extracts output as is - pub fn only_output(self) -> R::Output { - self.output - } +#[derive(Debug, Clone, serde::Serialize)] +pub struct ResultSet { + request: VersionedSignedQuery, + client_cursor: usize, + + iter: Vec, + server_cursor: Option, } -impl TryFrom for ClientQueryRequest -where - R: Query + Debug, - >::Error: Into, -{ - type Error = eyre::Report; +impl Iterator for ResultSet { + type Item = T; - fn try_from( - QueryResult { - result, - pagination, - sorting, - }: QueryResult, - ) -> Result { - let output = R::Output::try_from(result) - .map_err(Into::into) - .wrap_err("Unexpected type")?; + fn next(&mut self) -> Option { + if self.client_cursor >= self.iter.len() { + if let Some(server_cursor) = self.server_cursor { + //response = request_with_cursor(server_cursor); + self.client_cursor = 0; + } else { + return None; + } + } - Ok(Self { - output, - pagination, - sorting, - }) + let item = self.iter.get(self.client_cursor); + self.client_cursor += 1; + item.cloned() + } +} + +impl QueryOutput for Vec { + type Target = ResultSet; + + fn new(value: Self, server_cursor: Option) -> Self::Target { + ResultSet { + request, + iter: value, + server_cursor, + client_cursor: 0, + } } } +macro_rules! impl_query_result { + ( $($ident:ty),+ $(,)? ) => { $( + impl QueryOutput for $ident { + type Target = Self; + + fn new(value: Self, _server_cursor: Option) -> Self::Target { + value + } + } )+ + }; +} +impl_query_result! { + bool, + iroha_data_model::Value, + iroha_data_model::numeric::NumericValue, + iroha_data_model::role::Role, + iroha_data_model::asset::Asset, + iroha_data_model::asset::AssetDefinition, + iroha_data_model::account::Account, + iroha_data_model::domain::Domain, + iroha_data_model::block::BlockHeader, + iroha_data_model::query::MetadataValue, + iroha_data_model::query::TransactionQueryOutput, + iroha_data_model::trigger::Trigger, +} + /// Iroha client #[derive(Clone, DebugCustom, Display)] #[debug( @@ -413,7 +434,7 @@ impl Client { /// /// # Errors /// Fails if signature generation fails - pub fn sign_query(&self, query: QueryBuilder) -> Result { + pub fn sign_query(&self, query: QueryBuilder) -> Result { query .sign(self.key_pair.clone()) .wrap_err("Failed to sign query") @@ -424,10 +445,7 @@ impl Client { /// /// # Errors /// Fails if sending transaction to peer fails or if it response with error - pub fn submit( - &self, - instruction: impl Instruction + Debug, - ) -> Result> { + pub fn submit(&self, instruction: impl Instruction) -> Result> { let isi = instruction.into(); self.submit_all([isi]) } @@ -672,7 +690,7 @@ impl Client { /// ```ignore /// use eyre::Result; /// use iroha_client::{ - /// client::{Client, ResponseHandler}, + /// client::Client, /// http::{RequestBuilder, Response, Method}, /// }; /// use iroha_data_model::{predicate::PredicateBox, prelude::{Account, FindAllAccounts, Pagination}}; @@ -719,10 +737,10 @@ impl Client { /// // Handle response with the handler and get typed result /// let accounts = resp_handler.handle(resp)?; /// - /// Ok(accounts.only_output()) + /// Ok(accounts.output()) /// } /// ``` - pub fn prepare_query_request( + pub fn prepare_query_request( &self, request: R, filter: PredicateBox, @@ -730,14 +748,12 @@ impl Client { sorting: Sorting, ) -> Result<(B, QueryResponseHandler)> where - R: Query + Debug, >::Error: Into, - B: RequestBuilder, { let pagination: Vec<_> = pagination.into(); let sorting: Vec<_> = sorting.into(); let request = QueryBuilder::new(request, self.account_id.clone()).with_filter(filter); - let request: VersionedSignedQuery = self.sign_query(request)?.into(); + let request = self.sign_query(request)?; Ok(( B::new( @@ -748,7 +764,7 @@ impl Client { .params(sorting) .headers(self.headers.clone()) .body(request.encode_versioned()), - QueryResponseHandler::default(), + QueryResponseHandler(PhantomData) )) } @@ -756,37 +772,38 @@ impl Client { /// /// # Errors /// Fails if sending request fails - pub fn request_with_filter_and_pagination_and_sorting( + pub fn request_with_filter_and_pagination_and_sorting( &self, request: R, pagination: Pagination, sorting: Sorting, filter: PredicateBox, - ) -> QueryHandlerResult> + ) -> QueryResult<::Target> where - R: Query + Debug, - >::Error: Into, // Seems redundant + R::Output: QueryOutput, + >::Error: Into, { iroha_logger::trace!(?request, %pagination, ?sorting, ?filter); let (req, resp_handler) = self.prepare_query_request::( request, filter, pagination, sorting, )?; - let response = req.build()?.send()?; - resp_handler.handle(response) + let req = req.build()?; + let response = req.clone().send()?; + resp_handler.handle(req, response) } /// Create a request with pagination and sorting. /// /// # Errors /// Fails if sending request fails - pub fn request_with_pagination_and_sorting( + pub fn request_with_pagination_and_sorting( &self, request: R, pagination: Pagination, sorting: Sorting, - ) -> QueryHandlerResult> + ) -> QueryResult<::Target> where - R: Query + Debug, + R::Output: QueryOutput, >::Error: Into, { self.request_with_filter_and_pagination_and_sorting( @@ -801,15 +818,15 @@ impl Client { /// /// # Errors /// Fails if sending request fails - pub fn request_with_filter_and_pagination( + pub fn request_with_filter_and_pagination( &self, request: R, pagination: Pagination, filter: PredicateBox, - ) -> QueryHandlerResult> + ) -> QueryResult<::Target> where - R: Query + Debug, - >::Error: Into, // Seems redundant + R::Output: QueryOutput, + >::Error: Into, { self.request_with_filter_and_pagination_and_sorting( request, @@ -823,15 +840,15 @@ impl Client { /// /// # Errors /// Fails if sending request fails - pub fn request_with_filter_and_sorting( + pub fn request_with_filter_and_sorting( &self, request: R, sorting: Sorting, filter: PredicateBox, - ) -> QueryHandlerResult> + ) -> QueryResult<::Target> where - R: Query + Debug, - >::Error: Into, // Seems redundant + R::Output: QueryOutput, + >::Error: Into, { self.request_with_filter_and_pagination_and_sorting( request, @@ -848,13 +865,13 @@ impl Client { /// /// # Errors /// Fails if sending request fails - pub fn request_with_filter( + pub fn request_with_filter( &self, request: R, filter: PredicateBox, - ) -> QueryHandlerResult> + ) -> QueryResult<::Target> where - R: Query + Debug, + R::Output: QueryOutput, >::Error: Into, { self.request_with_filter_and_pagination(request, Pagination::default(), filter) @@ -867,13 +884,13 @@ impl Client { /// /// # Errors /// Fails if sending request fails - pub fn request_with_pagination( + pub fn request_with_pagination( &self, request: R, pagination: Pagination, - ) -> QueryHandlerResult> + ) -> QueryResult<::Target> where - R: Query + Debug, + R::Output: QueryOutput, >::Error: Into, { self.request_with_filter_and_pagination(request, pagination, PredicateBox::default()) @@ -883,13 +900,13 @@ impl Client { /// /// # Errors /// Fails if sending request fails - pub fn request_with_sorting( + pub fn request_with_sorting( &self, request: R, sorting: Sorting, - ) -> QueryHandlerResult> + ) -> QueryResult<::Target> where - R: Query + Debug, + R::Output: QueryOutput, >::Error: Into, { self.request_with_pagination_and_sorting(request, Pagination::default(), sorting) @@ -899,13 +916,15 @@ impl Client { /// /// # Errors /// Fails if sending request fails - pub fn request(&self, request: R) -> QueryHandlerResult + pub fn request( + &self, + request: R, + ) -> QueryResult<::Target> where - R: Query + Debug, + R::Output: QueryOutput, >::Error: Into, { self.request_with_pagination(request, Pagination::default()) - .map(ClientQueryRequest::only_output) } /// Connect (through `WebSocket`) to listen for `Iroha` `pipeline` and `data` events. @@ -1144,10 +1163,7 @@ impl Client { /// /// # Errors /// Fails if request build fails - pub fn prepare_status_request(&self) -> (B, StatusResponseHandler) - where - B: RequestBuilder, - { + pub fn prepare_status_request(&self) -> (B, StatusResponseHandler) { ( B::new( HttpMethod::GET, @@ -1258,11 +1274,9 @@ pub mod stream_api { /// - Sending failed /// - Message not received in stream during connection or subscription /// - Message is an error - pub async fn new(handler: I) -> Result> - where - I: Init + Send, - I::Next: Send, - { + pub async fn new>( + handler: I, + ) -> Result> { trace!("Creating `AsyncStream`"); let InitData { first_message, diff --git a/client/tests/integration/pagination.rs b/client/tests/integration/pagination.rs index afa827a511e..82c31927a4e 100644 --- a/client/tests/integration/pagination.rs +++ b/client/tests/integration/pagination.rs @@ -21,7 +21,7 @@ fn client_add_asset_quantity_to_existing_asset_should_increase_asset_amount() -> let vec = client .request_with_pagination(asset::all_definitions(), Pagination::new(Some(5), Some(5)))? - .only_output(); + .output(); assert_eq!(vec.len(), 5); Ok(()) } diff --git a/client/tests/integration/tx_history.rs b/client/tests/integration/tx_history.rs index 45ea0a8eb26..045ca757491 100644 --- a/client/tests/integration/tx_history.rs +++ b/client/tests/integration/tx_history.rs @@ -54,7 +54,7 @@ fn client_has_rejected_and_acepted_txs_should_return_tx_history() -> Result<()> transaction::by_account_id(account_id.clone()), Pagination::new(Some(1), Some(50)), )? - .only_output(); + .output(); assert_eq!(transactions.len(), 50); let mut prev_creation_time = core::time::Duration::from_millis(0); diff --git a/config/src/torii.rs b/config/src/torii.rs index 81ad7d67701..d2fa798364d 100644 --- a/config/src/torii.rs +++ b/config/src/torii.rs @@ -1,5 +1,7 @@ //! `Torii` configuration as well as the default values for the URLs used for the main endpoints: `p2p`, `telemetry`, but not `api`. #![allow(clippy::std_instead_of_core, clippy::arithmetic_side_effects)] +use std::num::NonZeroU64; + use iroha_config_base::derive::{Documented, Proxy}; use iroha_primitives::addr::{socket_addr, SocketAddr}; use serde::{Deserialize, Serialize}; @@ -33,6 +35,8 @@ pub struct Configuration { pub max_transaction_size: u32, /// Maximum number of bytes in raw message. Used to prevent from DOS attacks. pub max_content_len: u32, + /// How many query results are returned in one batch + pub fetch_size: NonZeroU64, } impl Default for ConfigurationProxy { @@ -43,6 +47,7 @@ impl Default for ConfigurationProxy { telemetry_url: None, max_transaction_size: Some(DEFAULT_TORII_MAX_TRANSACTION_SIZE), max_content_len: Some(DEFAULT_TORII_MAX_CONTENT_LENGTH), + fetch_size: NonZeroU64::new(10), } } } diff --git a/core/src/smartcontracts/isi/query.rs b/core/src/smartcontracts/isi/query.rs index a5d7c67da14..91dae16ecc4 100644 --- a/core/src/smartcontracts/isi/query.rs +++ b/core/src/smartcontracts/isi/query.rs @@ -42,7 +42,7 @@ macro_rules! impl_lazy { } impl_lazy! { bool, - NumericValue, + iroha_data_model::numeric::NumericValue, iroha_data_model::role::Role, iroha_data_model::asset::Asset, iroha_data_model::asset::AssetDefinition, @@ -50,12 +50,13 @@ impl_lazy! { iroha_data_model::domain::Domain, iroha_data_model::block::BlockHeader, iroha_data_model::query::MetadataValue, - iroha_data_model::query::TransactionQueryResult, + iroha_data_model::query::TransactionQueryOutput, iroha_data_model::trigger::Trigger, } /// Query Request statefully validated on the Iroha node side. #[derive(Debug, Decode, Encode)] +#[repr(transparent)] pub struct ValidQueryRequest(VersionedSignedQuery); impl ValidQueryRequest { @@ -387,7 +388,7 @@ mod tests { #[test] fn find_block_header_by_hash() -> Result<()> { let wsv = wsv_with_test_blocks_and_transactions(1, 1, 1)?; - let block = wsv.all_blocks().into_iter().last().expect("WSV is empty"); + let block = wsv.all_blocks().last().expect("WSV is empty"); assert_eq!( FindBlockHeaderByHash::new(block.hash()).execute(&wsv)?, @@ -463,7 +464,7 @@ mod tests { .sign(ALICE_KEYS.clone())?; let wrong_hash = unapplied_tx.hash(); let not_found = FindTransactionByHash::new(wrong_hash).execute(&wsv); - assert!(matches!(not_found, Err(_))); + assert!(not_found.is_err()); let found_accepted = FindTransactionByHash::new(va_tx.hash()).execute(&wsv)?; if found_accepted.transaction.error.is_none() { diff --git a/core/src/smartcontracts/isi/tx.rs b/core/src/smartcontracts/isi/tx.rs index 2c87fa781cb..7919f060b37 100644 --- a/core/src/smartcontracts/isi/tx.rs +++ b/core/src/smartcontracts/isi/tx.rs @@ -10,7 +10,7 @@ use iroha_data_model::{ prelude::*, query::{ error::{FindError, QueryExecutionFail}, - TransactionQueryResult, + TransactionQueryOutput, }, transaction::TransactionValue, }; @@ -65,7 +65,7 @@ impl ValidQuery for FindAllTransactions { Ok(Box::new( wsv.all_blocks() .flat_map(BlockTransactionIter::new) - .map(|tx| TransactionQueryResult { + .map(|tx| TransactionQueryOutput { block_hash: tx.block_hash(), transaction: tx.value(), }), @@ -88,7 +88,7 @@ impl ValidQuery for FindTransactionsByAccountId { wsv.all_blocks() .flat_map(BlockTransactionIter::new) .filter(move |tx| *tx.authority() == account_id) - .map(|tx| TransactionQueryResult { + .map(|tx| TransactionQueryOutput { block_hash: tx.block_hash(), transaction: tx.value(), }), @@ -122,7 +122,7 @@ impl ValidQuery for FindTransactionByHash { .iter() .find(|transaction| transaction.value.hash() == tx_hash) .cloned() - .map(|transaction| TransactionQueryResult { + .map(|transaction| TransactionQueryOutput { block_hash, transaction, }) diff --git a/core/src/smartcontracts/mod.rs b/core/src/smartcontracts/mod.rs index ebab9f86afe..d4ddbb03201 100644 --- a/core/src/smartcontracts/mod.rs +++ b/core/src/smartcontracts/mod.rs @@ -28,7 +28,7 @@ pub trait Execute { } /// This trait should be implemented for all Iroha Queries. -pub trait ValidQuery: Query +pub trait ValidQuery: iroha_data_model::query::Query where Self::Output: Lazy, { diff --git a/core/src/tx.rs b/core/src/tx.rs index f3456e023ff..e676bdbf2ed 100644 --- a/core/src/tx.rs +++ b/core/src/tx.rs @@ -20,6 +20,7 @@ use eyre::Result; use iroha_crypto::{HashOf, SignatureVerificationFail, SignaturesOf}; pub use iroha_data_model::prelude::*; use iroha_data_model::{ + isi::Instruction, query::error::FindError, transaction::{ error::{TransactionLimitError, TransactionRejectionReason}, diff --git a/core/src/wsv.rs b/core/src/wsv.rs index f5639f2cbff..ff451240c61 100644 --- a/core/src/wsv.rs +++ b/core/src/wsv.rs @@ -402,7 +402,7 @@ impl WorldStateView { macro_rules! update_params { ($ident:ident, $($param:expr => $config:expr),+ $(,)?) => { $(if let Some(param) = self.query_param($param) { - let mut $ident = &mut self.config; + let $ident = &mut self.config; $config = param; })+ diff --git a/core/test_network/src/lib.rs b/core/test_network/src/lib.rs index 161a15c489c..22affa4c029 100644 --- a/core/test_network/src/lib.rs +++ b/core/test_network/src/lib.rs @@ -9,10 +9,10 @@ use std::{ thread, }; -use eyre::{Error, Result}; +use eyre::Result; use futures::{prelude::*, stream::FuturesUnordered}; use iroha::Iroha; -use iroha_client::client::Client; +use iroha_client::client::{Client, QueryOutput}; use iroha_config::{ base::proxy::{LoadFromEnv, Override}, client::Configuration as ClientConfiguration, @@ -20,8 +20,8 @@ use iroha_config::{ sumeragi::Configuration as SumeragiConfiguration, torii::Configuration as ToriiConfiguration, }; -use iroha_core::{prelude::*, smartcontracts::query::Lazy}; -use iroha_data_model::{peer::Peer as DataModelPeer, prelude::*}; +use iroha_core::prelude::*; +use iroha_data_model::{isi::Instruction, peer::Peer as DataModelPeer, prelude::*, query::Query}; use iroha_genesis::{GenesisNetwork, RawGenesisBlock}; use iroha_logger::{Configuration as LoggerConfiguration, InstrumentFutures}; use iroha_primitives::addr::{socket_addr, SocketAddr}; @@ -660,7 +660,7 @@ impl PeerBuilder { type PeerWithRuntimeAndClient = (Runtime, Peer, Client); fn local_unique_port() -> Result { - Ok(socket_addr!(127.0.0.1: unique_port::get_unique_free_port().map_err(Error::msg)?)) + Ok(socket_addr!(127.0.0.1: unique_port::get_unique_free_port().map_err(eyre::Error::msg)?)) } /// Runtime used for testing. @@ -708,61 +708,57 @@ pub trait TestClient: Sized { /// /// # Errors /// If predicate is not satisfied, after maximum retries. - fn submit_till( + fn submit_till( &mut self, - instruction: impl Instruction + Debug, + instruction: impl Instruction + Debug + Clone, request: R, - f: impl Fn(&R::Output) -> bool, - ) -> eyre::Result + f: impl Fn(&::Target) -> bool, + ) -> eyre::Result<::Target> where - R: ValidQuery + Into + Debug + Clone, - >::Error: Into, - R::Output: Lazy + Clone + Debug; + R::Output: QueryOutput, + >::Error: Into; /// Submits instructions with polling /// /// # Errors /// If predicate is not satisfied, after maximum retries. - fn submit_all_till( + fn submit_all_till( &mut self, instructions: Vec, request: R, - f: impl Fn(&R::Output) -> bool, - ) -> eyre::Result + f: impl Fn(&::Target) -> bool, + ) -> eyre::Result<::Target> where - R: ValidQuery + Into + Debug + Clone, - >::Error: Into, - R::Output: Lazy + Clone + Debug; + R::Output: QueryOutput, + >::Error: Into; /// Polls request till predicate `f` is satisfied, with default period and max attempts. /// /// # Errors /// If predicate is not satisfied after maximum retries. - fn poll_request( + fn poll_request( &mut self, request: R, - f: impl Fn(&R::Output) -> bool, - ) -> eyre::Result + f: impl Fn(&::Target) -> bool, + ) -> eyre::Result<::Target> where - R: ValidQuery + Into + Debug + Clone, - >::Error: Into, - R::Output: Lazy + Clone + Debug; + R::Output: QueryOutput, + >::Error: Into; /// Polls request till predicate `f` is satisfied with `period` and `max_attempts` supplied. /// /// # Errors /// If predicate is not satisfied after maximum retries. - fn poll_request_with_period( + fn poll_request_with_period( &mut self, request: R, period: Duration, max_attempts: u32, - f: impl Fn(&R::Output) -> bool, - ) -> eyre::Result + f: impl Fn(&::Target) -> bool, + ) -> eyre::Result<::Target> where - R: ValidQuery + Into + Debug + Clone, - >::Error: Into, - R::Output: Lazy + Clone + Debug; + R::Output: QueryOutput, + >::Error: Into; } impl TestRuntime for Runtime { @@ -848,49 +844,46 @@ impl TestClient for Client { } } - fn submit_till( + fn submit_till( &mut self, - instruction: impl Instruction + Debug, + instruction: impl Instruction + Debug + Clone, request: R, - f: impl Fn(&R::Output) -> bool, - ) -> eyre::Result + f: impl Fn(&::Target) -> bool, + ) -> eyre::Result<::Target> where - R: ValidQuery + Into + Debug + Clone, - >::Error: Into, - R::Output: Lazy + Clone + Debug, + R::Output: QueryOutput, + >::Error: Into, { self.submit(instruction) .expect("Failed to submit instruction."); self.poll_request(request, f) } - fn submit_all_till( + fn submit_all_till( &mut self, instructions: Vec, request: R, - f: impl Fn(&R::Output) -> bool, - ) -> eyre::Result + f: impl Fn(&::Target) -> bool, + ) -> eyre::Result<::Target> where - R: ValidQuery + Into + Debug + Clone, - >::Error: Into, - R::Output: Lazy + Clone + Debug, + R::Output: QueryOutput, + >::Error: Into, { self.submit_all(instructions) .expect("Failed to submit instruction."); self.poll_request(request, f) } - fn poll_request_with_period( + fn poll_request_with_period( &mut self, request: R, period: Duration, max_attempts: u32, - f: impl Fn(&R::Output) -> bool, - ) -> eyre::Result + f: impl Fn(&::Target) -> bool, + ) -> eyre::Result<::Target> where - R: ValidQuery + Into + Debug + Clone, - >::Error: Into, - R::Output: Lazy + Clone + Debug, + R::Output: QueryOutput, + >::Error: Into, { let mut query_result = None; for _ in 0..max_attempts { @@ -903,15 +896,14 @@ impl TestClient for Client { Err(eyre::eyre!("Failed to wait for query request completion that would satisfy specified closure. Got this query result instead: {:?}", &query_result)) } - fn poll_request( + fn poll_request( &mut self, request: R, - f: impl Fn(&R::Output) -> bool, - ) -> eyre::Result + f: impl Fn(&::Target) -> bool, + ) -> eyre::Result<::Target> where - R: ValidQuery + Into + Debug + Clone, - >::Error: Into, - R::Output: Lazy + Clone + Debug, + R::Output: QueryOutput, + >::Error: Into, { self.poll_request_with_period(request, Configuration::pipeline_time() / 2, 10, f) } diff --git a/data_model/src/isi.rs b/data_model/src/isi.rs index 1ab82f63f5f..448fb2ec555 100644 --- a/data_model/src/isi.rs +++ b/data_model/src/isi.rs @@ -1266,10 +1266,10 @@ pub mod error { pub mod prelude { pub use super::{ Burn, BurnBox, Conditional, ExecuteTrigger, ExecuteTriggerBox, FailBox, Grant, GrantBox, - Instruction, InstructionBox, Mint, MintBox, NewParameter, NewParameterBox, Pair, Register, - RegisterBox, RemoveKeyValue, RemoveKeyValueBox, Revoke, RevokeBox, SequenceBox, - SetKeyValue, SetKeyValueBox, SetParameter, SetParameterBox, Transfer, TransferBox, - Unregister, UnregisterBox, Upgrade, UpgradeBox, + InstructionBox, Mint, MintBox, NewParameter, NewParameterBox, Pair, Register, RegisterBox, + RemoveKeyValue, RemoveKeyValueBox, Revoke, RevokeBox, SequenceBox, SetKeyValue, + SetKeyValueBox, SetParameter, SetParameterBox, Transfer, TransferBox, Unregister, + UnregisterBox, Upgrade, UpgradeBox, }; } diff --git a/data_model/src/lib.rs b/data_model/src/lib.rs index 44b394065cd..5d7148c202b 100644 --- a/data_model/src/lib.rs +++ b/data_model/src/lib.rs @@ -49,7 +49,7 @@ use iroha_primitives::{ use iroha_schema::IntoSchema; pub use numeric::model::NumericValue; use parity_scale_codec::{Decode, Encode}; -use prelude::{Executable, TransactionQueryResult}; +use prelude::{Executable, TransactionQueryOutput}; use serde::{Deserialize, Serialize}; use serde_with::{DeserializeFromStr, SerializeDisplay}; use strum::EnumDiscriminants; @@ -70,16 +70,12 @@ pub mod isi; pub mod metadata; pub mod name; pub mod numeric; -#[cfg(feature = "http")] -pub mod pagination; pub mod peer; pub mod permission; #[cfg(feature = "http")] pub mod predicate; pub mod query; pub mod role; -#[cfg(feature = "http")] -pub mod sorting; pub mod transaction; pub mod trigger; pub mod validator; @@ -828,7 +824,7 @@ pub mod model { Identifiable(IdentifiableBox), PublicKey(PublicKey), SignatureCheckCondition(SignatureCheckCondition), - TransactionQueryResult(TransactionQueryResult), + TransactionQueryOutput(TransactionQueryOutput), PermissionToken(permission::PermissionToken), Hash(HashValue), Block(VersionedCommittedBlockWrapper), @@ -1117,7 +1113,7 @@ impl fmt::Display for Value { Value::Identifiable(v) => fmt::Display::fmt(&v, f), Value::PublicKey(v) => fmt::Display::fmt(&v, f), Value::SignatureCheckCondition(v) => fmt::Display::fmt(&v, f), - Value::TransactionQueryResult(_) => write!(f, "TransactionQueryResult"), + Value::TransactionQueryOutput(_) => write!(f, "TransactionQueryOutput"), Value::PermissionToken(v) => fmt::Display::fmt(&v, f), Value::Hash(v) => fmt::Display::fmt(&v, f), Value::Block(v) => fmt::Display::fmt(&**v, f), @@ -1146,7 +1142,7 @@ impl Value { | Identifiable(_) | String(_) | Name(_) - | TransactionQueryResult(_) + | TransactionQueryOutput(_) | PermissionToken(_) | Hash(_) | Block(_) @@ -1943,6 +1939,4 @@ pub mod prelude { LengthLimits, NumericValue, PredicateTrait, RegistrableBox, ToValue, TriggerBox, TryAsMut, TryAsRef, TryToValue, UpgradableBox, ValidationFail, ValidatorDeny, Value, }; - #[cfg(feature = "http")] - pub use super::{pagination::prelude::*, sorting::prelude::*}; } diff --git a/data_model/src/numeric.rs b/data_model/src/numeric.rs index 8164d5a8c34..f85a7131eb1 100644 --- a/data_model/src/numeric.rs +++ b/data_model/src/numeric.rs @@ -17,7 +17,7 @@ use serde::{ Deserializer, }; -use self::model::NumericValue; +pub use self::model::*; use super::{ DebugCustom, Decode, Deserialize, Display, Encode, FromVariant, IntoSchema, Serialize, }; diff --git a/data_model/src/predicate.rs b/data_model/src/predicate.rs index 49f89440b7a..a14c75de318 100644 --- a/data_model/src/predicate.rs +++ b/data_model/src/predicate.rs @@ -10,7 +10,7 @@ use crate::{IdBox, Name, Value}; mod nontrivial { use super::*; /// Struct representing a sequence with at least three elements. - #[derive(Debug, Clone, Serialize, Deserialize, Encode, Decode, IntoSchema)] + #[derive(Debug, Clone, PartialEq, Eq, Decode, Encode, Deserialize, Serialize, IntoSchema)] pub struct NonTrivial(Vec); impl NonTrivial { @@ -73,7 +73,7 @@ macro_rules! nontrivial { } /// Predicate combinator enum. -#[derive(Debug, Clone, Serialize, Deserialize, Encode, Decode, IntoSchema)] +#[derive(Debug, Clone, PartialEq, Eq, Decode, Encode, Deserialize, Serialize, IntoSchema)] // Ideally we would enforce `P: PredicateTrait` here, but I // couldn't find a way to do it without polluting everything // downstream with explicit lifetimes, since we would need to @@ -282,7 +282,7 @@ pub mod string { use super::*; /// Predicate useful for processing [`String`]s and [`Name`]s. - #[derive(Debug, Clone, Serialize, Deserialize, Encode, Decode, IntoSchema)] + #[derive(Debug, Clone, PartialEq, Eq, Decode, Encode, Deserialize, Serialize, IntoSchema)] pub enum StringPredicate { /// Forward to [`str::contains()`] Contains(String), @@ -559,7 +559,7 @@ pub mod numerical { use super::*; /// A lower-inclusive range predicate. - #[derive(Debug, Clone, Serialize, Deserialize, Encode, Decode, IntoSchema)] + #[derive(Debug, Clone, PartialEq, Eq, Decode, Encode, Deserialize, Serialize, IntoSchema)] pub struct SemiInterval { /// The start of the range (inclusive) start: T, @@ -583,7 +583,7 @@ pub mod numerical { impl Copy for SemiInterval {} /// A both-inclusive range predicate - #[derive(Debug, Clone, Serialize, Deserialize, Encode, Decode, IntoSchema)] + #[derive(Debug, Clone, PartialEq, Eq, Decode, Encode, Deserialize, Serialize, IntoSchema)] pub struct Interval { /// The start of the range (inclusive) start: T, @@ -624,7 +624,7 @@ pub mod numerical { /// [`Self`] only applies to `Values` that are variants of /// compatible types. If the [`Range`] variant and the [`Value`] /// variant don't match defaults to `false`. - #[derive(Debug, Clone, Serialize, Deserialize, Encode, Decode, IntoSchema)] + #[derive(Debug, Clone, PartialEq, Eq, Decode, Encode, Deserialize, Serialize, IntoSchema)] pub enum SemiRange { /// 32-bit U32(SemiInterval), @@ -642,7 +642,7 @@ pub mod numerical { /// [`Self`] only applies to `Values` that are variants of /// compatible types. If the [`Range`] variant and the [`Value`] /// variant don't match defaults to `false`. - #[derive(Debug, Clone, Serialize, Deserialize, Encode, Decode, IntoSchema)] + #[derive(Debug, Clone, Decode, Encode, Deserialize, Serialize, IntoSchema)] pub enum Range { /// 32-bit U32(Interval), @@ -971,7 +971,7 @@ pub mod value { use super::*; /// A predicate designed for general processing of `Value`. - #[derive(Debug, Clone, Serialize, Deserialize, Encode, Decode, IntoSchema)] + #[derive(Debug, Clone, PartialEq, Eq, Decode, Encode, Deserialize, Serialize, IntoSchema)] pub enum ValuePredicate { /// Apply predicate to the [`Identifiable::Id`] and/or [`IdBox`]. Identifiable(string::StringPredicate), @@ -1107,14 +1107,14 @@ pub mod value { } /// A predicate that targets the particular `index` of a collection. - #[derive(Debug, Clone, Serialize, Deserialize, Encode, Decode, IntoSchema)] + #[derive(Debug, Clone, PartialEq, Eq, Decode, Encode, Deserialize, Serialize, IntoSchema)] pub struct AtIndex { index: u32, predicate: Box, } /// A predicate that targets the particular `key` of a collection. - #[derive(Debug, Clone, Serialize, Deserialize, Encode, Decode, IntoSchema)] + #[derive(Debug, Clone, PartialEq, Eq, Decode, Encode, Deserialize, Serialize, IntoSchema)] pub struct ValueOfKey { key: Name, predicate: Box, @@ -1124,7 +1124,7 @@ pub mod value { /// working with containers. Currently only /// [`Metadata`](crate::metadata::Metadata) and [`Vec`] are /// supported. - #[derive(Debug, Clone, Serialize, Deserialize, Encode, Decode, IntoSchema)] + #[derive(Debug, Clone, PartialEq, Eq, Decode, Encode, Deserialize, Serialize, IntoSchema)] pub enum Container { /// Forward to [`Iterator::any`] Any(Box), @@ -1267,7 +1267,9 @@ pub mod ip_addr { /// A Predicate containing independent octuplet masks to be /// applied to all elements of an IP version 4 address. - #[derive(Debug, Clone, Copy, Encode, Decode, IntoSchema, Serialize, Deserialize)] + #[derive( + Debug, Clone, Copy, PartialEq, Eq, Decode, Encode, Deserialize, Serialize, IntoSchema, + )] pub struct Ipv4Predicate([Mask; 4]); impl PredicateTrait for Ipv4Predicate { @@ -1302,7 +1304,9 @@ pub mod ip_addr { /// A Predicate containing independent _hexadecuplets_ (u16 /// groups) masks to be applied to all elements of an IP version 6 /// address. - #[derive(Debug, Clone, Copy, Encode, Decode, IntoSchema, Serialize, Deserialize)] + #[derive( + Debug, Clone, Copy, PartialEq, Eq, Decode, Encode, Deserialize, Serialize, IntoSchema, + )] pub struct Ipv6Predicate([Mask; 8]); impl PredicateTrait for Ipv6Predicate { diff --git a/data_model/src/query/cursor.rs b/data_model/src/query/cursor.rs new file mode 100644 index 00000000000..7d9e36082c3 --- /dev/null +++ b/data_model/src/query/cursor.rs @@ -0,0 +1,6 @@ +//! Structures and traits related to server-side cursor. + +use core::num::NonZeroU64; + +/// Forward-only (a.k.a non-scrollable) cursor +pub type ForwardCursor = Option; diff --git a/data_model/src/query.rs b/data_model/src/query/mod.rs similarity index 94% rename from data_model/src/query.rs rename to data_model/src/query/mod.rs index a704a87c342..7aa35f4ef97 100644 --- a/data_model/src/query.rs +++ b/data_model/src/query/mod.rs @@ -6,14 +6,20 @@ use alloc::{boxed::Box, format, string::String, vec::Vec}; use core::cmp::Ordering; +#[cfg(feature = "http")] +pub use cursor::ForwardCursor; use derive_more::Display; use iroha_crypto::SignatureOf; use iroha_data_model_derive::model; use iroha_macro::FromVariant; use iroha_schema::IntoSchema; use iroha_version::prelude::*; +#[cfg(feature = "http")] +pub use pagination::Pagination; use parity_scale_codec::{Decode, Encode}; use serde::{Deserialize, Serialize}; +#[cfg(feature = "http")] +pub use sorting::Sorting; pub use self::model::*; use self::{ @@ -28,6 +34,13 @@ use crate::{ Identifiable, Value, }; +#[cfg(feature = "http")] +pub mod cursor; +#[cfg(feature = "http")] +pub mod pagination; +#[cfg(feature = "http")] +pub mod sorting; + macro_rules! queries { ($($($meta:meta)* $item:item)+) => { pub use self::model::*; @@ -170,24 +183,40 @@ pub mod model { )] #[getset(get = "pub")] #[ffi_type] - pub struct TransactionQueryResult { + pub struct TransactionQueryOutput { /// Transaction pub transaction: TransactionValue, /// The hash of the block to which `tx` belongs to pub block_hash: HashOf, } -} -/// Type returned from [`Metadata`] queries -pub struct MetadataValue(Value); + /// Type returned from [`Metadata`] queries + #[derive( + Debug, + Clone, + PartialEq, + Eq, + PartialOrd, + Ord, + Decode, + Encode, + Deserialize, + Serialize, + IntoSchema, + )] + #[ffi_type] + pub struct MetadataValue(pub Value); +} impl From for Value { + #[inline] fn from(source: MetadataValue) -> Self { source.0 } } impl From for MetadataValue { + #[inline] fn from(source: Value) -> Self { Self(source) } @@ -197,7 +226,7 @@ impl Query for QueryBox { type Output = Value; } -impl TransactionQueryResult { +impl TransactionQueryOutput { #[inline] /// Return payload of the transaction pub fn payload(&self) -> &TransactionPayload { @@ -205,14 +234,14 @@ impl TransactionQueryResult { } } -impl PartialOrd for TransactionQueryResult { +impl PartialOrd for TransactionQueryOutput { #[inline] fn partial_cmp(&self, other: &Self) -> Option { Some(self.cmp(other)) } } -impl Ord for TransactionQueryResult { +impl Ord for TransactionQueryOutput { #[inline] fn cmp(&self, other: &Self) -> Ordering { self.payload() @@ -229,6 +258,7 @@ pub mod role { use derive_more::Display; + use super::Query; use crate::prelude::*; queries! { @@ -315,6 +345,7 @@ pub mod permission { use derive_more::Display; + use super::Query; use crate::{permission, prelude::*}; queries! { @@ -399,7 +430,7 @@ pub mod account { use derive_more::Display; - use super::MetadataValue; + use super::{MetadataValue, Query}; use crate::prelude::*; queries! { @@ -561,7 +592,7 @@ pub mod asset { use iroha_data_model_derive::model; pub use self::model::*; - use super::MetadataValue; + use super::{MetadataValue, Query}; use crate::prelude::*; queries! { @@ -910,7 +941,7 @@ pub mod domain { use derive_more::Display; - use super::MetadataValue; + use super::{MetadataValue, Query}; use crate::prelude::*; queries! { @@ -1151,7 +1182,7 @@ pub mod transaction { use derive_more::Display; use iroha_crypto::HashOf; - use super::{Query, TransactionQueryResult}; + use super::{Query, TransactionQueryOutput}; use crate::{ account::AccountId, expression::EvaluatesTo, prelude::Account, transaction::VersionedSignedTransaction, @@ -1190,15 +1221,15 @@ pub mod transaction { } impl Query for FindAllTransactions { - type Output = Vec; + type Output = Vec; } impl Query for FindTransactionsByAccountId { - type Output = Vec; + type Output = Vec; } impl Query for FindTransactionByHash { - type Output = TransactionQueryResult; + type Output = TransactionQueryOutput; } impl FindTransactionsByAccountId { @@ -1296,19 +1327,23 @@ pub mod block { pub mod http { //! Structures related to sending queries over HTTP + use getset::Getters; use iroha_data_model_derive::model; pub use self::model::*; use super::*; - use crate::{ - account::AccountId, pagination::prelude::*, predicate::PredicateBox, sorting::prelude::*, - }; + use crate::{account::AccountId, predicate::PredicateBox}; + + // TODO: Could we make a variant of `Value` that holds only query results? + type QueryResult = Value; declare_versioned_with_scale!(VersionedSignedQuery 1..2, Debug, Clone, iroha_macro::FromVariant, IntoSchema); - declare_versioned_with_scale!(VersionedQueryResult 1..2, Debug, Clone, iroha_macro::FromVariant, IntoSchema); + declare_versioned_with_scale!(VersionedQueryResponse 1..2, Debug, Clone, iroha_macro::FromVariant, IntoSchema); #[model] pub mod model { + use core::num::NonZeroU64; + use super::*; /// I/O ready structure to send queries. @@ -1321,15 +1356,17 @@ pub mod http { } /// Payload of a query. - #[derive(Debug, Clone, Decode, Encode, Deserialize, Serialize, IntoSchema)] + #[derive( + Debug, Clone, PartialEq, Eq, Decode, Encode, Deserialize, Serialize, IntoSchema, + )] pub(crate) struct QueryPayload { /// Timestamp of the query creation. #[codec(compact)] pub timestamp_ms: u128, - /// Query definition. - pub query: QueryBox, /// Account id of the user who will sign this query. pub authority: AccountId, + /// Query definition. + pub query: QueryBox, /// The filter applied to the result on the server-side. pub filter: PredicateBox, } @@ -1338,10 +1375,27 @@ pub mod http { #[derive(Debug, Clone, Encode, Serialize, IntoSchema)] #[version_with_scale(n = 1, versioned = "VersionedSignedQuery")] pub struct SignedQuery { - /// Payload - pub payload: QueryPayload, /// Signature of the client who sends this query. pub signature: SignatureOf, + /// Payload + pub payload: QueryPayload, + } + + /// [`SignedQuery`] response + #[derive(Debug, Clone, Getters, Decode, Encode, Deserialize, Serialize, IntoSchema)] + #[version_with_scale(n = 1, versioned = "VersionedQueryResponse")] + #[getset(get = "pub")] + pub struct QueryResponse { + /// The result of the query execution. + #[getset(skip)] + pub result: QueryResult, + /// Index of the next element in the result set. Client will use this value + /// in the next request to continue fetching results of the original query + pub cursor: cursor::ForwardCursor, + /// Sorting + pub sorting: sorting::Sorting, + /// Pagination + pub pagination: pagination::Pagination, } } @@ -1369,6 +1423,7 @@ pub mod http { }) } } + impl Decode for SignedQuery { fn decode(input: &mut I) -> Result { SignedQueryCandidate::decode(input)? @@ -1376,6 +1431,7 @@ pub mod http { .map_err(Into::into) } } + impl<'de> Deserialize<'de> for SignedQuery { fn deserialize(deserializer: D) -> Result where @@ -1414,19 +1470,6 @@ pub mod http { } } - /// Paginated Query Result - // TODO: This is the only structure whose inner fields are exposed. Wrap it in model macro? - #[derive(Debug, Clone, Decode, Encode, Deserialize, Serialize, IntoSchema)] - #[version_with_scale(n = 1, versioned = "VersionedQueryResult")] - pub struct QueryResult { - /// The result of the query execution. - pub result: Value, - /// pagination - pub pagination: Pagination, - /// sorting - pub sorting: Sorting, - } - impl QueryBuilder { /// Construct a new request with the `query`. pub fn new(query: impl Into, authority: AccountId) -> Self { @@ -1457,11 +1500,20 @@ pub mod http { pub fn sign( self, key_pair: iroha_crypto::KeyPair, - ) -> Result { - SignatureOf::new(key_pair, &self.payload).map(|signature| SignedQuery { - payload: self.payload, - signature, - }) + ) -> Result { + SignatureOf::new(key_pair, &self.payload) + .map(|signature| SignedQuery { + payload: self.payload, + signature, + }) + .map(Into::into) + } + } + + impl From for Value { + #[inline] + fn from(source: QueryResponse) -> Self { + source.result } } @@ -1469,7 +1521,7 @@ pub mod http { //! The prelude re-exports most commonly used traits, structs and macros from this crate. pub use super::{ - QueryBuilder, QueryResult, SignedQuery, VersionedQueryResult, VersionedSignedQuery, + QueryBuilder, QueryResponse, SignedQuery, VersionedQueryResponse, VersionedSignedQuery, }; } } @@ -1637,6 +1689,6 @@ pub mod prelude { pub use super::{ account::prelude::*, asset::prelude::*, block::prelude::*, domain::prelude::*, peer::prelude::*, permission::prelude::*, role::prelude::*, transaction::*, - trigger::prelude::*, Query, QueryBox, TransactionQueryResult, + trigger::prelude::*, QueryBox, TransactionQueryOutput, }; } diff --git a/data_model/src/pagination.rs b/data_model/src/query/pagination.rs similarity index 100% rename from data_model/src/pagination.rs rename to data_model/src/query/pagination.rs diff --git a/data_model/src/sorting.rs b/data_model/src/query/sorting.rs similarity index 97% rename from data_model/src/sorting.rs rename to data_model/src/query/sorting.rs index 0ceac5a86e2..b1e5f5e797e 100644 --- a/data_model/src/sorting.rs +++ b/data_model/src/query/sorting.rs @@ -21,7 +21,7 @@ const SORT_BY_KEY: &str = "sort_by_metadata_key"; pub mod model { use super::*; - /// Enum for sorting requests + /// Struct for sorting requests #[derive(Debug, Clone, Default, Decode, Encode, Deserialize, Serialize, IntoSchema)] pub struct Sorting { /// Sort query result using [`Name`] of the key in [`Asset`]'s metadata. diff --git a/data_model/src/transaction.rs b/data_model/src/transaction.rs index b49775f0e46..1b8b81a9e28 100644 --- a/data_model/src/transaction.rs +++ b/data_model/src/transaction.rs @@ -22,8 +22,11 @@ use serde::{Deserialize, Serialize}; pub use self::model::*; use crate::{ - account::AccountId, isi::InstructionBox, metadata::UnlimitedMetadata, name::Name, - prelude::Instruction, Value, + account::AccountId, + isi::{Instruction, InstructionBox}, + metadata::UnlimitedMetadata, + name::Name, + Value, }; #[model] @@ -99,20 +102,20 @@ pub mod model { #[getset(get = "pub")] #[ffi_type] pub struct TransactionPayload { - /// Account ID of transaction creator. - pub authority: AccountId, /// Creation timestamp (unix time in milliseconds). #[getset(skip)] pub creation_time_ms: u64, + /// Account ID of transaction creator. + pub authority: AccountId, + /// ISI or a `WebAssembly` smartcontract. + pub instructions: Executable, + /// If transaction is not committed by this time it will be dropped. + #[getset(skip)] + pub time_to_live_ms: Option, /// Random value to make different hashes for transactions which occur repeatedly and simultaneously. // TODO: Only temporary #[getset(skip)] pub nonce: Option, - /// If transaction is not committed by this time it will be dropped. - #[getset(skip)] - pub time_to_live_ms: Option, - /// ISI or a `WebAssembly` smartcontract. - pub instructions: Executable, /// Store for additional information. #[getset(skip)] pub metadata: UnlimitedMetadata, @@ -160,10 +163,10 @@ pub mod model { #[ffi_type] // TODO: All fields in this struct should be private pub struct SignedTransaction { - /// [`Transaction`] payload. - pub payload: TransactionPayload, /// [`iroha_crypto::SignatureOf`]<[`TransactionPayload`]>. pub signatures: SignaturesOf, + /// [`Transaction`] payload. + pub payload: TransactionPayload, } /// Transaction Value used in Instructions and Queries @@ -371,8 +374,8 @@ mod candidate { #[derive(Decode, Deserialize)] struct SignedTransactionCandidate { - payload: TransactionPayload, signatures: SignaturesOf, + payload: TransactionPayload, } impl SignedTransactionCandidate { diff --git a/schema/gen/src/lib.rs b/schema/gen/src/lib.rs index 8543817b32b..5fb04d4dc6d 100644 --- a/schema/gen/src/lib.rs +++ b/schema/gen/src/lib.rs @@ -47,7 +47,7 @@ pub fn build_schemas() -> MetaMap { VersionedBlockSubscriptionRequest, VersionedEventMessage, VersionedEventSubscriptionRequest, - VersionedQueryResult, + VersionedQueryResponse, VersionedSignedQuery, // Never referenced, but present in type signature. Like `PhantomData` @@ -282,7 +282,7 @@ types!( OriginFilter, OriginFilter, OriginFilter, - QueryResult, + QueryResponse, Pagination, Pair, Parameter, @@ -348,7 +348,7 @@ types!( TransactionLimitError, TransactionLimits, TransactionPayload, - TransactionQueryResult, + TransactionQueryOutput, TransactionRejectionReason, TransactionValue, TransferBox, @@ -378,7 +378,7 @@ types!( VersionedCommittedBlockWrapper, VersionedEventMessage, VersionedEventSubscriptionRequest, - VersionedQueryResult, + VersionedQueryResponse, VersionedSignedQuery, VersionedSignedTransaction, WasmExecutionFail, @@ -429,7 +429,10 @@ mod tests { GenericPredicateBox, NonTrivial, PredicateBox, }, prelude::*, - query::error::{FindError, QueryExecutionFail}, + query::{ + error::{FindError, QueryExecutionFail}, + ForwardCursor, Pagination, Sorting, + }, transaction::{error::TransactionLimitError, SignedTransaction, TransactionLimits}, validator::Validator, ValueKind, VersionedCommittedBlockWrapper, diff --git a/tools/parity_scale_decoder/src/main.rs b/tools/parity_scale_decoder/src/main.rs index b80d6578fe4..88a0586ec5b 100644 --- a/tools/parity_scale_decoder/src/main.rs +++ b/tools/parity_scale_decoder/src/main.rs @@ -40,7 +40,10 @@ use iroha_data_model::{ GenericPredicateBox, NonTrivial, PredicateBox, }, prelude::*, - query::error::{FindError, QueryExecutionFail}, + query::{ + error::{FindError, QueryExecutionFail}, + ForwardCursor, Pagination, Sorting, + }, transaction::{error::TransactionLimitError, SignedTransaction, TransactionLimits}, validator::Validator, ValueKind, VersionedCommittedBlockWrapper, diff --git a/wasm/src/lib.rs b/wasm/src/lib.rs index e478b27eac3..5ae2b7fff31 100644 --- a/wasm/src/lib.rs +++ b/wasm/src/lib.rs @@ -14,7 +14,12 @@ extern crate alloc; use alloc::{boxed::Box, collections::BTreeMap, format, vec::Vec}; use core::ops::RangeFrom; -use data_model::{prelude::*, query::QueryBox, validator::NeedsValidationBox}; +use data_model::{ + prelude::*, + isi::Instruction, + query::{Query, QueryBox}, + validator::NeedsValidationBox, +}; use debug::DebugExpectExt as _; pub use iroha_data_model as data_model; pub use iroha_wasm_derive::main;