From e4ba1f325b14a2b47ddb395a51ad839e0a0110fe Mon Sep 17 00:00:00 2001 From: Calvin Prewitt Date: Thu, 24 Aug 2023 11:36:02 -0500 Subject: [PATCH 01/26] added registry log index for operator and package records on the fetch logs --- .gitignore | 1 + crates/api/src/v1/fetch.rs | 6 +- crates/api/src/v1/package.rs | 4 +- crates/client/src/lib.rs | 11 ++-- crates/protocol/src/lib.rs | 4 +- crates/protocol/src/proto_envelope.rs | 57 +++++++++++++++++++ crates/server/openapi.yaml | 22 ++++++- crates/server/src/api/v1/fetch.rs | 6 +- crates/server/src/api/v1/package.rs | 4 +- crates/server/src/datastore/memory.rs | 44 +++++++------- crates/server/src/datastore/mod.rs | 8 +-- crates/server/src/datastore/postgres/mod.rs | 33 ++++++----- .../server/src/datastore/postgres/schema.rs | 7 ++- 13 files changed, 151 insertions(+), 56 deletions(-) diff --git a/.gitignore b/.gitignore index 83abdf7b..b566778e 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ /content /target *.swp +*.swo diff --git a/crates/api/src/v1/fetch.rs b/crates/api/src/v1/fetch.rs index baf5dd81..9478c4c8 100644 --- a/crates/api/src/v1/fetch.rs +++ b/crates/api/src/v1/fetch.rs @@ -7,7 +7,7 @@ use thiserror::Error; use warg_crypto::hash::AnyHash; use warg_protocol::{ registry::{LogId, RecordId}, - ProtoEnvelopeBody, + PublishedProtoEnvelopeBody, }; /// Represents a fetch logs request. @@ -36,10 +36,10 @@ pub struct FetchLogsResponse { pub more: bool, /// The operator records appended since the last known operator record. #[serde(default, skip_serializing_if = "Vec::is_empty")] - pub operator: Vec, + pub operator: Vec, /// The package records appended since last known package record ids. #[serde(default, skip_serializing_if = "HashMap::is_empty")] - pub packages: HashMap>, + pub packages: HashMap>, } /// Represents a fetch API error. diff --git a/crates/api/src/v1/package.rs b/crates/api/src/v1/package.rs index 0701c7e9..5eebbbce 100644 --- a/crates/api/src/v1/package.rs +++ b/crates/api/src/v1/package.rs @@ -105,10 +105,10 @@ pub enum PackageRecordState { Published { /// The envelope of the package record. record: ProtoEnvelopeBody, - /// The index of the record in the registry log. - registry_log_index: u32, /// The content sources of the record. content_sources: HashMap>, + /// The published index of the record in the registry log. + index: u32, }, } diff --git a/crates/client/src/lib.rs b/crates/client/src/lib.rs index 1e04b03a..aa428501 100644 --- a/crates/client/src/lib.rs +++ b/crates/client/src/lib.rs @@ -26,7 +26,7 @@ use warg_crypto::{ use warg_protocol::{ operator, package, registry::{LogId, LogLeaf, PackageId, RecordId, TimestampedCheckpoint}, - ProtoEnvelope, SerdeEnvelope, Version, VersionReq, + PublishedProtoEnvelope, SerdeEnvelope, Version, VersionReq, }; pub mod api; @@ -404,10 +404,10 @@ impl Client { })?; for record in response.operator { - let record: ProtoEnvelope = record.try_into()?; + let record: PublishedProtoEnvelope = record.try_into()?; operator .state - .validate(&record) + .validate(&record.envelope) .map_err(|inner| ClientError::OperatorValidationFailed { inner })?; } @@ -417,8 +417,9 @@ impl Client { })?; for record in records { - let record: ProtoEnvelope = record.try_into()?; - package.state.validate(&record).map_err(|inner| { + let record: PublishedProtoEnvelope = + record.try_into()?; + package.state.validate(&record.envelope).map_err(|inner| { ClientError::PackageValidationFailed { id: package.id.clone(), inner, diff --git a/crates/protocol/src/lib.rs b/crates/protocol/src/lib.rs index ba7ddbe9..e3e7ffba 100644 --- a/crates/protocol/src/lib.rs +++ b/crates/protocol/src/lib.rs @@ -8,7 +8,9 @@ mod proto_envelope; pub mod registry; mod serde_envelope; -pub use proto_envelope::{ProtoEnvelope, ProtoEnvelopeBody}; +pub use proto_envelope::{ + ProtoEnvelope, ProtoEnvelopeBody, PublishedProtoEnvelope, PublishedProtoEnvelopeBody, +}; pub use semver::{Version, VersionReq}; pub use serde_envelope::SerdeEnvelope; diff --git a/crates/protocol/src/proto_envelope.rs b/crates/protocol/src/proto_envelope.rs index 42df72d8..e5c26006 100644 --- a/crates/protocol/src/proto_envelope.rs +++ b/crates/protocol/src/proto_envelope.rs @@ -8,6 +8,15 @@ use thiserror::Error; use warg_crypto::{hash::AnyHashError, signing, Decode, Signable}; use warg_protobuf::protocol as protobuf; +/// The ProtoEnvelope with the published registry log index. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct PublishedProtoEnvelope { + /// The wrapped ProtoEnvelope + pub envelope: ProtoEnvelope, + /// The published registry log index for the record + pub index: u32, +} + /// The envelope struct is used to keep around the original /// bytes that the content was serialized into in case /// the serialization is not canonical. @@ -163,3 +172,51 @@ impl fmt::Debug for ProtoEnvelopeBody { .finish() } } + +#[serde_as] +#[derive(Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct PublishedProtoEnvelopeBody { + /// The ProtoEnvelopeBody flattened + #[serde(flatten)] + pub envelope: ProtoEnvelopeBody, + /// The index of the published record in the registry log + pub index: u32, +} + +impl TryFrom for PublishedProtoEnvelope +where + Content: Decode, +{ + type Error = Error; + + fn try_from(value: PublishedProtoEnvelopeBody) -> Result { + Ok(PublishedProtoEnvelope { + envelope: ProtoEnvelope::::try_from(value.envelope)?, + index: value.index, + }) + } +} + +impl From> for PublishedProtoEnvelopeBody { + fn from(value: PublishedProtoEnvelope) -> Self { + PublishedProtoEnvelopeBody { + envelope: ProtoEnvelopeBody::from(value.envelope), + index: value.index, + } + } +} + +impl fmt::Debug for PublishedProtoEnvelopeBody { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("PublishedProtoEnvelopeBody") + .field( + "content_bytes", + &STANDARD.encode(&self.envelope.content_bytes), + ) + .field("key_id", &self.envelope.key_id) + .field("signature", &self.envelope.signature) + .field("index", &self.index) + .finish() + } +} diff --git a/crates/server/openapi.yaml b/crates/server/openapi.yaml index 1b327ef8..bad3617d 100644 --- a/crates/server/openapi.yaml +++ b/crates/server/openapi.yaml @@ -545,7 +545,7 @@ components: description: The operator log records for the given checkpoint since the last known record. maxItems: 1000 items: - $ref: "#/components/schemas/EnvelopeBody" + $ref: "#/components/schemas/PublishedEnvelopeBody" packages: type: object description: The map of package log identifier to package records. @@ -555,19 +555,22 @@ components: description: The package log records for the given checkpoint since the last known record. maxItems: 1000 items: - $ref: "#/components/schemas/EnvelopeBody" + $ref: "#/components/schemas/PublishedEnvelopeBody" example: ? "sha256:7d865e959b2466918c9863afca942d0fb89d7c9ac0c99bafc3749504ded97730" : - contentBytes: "ZXhhbXBsZQ==" keyId: "sha256:7d865e959b2466918c9863afca942d0fb89d7c9ac0c99bafc3749504ded97730" signature: "ecdsa-p256:MEUCIQCzWZBW6ux9LecP66Y+hjmLZTP/hZVz7puzlPTXcRT2wwIgQZO7nxP0nugtw18MwHZ26ROFWcJmgCtKOguK031Y1D0=" + index: 101 - contentBytes: "ZXhhbXBsZQ==" keyId: "sha256:7d865e959b2466918c9863afca942d0fb89d7c9ac0c99bafc3749504ded97730" signature: "ecdsa-p256:MEUCIQCzWZBW6ux9LecP66Y+hjmLZTP/hZVz7puzlPTXcRT2wwIgQZO7nxP0nugtw18MwHZ26ROFWcJmgCtKOguK031Y1D0=" + index: 305 ? "sha256:b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c" : - contentBytes: "ZXhhbXBsZQ==" keyId: "sha256:7d865e959b2466918c9863afca942d0fb89d7c9ac0c99bafc3749504ded97730" signature: "ecdsa-p256:MEUCIQCzWZBW6ux9LecP66Y+hjmLZTP/hZVz7puzlPTXcRT2wwIgQZO7nxP0nugtw18MwHZ26ROFWcJmgCtKOguK031Y1D0=" + index: 732 PublishPackageRecordRequest: type: object description: A request to publish a record to a package log. @@ -747,7 +750,7 @@ components: description: The state of the package record. enum: [published] example: published - registryLogIndex: + index: type: number description: The index of the record in the registry log. record: @@ -835,6 +838,19 @@ components: maxLength: 1048576 example: "ZXhhbXBsZQ==" - $ref: "#/components/schemas/Signature" + PublishedEnvelopeBody: + description: A signed envelope body with the published registry log index. + allOf: + - type: object + required: + - index + properties: + index: + type: integer + description: The index of the published record in the registry log. + example: 42 + - $ref: "#/components/schemas/EnvelopeBody" + - $ref: "#/components/schemas/Signature" Signature: type: object description: Represents a signature of content. diff --git a/crates/server/src/api/v1/fetch.rs b/crates/server/src/api/v1/fetch.rs index 6cc34942..458eb5ac 100644 --- a/crates/server/src/api/v1/fetch.rs +++ b/crates/server/src/api/v1/fetch.rs @@ -13,7 +13,7 @@ use std::collections::HashMap; use warg_api::v1::fetch::{FetchError, FetchLogsRequest, FetchLogsResponse}; use warg_crypto::hash::Sha256; use warg_protocol::registry::{LogId, TimestampedCheckpoint}; -use warg_protocol::{ProtoEnvelopeBody, SerdeEnvelope}; +use warg_protocol::{PublishedProtoEnvelopeBody, SerdeEnvelope}; const DEFAULT_RECORDS_LIMIT: u16 = 100; const MAX_RECORDS_LIMIT: u16 = 1000; @@ -85,7 +85,7 @@ async fn fetch_logs( ))); } - let operator: Vec = config + let operator: Vec = config .core_service .store() .get_operator_records( @@ -104,7 +104,7 @@ async fn fetch_logs( let mut map = HashMap::new(); let packages = body.packages.into_owned(); for (id, since) in packages { - let records: Vec = config + let records: Vec = config .core_service .store() .get_package_records(&id, &body.checkpoint_id, since.as_ref(), limit) diff --git a/crates/server/src/api/v1/package.rs b/crates/server/src/api/v1/package.rs index 8e0f2a5c..3bc592b2 100644 --- a/crates/server/src/api/v1/package.rs +++ b/crates/server/src/api/v1/package.rs @@ -306,13 +306,13 @@ async fn get_record( }) .collect(); - let registry_log_index = record.registry_log_index.unwrap().try_into().unwrap(); + let index = record.index.unwrap().try_into().unwrap(); Ok(Json(PackageRecord { id: record_id, state: PackageRecordState::Published { record: record.envelope.into(), - registry_log_index, + index, content_sources, }, })) diff --git a/crates/server/src/datastore/memory.rs b/crates/server/src/datastore/memory.rs index af7998d2..e6c78db3 100644 --- a/crates/server/src/datastore/memory.rs +++ b/crates/server/src/datastore/memory.rs @@ -12,11 +12,11 @@ use warg_protocol::{ operator, package::{self, PackageEntry}, registry::{LogId, LogLeaf, PackageId, RecordId, TimestampedCheckpoint}, - ProtoEnvelope, SerdeEnvelope, + ProtoEnvelope, PublishedProtoEnvelope, SerdeEnvelope, }; struct Entry { - registry_index: u64, + registry_index: u32, record_content: ProtoEnvelope, } @@ -41,7 +41,7 @@ struct Record { /// Index in the log's entries. index: usize, /// Index in the registry's log. - registry_index: u64, + registry_index: u32, } enum PendingRecord { @@ -194,12 +194,12 @@ impl DataStore for MemoryDataStore { Ok(_) => { let index = log.entries.len(); log.entries.push(Entry { - registry_index: registry_log_index, + registry_index: registry_log_index as u32, record_content: record, }); *status = RecordStatus::Validated(Record { index, - registry_index: registry_log_index, + registry_index: registry_log_index as u32, }); Ok(()) } @@ -303,12 +303,12 @@ impl DataStore for MemoryDataStore { Ok(_) => { let index = log.entries.len(); log.entries.push(Entry { - registry_index: registry_log_index, + registry_index: registry_log_index as u32, record_content: record, }); *status = RecordStatus::Validated(Record { index, - registry_index: registry_log_index, + registry_index: registry_log_index as u32, }); Ok(()) } @@ -415,7 +415,7 @@ impl DataStore for MemoryDataStore { checkpoint_id: &AnyHash, since: Option<&RecordId>, limit: u16, - ) -> Result>, DataStoreError> { + ) -> Result>, DataStoreError> { let state = self.0.read().await; let log = state @@ -434,14 +434,17 @@ impl DataStore for MemoryDataStore { }, None => 0, }; - let end_registry_idx = checkpoint.as_ref().checkpoint.log_length as u64; + let end_registry_idx = checkpoint.as_ref().checkpoint.log_length; Ok(log .entries .iter() .skip(start_log_idx) .take_while(|entry| entry.registry_index < end_registry_idx) - .map(|entry| entry.record_content.clone()) + .map(|entry| PublishedProtoEnvelope { + envelope: entry.record_content.clone(), + index: entry.registry_index, + }) .take(limit as usize) .collect()) } @@ -452,7 +455,7 @@ impl DataStore for MemoryDataStore { checkpoint_id: &AnyHash, since: Option<&RecordId>, limit: u16, - ) -> Result>, DataStoreError> { + ) -> Result>, DataStoreError> { let state = self.0.read().await; let log = state @@ -471,14 +474,17 @@ impl DataStore for MemoryDataStore { }, None => 0, }; - let end_registry_idx = checkpoint.as_ref().checkpoint.log_length as u64; + let end_registry_idx = checkpoint.as_ref().checkpoint.log_length; Ok(log .entries .iter() .skip(start_log_idx) .take_while(|entry| entry.registry_index < end_registry_idx) - .map(|entry| entry.record_content.clone()) + .map(|entry| PublishedProtoEnvelope { + envelope: entry.record_content.clone(), + index: entry.registry_index, + }) .take(limit as usize) .collect()) } @@ -496,7 +502,7 @@ impl DataStore for MemoryDataStore { .get(record_id) .ok_or_else(|| DataStoreError::RecordNotFound(record_id.clone()))?; - let (status, envelope, registry_log_index) = match status { + let (status, envelope, index) = match status { RecordStatus::Pending(PendingRecord::Operator { record, .. }) => { (super::RecordStatus::Pending, record.clone().unwrap(), None) } @@ -515,7 +521,7 @@ impl DataStore for MemoryDataStore { .checkpoints .last() .map(|(_, c)| c.as_ref().checkpoint.log_length) - .unwrap_or_default() as u64; + .unwrap_or_default(); ( if r.registry_index < published_length { @@ -533,7 +539,7 @@ impl DataStore for MemoryDataStore { Ok(super::Record { status, envelope, - registry_log_index, + index, }) } @@ -550,7 +556,7 @@ impl DataStore for MemoryDataStore { .get(record_id) .ok_or_else(|| DataStoreError::RecordNotFound(record_id.clone()))?; - let (status, envelope, registry_log_index) = match status { + let (status, envelope, index) = match status { RecordStatus::Pending(PendingRecord::Package { record, .. }) => { (super::RecordStatus::Pending, record.clone().unwrap(), None) } @@ -569,7 +575,7 @@ impl DataStore for MemoryDataStore { .checkpoints .last() .map(|(_, c)| c.as_ref().checkpoint.log_length) - .unwrap_or_default() as u64; + .unwrap_or_default(); ( if r.registry_index < published_length { @@ -587,7 +593,7 @@ impl DataStore for MemoryDataStore { Ok(super::Record { status, envelope, - registry_log_index, + index, }) } diff --git a/crates/server/src/datastore/mod.rs b/crates/server/src/datastore/mod.rs index 599fb094..5e99aedc 100644 --- a/crates/server/src/datastore/mod.rs +++ b/crates/server/src/datastore/mod.rs @@ -5,7 +5,7 @@ use warg_crypto::{hash::AnyHash, signing::KeyID}; use warg_protocol::{ operator, package, registry::{LogId, LogLeaf, PackageId, RecordId, TimestampedCheckpoint}, - ProtoEnvelope, SerdeEnvelope, + ProtoEnvelope, PublishedProtoEnvelope, SerdeEnvelope, }; mod memory; @@ -90,7 +90,7 @@ where /// The index of the record in the registry log. /// /// This is `None` if the record is not published. - pub registry_log_index: Option, + pub index: Option, } /// Implemented by data stores. @@ -222,7 +222,7 @@ pub trait DataStore: Send + Sync { checkpoint_id: &AnyHash, since: Option<&RecordId>, limit: u16, - ) -> Result>, DataStoreError>; + ) -> Result>, DataStoreError>; /// Gets the package records for the given registry checkpoint ID hash. async fn get_package_records( @@ -231,7 +231,7 @@ pub trait DataStore: Send + Sync { checkpoint_id: &AnyHash, since: Option<&RecordId>, limit: u16, - ) -> Result>, DataStoreError>; + ) -> Result>, DataStoreError>; /// Gets an operator record. async fn get_operator_record( diff --git a/crates/server/src/datastore/postgres/mod.rs b/crates/server/src/datastore/postgres/mod.rs index 5d93e94f..460f948b 100644 --- a/crates/server/src/datastore/postgres/mod.rs +++ b/crates/server/src/datastore/postgres/mod.rs @@ -22,7 +22,7 @@ use warg_protocol::{ operator, package::{self, PackageEntry}, registry::{Checkpoint, LogId, LogLeaf, PackageId, RecordId, TimestampedCheckpoint}, - ProtoEnvelope, Record as _, SerdeEnvelope, Validator, + ProtoEnvelope, PublishedProtoEnvelope, Record as _, SerdeEnvelope, Validator, }; mod models; @@ -34,7 +34,7 @@ async fn get_records( checkpoint_id: &AnyHash, since: Option<&RecordId>, limit: i64, -) -> Result>, DataStoreError> { +) -> Result>, DataStoreError> { let checkpoint_length = schema::checkpoints::table .select(schema::checkpoints::log_length) .filter(schema::checkpoints::checkpoint_id.eq(TextRef(checkpoint_id))) @@ -45,7 +45,11 @@ async fn get_records( let mut query = schema::records::table .into_boxed() - .select((schema::records::record_id, schema::records::content)) + .select(( + schema::records::record_id, + schema::records::content, + schema::records::registry_log_index, + )) .order_by(schema::records::id.asc()) .limit(limit) .filter( @@ -68,15 +72,18 @@ async fn get_records( } query - .load::<(ParsedText, Vec)>(conn) + .load::<(ParsedText, Vec, Option)>(conn) .await? .into_iter() - .map(|(record_id, c)| { - ProtoEnvelope::from_protobuf(c).map_err(|e| DataStoreError::InvalidRecordContents { - record_id: record_id.0.into(), - message: e.to_string(), - }) - }) + .map( + |(record_id, c, index)| match ProtoEnvelope::from_protobuf(c) { + Ok(envelope) => Ok(PublishedProtoEnvelope { envelope, index: index.unwrap() as u32 }), + Err(e) => Err(DataStoreError::InvalidRecordContents { + record_id: record_id.0.into(), + message: e.to_string(), + }), + }, + ) .collect::>() } @@ -331,7 +338,7 @@ where message: e.to_string(), } })?, - registry_log_index: record.registry_log_index.map(|idx| idx.try_into().unwrap()), + index: record.registry_log_index.map(|idx| idx.try_into().unwrap()), }) } @@ -738,7 +745,7 @@ impl DataStore for PostgresDataStore { checkpoint_id: &AnyHash, since: Option<&RecordId>, limit: u16, - ) -> Result>, DataStoreError> { + ) -> Result>, DataStoreError> { let mut conn = self.pool.get().await?; let log_id = schema::logs::table .select(schema::logs::id) @@ -757,7 +764,7 @@ impl DataStore for PostgresDataStore { checkpoint_id: &AnyHash, since: Option<&RecordId>, limit: u16, - ) -> Result>, DataStoreError> { + ) -> Result>, DataStoreError> { let mut conn = self.pool.get().await?; let log_id = schema::logs::table .select(schema::logs::id) diff --git a/crates/server/src/datastore/postgres/schema.rs b/crates/server/src/datastore/postgres/schema.rs index f25b2839..17bafe3a 100644 --- a/crates/server/src/datastore/postgres/schema.rs +++ b/crates/server/src/datastore/postgres/schema.rs @@ -63,4 +63,9 @@ diesel::table! { diesel::joinable!(contents -> records (record_id)); diesel::joinable!(records -> logs (log_id)); -diesel::allow_tables_to_appear_in_same_query!(checkpoints, contents, logs, records,); +diesel::allow_tables_to_appear_in_same_query!( + checkpoints, + contents, + logs, + records, +); From 71f9ab578b2d0be280dcd3129eb5db3b1e48b6a8 Mon Sep 17 00:00:00 2001 From: Calvin Prewitt Date: Thu, 24 Aug 2023 16:41:15 -0500 Subject: [PATCH 02/26] refactored proof endpoints to use log_length and log index --- crates/api/src/v1/proof.rs | 96 ++++++------------- crates/client/src/api.rs | 6 +- crates/client/src/lib.rs | 21 ++-- crates/client/src/storage.rs | 7 ++ crates/server/openapi.yaml | 8 +- crates/server/src/api/v1/proof.rs | 39 +++----- crates/server/src/datastore/memory.rs | 27 +++++- crates/server/src/datastore/mod.rs | 9 ++ crates/server/src/datastore/postgres/mod.rs | 31 +++++- .../server/src/datastore/postgres/schema.rs | 7 +- crates/server/src/services/core.rs | 57 +++++------ 11 files changed, 162 insertions(+), 146 deletions(-) diff --git a/crates/api/src/v1/proof.rs b/crates/api/src/v1/proof.rs index d7f18311..ae7a5976 100644 --- a/crates/api/src/v1/proof.rs +++ b/crates/api/src/v1/proof.rs @@ -1,12 +1,12 @@ //! Types relating to the proof API. use crate::Status; -use serde::{de::Unexpected, Deserialize, Serialize, Serializer}; +use serde::{Deserialize, Serialize, Serializer}; use serde_with::{base64::Base64, serde_as}; use std::borrow::Cow; use thiserror::Error; use warg_crypto::hash::AnyHash; -use warg_protocol::registry::{Checkpoint, LogId, LogLeaf}; +use warg_protocol::registry::{LogId}; /// Represents a consistency proof request. #[derive(Serialize, Deserialize)] @@ -31,11 +31,11 @@ pub struct ConsistencyResponse { /// Represents an inclusion proof request. #[derive(Serialize, Deserialize)] #[serde(rename_all = "camelCase")] -pub struct InclusionRequest<'a> { - /// The checkpoint to check for inclusion. - pub checkpoint: Cow<'a, Checkpoint>, - /// The log leafs to check for inclusion. - pub leafs: Cow<'a, [LogLeaf]>, +pub struct InclusionRequest { + /// The log length to check for inclusion. + pub log_length: u32, + /// The log leaf indexes in the registry log to check for inclusion. + pub leafs: Vec, } /// Represents an inclusion proof response. @@ -55,12 +55,12 @@ pub struct InclusionResponse { #[non_exhaustive] #[derive(Debug, Error)] pub enum ProofError { - /// The provided log root was not found. - #[error("log root `{0}` was not found")] - RootNotFound(AnyHash), + /// The checkpoint could not be found for the provided log length. + #[error("checkpoint not found for log length {0}")] + CheckpointNotFound(u32), /// The provided log leaf was not found. - #[error("log leaf `{}:{}` was not found", .0.log_id, .0.record_id)] - LeafNotFound(LogLeaf), + #[error("log leaf `{0}` exceeds the registry log length")] + LeafNotFound(u32), /// Failed to prove inclusion of a package. #[error("failed to prove inclusion of package log `{0}`")] PackageLogNotIncluded(LogId), @@ -89,7 +89,7 @@ impl ProofError { /// Returns the HTTP status code of the error. pub fn status(&self) -> u16 { match self { - Self::RootNotFound(_) | Self::LeafNotFound(_) => 404, + Self::CheckpointNotFound(_) | Self::LeafNotFound(_) => 404, Self::BundleFailure(_) | Self::PackageLogNotIncluded(_) | Self::IncorrectProof { .. } => 422, @@ -101,7 +101,7 @@ impl ProofError { #[derive(Serialize, Deserialize)] #[serde(rename_all = "camelCase")] enum EntityType { - LogRoot, + LogLength, Leaf, } @@ -122,16 +122,13 @@ enum BundleError<'a> { #[derive(Serialize, Deserialize)] #[serde(untagged, rename_all = "camelCase")] -enum RawError<'a, T> -where - T: Clone + ToOwned, - ::Owned: Serialize + for<'b> Deserialize<'b>, +enum RawError<'a> { NotFound { status: Status<404>, #[serde(rename = "type")] ty: EntityType, - id: Cow<'a, T>, + id: u32, }, BundleError { status: Status<422>, @@ -147,26 +144,26 @@ where impl Serialize for ProofError { fn serialize(&self, serializer: S) -> Result { match self { - Self::RootNotFound(root) => RawError::NotFound { + Self::CheckpointNotFound(log_length) => RawError::NotFound { status: Status::<404>, - ty: EntityType::LogRoot, - id: Cow::Borrowed(root), + ty: EntityType::LogLength, + id: *log_length, } .serialize(serializer), - Self::LeafNotFound(leaf) => RawError::NotFound:: { + Self::LeafNotFound(leaf_index) => RawError::NotFound { status: Status::<404>, ty: EntityType::Leaf, - id: Cow::Owned(format!("{}|{}", leaf.log_id, leaf.record_id)), + id: *leaf_index, } .serialize(serializer), - Self::PackageLogNotIncluded(log_id) => RawError::BundleError::<()> { + Self::PackageLogNotIncluded(log_id) => RawError::BundleError { status: Status::<422>, error: BundleError::PackageNotIncluded { log_id: Cow::Borrowed(log_id), }, } .serialize(serializer), - Self::IncorrectProof { root, found } => RawError::BundleError::<()> { + Self::IncorrectProof { root, found } => RawError::BundleError { status: Status::<422>, error: BundleError::IncorrectProof { root: Cow::Borrowed(root), @@ -174,14 +171,14 @@ impl Serialize for ProofError { }, } .serialize(serializer), - Self::BundleFailure(message) => RawError::BundleError::<()> { + Self::BundleFailure(message) => RawError::BundleError { status: Status::<422>, error: BundleError::Failure { message: Cow::Borrowed(message), }, } .serialize(serializer), - Self::Message { status, message } => RawError::Message::<()> { + Self::Message { status, message } => RawError::Message { status: *status, message: Cow::Borrowed(message), } @@ -195,47 +192,12 @@ impl<'de> Deserialize<'de> for ProofError { where D: serde::Deserializer<'de>, { - match RawError::::deserialize(deserializer)? { + match RawError::deserialize(deserializer)? { RawError::NotFound { status: _, ty, id } => match ty { - EntityType::LogRoot => { - Ok(Self::RootNotFound(id.parse::().map_err(|_| { - serde::de::Error::invalid_value( - Unexpected::Str(&id), - &"a valid checkpoint id", - ) - })?)) + EntityType::LogLength => { + Ok(Self::CheckpointNotFound(id)) } - EntityType::Leaf => Ok(Self::LeafNotFound( - id.split_once('|') - .map(|(log_id, record_id)| { - Ok(LogLeaf { - log_id: log_id - .parse::() - .map_err(|_| { - serde::de::Error::invalid_value( - Unexpected::Str(log_id), - &"a valid log id", - ) - })? - .into(), - record_id: record_id - .parse::() - .map_err(|_| { - serde::de::Error::invalid_value( - Unexpected::Str(record_id), - &"a valid record id", - ) - })? - .into(), - }) - }) - .ok_or_else(|| { - serde::de::Error::invalid_value( - Unexpected::Str(&id), - &"a valid leaf id", - ) - })??, - )), + EntityType::Leaf => Ok(Self::LeafNotFound(id)), }, RawError::BundleError { status: _, error } => match error { BundleError::PackageNotIncluded { log_id } => { diff --git a/crates/client/src/api.rs b/crates/client/src/api.rs index 6ae05355..6dc78fab 100644 --- a/crates/client/src/api.rs +++ b/crates/client/src/api.rs @@ -237,7 +237,7 @@ impl Client { } /// Proves the inclusion of the given package log heads in the registry. - pub async fn prove_inclusion(&self, request: InclusionRequest<'_>) -> Result<(), ClientError> { + pub async fn prove_inclusion(&self, request: InclusionRequest, checkpoint: &Checkpoint, leafs: &[LogLeaf]) -> Result<(), ClientError> { let url = self.url.join(paths::prove_inclusion()); tracing::debug!("proving checkpoint inclusion at `{url}`"); @@ -248,8 +248,8 @@ impl Client { Self::validate_inclusion_response( response, - request.checkpoint.as_ref(), - request.leafs.as_ref(), + checkpoint, + leafs, ) } diff --git a/crates/client/src/lib.rs b/crates/client/src/lib.rs index aa428501..8c4fd9ec 100644 --- a/crates/client/src/lib.rs +++ b/crates/client/src/lib.rs @@ -409,6 +409,7 @@ impl Client { .state .validate(&record.envelope) .map_err(|inner| ClientError::OperatorValidationFailed { inner })?; + operator.latest_index = Some(record.index); } for (log_id, records) in response.packages { @@ -425,6 +426,7 @@ impl Client { inner, } })?; + package.latest_index = Some(record.index); } // At this point, the package log should not be empty @@ -446,19 +448,22 @@ impl Client { } // Prove inclusion for the current log heads - let mut leafs = Vec::with_capacity(packages.len() + 1 /* for operator */); - if let Some(head) = operator.state.head() { + let mut leaf_indices = Vec::with_capacity(packages.len() + 1 /* for operator */); + let mut leafs = Vec::with_capacity(leaf_indices.len()); + if let Some(index) = operator.latest_index { + leaf_indices.push(index); leafs.push(LogLeaf { log_id: LogId::operator_log::(), - record_id: head.digest.clone(), + record_id: operator.state.head().as_ref().unwrap().digest.clone(), }); } for (log_id, package) in &packages { - if let Some(head) = package.state.head() { + if let Some(index) = package.latest_index { + leaf_indices.push(index); leafs.push(LogLeaf { log_id: log_id.clone(), - record_id: head.digest.clone(), + record_id: package.state.head().as_ref().unwrap().digest.clone(), }); } } @@ -466,9 +471,9 @@ impl Client { if !leafs.is_empty() { self.api .prove_inclusion(InclusionRequest { - checkpoint: Cow::Borrowed(checkpoint), - leafs: Cow::Borrowed(&leafs), - }) + log_length: checkpoint.log_length, + leafs: leaf_indices, + }, &checkpoint, &leafs) .await?; } diff --git a/crates/client/src/storage.rs b/crates/client/src/storage.rs index ee25ef89..7dd0a5f8 100644 --- a/crates/client/src/storage.rs +++ b/crates/client/src/storage.rs @@ -109,6 +109,9 @@ pub struct OperatorInfo { /// The current operator log state #[serde(default)] pub state: operator::LogState, + /// The registry log index of the most recent record + #[serde(default)] + pub latest_index: Option, } /// Represents information about a registry package. @@ -123,6 +126,9 @@ pub struct PackageInfo { /// The current package log state #[serde(default)] pub state: package::LogState, + /// The registry log index of the most recent record + #[serde(default)] + pub latest_index: Option, } impl PackageInfo { @@ -132,6 +138,7 @@ impl PackageInfo { id: id.into(), checkpoint: None, state: package::LogState::default(), + latest_index: None, } } } diff --git a/crates/server/openapi.yaml b/crates/server/openapi.yaml index bad3617d..b328f726 100644 --- a/crates/server/openapi.yaml +++ b/crates/server/openapi.yaml @@ -439,14 +439,12 @@ paths: type: type: string description: The type of entity that was not found. - enum: [checkpoint, leaf] - example: checkpoint + enum: [logLength, leaf] + example: logLength id: - "$ref": "#/components/schemas/AnyHash" + type: integer description: | The identifier of the entity that was not found. - - For leafs, the format is `|`. "422": description: The proof bundle could not be generated. content: diff --git a/crates/server/src/api/v1/proof.rs b/crates/server/src/api/v1/proof.rs index b3d9017f..126e504a 100644 --- a/crates/server/src/api/v1/proof.rs +++ b/crates/server/src/api/v1/proof.rs @@ -27,20 +27,13 @@ impl Config { struct ProofApiError(ProofError); -impl ProofApiError { - fn bad_request(message: impl ToString) -> Self { - Self(ProofError::Message { - status: StatusCode::BAD_REQUEST.as_u16(), - message: message.to_string(), - }) - } -} - impl From for ProofApiError { fn from(value: CoreServiceError) -> Self { Self(match value { - CoreServiceError::RootNotFound(root) => ProofError::RootNotFound(root), - CoreServiceError::LeafNotFound(leaf) => ProofError::LeafNotFound(leaf), + CoreServiceError::CheckpointNotFound(log_length) => { + ProofError::CheckpointNotFound(log_length as u32) + } + //CoreServiceError::LeafNotFound(leaf) => ProofError::LeafNotFound(leaf), CoreServiceError::BundleFailure(e) => ProofError::BundleFailure(e.to_string()), CoreServiceError::PackageNotIncluded(id) => ProofError::PackageLogNotIncluded(id), CoreServiceError::IncorrectProof { root, found } => { @@ -81,24 +74,18 @@ async fn prove_consistency( #[debug_handler] async fn prove_inclusion( State(config): State, - Json(body): Json>, + Json(body): Json, ) -> Result, ProofApiError> { - let checkpoint = body.checkpoint.into_owned(); - let log_length = checkpoint.log_length; - let map_root = checkpoint - .map_root - .try_into() - .map_err(ProofApiError::bad_request)?; + let log_length = body.log_length as usize; + let leafs = body + .leafs + .into_iter() + .map(|index| index as usize) + .collect::>(); - let log_bundle = config - .core - .log_inclusion_proofs(log_length as usize, &body.leafs) - .await?; + let log_bundle = config.core.log_inclusion_proofs(log_length, &leafs).await?; - let map_bundle = config - .core - .map_inclusion_proofs(&map_root, &body.leafs) - .await?; + let map_bundle = config.core.map_inclusion_proofs(log_length, &leafs).await?; Ok(Json(InclusionResponse { log: log_bundle.encode(), diff --git a/crates/server/src/datastore/memory.rs b/crates/server/src/datastore/memory.rs index e6c78db3..396c2ecc 100644 --- a/crates/server/src/datastore/memory.rs +++ b/crates/server/src/datastore/memory.rs @@ -78,6 +78,7 @@ struct State { package_ids: BTreeSet, checkpoints: IndexMap>, records: HashMap>, + log_leafs: HashMap, } /// Represents an in-memory data store. @@ -118,6 +119,23 @@ impl DataStore for MemoryDataStore { Ok(Box::pin(futures::stream::empty())) } + async fn get_log_leafs_from_registry_index( + &self, + entries: &[usize], + ) -> Result, DataStoreError> { + let state = self.0.read().await; + + let mut leafs = Vec::with_capacity(entries.len()); + for entry in entries { + match state.log_leafs.get(&(*entry as u32)) { + Some(log_leaf) => leafs.push(log_leaf.clone()), + None => return Err(DataStoreError::LogLeafNotFound(*entry)), + } + } + + Ok(leafs) + } + async fn store_operator_record( &self, log_id: &LogId, @@ -173,7 +191,7 @@ impl DataStore for MemoryDataStore { let mut state = self.0.write().await; let State { - operators, records, .. + operators, records, log_leafs, .. } = &mut *state; let status = records @@ -201,6 +219,7 @@ impl DataStore for MemoryDataStore { index, registry_index: registry_log_index as u32, }); + log_leafs.insert(registry_log_index as u32, LogLeaf { log_id: log_id.clone(), record_id: record_id.clone() }); Ok(()) } Err(e) => { @@ -282,7 +301,7 @@ impl DataStore for MemoryDataStore { let mut state = self.0.write().await; let State { - packages, records, .. + packages, records, log_leafs, .. } = &mut *state; let status = records @@ -310,6 +329,10 @@ impl DataStore for MemoryDataStore { index, registry_index: registry_log_index as u32, }); + log_leafs.insert(registry_log_index as u32, LogLeaf { + log_id: log_id.clone(), + record_id: record_id.clone(), + }); Ok(()) } Err(e) => { diff --git a/crates/server/src/datastore/mod.rs b/crates/server/src/datastore/mod.rs index 5e99aedc..ffd161e3 100644 --- a/crates/server/src/datastore/mod.rs +++ b/crates/server/src/datastore/mod.rs @@ -30,6 +30,9 @@ pub enum DataStoreError { #[error("record `{0}` was not found")] RecordNotFound(RecordId), + #[error("log leaf {0} was not found")] + LogLeafNotFound(usize), + #[error("record `{0}` cannot be validated as it is not in a pending state")] RecordNotPending(RecordId), @@ -113,6 +116,12 @@ pub trait DataStore: Send + Sync { &self, ) -> Result> + Send>>, DataStoreError>; + /// Looks up the log_id and record_id from the registry log index. + async fn get_log_leafs_from_registry_index( + &self, + entries: &[usize], + ) -> Result, DataStoreError>; + /// Stores the given operator record. async fn store_operator_record( &self, diff --git a/crates/server/src/datastore/postgres/mod.rs b/crates/server/src/datastore/postgres/mod.rs index 460f948b..fe48b114 100644 --- a/crates/server/src/datastore/postgres/mod.rs +++ b/crates/server/src/datastore/postgres/mod.rs @@ -77,7 +77,10 @@ async fn get_records( .into_iter() .map( |(record_id, c, index)| match ProtoEnvelope::from_protobuf(c) { - Ok(envelope) => Ok(PublishedProtoEnvelope { envelope, index: index.unwrap() as u32 }), + Ok(envelope) => Ok(PublishedProtoEnvelope { + envelope, + index: index.unwrap() as u32, + }), Err(e) => Err(DataStoreError::InvalidRecordContents { record_id: record_id.0.into(), message: e.to_string(), @@ -440,6 +443,32 @@ impl DataStore for PostgresDataStore { )) } + async fn get_log_leafs_from_registry_index( + &self, + entries: &[usize], + ) -> Result, DataStoreError> { + let mut conn = self.pool.get().await?; + + let leafs = schema::records::table + .inner_join(schema::logs::table) + .select((schema::logs::log_id, schema::records::record_id, schema::records::registry_log_index)) + .filter(schema::records::registry_log_index.eq_any(entries.iter().map(|i| *i as i64).collect::>())) + .load::<(ParsedText, ParsedText, Option)>(&mut conn) + .await? + .into_iter() + .map(|(log_id, record_id, index)| (index.unwrap() as u32, LogLeaf { + log_id: log_id.0.into(), + record_id: record_id.0.into(), + })) + .collect::>(); + + if leafs.len() < entries.len() { + return Err(DataStoreError::LogLeafNotFound(0)); // TODO + } + + Ok(leafs.into_iter().map(|(index, log_leaf)| log_leaf).collect::>()) + } + async fn store_operator_record( &self, log_id: &LogId, diff --git a/crates/server/src/datastore/postgres/schema.rs b/crates/server/src/datastore/postgres/schema.rs index 17bafe3a..f25b2839 100644 --- a/crates/server/src/datastore/postgres/schema.rs +++ b/crates/server/src/datastore/postgres/schema.rs @@ -63,9 +63,4 @@ diesel::table! { diesel::joinable!(contents -> records (record_id)); diesel::joinable!(records -> logs (log_id)); -diesel::allow_tables_to_appear_in_same_query!( - checkpoints, - contents, - logs, - records, -); +diesel::allow_tables_to_appear_in_same_query!(checkpoints, contents, logs, records,); diff --git a/crates/server/src/services/core.rs b/crates/server/src/services/core.rs index 2c786433..5cfcd339 100644 --- a/crates/server/src/services/core.rs +++ b/crates/server/src/services/core.rs @@ -85,20 +85,14 @@ impl CoreService { pub async fn log_inclusion_proofs( &self, log_length: usize, - entries: &[LogLeaf], + entries: &[usize], ) -> Result, CoreServiceError> { let state = self.inner.state.read().await; let proofs = entries .iter() - .map(|entry| { - let node = state - .leaf_index - .get(entry) - .ok_or_else(|| CoreServiceError::LeafNotFound(entry.clone()))?; - Ok(state.log.prove_inclusion(*node, log_length)) - }) - .collect::, CoreServiceError>>()?; + .map(|index| state.log.prove_inclusion(Node(*index), log_length)) + .collect(); LogProofBundle::bundle(vec![], proofs, &state.log).map_err(CoreServiceError::BundleFailure) } @@ -106,20 +100,27 @@ impl CoreService { /// Constructs map inclusion proofs for the given entries at the given map tree root. pub async fn map_inclusion_proofs( &self, - root: &Hash, - entries: &[LogLeaf], + log_length: usize, + entries: &[usize], ) -> Result, CoreServiceError> { let state = self.inner.state.read().await; - let map = state + let (map_root, map) = state .map_index - .get(root) - .ok_or_else(|| CoreServiceError::RootNotFound(root.into()))?; + .get(&log_length) + .ok_or_else(|| CoreServiceError::CheckpointNotFound(log_length))?; - let proofs = entries + let indexes = self + .inner + .store + .get_log_leafs_from_registry_index(entries) + .await + .map_err(CoreServiceError::DataStore)?; + + let proofs = indexes .iter() - .map(|entry| { - let LogLeaf { log_id, record_id } = entry; + .map(|log_leaf| { + let LogLeaf { log_id, record_id } = log_leaf; let proof = map .prove(log_id.clone()) @@ -129,9 +130,9 @@ impl CoreService { record_id: record_id.clone(), }; let found_root = proof.evaluate(log_id, &map_leaf); - if &found_root != root { + if &found_root != map_root { return Err(CoreServiceError::IncorrectProof { - root: root.into(), + root: map_root.into(), found: found_root.into(), }); } @@ -345,8 +346,8 @@ struct State { // The verifiable map of package logs' latest entries (log_id -> record_id) map: VerifiableMap, - // Index verifiable map snapshots by root (at checkpoints only) - map_index: HashMap, VerifiableMap>, + // Index verifiable map snapshots by log length (at checkpoints only) + map_index: HashMap, VerifiableMap)>, } impl State { @@ -369,14 +370,16 @@ impl State { fn checkpoint(&mut self) -> Checkpoint { let log_checkpoint = self.log.checkpoint(); let map_root = self.map.root(); + let log_length = log_checkpoint.length(); // Update map snapshot - if log_checkpoint.length() > 0 { - self.map_index.insert(map_root.clone(), self.map.clone()); + if log_length > 0 { + self.map_index + .insert(log_length, (map_root.clone(), self.map.clone())); } Checkpoint { - log_length: log_checkpoint.length().try_into().unwrap(), + log_length: log_length.try_into().unwrap(), log_root: log_checkpoint.root().into(), map_root: map_root.into(), } @@ -385,10 +388,8 @@ impl State { #[derive(Debug, Error)] pub enum CoreServiceError { - #[error("root `{0}` was not found")] - RootNotFound(AnyHash), - #[error("log leaf `{}:{}` was not found", .0.log_id, .0.record_id)] - LeafNotFound(LogLeaf), + #[error("checkpoint at log length `{0}` was not found")] + CheckpointNotFound(usize), #[error("failed to bundle proofs: `{0}`")] BundleFailure(anyhow::Error), #[error("failed to prove inclusion of package `{0}`")] From 0cb1a12fe92835e940e8395cd78e56577894f064 Mon Sep 17 00:00:00 2001 From: Calvin Prewitt Date: Thu, 24 Aug 2023 16:45:47 -0500 Subject: [PATCH 03/26] updated openapi.yaml --- crates/server/openapi.yaml | 21 +++++---------------- 1 file changed, 5 insertions(+), 16 deletions(-) diff --git a/crates/server/openapi.yaml b/crates/server/openapi.yaml index b328f726..1d208387 100644 --- a/crates/server/openapi.yaml +++ b/crates/server/openapi.yaml @@ -654,26 +654,15 @@ components: - checkpoint - leafs properties: - checkpoint: - $ref: "#/components/schemas/Checkpoint" - description: The checkpoint to prove the inclusion for. + logLength: + type: integer + description: The checkpoint log length to prove the inclusion for. leafs: type: array maxItems: 1000 - description: The log leafs to prove the inclusion for. + description: The log leaf registry log index to prove the inclusion for. items: - type: object - description: A log leaf. - required: - - logId - - recordId - properties: - logId: - $ref: "#/components/schemas/AnyHash" - description: The log identifier. - recordId: - $ref: "#/components/schemas/AnyHash" - description: The record identifier. + type: integer ProveInclusionResponse: type: object description: A response containing the inclusion proof bundle. From 327e4b1a8bcfa205f95158c5b0396f397ce6066c Mon Sep 17 00:00:00 2001 From: Calvin Prewitt Date: Thu, 24 Aug 2023 20:22:50 -0500 Subject: [PATCH 04/26] fixed bug assuming the Node(usize) was the registry log index --- crates/server/src/datastore/memory.rs | 35 +++++++++---- crates/server/src/datastore/mod.rs | 5 +- crates/server/src/datastore/postgres/mod.rs | 56 +++++++++++++++------ crates/server/src/services/core.rs | 27 ++++++---- 4 files changed, 89 insertions(+), 34 deletions(-) diff --git a/crates/server/src/datastore/memory.rs b/crates/server/src/datastore/memory.rs index 396c2ecc..7cd50247 100644 --- a/crates/server/src/datastore/memory.rs +++ b/crates/server/src/datastore/memory.rs @@ -114,8 +114,10 @@ impl DataStore for MemoryDataStore { async fn get_all_validated_records( &self, - ) -> Result> + Send>>, DataStoreError> - { + ) -> Result< + Pin> + Send>>, + DataStoreError, + > { Ok(Box::pin(futures::stream::empty())) } @@ -191,7 +193,10 @@ impl DataStore for MemoryDataStore { let mut state = self.0.write().await; let State { - operators, records, log_leafs, .. + operators, + records, + log_leafs, + .. } = &mut *state; let status = records @@ -219,7 +224,13 @@ impl DataStore for MemoryDataStore { index, registry_index: registry_log_index as u32, }); - log_leafs.insert(registry_log_index as u32, LogLeaf { log_id: log_id.clone(), record_id: record_id.clone() }); + log_leafs.insert( + registry_log_index as u32, + LogLeaf { + log_id: log_id.clone(), + record_id: record_id.clone(), + }, + ); Ok(()) } Err(e) => { @@ -301,7 +312,10 @@ impl DataStore for MemoryDataStore { let mut state = self.0.write().await; let State { - packages, records, log_leafs, .. + packages, + records, + log_leafs, + .. } = &mut *state; let status = records @@ -329,10 +343,13 @@ impl DataStore for MemoryDataStore { index, registry_index: registry_log_index as u32, }); - log_leafs.insert(registry_log_index as u32, LogLeaf { - log_id: log_id.clone(), - record_id: record_id.clone(), - }); + log_leafs.insert( + registry_log_index as u32, + LogLeaf { + log_id: log_id.clone(), + record_id: record_id.clone(), + }, + ); Ok(()) } Err(e) => { diff --git a/crates/server/src/datastore/mod.rs b/crates/server/src/datastore/mod.rs index ffd161e3..2118dc56 100644 --- a/crates/server/src/datastore/mod.rs +++ b/crates/server/src/datastore/mod.rs @@ -114,7 +114,10 @@ pub trait DataStore: Send + Sync { /// This is an expensive operation and should only be performed on startup. async fn get_all_validated_records( &self, - ) -> Result> + Send>>, DataStoreError>; + ) -> Result< + Pin> + Send>>, + DataStoreError, + >; /// Looks up the log_id and record_id from the registry log index. async fn get_log_leafs_from_registry_index( diff --git a/crates/server/src/datastore/postgres/mod.rs b/crates/server/src/datastore/postgres/mod.rs index fe48b114..208a4ab1 100644 --- a/crates/server/src/datastore/postgres/mod.rs +++ b/crates/server/src/datastore/postgres/mod.rs @@ -417,8 +417,10 @@ impl DataStore for PostgresDataStore { async fn get_all_validated_records( &self, - ) -> Result> + Send>>, DataStoreError> - { + ) -> Result< + Pin> + Send>>, + DataStoreError, + > { // The returned future will keep the connection from the pool until dropped let mut conn = self.pool.get().await?; @@ -429,15 +431,24 @@ impl DataStore for PostgresDataStore { Ok(Box::pin( schema::records::table .inner_join(schema::logs::table) - .select((schema::logs::log_id, schema::records::record_id)) + .select(( + schema::logs::log_id, + schema::records::record_id, + schema::records::registry_log_index, + )) .filter(schema::records::status.eq(RecordStatus::Validated)) - .order_by(schema::records::id) - .load_stream::<(ParsedText, ParsedText)>(&mut conn) + .order_by(schema::records::registry_log_index) + .load_stream::<(ParsedText, ParsedText, Option)>(&mut conn) .await? .map(|r| { - r.map_err(Into::into).map(|(log_id, record_id)| LogLeaf { - log_id: log_id.0.into(), - record_id: record_id.0.into(), + r.map_err(Into::into).map(|(log_id, record_id, index)| { + ( + index.unwrap() as usize, + LogLeaf { + log_id: log_id.0.into(), + record_id: record_id.0.into(), + }, + ) }) }), )) @@ -451,22 +462,37 @@ impl DataStore for PostgresDataStore { let leafs = schema::records::table .inner_join(schema::logs::table) - .select((schema::logs::log_id, schema::records::record_id, schema::records::registry_log_index)) - .filter(schema::records::registry_log_index.eq_any(entries.iter().map(|i| *i as i64).collect::>())) + .select(( + schema::logs::log_id, + schema::records::record_id, + schema::records::registry_log_index, + )) + .filter( + schema::records::registry_log_index + .eq_any(entries.iter().map(|i| *i as i64).collect::>()), + ) .load::<(ParsedText, ParsedText, Option)>(&mut conn) .await? .into_iter() - .map(|(log_id, record_id, index)| (index.unwrap() as u32, LogLeaf { - log_id: log_id.0.into(), - record_id: record_id.0.into(), - })) + .map(|(log_id, record_id, index)| { + ( + index.unwrap() as u32, + LogLeaf { + log_id: log_id.0.into(), + record_id: record_id.0.into(), + }, + ) + }) .collect::>(); if leafs.len() < entries.len() { return Err(DataStoreError::LogLeafNotFound(0)); // TODO } - Ok(leafs.into_iter().map(|(index, log_leaf)| log_leaf).collect::>()) + Ok(leafs + .into_iter() + .map(|(index, log_leaf)| log_leaf) + .collect::>()) } async fn store_operator_record( diff --git a/crates/server/src/services/core.rs b/crates/server/src/services/core.rs index 5cfcd339..996b413d 100644 --- a/crates/server/src/services/core.rs +++ b/crates/server/src/services/core.rs @@ -91,8 +91,14 @@ impl CoreService { let proofs = entries .iter() - .map(|index| state.log.prove_inclusion(Node(*index), log_length)) - .collect(); + .map(|index| { + let node = state + .leaf_index + .get(index) + .ok_or_else(|| CoreServiceError::LeafNotFound(*index))?; + Ok(state.log.prove_inclusion(*node, log_length)) + }) + .collect::, CoreServiceError>>()?; LogProofBundle::bundle(vec![], proofs, &state.log).map_err(CoreServiceError::BundleFailure) } @@ -194,7 +200,8 @@ impl Inner { let state = self.state.get_mut(); while let Some(entry) = published.next().await { - state.push_entry(entry?); + let (index, log_leaf) = entry?; + state.push_entry(index, log_leaf); if let Some(stored_checkpoint) = checkpoints_by_len.get(&state.log.length()) { // Validate stored checkpoint (and update internal state as a side-effect) let computed_checkpoint = state.checkpoint(); @@ -233,7 +240,7 @@ impl Inner { // Update state with init record let entry = LogLeaf { log_id, record_id }; - state.push_entry(entry.clone()); + state.push_entry(0, entry.clone()); // "zero" checkpoint to be updated let mut checkpoint = Checkpoint { @@ -305,7 +312,7 @@ impl Inner { return; } - state.push_entry(entry.clone()); + state.push_entry(registry_log_index as usize, entry.clone()); } // Store a checkpoint including the given new entries @@ -339,8 +346,8 @@ type VerifiableMap = Map; struct State { // The verifiable log of all package log entries log: VecLog, - // Index log tree nodes by entry - leaf_index: HashMap, + // Index log tree nodes by registry log index of the record + leaf_index: HashMap, // Index log size by log tree root root_index: HashMap, usize>, @@ -351,9 +358,9 @@ struct State { } impl State { - fn push_entry(&mut self, entry: LogLeaf) { + fn push_entry(&mut self, registry_log_index: usize, entry: LogLeaf) { let node = self.log.push(&entry); - self.leaf_index.insert(entry.clone(), node); + self.leaf_index.insert(registry_log_index, node); let log_checkpoint = self.log.checkpoint(); self.root_index @@ -390,6 +397,8 @@ impl State { pub enum CoreServiceError { #[error("checkpoint at log length `{0}` was not found")] CheckpointNotFound(usize), + #[error("log leaf `{0}` was not found")] + LeafNotFound(usize), #[error("failed to bundle proofs: `{0}`")] BundleFailure(anyhow::Error), #[error("failed to prove inclusion of package `{0}`")] From eb8ffb93de04f0bd09dfd2019dfeb7b4a5a2320f Mon Sep 17 00:00:00 2001 From: Calvin Prewitt Date: Thu, 24 Aug 2023 20:47:44 -0500 Subject: [PATCH 05/26] fixed error --- crates/server/src/datastore/postgres/mod.rs | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/crates/server/src/datastore/postgres/mod.rs b/crates/server/src/datastore/postgres/mod.rs index 208a4ab1..cb718894 100644 --- a/crates/server/src/datastore/postgres/mod.rs +++ b/crates/server/src/datastore/postgres/mod.rs @@ -471,6 +471,7 @@ impl DataStore for PostgresDataStore { schema::records::registry_log_index .eq_any(entries.iter().map(|i| *i as i64).collect::>()), ) + .order(schema::records::registry_log_index.asc()) .load::<(ParsedText, ParsedText, Option)>(&mut conn) .await? .into_iter() @@ -486,7 +487,15 @@ impl DataStore for PostgresDataStore { .collect::>(); if leafs.len() < entries.len() { - return Err(DataStoreError::LogLeafNotFound(0)); // TODO + let mut input = entries.to_vec(); + input.sort_unstable(); + + for (i, (index, _)) in leafs.iter().enumerate() { + let input_entry = input.get(i).unwrap(); + if *index as usize != *input_entry { + return Err(DataStoreError::LogLeafNotFound(*input_entry)); + } + } } Ok(leafs From 863991535eabb77a43e8390efba06479b3c05c5a Mon Sep 17 00:00:00 2001 From: Calvin Prewitt Date: Thu, 24 Aug 2023 22:23:21 -0500 Subject: [PATCH 06/26] added type alias for log_length and log_index and fixed issues --- crates/api/src/v1/proof.rs | 16 ++++---- crates/client/src/storage.rs | 6 +-- crates/protocol/src/proto_envelope.rs | 5 ++- crates/protocol/src/registry.rs | 5 ++- crates/server/src/api/v1/proof.rs | 11 +++--- crates/server/src/datastore/memory.rs | 44 ++++++++++----------- crates/server/src/datastore/mod.rs | 17 ++++---- crates/server/src/datastore/postgres/mod.rs | 26 ++++++------ crates/server/src/services/core.rs | 44 ++++++++++----------- 9 files changed, 88 insertions(+), 86 deletions(-) diff --git a/crates/api/src/v1/proof.rs b/crates/api/src/v1/proof.rs index ae7a5976..f9af6594 100644 --- a/crates/api/src/v1/proof.rs +++ b/crates/api/src/v1/proof.rs @@ -6,16 +6,16 @@ use serde_with::{base64::Base64, serde_as}; use std::borrow::Cow; use thiserror::Error; use warg_crypto::hash::AnyHash; -use warg_protocol::registry::{LogId}; +use warg_protocol::registry::{LogId, LogIndex}; /// Represents a consistency proof request. #[derive(Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct ConsistencyRequest { /// The starting log length to check for consistency. - pub from: u32, + pub from: LogIndex, /// The ending log length to check for consistency. - pub to: u32, + pub to: LogIndex, } /// Represents a consistency proof response. @@ -33,9 +33,9 @@ pub struct ConsistencyResponse { #[serde(rename_all = "camelCase")] pub struct InclusionRequest { /// The log length to check for inclusion. - pub log_length: u32, + pub log_length: LogIndex, /// The log leaf indexes in the registry log to check for inclusion. - pub leafs: Vec, + pub leafs: Vec, } /// Represents an inclusion proof response. @@ -57,10 +57,10 @@ pub struct InclusionResponse { pub enum ProofError { /// The checkpoint could not be found for the provided log length. #[error("checkpoint not found for log length {0}")] - CheckpointNotFound(u32), + CheckpointNotFound(LogIndex), /// The provided log leaf was not found. #[error("log leaf `{0}` exceeds the registry log length")] - LeafNotFound(u32), + LeafNotFound(LogIndex), /// Failed to prove inclusion of a package. #[error("failed to prove inclusion of package log `{0}`")] PackageLogNotIncluded(LogId), @@ -128,7 +128,7 @@ enum RawError<'a> status: Status<404>, #[serde(rename = "type")] ty: EntityType, - id: u32, + id: LogIndex, }, BundleError { status: Status<422>, diff --git a/crates/client/src/storage.rs b/crates/client/src/storage.rs index 7dd0a5f8..b99048f0 100644 --- a/crates/client/src/storage.rs +++ b/crates/client/src/storage.rs @@ -13,7 +13,7 @@ use warg_crypto::{ use warg_protocol::{ operator, package::{self, PackageRecord, PACKAGE_RECORD_VERSION}, - registry::{Checkpoint, PackageId, RecordId, TimestampedCheckpoint}, + registry::{Checkpoint, LogIndex, PackageId, RecordId, TimestampedCheckpoint}, ProtoEnvelope, SerdeEnvelope, Version, }; @@ -111,7 +111,7 @@ pub struct OperatorInfo { pub state: operator::LogState, /// The registry log index of the most recent record #[serde(default)] - pub latest_index: Option, + pub latest_index: Option, } /// Represents information about a registry package. @@ -128,7 +128,7 @@ pub struct PackageInfo { pub state: package::LogState, /// The registry log index of the most recent record #[serde(default)] - pub latest_index: Option, + pub latest_index: Option, } impl PackageInfo { diff --git a/crates/protocol/src/proto_envelope.rs b/crates/protocol/src/proto_envelope.rs index e5c26006..7fc7908d 100644 --- a/crates/protocol/src/proto_envelope.rs +++ b/crates/protocol/src/proto_envelope.rs @@ -7,6 +7,7 @@ use std::fmt; use thiserror::Error; use warg_crypto::{hash::AnyHashError, signing, Decode, Signable}; use warg_protobuf::protocol as protobuf; +use super::registry::LogIndex; /// The ProtoEnvelope with the published registry log index. #[derive(Debug, Clone, PartialEq, Eq)] @@ -14,7 +15,7 @@ pub struct PublishedProtoEnvelope { /// The wrapped ProtoEnvelope pub envelope: ProtoEnvelope, /// The published registry log index for the record - pub index: u32, + pub index: LogIndex, } /// The envelope struct is used to keep around the original @@ -181,7 +182,7 @@ pub struct PublishedProtoEnvelopeBody { #[serde(flatten)] pub envelope: ProtoEnvelopeBody, /// The index of the published record in the registry log - pub index: u32, + pub index: LogIndex, } impl TryFrom for PublishedProtoEnvelope diff --git a/crates/protocol/src/registry.rs b/crates/protocol/src/registry.rs index 10dc8782..30de43d5 100644 --- a/crates/protocol/src/registry.rs +++ b/crates/protocol/src/registry.rs @@ -9,11 +9,14 @@ use warg_crypto::prefix::VisitPrefixEncode; use warg_crypto::{prefix, ByteVisitor, Signable, VisitBytes}; use wasmparser::names::KebabStr; +/// Type alias for log index and log length +pub type LogIndex = usize; + #[derive(Debug, Clone, Hash, PartialEq, Eq, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct Checkpoint { pub log_root: AnyHash, - pub log_length: u32, + pub log_length: LogIndex, pub map_root: AnyHash, } diff --git a/crates/server/src/api/v1/proof.rs b/crates/server/src/api/v1/proof.rs index 126e504a..503edde6 100644 --- a/crates/server/src/api/v1/proof.rs +++ b/crates/server/src/api/v1/proof.rs @@ -6,6 +6,7 @@ use axum::{ use warg_api::v1::proof::{ ConsistencyRequest, ConsistencyResponse, InclusionRequest, InclusionResponse, ProofError, }; +use warg_protocol::registry::LogIndex; #[derive(Clone)] pub struct Config { @@ -31,7 +32,7 @@ impl From for ProofApiError { fn from(value: CoreServiceError) -> Self { Self(match value { CoreServiceError::CheckpointNotFound(log_length) => { - ProofError::CheckpointNotFound(log_length as u32) + ProofError::CheckpointNotFound(log_length) } //CoreServiceError::LeafNotFound(leaf) => ProofError::LeafNotFound(leaf), CoreServiceError::BundleFailure(e) => ProofError::BundleFailure(e.to_string()), @@ -63,7 +64,7 @@ async fn prove_consistency( ) -> Result, ProofApiError> { let bundle = config .core - .log_consistency_proof(body.from as usize, body.to as usize) + .log_consistency_proof(body.from as LogIndex, body.to as LogIndex) .await?; Ok(Json(ConsistencyResponse { @@ -76,12 +77,12 @@ async fn prove_inclusion( State(config): State, Json(body): Json, ) -> Result, ProofApiError> { - let log_length = body.log_length as usize; + let log_length = body.log_length as LogIndex; let leafs = body .leafs .into_iter() - .map(|index| index as usize) - .collect::>(); + .map(|index| index as LogIndex) + .collect::>(); let log_bundle = config.core.log_inclusion_proofs(log_length, &leafs).await?; diff --git a/crates/server/src/datastore/memory.rs b/crates/server/src/datastore/memory.rs index 7cd50247..3ab1a22d 100644 --- a/crates/server/src/datastore/memory.rs +++ b/crates/server/src/datastore/memory.rs @@ -11,12 +11,12 @@ use warg_crypto::{hash::AnyHash, Signable}; use warg_protocol::{ operator, package::{self, PackageEntry}, - registry::{LogId, LogLeaf, PackageId, RecordId, TimestampedCheckpoint}, + registry::{LogId, LogLeaf, LogIndex, PackageId, RecordId, TimestampedCheckpoint}, ProtoEnvelope, PublishedProtoEnvelope, SerdeEnvelope, }; struct Entry { - registry_index: u32, + registry_index: LogIndex, record_content: ProtoEnvelope, } @@ -39,9 +39,9 @@ where struct Record { /// Index in the log's entries. - index: usize, + index: LogIndex, /// Index in the registry's log. - registry_index: u32, + registry_index: LogIndex, } enum PendingRecord { @@ -78,7 +78,7 @@ struct State { package_ids: BTreeSet, checkpoints: IndexMap>, records: HashMap>, - log_leafs: HashMap, + log_leafs: HashMap, } /// Represents an in-memory data store. @@ -115,7 +115,7 @@ impl DataStore for MemoryDataStore { async fn get_all_validated_records( &self, ) -> Result< - Pin> + Send>>, + Pin> + Send>>, DataStoreError, > { Ok(Box::pin(futures::stream::empty())) @@ -123,13 +123,13 @@ impl DataStore for MemoryDataStore { async fn get_log_leafs_from_registry_index( &self, - entries: &[usize], + entries: &[LogIndex], ) -> Result, DataStoreError> { let state = self.0.read().await; let mut leafs = Vec::with_capacity(entries.len()); for entry in entries { - match state.log_leafs.get(&(*entry as u32)) { + match state.log_leafs.get(entry) { Some(log_leaf) => leafs.push(log_leaf.clone()), None => return Err(DataStoreError::LogLeafNotFound(*entry)), } @@ -188,7 +188,7 @@ impl DataStore for MemoryDataStore { &self, log_id: &LogId, record_id: &RecordId, - registry_log_index: u64, + registry_log_index: LogIndex, ) -> Result<(), DataStoreError> { let mut state = self.0.write().await; @@ -217,15 +217,15 @@ impl DataStore for MemoryDataStore { Ok(_) => { let index = log.entries.len(); log.entries.push(Entry { - registry_index: registry_log_index as u32, + registry_index: registry_log_index, record_content: record, }); *status = RecordStatus::Validated(Record { - index, - registry_index: registry_log_index as u32, + index: index as LogIndex, + registry_index: registry_log_index, }); log_leafs.insert( - registry_log_index as u32, + registry_log_index, LogLeaf { log_id: log_id.clone(), record_id: record_id.clone(), @@ -307,7 +307,7 @@ impl DataStore for MemoryDataStore { &self, log_id: &LogId, record_id: &RecordId, - registry_log_index: u64, + registry_log_index: LogIndex, ) -> Result<(), DataStoreError> { let mut state = self.0.write().await; @@ -336,15 +336,15 @@ impl DataStore for MemoryDataStore { Ok(_) => { let index = log.entries.len(); log.entries.push(Entry { - registry_index: registry_log_index as u32, + registry_index: registry_log_index, record_content: record, }); *status = RecordStatus::Validated(Record { - index, - registry_index: registry_log_index as u32, + index: index as LogIndex, + registry_index: registry_log_index, }); log_leafs.insert( - registry_log_index as u32, + registry_log_index, LogLeaf { log_id: log_id.clone(), record_id: record_id.clone(), @@ -479,7 +479,7 @@ impl DataStore for MemoryDataStore { Ok(log .entries .iter() - .skip(start_log_idx) + .skip(start_log_idx as usize) .take_while(|entry| entry.registry_index < end_registry_idx) .map(|entry| PublishedProtoEnvelope { envelope: entry.record_content.clone(), @@ -519,7 +519,7 @@ impl DataStore for MemoryDataStore { Ok(log .entries .iter() - .skip(start_log_idx) + .skip(start_log_idx as usize) .take_while(|entry| entry.registry_index < end_registry_idx) .map(|entry| PublishedProtoEnvelope { envelope: entry.record_content.clone(), @@ -569,7 +569,7 @@ impl DataStore for MemoryDataStore { } else { super::RecordStatus::Validated }, - log.entries[r.index].record_content.clone(), + log.entries[r.index as usize].record_content.clone(), Some(r.registry_index), ) } @@ -623,7 +623,7 @@ impl DataStore for MemoryDataStore { } else { super::RecordStatus::Validated }, - log.entries[r.index].record_content.clone(), + log.entries[r.index as usize].record_content.clone(), Some(r.registry_index), ) } diff --git a/crates/server/src/datastore/mod.rs b/crates/server/src/datastore/mod.rs index 2118dc56..7af6dd62 100644 --- a/crates/server/src/datastore/mod.rs +++ b/crates/server/src/datastore/mod.rs @@ -4,7 +4,7 @@ use thiserror::Error; use warg_crypto::{hash::AnyHash, signing::KeyID}; use warg_protocol::{ operator, package, - registry::{LogId, LogLeaf, PackageId, RecordId, TimestampedCheckpoint}, + registry::{LogId, LogLeaf, LogIndex, PackageId, RecordId, TimestampedCheckpoint}, ProtoEnvelope, PublishedProtoEnvelope, SerdeEnvelope, }; @@ -31,7 +31,7 @@ pub enum DataStoreError { RecordNotFound(RecordId), #[error("log leaf {0} was not found")] - LogLeafNotFound(usize), + LogLeafNotFound(LogIndex), #[error("record `{0}` cannot be validated as it is not in a pending state")] RecordNotPending(RecordId), @@ -93,7 +93,7 @@ where /// The index of the record in the registry log. /// /// This is `None` if the record is not published. - pub index: Option, + pub index: Option, } /// Implemented by data stores. @@ -114,15 +114,12 @@ pub trait DataStore: Send + Sync { /// This is an expensive operation and should only be performed on startup. async fn get_all_validated_records( &self, - ) -> Result< - Pin> + Send>>, - DataStoreError, - >; + ) -> Result> + Send>>, DataStoreError>; /// Looks up the log_id and record_id from the registry log index. async fn get_log_leafs_from_registry_index( &self, - entries: &[usize], + entries: &[LogIndex], ) -> Result, DataStoreError>; /// Stores the given operator record. @@ -152,7 +149,7 @@ pub trait DataStore: Send + Sync { &self, log_id: &LogId, record_id: &RecordId, - registry_log_index: u64, + registry_log_index: LogIndex, ) -> Result<(), DataStoreError>; /// Stores the given package record. @@ -187,7 +184,7 @@ pub trait DataStore: Send + Sync { &self, log_id: &LogId, record_id: &RecordId, - registry_log_index: u64, + registry_log_index: LogIndex, ) -> Result<(), DataStoreError>; /// Determines if the given content digest is missing for the record. diff --git a/crates/server/src/datastore/postgres/mod.rs b/crates/server/src/datastore/postgres/mod.rs index cb718894..58e77f85 100644 --- a/crates/server/src/datastore/postgres/mod.rs +++ b/crates/server/src/datastore/postgres/mod.rs @@ -21,7 +21,7 @@ use warg_crypto::{hash::AnyHash, Decode, Signable}; use warg_protocol::{ operator, package::{self, PackageEntry}, - registry::{Checkpoint, LogId, LogLeaf, PackageId, RecordId, TimestampedCheckpoint}, + registry::{Checkpoint, LogId, LogLeaf, LogIndex, PackageId, RecordId, TimestampedCheckpoint}, ProtoEnvelope, PublishedProtoEnvelope, Record as _, SerdeEnvelope, Validator, }; @@ -79,7 +79,7 @@ async fn get_records( |(record_id, c, index)| match ProtoEnvelope::from_protobuf(c) { Ok(envelope) => Ok(PublishedProtoEnvelope { envelope, - index: index.unwrap() as u32, + index: index.unwrap() as LogIndex, }), Err(e) => Err(DataStoreError::InvalidRecordContents { record_id: record_id.0.into(), @@ -204,7 +204,7 @@ async fn commit_record( conn: &mut AsyncPgConnection, log_id: i32, record_id: &RecordId, - registry_log_index: u64, + registry_log_index: LogIndex, ) -> Result<(), DataStoreError> where V: Validator + 'static, @@ -405,7 +405,7 @@ impl DataStore for PostgresDataStore { Ok(TimestampedCheckpoint { checkpoint: Checkpoint { log_root: checkpoint.log_root.0, - log_length: checkpoint.log_length as u32, + log_length: checkpoint.log_length as LogIndex, map_root: checkpoint.map_root.0, }, timestamp: checkpoint.timestamp.try_into().unwrap(), @@ -418,7 +418,7 @@ impl DataStore for PostgresDataStore { async fn get_all_validated_records( &self, ) -> Result< - Pin> + Send>>, + Pin> + Send>>, DataStoreError, > { // The returned future will keep the connection from the pool until dropped @@ -443,7 +443,7 @@ impl DataStore for PostgresDataStore { .map(|r| { r.map_err(Into::into).map(|(log_id, record_id, index)| { ( - index.unwrap() as usize, + index.unwrap() as LogIndex, LogLeaf { log_id: log_id.0.into(), record_id: record_id.0.into(), @@ -456,7 +456,7 @@ impl DataStore for PostgresDataStore { async fn get_log_leafs_from_registry_index( &self, - entries: &[usize], + entries: &[LogIndex], ) -> Result, DataStoreError> { let mut conn = self.pool.get().await?; @@ -477,14 +477,14 @@ impl DataStore for PostgresDataStore { .into_iter() .map(|(log_id, record_id, index)| { ( - index.unwrap() as u32, + index.unwrap() as LogIndex, LogLeaf { log_id: log_id.0.into(), record_id: record_id.0.into(), }, ) }) - .collect::>(); + .collect::>(); if leafs.len() < entries.len() { let mut input = entries.to_vec(); @@ -492,7 +492,7 @@ impl DataStore for PostgresDataStore { for (i, (index, _)) in leafs.iter().enumerate() { let input_entry = input.get(i).unwrap(); - if *index as usize != *input_entry { + if *index != *input_entry { return Err(DataStoreError::LogLeafNotFound(*input_entry)); } } @@ -500,7 +500,7 @@ impl DataStore for PostgresDataStore { Ok(leafs .into_iter() - .map(|(index, log_leaf)| log_leaf) + .map(|(_, log_leaf)| log_leaf) .collect::>()) } @@ -544,7 +544,7 @@ impl DataStore for PostgresDataStore { &self, log_id: &LogId, record_id: &RecordId, - registry_log_index: u64, + registry_log_index: LogIndex, ) -> Result<(), DataStoreError> { let mut conn = self.pool.get().await?; let log_id = schema::logs::table @@ -613,7 +613,7 @@ impl DataStore for PostgresDataStore { &self, log_id: &LogId, record_id: &RecordId, - registry_log_index: u64, + registry_log_index: LogIndex, ) -> Result<(), DataStoreError> { let mut conn = self.pool.get().await?; let log_id = schema::logs::table diff --git a/crates/server/src/services/core.rs b/crates/server/src/services/core.rs index 996b413d..03d85cbc 100644 --- a/crates/server/src/services/core.rs +++ b/crates/server/src/services/core.rs @@ -17,7 +17,7 @@ use warg_crypto::{ }; use warg_protocol::{ operator, - registry::{Checkpoint, LogId, LogLeaf, MapLeaf, RecordId, TimestampedCheckpoint}, + registry::{Checkpoint, LogId, LogIndex, LogLeaf, MapLeaf, RecordId, TimestampedCheckpoint}, ProtoEnvelope, SerdeEnvelope, }; use warg_transparency::{ @@ -71,12 +71,12 @@ impl CoreService { /// Constructs a log consistency proof between the given log tree roots. pub async fn log_consistency_proof( &self, - from_log_length: usize, - to_log_length: usize, + from_log_length: LogIndex, + to_log_length: LogIndex, ) -> Result, CoreServiceError> { let state = self.inner.state.read().await; - let proof = state.log.prove_consistency(from_log_length, to_log_length); + let proof = state.log.prove_consistency(from_log_length as usize, to_log_length as usize); LogProofBundle::bundle(vec![proof], vec![], &state.log) .map_err(CoreServiceError::BundleFailure) } @@ -84,8 +84,8 @@ impl CoreService { /// Constructs log inclusion proofs for the given entries at the given log tree root. pub async fn log_inclusion_proofs( &self, - log_length: usize, - entries: &[usize], + log_length: LogIndex, + entries: &[LogIndex], ) -> Result, CoreServiceError> { let state = self.inner.state.read().await; @@ -96,7 +96,7 @@ impl CoreService { .leaf_index .get(index) .ok_or_else(|| CoreServiceError::LeafNotFound(*index))?; - Ok(state.log.prove_inclusion(*node, log_length)) + Ok(state.log.prove_inclusion(*node, log_length as usize)) }) .collect::, CoreServiceError>>()?; @@ -106,8 +106,8 @@ impl CoreService { /// Constructs map inclusion proofs for the given entries at the given map tree root. pub async fn map_inclusion_proofs( &self, - log_length: usize, - entries: &[usize], + log_length: LogIndex, + entries: &[LogIndex], ) -> Result, CoreServiceError> { let state = self.inner.state.read().await; @@ -192,17 +192,17 @@ impl Inner { // Reconstruct internal state from previously-stored data let mut checkpoints = self.store.get_all_checkpoints().await?; - let mut checkpoints_by_len: HashMap = Default::default(); + let mut checkpoints_by_len: HashMap = Default::default(); while let Some(checkpoint) = checkpoints.next().await { let checkpoint = checkpoint?.checkpoint; - checkpoints_by_len.insert(checkpoint.log_length as usize, checkpoint); + checkpoints_by_len.insert(checkpoint.log_length, checkpoint); } let state = self.state.get_mut(); while let Some(entry) = published.next().await { let (index, log_leaf) = entry?; state.push_entry(index, log_leaf); - if let Some(stored_checkpoint) = checkpoints_by_len.get(&state.log.length()) { + if let Some(stored_checkpoint) = checkpoints_by_len.get(&(state.log.length() as LogIndex)) { // Validate stored checkpoint (and update internal state as a side-effect) let computed_checkpoint = state.checkpoint(); assert!(stored_checkpoint == &computed_checkpoint); @@ -312,7 +312,7 @@ impl Inner { return; } - state.push_entry(registry_log_index as usize, entry.clone()); + state.push_entry(registry_log_index as LogIndex, entry.clone()); } // Store a checkpoint including the given new entries @@ -320,7 +320,7 @@ impl Inner { { // Recalculate the checkpoint if necessary let mut state = self.state.write().await; - if state.log.length() != (checkpoint.log_length as usize) { + if state.log.length() as LogIndex != checkpoint.log_length { *checkpoint = state.checkpoint(); tracing::debug!("Updating to checkpoint {checkpoint:?}"); } @@ -347,24 +347,24 @@ struct State { // The verifiable log of all package log entries log: VecLog, // Index log tree nodes by registry log index of the record - leaf_index: HashMap, + leaf_index: HashMap, // Index log size by log tree root - root_index: HashMap, usize>, + root_index: HashMap, LogIndex>, // The verifiable map of package logs' latest entries (log_id -> record_id) map: VerifiableMap, // Index verifiable map snapshots by log length (at checkpoints only) - map_index: HashMap, VerifiableMap)>, + map_index: HashMap, VerifiableMap)>, } impl State { - fn push_entry(&mut self, registry_log_index: usize, entry: LogLeaf) { + fn push_entry(&mut self, registry_log_index: LogIndex, entry: LogLeaf) { let node = self.log.push(&entry); self.leaf_index.insert(registry_log_index, node); let log_checkpoint = self.log.checkpoint(); self.root_index - .insert(log_checkpoint.root(), log_checkpoint.length()); + .insert(log_checkpoint.root(), log_checkpoint.length() as LogIndex); self.map = self.map.insert( entry.log_id.clone(), @@ -377,7 +377,7 @@ impl State { fn checkpoint(&mut self) -> Checkpoint { let log_checkpoint = self.log.checkpoint(); let map_root = self.map.root(); - let log_length = log_checkpoint.length(); + let log_length = log_checkpoint.length() as LogIndex; // Update map snapshot if log_length > 0 { @@ -396,9 +396,9 @@ impl State { #[derive(Debug, Error)] pub enum CoreServiceError { #[error("checkpoint at log length `{0}` was not found")] - CheckpointNotFound(usize), + CheckpointNotFound(LogIndex), #[error("log leaf `{0}` was not found")] - LeafNotFound(usize), + LeafNotFound(LogIndex), #[error("failed to bundle proofs: `{0}`")] BundleFailure(anyhow::Error), #[error("failed to prove inclusion of package `{0}`")] From e3f6fe560feb674db3a70076b2a49a57a2dffaa7 Mon Sep 17 00:00:00 2001 From: Calvin Prewitt Date: Thu, 24 Aug 2023 23:01:42 -0500 Subject: [PATCH 07/26] renamed fields --- crates/api/src/v1/package.rs | 4 +- crates/api/src/v1/proof.rs | 16 +++---- crates/client/src/lib.rs | 8 ++-- crates/client/src/storage.rs | 8 ++-- crates/protocol/src/proto_envelope.rs | 12 +++--- crates/protocol/src/registry.rs | 4 +- crates/server/openapi.yaml | 4 +- crates/server/src/api/v1/package.rs | 4 +- crates/server/src/api/v1/proof.rs | 10 ++--- crates/server/src/datastore/memory.rs | 48 ++++++++++----------- crates/server/src/datastore/mod.rs | 16 +++---- crates/server/src/datastore/postgres/mod.rs | 34 +++++++-------- crates/server/src/services/core.rs | 46 ++++++++++---------- 13 files changed, 107 insertions(+), 107 deletions(-) diff --git a/crates/api/src/v1/package.rs b/crates/api/src/v1/package.rs index 5eebbbce..090d3ae7 100644 --- a/crates/api/src/v1/package.rs +++ b/crates/api/src/v1/package.rs @@ -6,7 +6,7 @@ use std::{borrow::Cow, collections::HashMap}; use thiserror::Error; use warg_crypto::hash::AnyHash; use warg_protocol::{ - registry::{LogId, PackageId, RecordId}, + registry::{LogId, PackageId, RecordId, RegistryIndex}, ProtoEnvelopeBody, }; @@ -108,7 +108,7 @@ pub enum PackageRecordState { /// The content sources of the record. content_sources: HashMap>, /// The published index of the record in the registry log. - index: u32, + registry_index: RegistryIndex, }, } diff --git a/crates/api/src/v1/proof.rs b/crates/api/src/v1/proof.rs index f9af6594..c66cfbef 100644 --- a/crates/api/src/v1/proof.rs +++ b/crates/api/src/v1/proof.rs @@ -6,16 +6,16 @@ use serde_with::{base64::Base64, serde_as}; use std::borrow::Cow; use thiserror::Error; use warg_crypto::hash::AnyHash; -use warg_protocol::registry::{LogId, LogIndex}; +use warg_protocol::registry::{LogId, RegistryIndex}; /// Represents a consistency proof request. #[derive(Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct ConsistencyRequest { /// The starting log length to check for consistency. - pub from: LogIndex, + pub from: RegistryIndex, /// The ending log length to check for consistency. - pub to: LogIndex, + pub to: RegistryIndex, } /// Represents a consistency proof response. @@ -33,9 +33,9 @@ pub struct ConsistencyResponse { #[serde(rename_all = "camelCase")] pub struct InclusionRequest { /// The log length to check for inclusion. - pub log_length: LogIndex, + pub log_length: RegistryIndex, /// The log leaf indexes in the registry log to check for inclusion. - pub leafs: Vec, + pub leafs: Vec, } /// Represents an inclusion proof response. @@ -57,10 +57,10 @@ pub struct InclusionResponse { pub enum ProofError { /// The checkpoint could not be found for the provided log length. #[error("checkpoint not found for log length {0}")] - CheckpointNotFound(LogIndex), + CheckpointNotFound(RegistryIndex), /// The provided log leaf was not found. #[error("log leaf `{0}` exceeds the registry log length")] - LeafNotFound(LogIndex), + LeafNotFound(RegistryIndex), /// Failed to prove inclusion of a package. #[error("failed to prove inclusion of package log `{0}`")] PackageLogNotIncluded(LogId), @@ -128,7 +128,7 @@ enum RawError<'a> status: Status<404>, #[serde(rename = "type")] ty: EntityType, - id: LogIndex, + id: RegistryIndex, }, BundleError { status: Status<422>, diff --git a/crates/client/src/lib.rs b/crates/client/src/lib.rs index 8c4fd9ec..19c55a5f 100644 --- a/crates/client/src/lib.rs +++ b/crates/client/src/lib.rs @@ -409,7 +409,7 @@ impl Client { .state .validate(&record.envelope) .map_err(|inner| ClientError::OperatorValidationFailed { inner })?; - operator.latest_index = Some(record.index); + operator.head_registry_index = Some(record.registry_index); } for (log_id, records) in response.packages { @@ -426,7 +426,7 @@ impl Client { inner, } })?; - package.latest_index = Some(record.index); + package.head_registry_index = Some(record.registry_index); } // At this point, the package log should not be empty @@ -450,7 +450,7 @@ impl Client { // Prove inclusion for the current log heads let mut leaf_indices = Vec::with_capacity(packages.len() + 1 /* for operator */); let mut leafs = Vec::with_capacity(leaf_indices.len()); - if let Some(index) = operator.latest_index { + if let Some(index) = operator.head_registry_index { leaf_indices.push(index); leafs.push(LogLeaf { log_id: LogId::operator_log::(), @@ -459,7 +459,7 @@ impl Client { } for (log_id, package) in &packages { - if let Some(index) = package.latest_index { + if let Some(index) = package.head_registry_index { leaf_indices.push(index); leafs.push(LogLeaf { log_id: log_id.clone(), diff --git a/crates/client/src/storage.rs b/crates/client/src/storage.rs index b99048f0..3668891b 100644 --- a/crates/client/src/storage.rs +++ b/crates/client/src/storage.rs @@ -13,7 +13,7 @@ use warg_crypto::{ use warg_protocol::{ operator, package::{self, PackageRecord, PACKAGE_RECORD_VERSION}, - registry::{Checkpoint, LogIndex, PackageId, RecordId, TimestampedCheckpoint}, + registry::{Checkpoint, RegistryIndex, PackageId, RecordId, TimestampedCheckpoint}, ProtoEnvelope, SerdeEnvelope, Version, }; @@ -111,7 +111,7 @@ pub struct OperatorInfo { pub state: operator::LogState, /// The registry log index of the most recent record #[serde(default)] - pub latest_index: Option, + pub head_registry_index: Option, } /// Represents information about a registry package. @@ -128,7 +128,7 @@ pub struct PackageInfo { pub state: package::LogState, /// The registry log index of the most recent record #[serde(default)] - pub latest_index: Option, + pub head_registry_index: Option, } impl PackageInfo { @@ -138,7 +138,7 @@ impl PackageInfo { id: id.into(), checkpoint: None, state: package::LogState::default(), - latest_index: None, + head_registry_index: None, } } } diff --git a/crates/protocol/src/proto_envelope.rs b/crates/protocol/src/proto_envelope.rs index 7fc7908d..fc3bb6b4 100644 --- a/crates/protocol/src/proto_envelope.rs +++ b/crates/protocol/src/proto_envelope.rs @@ -7,7 +7,7 @@ use std::fmt; use thiserror::Error; use warg_crypto::{hash::AnyHashError, signing, Decode, Signable}; use warg_protobuf::protocol as protobuf; -use super::registry::LogIndex; +use super::registry::RegistryIndex; /// The ProtoEnvelope with the published registry log index. #[derive(Debug, Clone, PartialEq, Eq)] @@ -15,7 +15,7 @@ pub struct PublishedProtoEnvelope { /// The wrapped ProtoEnvelope pub envelope: ProtoEnvelope, /// The published registry log index for the record - pub index: LogIndex, + pub registry_index: RegistryIndex, } /// The envelope struct is used to keep around the original @@ -182,7 +182,7 @@ pub struct PublishedProtoEnvelopeBody { #[serde(flatten)] pub envelope: ProtoEnvelopeBody, /// The index of the published record in the registry log - pub index: LogIndex, + pub registry_index: RegistryIndex, } impl TryFrom for PublishedProtoEnvelope @@ -194,7 +194,7 @@ where fn try_from(value: PublishedProtoEnvelopeBody) -> Result { Ok(PublishedProtoEnvelope { envelope: ProtoEnvelope::::try_from(value.envelope)?, - index: value.index, + registry_index: value.registry_index, }) } } @@ -203,7 +203,7 @@ impl From> for PublishedProtoEnvelopeBo fn from(value: PublishedProtoEnvelope) -> Self { PublishedProtoEnvelopeBody { envelope: ProtoEnvelopeBody::from(value.envelope), - index: value.index, + registry_index: value.registry_index, } } } @@ -217,7 +217,7 @@ impl fmt::Debug for PublishedProtoEnvelopeBody { ) .field("key_id", &self.envelope.key_id) .field("signature", &self.envelope.signature) - .field("index", &self.index) + .field("registry_index", &self.registry_index) .finish() } } diff --git a/crates/protocol/src/registry.rs b/crates/protocol/src/registry.rs index 30de43d5..840433ca 100644 --- a/crates/protocol/src/registry.rs +++ b/crates/protocol/src/registry.rs @@ -10,13 +10,13 @@ use warg_crypto::{prefix, ByteVisitor, Signable, VisitBytes}; use wasmparser::names::KebabStr; /// Type alias for log index and log length -pub type LogIndex = usize; +pub type RegistryIndex = usize; #[derive(Debug, Clone, Hash, PartialEq, Eq, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct Checkpoint { pub log_root: AnyHash, - pub log_length: LogIndex, + pub log_length: RegistryIndex, pub map_root: AnyHash, } diff --git a/crates/server/openapi.yaml b/crates/server/openapi.yaml index 1d208387..a34d2447 100644 --- a/crates/server/openapi.yaml +++ b/crates/server/openapi.yaml @@ -737,7 +737,7 @@ components: description: The state of the package record. enum: [published] example: published - index: + registryIndex: type: number description: The index of the record in the registry log. record: @@ -832,7 +832,7 @@ components: required: - index properties: - index: + registryIndex: type: integer description: The index of the published record in the registry log. example: 42 diff --git a/crates/server/src/api/v1/package.rs b/crates/server/src/api/v1/package.rs index 3bc592b2..0a469373 100644 --- a/crates/server/src/api/v1/package.rs +++ b/crates/server/src/api/v1/package.rs @@ -306,13 +306,13 @@ async fn get_record( }) .collect(); - let index = record.index.unwrap().try_into().unwrap(); + let registry_index = record.registry_index.unwrap().try_into().unwrap(); Ok(Json(PackageRecord { id: record_id, state: PackageRecordState::Published { record: record.envelope.into(), - index, + registry_index, content_sources, }, })) diff --git a/crates/server/src/api/v1/proof.rs b/crates/server/src/api/v1/proof.rs index 503edde6..0e9711a9 100644 --- a/crates/server/src/api/v1/proof.rs +++ b/crates/server/src/api/v1/proof.rs @@ -6,7 +6,7 @@ use axum::{ use warg_api::v1::proof::{ ConsistencyRequest, ConsistencyResponse, InclusionRequest, InclusionResponse, ProofError, }; -use warg_protocol::registry::LogIndex; +use warg_protocol::registry::RegistryIndex; #[derive(Clone)] pub struct Config { @@ -64,7 +64,7 @@ async fn prove_consistency( ) -> Result, ProofApiError> { let bundle = config .core - .log_consistency_proof(body.from as LogIndex, body.to as LogIndex) + .log_consistency_proof(body.from as RegistryIndex, body.to as RegistryIndex) .await?; Ok(Json(ConsistencyResponse { @@ -77,12 +77,12 @@ async fn prove_inclusion( State(config): State, Json(body): Json, ) -> Result, ProofApiError> { - let log_length = body.log_length as LogIndex; + let log_length = body.log_length as RegistryIndex; let leafs = body .leafs .into_iter() - .map(|index| index as LogIndex) - .collect::>(); + .map(|index| index as RegistryIndex) + .collect::>(); let log_bundle = config.core.log_inclusion_proofs(log_length, &leafs).await?; diff --git a/crates/server/src/datastore/memory.rs b/crates/server/src/datastore/memory.rs index 3ab1a22d..53d752df 100644 --- a/crates/server/src/datastore/memory.rs +++ b/crates/server/src/datastore/memory.rs @@ -11,12 +11,12 @@ use warg_crypto::{hash::AnyHash, Signable}; use warg_protocol::{ operator, package::{self, PackageEntry}, - registry::{LogId, LogLeaf, LogIndex, PackageId, RecordId, TimestampedCheckpoint}, + registry::{LogId, LogLeaf, RegistryIndex, PackageId, RecordId, TimestampedCheckpoint}, ProtoEnvelope, PublishedProtoEnvelope, SerdeEnvelope, }; struct Entry { - registry_index: LogIndex, + registry_index: RegistryIndex, record_content: ProtoEnvelope, } @@ -39,9 +39,9 @@ where struct Record { /// Index in the log's entries. - index: LogIndex, + index: usize, /// Index in the registry's log. - registry_index: LogIndex, + registry_index: RegistryIndex, } enum PendingRecord { @@ -78,7 +78,7 @@ struct State { package_ids: BTreeSet, checkpoints: IndexMap>, records: HashMap>, - log_leafs: HashMap, + log_leafs: HashMap, } /// Represents an in-memory data store. @@ -115,15 +115,15 @@ impl DataStore for MemoryDataStore { async fn get_all_validated_records( &self, ) -> Result< - Pin> + Send>>, + Pin> + Send>>, DataStoreError, > { Ok(Box::pin(futures::stream::empty())) } - async fn get_log_leafs_from_registry_index( + async fn get_log_leafs_with_registry_index( &self, - entries: &[LogIndex], + entries: &[RegistryIndex], ) -> Result, DataStoreError> { let state = self.0.read().await; @@ -188,7 +188,7 @@ impl DataStore for MemoryDataStore { &self, log_id: &LogId, record_id: &RecordId, - registry_log_index: LogIndex, + registry_index: RegistryIndex, ) -> Result<(), DataStoreError> { let mut state = self.0.write().await; @@ -217,15 +217,15 @@ impl DataStore for MemoryDataStore { Ok(_) => { let index = log.entries.len(); log.entries.push(Entry { - registry_index: registry_log_index, + registry_index, record_content: record, }); *status = RecordStatus::Validated(Record { - index: index as LogIndex, - registry_index: registry_log_index, + index: index, + registry_index, }); log_leafs.insert( - registry_log_index, + registry_index, LogLeaf { log_id: log_id.clone(), record_id: record_id.clone(), @@ -307,7 +307,7 @@ impl DataStore for MemoryDataStore { &self, log_id: &LogId, record_id: &RecordId, - registry_log_index: LogIndex, + registry_index: RegistryIndex, ) -> Result<(), DataStoreError> { let mut state = self.0.write().await; @@ -336,15 +336,15 @@ impl DataStore for MemoryDataStore { Ok(_) => { let index = log.entries.len(); log.entries.push(Entry { - registry_index: registry_log_index, + registry_index, record_content: record, }); *status = RecordStatus::Validated(Record { - index: index as LogIndex, - registry_index: registry_log_index, + index: index, + registry_index, }); log_leafs.insert( - registry_log_index, + registry_index, LogLeaf { log_id: log_id.clone(), record_id: record_id.clone(), @@ -483,7 +483,7 @@ impl DataStore for MemoryDataStore { .take_while(|entry| entry.registry_index < end_registry_idx) .map(|entry| PublishedProtoEnvelope { envelope: entry.record_content.clone(), - index: entry.registry_index, + registry_index: entry.registry_index, }) .take(limit as usize) .collect()) @@ -523,7 +523,7 @@ impl DataStore for MemoryDataStore { .take_while(|entry| entry.registry_index < end_registry_idx) .map(|entry| PublishedProtoEnvelope { envelope: entry.record_content.clone(), - index: entry.registry_index, + registry_index: entry.registry_index, }) .take(limit as usize) .collect()) @@ -542,7 +542,7 @@ impl DataStore for MemoryDataStore { .get(record_id) .ok_or_else(|| DataStoreError::RecordNotFound(record_id.clone()))?; - let (status, envelope, index) = match status { + let (status, envelope, registry_index) = match status { RecordStatus::Pending(PendingRecord::Operator { record, .. }) => { (super::RecordStatus::Pending, record.clone().unwrap(), None) } @@ -579,7 +579,7 @@ impl DataStore for MemoryDataStore { Ok(super::Record { status, envelope, - index, + registry_index, }) } @@ -596,7 +596,7 @@ impl DataStore for MemoryDataStore { .get(record_id) .ok_or_else(|| DataStoreError::RecordNotFound(record_id.clone()))?; - let (status, envelope, index) = match status { + let (status, envelope, registry_index) = match status { RecordStatus::Pending(PendingRecord::Package { record, .. }) => { (super::RecordStatus::Pending, record.clone().unwrap(), None) } @@ -633,7 +633,7 @@ impl DataStore for MemoryDataStore { Ok(super::Record { status, envelope, - index, + registry_index, }) } diff --git a/crates/server/src/datastore/mod.rs b/crates/server/src/datastore/mod.rs index 7af6dd62..12264730 100644 --- a/crates/server/src/datastore/mod.rs +++ b/crates/server/src/datastore/mod.rs @@ -4,7 +4,7 @@ use thiserror::Error; use warg_crypto::{hash::AnyHash, signing::KeyID}; use warg_protocol::{ operator, package, - registry::{LogId, LogLeaf, LogIndex, PackageId, RecordId, TimestampedCheckpoint}, + registry::{LogId, LogLeaf, RegistryIndex, PackageId, RecordId, TimestampedCheckpoint}, ProtoEnvelope, PublishedProtoEnvelope, SerdeEnvelope, }; @@ -31,7 +31,7 @@ pub enum DataStoreError { RecordNotFound(RecordId), #[error("log leaf {0} was not found")] - LogLeafNotFound(LogIndex), + LogLeafNotFound(RegistryIndex), #[error("record `{0}` cannot be validated as it is not in a pending state")] RecordNotPending(RecordId), @@ -93,7 +93,7 @@ where /// The index of the record in the registry log. /// /// This is `None` if the record is not published. - pub index: Option, + pub registry_index: Option, } /// Implemented by data stores. @@ -114,12 +114,12 @@ pub trait DataStore: Send + Sync { /// This is an expensive operation and should only be performed on startup. async fn get_all_validated_records( &self, - ) -> Result> + Send>>, DataStoreError>; + ) -> Result> + Send>>, DataStoreError>; /// Looks up the log_id and record_id from the registry log index. - async fn get_log_leafs_from_registry_index( + async fn get_log_leafs_with_registry_index( &self, - entries: &[LogIndex], + entries: &[RegistryIndex], ) -> Result, DataStoreError>; /// Stores the given operator record. @@ -149,7 +149,7 @@ pub trait DataStore: Send + Sync { &self, log_id: &LogId, record_id: &RecordId, - registry_log_index: LogIndex, + registry_index: RegistryIndex, ) -> Result<(), DataStoreError>; /// Stores the given package record. @@ -184,7 +184,7 @@ pub trait DataStore: Send + Sync { &self, log_id: &LogId, record_id: &RecordId, - registry_log_index: LogIndex, + registry_index: RegistryIndex, ) -> Result<(), DataStoreError>; /// Determines if the given content digest is missing for the record. diff --git a/crates/server/src/datastore/postgres/mod.rs b/crates/server/src/datastore/postgres/mod.rs index 58e77f85..d06ecdee 100644 --- a/crates/server/src/datastore/postgres/mod.rs +++ b/crates/server/src/datastore/postgres/mod.rs @@ -21,7 +21,7 @@ use warg_crypto::{hash::AnyHash, Decode, Signable}; use warg_protocol::{ operator, package::{self, PackageEntry}, - registry::{Checkpoint, LogId, LogLeaf, LogIndex, PackageId, RecordId, TimestampedCheckpoint}, + registry::{Checkpoint, LogId, LogLeaf, RegistryIndex, PackageId, RecordId, TimestampedCheckpoint}, ProtoEnvelope, PublishedProtoEnvelope, Record as _, SerdeEnvelope, Validator, }; @@ -79,7 +79,7 @@ async fn get_records( |(record_id, c, index)| match ProtoEnvelope::from_protobuf(c) { Ok(envelope) => Ok(PublishedProtoEnvelope { envelope, - index: index.unwrap() as LogIndex, + registry_index: index.unwrap() as RegistryIndex, }), Err(e) => Err(DataStoreError::InvalidRecordContents { record_id: record_id.0.into(), @@ -204,14 +204,14 @@ async fn commit_record( conn: &mut AsyncPgConnection, log_id: i32, record_id: &RecordId, - registry_log_index: LogIndex, + registry_index: RegistryIndex, ) -> Result<(), DataStoreError> where V: Validator + 'static, ::Error: ToString + Send + Sync, DataStoreError: From<::Error>, { - let registry_log_index: i64 = registry_log_index.try_into().unwrap(); + let registry_index: i64 = registry_index.try_into().unwrap(); conn.transaction::<_, DataStoreError, _>(|conn| { async move { // Get the record content and validator @@ -256,7 +256,7 @@ where .filter(schema::records::id.eq(id)) .set(( schema::records::status.eq(RecordStatus::Validated), - schema::records::registry_log_index.eq(Some(registry_log_index)), + schema::records::registry_log_index.eq(Some(registry_index)), )) .execute(conn) .await?; @@ -341,7 +341,7 @@ where message: e.to_string(), } })?, - index: record.registry_log_index.map(|idx| idx.try_into().unwrap()), + registry_index: record.registry_log_index.map(|idx| idx.try_into().unwrap()), }) } @@ -405,7 +405,7 @@ impl DataStore for PostgresDataStore { Ok(TimestampedCheckpoint { checkpoint: Checkpoint { log_root: checkpoint.log_root.0, - log_length: checkpoint.log_length as LogIndex, + log_length: checkpoint.log_length as RegistryIndex, map_root: checkpoint.map_root.0, }, timestamp: checkpoint.timestamp.try_into().unwrap(), @@ -418,7 +418,7 @@ impl DataStore for PostgresDataStore { async fn get_all_validated_records( &self, ) -> Result< - Pin> + Send>>, + Pin> + Send>>, DataStoreError, > { // The returned future will keep the connection from the pool until dropped @@ -443,7 +443,7 @@ impl DataStore for PostgresDataStore { .map(|r| { r.map_err(Into::into).map(|(log_id, record_id, index)| { ( - index.unwrap() as LogIndex, + index.unwrap() as RegistryIndex, LogLeaf { log_id: log_id.0.into(), record_id: record_id.0.into(), @@ -454,9 +454,9 @@ impl DataStore for PostgresDataStore { )) } - async fn get_log_leafs_from_registry_index( + async fn get_log_leafs_with_registry_index( &self, - entries: &[LogIndex], + entries: &[RegistryIndex], ) -> Result, DataStoreError> { let mut conn = self.pool.get().await?; @@ -477,14 +477,14 @@ impl DataStore for PostgresDataStore { .into_iter() .map(|(log_id, record_id, index)| { ( - index.unwrap() as LogIndex, + index.unwrap() as RegistryIndex, LogLeaf { log_id: log_id.0.into(), record_id: record_id.0.into(), }, ) }) - .collect::>(); + .collect::>(); if leafs.len() < entries.len() { let mut input = entries.to_vec(); @@ -544,7 +544,7 @@ impl DataStore for PostgresDataStore { &self, log_id: &LogId, record_id: &RecordId, - registry_log_index: LogIndex, + registry_index: RegistryIndex, ) -> Result<(), DataStoreError> { let mut conn = self.pool.get().await?; let log_id = schema::logs::table @@ -559,7 +559,7 @@ impl DataStore for PostgresDataStore { conn.as_mut(), log_id, record_id, - registry_log_index, + registry_index, ) .await { @@ -613,7 +613,7 @@ impl DataStore for PostgresDataStore { &self, log_id: &LogId, record_id: &RecordId, - registry_log_index: LogIndex, + registry_index: RegistryIndex, ) -> Result<(), DataStoreError> { let mut conn = self.pool.get().await?; let log_id = schema::logs::table @@ -628,7 +628,7 @@ impl DataStore for PostgresDataStore { conn.as_mut(), log_id, record_id, - registry_log_index, + registry_index, ) .await { diff --git a/crates/server/src/services/core.rs b/crates/server/src/services/core.rs index 03d85cbc..08c8b6a4 100644 --- a/crates/server/src/services/core.rs +++ b/crates/server/src/services/core.rs @@ -17,7 +17,7 @@ use warg_crypto::{ }; use warg_protocol::{ operator, - registry::{Checkpoint, LogId, LogIndex, LogLeaf, MapLeaf, RecordId, TimestampedCheckpoint}, + registry::{Checkpoint, LogId, RegistryIndex, LogLeaf, MapLeaf, RecordId, TimestampedCheckpoint}, ProtoEnvelope, SerdeEnvelope, }; use warg_transparency::{ @@ -71,8 +71,8 @@ impl CoreService { /// Constructs a log consistency proof between the given log tree roots. pub async fn log_consistency_proof( &self, - from_log_length: LogIndex, - to_log_length: LogIndex, + from_log_length: RegistryIndex, + to_log_length: RegistryIndex, ) -> Result, CoreServiceError> { let state = self.inner.state.read().await; @@ -84,8 +84,8 @@ impl CoreService { /// Constructs log inclusion proofs for the given entries at the given log tree root. pub async fn log_inclusion_proofs( &self, - log_length: LogIndex, - entries: &[LogIndex], + log_length: RegistryIndex, + entries: &[RegistryIndex], ) -> Result, CoreServiceError> { let state = self.inner.state.read().await; @@ -106,8 +106,8 @@ impl CoreService { /// Constructs map inclusion proofs for the given entries at the given map tree root. pub async fn map_inclusion_proofs( &self, - log_length: LogIndex, - entries: &[LogIndex], + log_length: RegistryIndex, + entries: &[RegistryIndex], ) -> Result, CoreServiceError> { let state = self.inner.state.read().await; @@ -119,7 +119,7 @@ impl CoreService { let indexes = self .inner .store - .get_log_leafs_from_registry_index(entries) + .get_log_leafs_with_registry_index(entries) .await .map_err(CoreServiceError::DataStore)?; @@ -192,7 +192,7 @@ impl Inner { // Reconstruct internal state from previously-stored data let mut checkpoints = self.store.get_all_checkpoints().await?; - let mut checkpoints_by_len: HashMap = Default::default(); + let mut checkpoints_by_len: HashMap = Default::default(); while let Some(checkpoint) = checkpoints.next().await { let checkpoint = checkpoint?.checkpoint; checkpoints_by_len.insert(checkpoint.log_length, checkpoint); @@ -202,7 +202,7 @@ impl Inner { while let Some(entry) = published.next().await { let (index, log_leaf) = entry?; state.push_entry(index, log_leaf); - if let Some(stored_checkpoint) = checkpoints_by_len.get(&(state.log.length() as LogIndex)) { + if let Some(stored_checkpoint) = checkpoints_by_len.get(&(state.log.length() as RegistryIndex)) { // Validate stored checkpoint (and update internal state as a side-effect) let computed_checkpoint = state.checkpoint(); assert!(stored_checkpoint == &computed_checkpoint); @@ -289,10 +289,10 @@ impl Inner { let LogLeaf { log_id, record_id } = entry; // Validate and commit the package entry to the store - let registry_log_index = state.log.length().try_into().unwrap(); + let registry_index = state.log.length().try_into().unwrap(); let commit_res = self .store - .commit_package_record(log_id, record_id, registry_log_index) + .commit_package_record(log_id, record_id, registry_index) .await; if let Err(err) = commit_res { @@ -312,7 +312,7 @@ impl Inner { return; } - state.push_entry(registry_log_index as LogIndex, entry.clone()); + state.push_entry(registry_index as RegistryIndex, entry.clone()); } // Store a checkpoint including the given new entries @@ -320,7 +320,7 @@ impl Inner { { // Recalculate the checkpoint if necessary let mut state = self.state.write().await; - if state.log.length() as LogIndex != checkpoint.log_length { + if state.log.length() as RegistryIndex != checkpoint.log_length { *checkpoint = state.checkpoint(); tracing::debug!("Updating to checkpoint {checkpoint:?}"); } @@ -347,24 +347,24 @@ struct State { // The verifiable log of all package log entries log: VecLog, // Index log tree nodes by registry log index of the record - leaf_index: HashMap, + leaf_index: HashMap, // Index log size by log tree root - root_index: HashMap, LogIndex>, + root_index: HashMap, RegistryIndex>, // The verifiable map of package logs' latest entries (log_id -> record_id) map: VerifiableMap, // Index verifiable map snapshots by log length (at checkpoints only) - map_index: HashMap, VerifiableMap)>, + map_index: HashMap, VerifiableMap)>, } impl State { - fn push_entry(&mut self, registry_log_index: LogIndex, entry: LogLeaf) { + fn push_entry(&mut self, registry_index: RegistryIndex, entry: LogLeaf) { let node = self.log.push(&entry); - self.leaf_index.insert(registry_log_index, node); + self.leaf_index.insert(registry_index, node); let log_checkpoint = self.log.checkpoint(); self.root_index - .insert(log_checkpoint.root(), log_checkpoint.length() as LogIndex); + .insert(log_checkpoint.root(), log_checkpoint.length() as RegistryIndex); self.map = self.map.insert( entry.log_id.clone(), @@ -377,7 +377,7 @@ impl State { fn checkpoint(&mut self) -> Checkpoint { let log_checkpoint = self.log.checkpoint(); let map_root = self.map.root(); - let log_length = log_checkpoint.length() as LogIndex; + let log_length = log_checkpoint.length() as RegistryIndex; // Update map snapshot if log_length > 0 { @@ -396,9 +396,9 @@ impl State { #[derive(Debug, Error)] pub enum CoreServiceError { #[error("checkpoint at log length `{0}` was not found")] - CheckpointNotFound(LogIndex), + CheckpointNotFound(RegistryIndex), #[error("log leaf `{0}` was not found")] - LeafNotFound(LogIndex), + LeafNotFound(RegistryIndex), #[error("failed to bundle proofs: `{0}`")] BundleFailure(anyhow::Error), #[error("failed to prove inclusion of package `{0}`")] From 2c80fea030a6f642e6022719b9c940d17e508d37 Mon Sep 17 00:00:00 2001 From: Calvin Prewitt Date: Thu, 24 Aug 2023 23:25:51 -0500 Subject: [PATCH 08/26] fixed debug endpoints --- crates/server/src/api/debug/mod.rs | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/crates/server/src/api/debug/mod.rs b/crates/server/src/api/debug/mod.rs index 035a82f9..36b2c073 100644 --- a/crates/server/src/api/debug/mod.rs +++ b/crates/server/src/api/debug/mod.rs @@ -105,15 +105,17 @@ async fn get_package_info( let records = records .into_iter() .map(|record| { - package_state.validate(&record).context("validate")?; - let record_id = RecordId::package_record::(&record); + package_state.validate(&record.envelope).context("validate")?; + let record_id = RecordId::package_record::(&record.envelope); let timestamp = record + .envelope .as_ref() .timestamp .duration_since(SystemTime::UNIX_EPOCH) .context("duration_since")? .as_secs(); let entries = record + .envelope .as_ref() .entries .iter() From bbfe6cc4bb730f99d24f64562927e5f92317a62f Mon Sep 17 00:00:00 2001 From: Calvin Prewitt Date: Thu, 24 Aug 2023 23:27:20 -0500 Subject: [PATCH 09/26] cargo fmt --- crates/api/src/v1/proof.rs | 7 ++----- crates/client/src/api.rs | 13 ++++++------ crates/client/src/lib.rs | 12 +++++++---- crates/client/src/storage.rs | 2 +- crates/protocol/src/proto_envelope.rs | 2 +- crates/server/src/api/debug/mod.rs | 4 +++- crates/server/src/datastore/memory.rs | 2 +- crates/server/src/datastore/mod.rs | 7 +++++-- crates/server/src/datastore/postgres/mod.rs | 22 +++++++-------------- crates/server/src/services/core.rs | 18 ++++++++++++----- 10 files changed, 48 insertions(+), 41 deletions(-) diff --git a/crates/api/src/v1/proof.rs b/crates/api/src/v1/proof.rs index c66cfbef..f32d2ce4 100644 --- a/crates/api/src/v1/proof.rs +++ b/crates/api/src/v1/proof.rs @@ -122,8 +122,7 @@ enum BundleError<'a> { #[derive(Serialize, Deserialize)] #[serde(untagged, rename_all = "camelCase")] -enum RawError<'a> -{ +enum RawError<'a> { NotFound { status: Status<404>, #[serde(rename = "type")] @@ -194,9 +193,7 @@ impl<'de> Deserialize<'de> for ProofError { { match RawError::deserialize(deserializer)? { RawError::NotFound { status: _, ty, id } => match ty { - EntityType::LogLength => { - Ok(Self::CheckpointNotFound(id)) - } + EntityType::LogLength => Ok(Self::CheckpointNotFound(id)), EntityType::Leaf => Ok(Self::LeafNotFound(id)), }, RawError::BundleError { status: _, error } => match error { diff --git a/crates/client/src/api.rs b/crates/client/src/api.rs index 6dc78fab..7f379590 100644 --- a/crates/client/src/api.rs +++ b/crates/client/src/api.rs @@ -237,7 +237,12 @@ impl Client { } /// Proves the inclusion of the given package log heads in the registry. - pub async fn prove_inclusion(&self, request: InclusionRequest, checkpoint: &Checkpoint, leafs: &[LogLeaf]) -> Result<(), ClientError> { + pub async fn prove_inclusion( + &self, + request: InclusionRequest, + checkpoint: &Checkpoint, + leafs: &[LogLeaf], + ) -> Result<(), ClientError> { let url = self.url.join(paths::prove_inclusion()); tracing::debug!("proving checkpoint inclusion at `{url}`"); @@ -246,11 +251,7 @@ impl Client { ) .await?; - Self::validate_inclusion_response( - response, - checkpoint, - leafs, - ) + Self::validate_inclusion_response(response, checkpoint, leafs) } /// Proves consistency between two log roots. diff --git a/crates/client/src/lib.rs b/crates/client/src/lib.rs index 19c55a5f..456c3443 100644 --- a/crates/client/src/lib.rs +++ b/crates/client/src/lib.rs @@ -470,10 +470,14 @@ impl Client { if !leafs.is_empty() { self.api - .prove_inclusion(InclusionRequest { - log_length: checkpoint.log_length, - leafs: leaf_indices, - }, &checkpoint, &leafs) + .prove_inclusion( + InclusionRequest { + log_length: checkpoint.log_length, + leafs: leaf_indices, + }, + &checkpoint, + &leafs, + ) .await?; } diff --git a/crates/client/src/storage.rs b/crates/client/src/storage.rs index 3668891b..c1260079 100644 --- a/crates/client/src/storage.rs +++ b/crates/client/src/storage.rs @@ -13,7 +13,7 @@ use warg_crypto::{ use warg_protocol::{ operator, package::{self, PackageRecord, PACKAGE_RECORD_VERSION}, - registry::{Checkpoint, RegistryIndex, PackageId, RecordId, TimestampedCheckpoint}, + registry::{Checkpoint, PackageId, RecordId, RegistryIndex, TimestampedCheckpoint}, ProtoEnvelope, SerdeEnvelope, Version, }; diff --git a/crates/protocol/src/proto_envelope.rs b/crates/protocol/src/proto_envelope.rs index fc3bb6b4..1856aa99 100644 --- a/crates/protocol/src/proto_envelope.rs +++ b/crates/protocol/src/proto_envelope.rs @@ -1,3 +1,4 @@ +use super::registry::RegistryIndex; use anyhow::Error; use base64::{engine::general_purpose::STANDARD, Engine}; use prost::Message; @@ -7,7 +8,6 @@ use std::fmt; use thiserror::Error; use warg_crypto::{hash::AnyHashError, signing, Decode, Signable}; use warg_protobuf::protocol as protobuf; -use super::registry::RegistryIndex; /// The ProtoEnvelope with the published registry log index. #[derive(Debug, Clone, PartialEq, Eq)] diff --git a/crates/server/src/api/debug/mod.rs b/crates/server/src/api/debug/mod.rs index 36b2c073..f13146fe 100644 --- a/crates/server/src/api/debug/mod.rs +++ b/crates/server/src/api/debug/mod.rs @@ -105,7 +105,9 @@ async fn get_package_info( let records = records .into_iter() .map(|record| { - package_state.validate(&record.envelope).context("validate")?; + package_state + .validate(&record.envelope) + .context("validate")?; let record_id = RecordId::package_record::(&record.envelope); let timestamp = record .envelope diff --git a/crates/server/src/datastore/memory.rs b/crates/server/src/datastore/memory.rs index 53d752df..c306723a 100644 --- a/crates/server/src/datastore/memory.rs +++ b/crates/server/src/datastore/memory.rs @@ -11,7 +11,7 @@ use warg_crypto::{hash::AnyHash, Signable}; use warg_protocol::{ operator, package::{self, PackageEntry}, - registry::{LogId, LogLeaf, RegistryIndex, PackageId, RecordId, TimestampedCheckpoint}, + registry::{LogId, LogLeaf, PackageId, RecordId, RegistryIndex, TimestampedCheckpoint}, ProtoEnvelope, PublishedProtoEnvelope, SerdeEnvelope, }; diff --git a/crates/server/src/datastore/mod.rs b/crates/server/src/datastore/mod.rs index 12264730..6a6c7007 100644 --- a/crates/server/src/datastore/mod.rs +++ b/crates/server/src/datastore/mod.rs @@ -4,7 +4,7 @@ use thiserror::Error; use warg_crypto::{hash::AnyHash, signing::KeyID}; use warg_protocol::{ operator, package, - registry::{LogId, LogLeaf, RegistryIndex, PackageId, RecordId, TimestampedCheckpoint}, + registry::{LogId, LogLeaf, PackageId, RecordId, RegistryIndex, TimestampedCheckpoint}, ProtoEnvelope, PublishedProtoEnvelope, SerdeEnvelope, }; @@ -114,7 +114,10 @@ pub trait DataStore: Send + Sync { /// This is an expensive operation and should only be performed on startup. async fn get_all_validated_records( &self, - ) -> Result> + Send>>, DataStoreError>; + ) -> Result< + Pin> + Send>>, + DataStoreError, + >; /// Looks up the log_id and record_id from the registry log index. async fn get_log_leafs_with_registry_index( diff --git a/crates/server/src/datastore/postgres/mod.rs b/crates/server/src/datastore/postgres/mod.rs index d06ecdee..2289132e 100644 --- a/crates/server/src/datastore/postgres/mod.rs +++ b/crates/server/src/datastore/postgres/mod.rs @@ -21,7 +21,9 @@ use warg_crypto::{hash::AnyHash, Decode, Signable}; use warg_protocol::{ operator, package::{self, PackageEntry}, - registry::{Checkpoint, LogId, LogLeaf, RegistryIndex, PackageId, RecordId, TimestampedCheckpoint}, + registry::{ + Checkpoint, LogId, LogLeaf, PackageId, RecordId, RegistryIndex, TimestampedCheckpoint, + }, ProtoEnvelope, PublishedProtoEnvelope, Record as _, SerdeEnvelope, Validator, }; @@ -555,13 +557,8 @@ impl DataStore for PostgresDataStore { .optional()? .ok_or_else(|| DataStoreError::LogNotFound(log_id.clone()))?; - match commit_record::( - conn.as_mut(), - log_id, - record_id, - registry_index, - ) - .await + match commit_record::(conn.as_mut(), log_id, record_id, registry_index) + .await { Ok(()) => Ok(()), Err(e) => { @@ -624,13 +621,8 @@ impl DataStore for PostgresDataStore { .optional()? .ok_or_else(|| DataStoreError::LogNotFound(log_id.clone()))?; - match commit_record::( - conn.as_mut(), - log_id, - record_id, - registry_index, - ) - .await + match commit_record::(conn.as_mut(), log_id, record_id, registry_index) + .await { Ok(()) => Ok(()), Err(e) => { diff --git a/crates/server/src/services/core.rs b/crates/server/src/services/core.rs index 08c8b6a4..13ab575a 100644 --- a/crates/server/src/services/core.rs +++ b/crates/server/src/services/core.rs @@ -17,7 +17,9 @@ use warg_crypto::{ }; use warg_protocol::{ operator, - registry::{Checkpoint, LogId, RegistryIndex, LogLeaf, MapLeaf, RecordId, TimestampedCheckpoint}, + registry::{ + Checkpoint, LogId, LogLeaf, MapLeaf, RecordId, RegistryIndex, TimestampedCheckpoint, + }, ProtoEnvelope, SerdeEnvelope, }; use warg_transparency::{ @@ -76,7 +78,9 @@ impl CoreService { ) -> Result, CoreServiceError> { let state = self.inner.state.read().await; - let proof = state.log.prove_consistency(from_log_length as usize, to_log_length as usize); + let proof = state + .log + .prove_consistency(from_log_length as usize, to_log_length as usize); LogProofBundle::bundle(vec![proof], vec![], &state.log) .map_err(CoreServiceError::BundleFailure) } @@ -202,7 +206,9 @@ impl Inner { while let Some(entry) = published.next().await { let (index, log_leaf) = entry?; state.push_entry(index, log_leaf); - if let Some(stored_checkpoint) = checkpoints_by_len.get(&(state.log.length() as RegistryIndex)) { + if let Some(stored_checkpoint) = + checkpoints_by_len.get(&(state.log.length() as RegistryIndex)) + { // Validate stored checkpoint (and update internal state as a side-effect) let computed_checkpoint = state.checkpoint(); assert!(stored_checkpoint == &computed_checkpoint); @@ -363,8 +369,10 @@ impl State { self.leaf_index.insert(registry_index, node); let log_checkpoint = self.log.checkpoint(); - self.root_index - .insert(log_checkpoint.root(), log_checkpoint.length() as RegistryIndex); + self.root_index.insert( + log_checkpoint.root(), + log_checkpoint.length() as RegistryIndex, + ); self.map = self.map.insert( entry.log_id.clone(), From fd7a13287f4e553d52a6c31f600014941e835f55 Mon Sep 17 00:00:00 2001 From: Calvin Prewitt Date: Thu, 24 Aug 2023 23:31:16 -0500 Subject: [PATCH 10/26] fixed required field in openapi.yaml --- crates/server/openapi.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/server/openapi.yaml b/crates/server/openapi.yaml index a34d2447..f2067185 100644 --- a/crates/server/openapi.yaml +++ b/crates/server/openapi.yaml @@ -651,7 +651,7 @@ components: description: A request to prove the inclusion of log leafs in a checkpoint. additionalProperties: false required: - - checkpoint + - logLength - leafs properties: logLength: From 8ba0ea8b8b3d22ad95fc97c2c7b41a8cf6109aa7 Mon Sep 17 00:00:00 2001 From: Calvin Prewitt Date: Thu, 24 Aug 2023 23:34:01 -0500 Subject: [PATCH 11/26] fix openapi.yaml --- crates/server/openapi.yaml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/crates/server/openapi.yaml b/crates/server/openapi.yaml index f2067185..ddd3d614 100644 --- a/crates/server/openapi.yaml +++ b/crates/server/openapi.yaml @@ -559,16 +559,16 @@ components: : - contentBytes: "ZXhhbXBsZQ==" keyId: "sha256:7d865e959b2466918c9863afca942d0fb89d7c9ac0c99bafc3749504ded97730" signature: "ecdsa-p256:MEUCIQCzWZBW6ux9LecP66Y+hjmLZTP/hZVz7puzlPTXcRT2wwIgQZO7nxP0nugtw18MwHZ26ROFWcJmgCtKOguK031Y1D0=" - index: 101 + registryIndex: 101 - contentBytes: "ZXhhbXBsZQ==" keyId: "sha256:7d865e959b2466918c9863afca942d0fb89d7c9ac0c99bafc3749504ded97730" signature: "ecdsa-p256:MEUCIQCzWZBW6ux9LecP66Y+hjmLZTP/hZVz7puzlPTXcRT2wwIgQZO7nxP0nugtw18MwHZ26ROFWcJmgCtKOguK031Y1D0=" - index: 305 + registryIndex: 305 ? "sha256:b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c" : - contentBytes: "ZXhhbXBsZQ==" keyId: "sha256:7d865e959b2466918c9863afca942d0fb89d7c9ac0c99bafc3749504ded97730" signature: "ecdsa-p256:MEUCIQCzWZBW6ux9LecP66Y+hjmLZTP/hZVz7puzlPTXcRT2wwIgQZO7nxP0nugtw18MwHZ26ROFWcJmgCtKOguK031Y1D0=" - index: 732 + registryIndex: 732 PublishPackageRecordRequest: type: object description: A request to publish a record to a package log. @@ -830,7 +830,7 @@ components: allOf: - type: object required: - - index + - registryIndex properties: registryIndex: type: integer From 5d723627b52bffd16913badb56de5791b5c214eb Mon Sep 17 00:00:00 2001 From: Calvin Prewitt Date: Thu, 24 Aug 2023 23:37:18 -0500 Subject: [PATCH 12/26] more fixes for openapi.yaml --- crates/server/openapi.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/server/openapi.yaml b/crates/server/openapi.yaml index ddd3d614..b22a0a93 100644 --- a/crates/server/openapi.yaml +++ b/crates/server/openapi.yaml @@ -729,7 +729,7 @@ components: description: A record that has been published to the log. required: - state - - checkpoint + - registryIndex - record properties: state: @@ -738,7 +738,7 @@ components: enum: [published] example: published registryIndex: - type: number + type: integer description: The index of the record in the registry log. record: "$ref": "#/components/schemas/EnvelopeBody" From 9f1e6aec5985dfed56a7431849d8ade7ed8894c5 Mon Sep 17 00:00:00 2001 From: Calvin Prewitt Date: Thu, 24 Aug 2023 23:59:27 -0500 Subject: [PATCH 13/26] fixing type in postgres tests --- tests/postgres/mod.rs | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/postgres/mod.rs b/tests/postgres/mod.rs index 0d8d29a1..9b65e62d 100644 --- a/tests/postgres/mod.rs +++ b/tests/postgres/mod.rs @@ -5,6 +5,7 @@ use anyhow::{Context, Result}; use testresult::TestResult; use warg_client::api; use warg_server::datastore::{DataStore, PostgresDataStore}; +use warg_protocol::registry::RegistryIndex; fn data_store() -> Result> { Ok(Box::new(PostgresDataStore::new( @@ -65,7 +66,7 @@ async fn it_works_with_postgres() -> TestResult { let ts_checkpoint = client.latest_checkpoint().await?; assert_eq!( ts_checkpoint.as_ref().checkpoint.log_length, - packages.len() as u32 + 2, /* publishes + initial checkpoint + yank */ + packages.len() as RegistryIndex + 2, /* publishes + initial checkpoint + yank */ "expected {len} packages plus the initial checkpoint and yank", len = packages.len() ); @@ -83,7 +84,7 @@ async fn it_works_with_postgres() -> TestResult { let ts_checkpoint = client.latest_checkpoint().await?; assert_eq!( ts_checkpoint.as_ref().checkpoint.log_length, - packages.len() as u32 + 2, /* publishes + initial checkpoint + yank*/ + packages.len() as RegistryIndex + 2, /* publishes + initial checkpoint + yank*/ "expected {len} packages plus the initial checkpoint and yank", len = packages.len() ); From 9d8bb339a727eb00ca8731e3920d29c3af546849 Mon Sep 17 00:00:00 2001 From: Calvin Prewitt Date: Fri, 25 Aug 2023 00:02:39 -0500 Subject: [PATCH 14/26] fixed fmt again --- tests/postgres/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/postgres/mod.rs b/tests/postgres/mod.rs index 9b65e62d..a71da6e9 100644 --- a/tests/postgres/mod.rs +++ b/tests/postgres/mod.rs @@ -4,8 +4,8 @@ use super::{support::*, *}; use anyhow::{Context, Result}; use testresult::TestResult; use warg_client::api; -use warg_server::datastore::{DataStore, PostgresDataStore}; use warg_protocol::registry::RegistryIndex; +use warg_server::datastore::{DataStore, PostgresDataStore}; fn data_store() -> Result> { Ok(Box::new(PostgresDataStore::new( From becad3d86c3ee3ee81bfc9c444a692404d5305fd Mon Sep 17 00:00:00 2001 From: Calvin Prewitt Date: Fri, 25 Aug 2023 01:02:41 -0500 Subject: [PATCH 15/26] set order on the sql more explicitly --- crates/server/src/datastore/postgres/mod.rs | 2 +- crates/server/src/services/core.rs | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/server/src/datastore/postgres/mod.rs b/crates/server/src/datastore/postgres/mod.rs index 2289132e..99a60eab 100644 --- a/crates/server/src/datastore/postgres/mod.rs +++ b/crates/server/src/datastore/postgres/mod.rs @@ -439,7 +439,7 @@ impl DataStore for PostgresDataStore { schema::records::registry_log_index, )) .filter(schema::records::status.eq(RecordStatus::Validated)) - .order_by(schema::records::registry_log_index) + .order(schema::records::registry_log_index.asc()) .load_stream::<(ParsedText, ParsedText, Option)>(&mut conn) .await? .map(|r| { diff --git a/crates/server/src/services/core.rs b/crates/server/src/services/core.rs index 13ab575a..5dfa8d62 100644 --- a/crates/server/src/services/core.rs +++ b/crates/server/src/services/core.rs @@ -204,8 +204,8 @@ impl Inner { let state = self.state.get_mut(); while let Some(entry) = published.next().await { - let (index, log_leaf) = entry?; - state.push_entry(index, log_leaf); + let (registry_index, log_leaf) = entry?; + state.push_entry(registry_index, log_leaf); if let Some(stored_checkpoint) = checkpoints_by_len.get(&(state.log.length() as RegistryIndex)) { From 8e0fcbd3621b5d0e583f3be024951b6122bd9d70 Mon Sep 17 00:00:00 2001 From: Calvin Prewitt Date: Fri, 25 Aug 2023 01:13:52 -0500 Subject: [PATCH 16/26] removed unused field --- crates/server/src/services/core.rs | 8 -------- 1 file changed, 8 deletions(-) diff --git a/crates/server/src/services/core.rs b/crates/server/src/services/core.rs index 5dfa8d62..878d3c25 100644 --- a/crates/server/src/services/core.rs +++ b/crates/server/src/services/core.rs @@ -354,8 +354,6 @@ struct State { log: VecLog, // Index log tree nodes by registry log index of the record leaf_index: HashMap, - // Index log size by log tree root - root_index: HashMap, RegistryIndex>, // The verifiable map of package logs' latest entries (log_id -> record_id) map: VerifiableMap, @@ -368,12 +366,6 @@ impl State { let node = self.log.push(&entry); self.leaf_index.insert(registry_index, node); - let log_checkpoint = self.log.checkpoint(); - self.root_index.insert( - log_checkpoint.root(), - log_checkpoint.length() as RegistryIndex, - ); - self.map = self.map.insert( entry.log_id.clone(), MapLeaf { From 31ea3e44128058619a24fc803a3106e69b35a566 Mon Sep 17 00:00:00 2001 From: Calvin Prewitt Date: Fri, 25 Aug 2023 19:16:19 -0500 Subject: [PATCH 17/26] fixed map inclusion proof; the order of the requested logleafs must be consistent throughout the proof bundle creation --- crates/server/src/api/v1/proof.rs | 1 - crates/server/src/datastore/postgres/mod.rs | 56 ++++++++++----------- 2 files changed, 26 insertions(+), 31 deletions(-) diff --git a/crates/server/src/api/v1/proof.rs b/crates/server/src/api/v1/proof.rs index 0e9711a9..985eb665 100644 --- a/crates/server/src/api/v1/proof.rs +++ b/crates/server/src/api/v1/proof.rs @@ -85,7 +85,6 @@ async fn prove_inclusion( .collect::>(); let log_bundle = config.core.log_inclusion_proofs(log_length, &leafs).await?; - let map_bundle = config.core.map_inclusion_proofs(log_length, &leafs).await?; Ok(Json(InclusionResponse { diff --git a/crates/server/src/datastore/postgres/mod.rs b/crates/server/src/datastore/postgres/mod.rs index 99a60eab..752ab8e6 100644 --- a/crates/server/src/datastore/postgres/mod.rs +++ b/crates/server/src/datastore/postgres/mod.rs @@ -16,7 +16,10 @@ use diesel_migrations::{ }; use futures::{Stream, StreamExt}; use secrecy::{ExposeSecret, SecretString}; -use std::{collections::HashSet, pin::Pin}; +use std::{ + collections::{HashMap, HashSet}, + pin::Pin, +}; use warg_crypto::{hash::AnyHash, Decode, Signable}; use warg_protocol::{ operator, @@ -443,26 +446,28 @@ impl DataStore for PostgresDataStore { .load_stream::<(ParsedText, ParsedText, Option)>(&mut conn) .await? .map(|r| { - r.map_err(Into::into).map(|(log_id, record_id, index)| { - ( - index.unwrap() as RegistryIndex, - LogLeaf { - log_id: log_id.0.into(), - record_id: record_id.0.into(), - }, - ) - }) + r.map_err(Into::into) + .map(|(log_id, record_id, registry_index)| { + ( + registry_index.unwrap() as RegistryIndex, + LogLeaf { + log_id: log_id.0.into(), + record_id: record_id.0.into(), + }, + ) + }) }), )) } + // Note: order of the entries is expected to match to the corresponding returned log leafs. async fn get_log_leafs_with_registry_index( &self, entries: &[RegistryIndex], ) -> Result, DataStoreError> { let mut conn = self.pool.get().await?; - let leafs = schema::records::table + let mut leafs_map = schema::records::table .inner_join(schema::logs::table) .select(( schema::logs::log_id, @@ -473,7 +478,6 @@ impl DataStore for PostgresDataStore { schema::records::registry_log_index .eq_any(entries.iter().map(|i| *i as i64).collect::>()), ) - .order(schema::records::registry_log_index.asc()) .load::<(ParsedText, ParsedText, Option)>(&mut conn) .await? .into_iter() @@ -486,24 +490,16 @@ impl DataStore for PostgresDataStore { }, ) }) - .collect::>(); - - if leafs.len() < entries.len() { - let mut input = entries.to_vec(); - input.sort_unstable(); - - for (i, (index, _)) in leafs.iter().enumerate() { - let input_entry = input.get(i).unwrap(); - if *index != *input_entry { - return Err(DataStoreError::LogLeafNotFound(*input_entry)); - } - } - } - - Ok(leafs - .into_iter() - .map(|(_, log_leaf)| log_leaf) - .collect::>()) + .collect::>(); + + Ok(entries + .iter() + .map(|registry_index| { + leafs_map + .remove(registry_index) + .ok_or(DataStoreError::LogLeafNotFound(*registry_index)) + }) + .collect::, _>>()?) } async fn store_operator_record( From 53ca289679430ab91d89e9c6a50bc7c6d89892c9 Mon Sep 17 00:00:00 2001 From: Calvin Prewitt Date: Fri, 25 Aug 2023 19:38:27 -0500 Subject: [PATCH 18/26] added another alias type for RegistryLen as well as RegistryIndex --- crates/api/src/v1/proof.rs | 12 ++++++------ crates/protocol/src/registry.rs | 7 +++++-- crates/server/src/api/v1/proof.rs | 6 +++--- crates/server/src/services/core.rs | 22 +++++++++++----------- tests/postgres/mod.rs | 6 +++--- 5 files changed, 28 insertions(+), 25 deletions(-) diff --git a/crates/api/src/v1/proof.rs b/crates/api/src/v1/proof.rs index f32d2ce4..aa843cb2 100644 --- a/crates/api/src/v1/proof.rs +++ b/crates/api/src/v1/proof.rs @@ -6,16 +6,16 @@ use serde_with::{base64::Base64, serde_as}; use std::borrow::Cow; use thiserror::Error; use warg_crypto::hash::AnyHash; -use warg_protocol::registry::{LogId, RegistryIndex}; +use warg_protocol::registry::{LogId, RegistryLen, RegistryIndex}; /// Represents a consistency proof request. #[derive(Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct ConsistencyRequest { /// The starting log length to check for consistency. - pub from: RegistryIndex, + pub from: RegistryLen, /// The ending log length to check for consistency. - pub to: RegistryIndex, + pub to: RegistryLen, } /// Represents a consistency proof response. @@ -33,7 +33,7 @@ pub struct ConsistencyResponse { #[serde(rename_all = "camelCase")] pub struct InclusionRequest { /// The log length to check for inclusion. - pub log_length: RegistryIndex, + pub log_length: RegistryLen, /// The log leaf indexes in the registry log to check for inclusion. pub leafs: Vec, } @@ -57,9 +57,9 @@ pub struct InclusionResponse { pub enum ProofError { /// The checkpoint could not be found for the provided log length. #[error("checkpoint not found for log length {0}")] - CheckpointNotFound(RegistryIndex), + CheckpointNotFound(RegistryLen), /// The provided log leaf was not found. - #[error("log leaf `{0}` exceeds the registry log length")] + #[error("log leaf `{0}` not found")] LeafNotFound(RegistryIndex), /// Failed to prove inclusion of a package. #[error("failed to prove inclusion of package log `{0}`")] diff --git a/crates/protocol/src/registry.rs b/crates/protocol/src/registry.rs index 840433ca..0bdd48fc 100644 --- a/crates/protocol/src/registry.rs +++ b/crates/protocol/src/registry.rs @@ -9,14 +9,17 @@ use warg_crypto::prefix::VisitPrefixEncode; use warg_crypto::{prefix, ByteVisitor, Signable, VisitBytes}; use wasmparser::names::KebabStr; -/// Type alias for log index and log length +/// Type alias for registry log index pub type RegistryIndex = usize; +/// Type alias for registry log length +pub type RegistryLen = RegistryIndex; + #[derive(Debug, Clone, Hash, PartialEq, Eq, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct Checkpoint { pub log_root: AnyHash, - pub log_length: RegistryIndex, + pub log_length: RegistryLen, pub map_root: AnyHash, } diff --git a/crates/server/src/api/v1/proof.rs b/crates/server/src/api/v1/proof.rs index 985eb665..e03deb0c 100644 --- a/crates/server/src/api/v1/proof.rs +++ b/crates/server/src/api/v1/proof.rs @@ -6,7 +6,7 @@ use axum::{ use warg_api::v1::proof::{ ConsistencyRequest, ConsistencyResponse, InclusionRequest, InclusionResponse, ProofError, }; -use warg_protocol::registry::RegistryIndex; +use warg_protocol::registry::{RegistryIndex, RegistryLen}; #[derive(Clone)] pub struct Config { @@ -64,7 +64,7 @@ async fn prove_consistency( ) -> Result, ProofApiError> { let bundle = config .core - .log_consistency_proof(body.from as RegistryIndex, body.to as RegistryIndex) + .log_consistency_proof(body.from as RegistryLen, body.to as RegistryLen) .await?; Ok(Json(ConsistencyResponse { @@ -77,7 +77,7 @@ async fn prove_inclusion( State(config): State, Json(body): Json, ) -> Result, ProofApiError> { - let log_length = body.log_length as RegistryIndex; + let log_length = body.log_length as RegistryLen; let leafs = body .leafs .into_iter() diff --git a/crates/server/src/services/core.rs b/crates/server/src/services/core.rs index 878d3c25..2ada21b7 100644 --- a/crates/server/src/services/core.rs +++ b/crates/server/src/services/core.rs @@ -18,7 +18,7 @@ use warg_crypto::{ use warg_protocol::{ operator, registry::{ - Checkpoint, LogId, LogLeaf, MapLeaf, RecordId, RegistryIndex, TimestampedCheckpoint, + Checkpoint, LogId, LogLeaf, MapLeaf, RecordId, RegistryIndex, RegistryLen, TimestampedCheckpoint, }, ProtoEnvelope, SerdeEnvelope, }; @@ -73,8 +73,8 @@ impl CoreService { /// Constructs a log consistency proof between the given log tree roots. pub async fn log_consistency_proof( &self, - from_log_length: RegistryIndex, - to_log_length: RegistryIndex, + from_log_length: RegistryLen, + to_log_length: RegistryLen, ) -> Result, CoreServiceError> { let state = self.inner.state.read().await; @@ -88,7 +88,7 @@ impl CoreService { /// Constructs log inclusion proofs for the given entries at the given log tree root. pub async fn log_inclusion_proofs( &self, - log_length: RegistryIndex, + log_length: RegistryLen, entries: &[RegistryIndex], ) -> Result, CoreServiceError> { let state = self.inner.state.read().await; @@ -110,7 +110,7 @@ impl CoreService { /// Constructs map inclusion proofs for the given entries at the given map tree root. pub async fn map_inclusion_proofs( &self, - log_length: RegistryIndex, + log_length: RegistryLen, entries: &[RegistryIndex], ) -> Result, CoreServiceError> { let state = self.inner.state.read().await; @@ -196,7 +196,7 @@ impl Inner { // Reconstruct internal state from previously-stored data let mut checkpoints = self.store.get_all_checkpoints().await?; - let mut checkpoints_by_len: HashMap = Default::default(); + let mut checkpoints_by_len: HashMap = Default::default(); while let Some(checkpoint) = checkpoints.next().await { let checkpoint = checkpoint?.checkpoint; checkpoints_by_len.insert(checkpoint.log_length, checkpoint); @@ -207,7 +207,7 @@ impl Inner { let (registry_index, log_leaf) = entry?; state.push_entry(registry_index, log_leaf); if let Some(stored_checkpoint) = - checkpoints_by_len.get(&(state.log.length() as RegistryIndex)) + checkpoints_by_len.get(&(state.log.length() as RegistryLen)) { // Validate stored checkpoint (and update internal state as a side-effect) let computed_checkpoint = state.checkpoint(); @@ -326,7 +326,7 @@ impl Inner { { // Recalculate the checkpoint if necessary let mut state = self.state.write().await; - if state.log.length() as RegistryIndex != checkpoint.log_length { + if state.log.length() as RegistryLen != checkpoint.log_length { *checkpoint = state.checkpoint(); tracing::debug!("Updating to checkpoint {checkpoint:?}"); } @@ -358,7 +358,7 @@ struct State { // The verifiable map of package logs' latest entries (log_id -> record_id) map: VerifiableMap, // Index verifiable map snapshots by log length (at checkpoints only) - map_index: HashMap, VerifiableMap)>, + map_index: HashMap, VerifiableMap)>, } impl State { @@ -377,7 +377,7 @@ impl State { fn checkpoint(&mut self) -> Checkpoint { let log_checkpoint = self.log.checkpoint(); let map_root = self.map.root(); - let log_length = log_checkpoint.length() as RegistryIndex; + let log_length = log_checkpoint.length() as RegistryLen; // Update map snapshot if log_length > 0 { @@ -396,7 +396,7 @@ impl State { #[derive(Debug, Error)] pub enum CoreServiceError { #[error("checkpoint at log length `{0}` was not found")] - CheckpointNotFound(RegistryIndex), + CheckpointNotFound(RegistryLen), #[error("log leaf `{0}` was not found")] LeafNotFound(RegistryIndex), #[error("failed to bundle proofs: `{0}`")] diff --git a/tests/postgres/mod.rs b/tests/postgres/mod.rs index a71da6e9..8ed5939e 100644 --- a/tests/postgres/mod.rs +++ b/tests/postgres/mod.rs @@ -4,7 +4,7 @@ use super::{support::*, *}; use anyhow::{Context, Result}; use testresult::TestResult; use warg_client::api; -use warg_protocol::registry::RegistryIndex; +use warg_protocol::registry::RegistryLen; use warg_server::datastore::{DataStore, PostgresDataStore}; fn data_store() -> Result> { @@ -66,7 +66,7 @@ async fn it_works_with_postgres() -> TestResult { let ts_checkpoint = client.latest_checkpoint().await?; assert_eq!( ts_checkpoint.as_ref().checkpoint.log_length, - packages.len() as RegistryIndex + 2, /* publishes + initial checkpoint + yank */ + packages.len() as RegistryLen + 2, /* publishes + initial checkpoint + yank */ "expected {len} packages plus the initial checkpoint and yank", len = packages.len() ); @@ -84,7 +84,7 @@ async fn it_works_with_postgres() -> TestResult { let ts_checkpoint = client.latest_checkpoint().await?; assert_eq!( ts_checkpoint.as_ref().checkpoint.log_length, - packages.len() as RegistryIndex + 2, /* publishes + initial checkpoint + yank*/ + packages.len() as RegistryLen + 2, /* publishes + initial checkpoint + yank*/ "expected {len} packages plus the initial checkpoint and yank", len = packages.len() ); From 8c253d58a6c23d3ff30f231739320fcd7f61d111 Mon Sep 17 00:00:00 2001 From: Calvin Prewitt Date: Fri, 25 Aug 2023 19:42:42 -0500 Subject: [PATCH 19/26] uncommented out proof error enum type --- crates/server/src/api/v1/proof.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/server/src/api/v1/proof.rs b/crates/server/src/api/v1/proof.rs index e03deb0c..bc2086d2 100644 --- a/crates/server/src/api/v1/proof.rs +++ b/crates/server/src/api/v1/proof.rs @@ -34,7 +34,7 @@ impl From for ProofApiError { CoreServiceError::CheckpointNotFound(log_length) => { ProofError::CheckpointNotFound(log_length) } - //CoreServiceError::LeafNotFound(leaf) => ProofError::LeafNotFound(leaf), + CoreServiceError::LeafNotFound(leaf) => ProofError::LeafNotFound(leaf), CoreServiceError::BundleFailure(e) => ProofError::BundleFailure(e.to_string()), CoreServiceError::PackageNotIncluded(id) => ProofError::PackageLogNotIncluded(id), CoreServiceError::IncorrectProof { root, found } => { From 521277f2dda2085ace2f70c5be1a4deae77a4b5f Mon Sep 17 00:00:00 2001 From: Calvin Prewitt Date: Fri, 25 Aug 2023 19:43:23 -0500 Subject: [PATCH 20/26] cargo fmt --- crates/api/src/v1/proof.rs | 2 +- crates/server/src/services/core.rs | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/crates/api/src/v1/proof.rs b/crates/api/src/v1/proof.rs index aa843cb2..f71ac381 100644 --- a/crates/api/src/v1/proof.rs +++ b/crates/api/src/v1/proof.rs @@ -6,7 +6,7 @@ use serde_with::{base64::Base64, serde_as}; use std::borrow::Cow; use thiserror::Error; use warg_crypto::hash::AnyHash; -use warg_protocol::registry::{LogId, RegistryLen, RegistryIndex}; +use warg_protocol::registry::{LogId, RegistryIndex, RegistryLen}; /// Represents a consistency proof request. #[derive(Serialize, Deserialize)] diff --git a/crates/server/src/services/core.rs b/crates/server/src/services/core.rs index 2ada21b7..04b3af39 100644 --- a/crates/server/src/services/core.rs +++ b/crates/server/src/services/core.rs @@ -18,7 +18,8 @@ use warg_crypto::{ use warg_protocol::{ operator, registry::{ - Checkpoint, LogId, LogLeaf, MapLeaf, RecordId, RegistryIndex, RegistryLen, TimestampedCheckpoint, + Checkpoint, LogId, LogLeaf, MapLeaf, RecordId, RegistryIndex, RegistryLen, + TimestampedCheckpoint, }, ProtoEnvelope, SerdeEnvelope, }; From 58905e4066a9b72998854d2addf66ee4abf13058 Mon Sep 17 00:00:00 2001 From: Calvin Prewitt Date: Sat, 26 Aug 2023 13:06:41 -0500 Subject: [PATCH 21/26] changed the fetch logs endpoint to use log length instead of checkpoint id --- crates/api/src/v1/fetch.rs | 39 +++++---- crates/client/src/lib.rs | 2 +- crates/server/openapi.yaml | 93 +++++++++++++-------- crates/server/src/api/v1/fetch.rs | 4 +- crates/server/src/datastore/memory.rs | 28 +++---- crates/server/src/datastore/mod.rs | 16 ++-- crates/server/src/datastore/postgres/mod.rs | 21 ++--- 7 files changed, 115 insertions(+), 88 deletions(-) diff --git a/crates/api/src/v1/fetch.rs b/crates/api/src/v1/fetch.rs index 9478c4c8..b2c48f75 100644 --- a/crates/api/src/v1/fetch.rs +++ b/crates/api/src/v1/fetch.rs @@ -6,7 +6,7 @@ use std::{borrow::Cow, collections::HashMap}; use thiserror::Error; use warg_crypto::hash::AnyHash; use warg_protocol::{ - registry::{LogId, RecordId}, + registry::{LogId, RecordId, RegistryLen}, PublishedProtoEnvelopeBody, }; @@ -14,8 +14,8 @@ use warg_protocol::{ #[derive(Debug, Serialize, Deserialize)] #[serde(rename_all = "camelCase", deny_unknown_fields)] pub struct FetchLogsRequest<'a> { - /// The root checkpoint ID hash of the registry. - pub checkpoint_id: Cow<'a, AnyHash>, + /// The checkpoint log length. + pub log_length: RegistryLen, /// The limit for the number of operator and package records to fetch. #[serde(skip_serializing_if = "Option::is_none")] pub limit: Option, @@ -47,8 +47,8 @@ pub struct FetchLogsResponse { #[derive(Debug, Error)] pub enum FetchError { /// The provided checkpoint was not found. - #[error("checkpoint `{0}` was not found")] - CheckpointNotFound(AnyHash), + #[error("checkpoint log length `{0}` was not found")] + CheckpointNotFound(RegistryLen), /// The provided log was not found. #[error("log `{0}` was not found")] LogNotFound(LogId), @@ -78,7 +78,7 @@ impl FetchError { #[derive(Serialize, Deserialize)] #[serde(rename_all = "camelCase")] enum EntityType { - Checkpoint, + LogLength, Log, Record, } @@ -90,6 +90,12 @@ where T: Clone + ToOwned, ::Owned: Serialize + for<'b> Deserialize<'b>, { + CheckpointNotFound { + status: Status<404>, + #[serde(rename = "type")] + ty: EntityType, + id: RegistryLen, + }, NotFound { status: Status<404>, #[serde(rename = "type")] @@ -105,10 +111,10 @@ where impl Serialize for FetchError { fn serialize(&self, serializer: S) -> Result { match self { - Self::CheckpointNotFound(checkpoint) => RawError::NotFound { + Self::CheckpointNotFound(log_length) => RawError::CheckpointNotFound:: { status: Status::<404>, - ty: EntityType::Checkpoint, - id: Cow::Borrowed(checkpoint), + ty: EntityType::LogLength, + id: *log_length, } .serialize(serializer), Self::LogNotFound(log_id) => RawError::NotFound { @@ -138,15 +144,10 @@ impl<'de> Deserialize<'de> for FetchError { D: serde::Deserializer<'de>, { match RawError::::deserialize(deserializer)? { + RawError::CheckpointNotFound { status: _, ty: _, id } => { + Ok(Self::CheckpointNotFound(id)) + }, RawError::NotFound { status: _, ty, id } => match ty { - EntityType::Checkpoint => { - Ok(Self::CheckpointNotFound(id.parse().map_err(|_| { - serde::de::Error::invalid_value( - Unexpected::Str(&id), - &"a valid checkpoint hash", - ) - })?)) - } EntityType::Log => Ok(Self::LogNotFound( id.parse::() .map_err(|_| { @@ -164,6 +165,10 @@ impl<'de> Deserialize<'de> for FetchError { })? .into(), )), + _ => Err(serde::de::Error::invalid_value( + Unexpected::Str(&id), + &"a valid log length", + )), }, RawError::Message { status, message } => Ok(Self::Message { status, diff --git a/crates/client/src/lib.rs b/crates/client/src/lib.rs index 456c3443..b696820e 100644 --- a/crates/client/src/lib.rs +++ b/crates/client/src/lib.rs @@ -387,7 +387,7 @@ impl Client { let response: FetchLogsResponse = self .api .fetch_logs(FetchLogsRequest { - checkpoint_id: Cow::Borrowed(&checkpoint_id), + log_length: checkpoint.log_length, operator: operator .state .head() diff --git a/crates/server/openapi.yaml b/crates/server/openapi.yaml index b22a0a93..b632bd6a 100644 --- a/crates/server/openapi.yaml +++ b/crates/server/openapi.yaml @@ -62,26 +62,9 @@ paths: content: application/json: schema: - type: object - additionalProperties: false - required: - - status - - type - - id - properties: - status: - type: integer - description: The HTTP status code for the error. - example: 404 - type: - type: string - description: The type of entity that was not found. - enum: [checkpoint, log, record] - example: checkpoint - id: - type: string - description: The identifier of the entity that was not found. - example: sha256:b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c + oneOf: + - "$ref": "#/components/schemas/FetchLogsIDNotFoundError" + - "$ref": "#/components/schemas/FetchLogsLogLengthNotFoundError" default: description: An error occurred when processing the request. content: @@ -339,8 +322,6 @@ paths: - proof description: | Proves the consistency of the registry between two specified checkpoints. - - TODO: document the consistency proof bundle format. requestBody: content: application/json: @@ -372,12 +353,11 @@ paths: type: type: string description: The type of entity that was not found. - enum: [checkpoint] - example: checkpoint + enum: [logLength] + example: logLength id: - "$ref": "#/components/schemas/AnyHash" - description: | - The identifier of the entity that was not found. + type: integer + description: The identifier of the entity that was not found. "422": description: The proof bundle could not be generated. content: @@ -404,10 +384,6 @@ paths: - proof description: | Proves that the given log leafs are present in the given registry checkpoint. - - TODO: document the log inclusion proof bundle format. - - TODO: document the map inclusion proof bundle format. requestBody: content: application/json: @@ -443,8 +419,7 @@ paths: example: logLength id: type: integer - description: | - The identifier of the entity that was not found. + description: The identifier of the entity that was not found. "422": description: The proof bundle could not be generated. content: @@ -494,11 +469,13 @@ components: description: A request to fetch logs from the registry. additionalProperties: false required: - - checkpointId + - logLength properties: - checkpointId: - $ref: "#/components/schemas/AnyHash" - description: The registry checkpoint ID hash to fetch from. + logLength: + type: integer + description: The registry checkpoint log length to fetch from. + example: 101 + minimum: 1 limit: type: integer description: The limit of operator and packages records to return for the fetch request. @@ -993,3 +970,45 @@ components: type: string description: The failure error message. example: bundle must contain proofs for the same root + FetchLogsIDNotFoundError: + type: object + additionalProperties: false + required: + - status + - type + - id + properties: + status: + type: integer + description: The HTTP status code for the error. + example: 404 + type: + type: string + description: The type of entity that was not found. + enum: [log, record] + example: log + id: + type: string + description: The identifier of the entity that was not found. + example: sha256:b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c + FetchLogsLogLengthNotFoundError: + type: object + additionalProperties: false + required: + - status + - type + - id + properties: + status: + type: integer + description: The HTTP status code for the error. + example: 404 + type: + type: string + description: The type of entity that was not found. + enum: [logLength] + example: log + id: + type: integer + description: The log length that was not found. + example: 1001 diff --git a/crates/server/src/api/v1/fetch.rs b/crates/server/src/api/v1/fetch.rs index 458eb5ac..32ca984f 100644 --- a/crates/server/src/api/v1/fetch.rs +++ b/crates/server/src/api/v1/fetch.rs @@ -90,7 +90,7 @@ async fn fetch_logs( .store() .get_operator_records( &LogId::operator_log::(), - &body.checkpoint_id, + body.log_length, body.operator.as_deref(), limit, ) @@ -107,7 +107,7 @@ async fn fetch_logs( let records: Vec = config .core_service .store() - .get_package_records(&id, &body.checkpoint_id, since.as_ref(), limit) + .get_package_records(&id, body.log_length, since.as_ref(), limit) .await? .into_iter() .map(Into::into) diff --git a/crates/server/src/datastore/memory.rs b/crates/server/src/datastore/memory.rs index c306723a..15baed6a 100644 --- a/crates/server/src/datastore/memory.rs +++ b/crates/server/src/datastore/memory.rs @@ -11,7 +11,9 @@ use warg_crypto::{hash::AnyHash, Signable}; use warg_protocol::{ operator, package::{self, PackageEntry}, - registry::{LogId, LogLeaf, PackageId, RecordId, RegistryIndex, TimestampedCheckpoint}, + registry::{ + LogId, LogLeaf, PackageId, RecordId, RegistryIndex, RegistryLen, TimestampedCheckpoint, + }, ProtoEnvelope, PublishedProtoEnvelope, SerdeEnvelope, }; @@ -76,7 +78,7 @@ struct State { operators: HashMap>, packages: HashMap>, package_ids: BTreeSet, - checkpoints: IndexMap>, + checkpoints: IndexMap>, records: HashMap>, log_leafs: HashMap, } @@ -429,14 +431,14 @@ impl DataStore for MemoryDataStore { async fn store_checkpoint( &self, - checkpoint_id: &AnyHash, + _checkpoint_id: &AnyHash, ts_checkpoint: SerdeEnvelope, ) -> Result<(), DataStoreError> { let mut state = self.0.write().await; state .checkpoints - .insert(checkpoint_id.clone(), ts_checkpoint); + .insert(ts_checkpoint.as_ref().checkpoint.log_length, ts_checkpoint); Ok(()) } @@ -452,7 +454,7 @@ impl DataStore for MemoryDataStore { async fn get_operator_records( &self, log_id: &LogId, - checkpoint_id: &AnyHash, + registry_log_length: RegistryLen, since: Option<&RecordId>, limit: u16, ) -> Result>, DataStoreError> { @@ -463,8 +465,8 @@ impl DataStore for MemoryDataStore { .get(log_id) .ok_or_else(|| DataStoreError::LogNotFound(log_id.clone()))?; - let Some(checkpoint) = state.checkpoints.get(checkpoint_id) else { - return Err(DataStoreError::CheckpointNotFound(checkpoint_id.clone())); + if !state.checkpoints.contains_key(®istry_log_length) { + return Err(DataStoreError::CheckpointNotFound(registry_log_length)); }; let start_log_idx = match since { @@ -474,13 +476,12 @@ impl DataStore for MemoryDataStore { }, None => 0, }; - let end_registry_idx = checkpoint.as_ref().checkpoint.log_length; Ok(log .entries .iter() .skip(start_log_idx as usize) - .take_while(|entry| entry.registry_index < end_registry_idx) + .take_while(|entry| entry.registry_index < registry_log_length) .map(|entry| PublishedProtoEnvelope { envelope: entry.record_content.clone(), registry_index: entry.registry_index, @@ -492,7 +493,7 @@ impl DataStore for MemoryDataStore { async fn get_package_records( &self, log_id: &LogId, - checkpoint_id: &AnyHash, + registry_log_length: RegistryLen, since: Option<&RecordId>, limit: u16, ) -> Result>, DataStoreError> { @@ -503,8 +504,8 @@ impl DataStore for MemoryDataStore { .get(log_id) .ok_or_else(|| DataStoreError::LogNotFound(log_id.clone()))?; - let Some(checkpoint) = state.checkpoints.get(checkpoint_id) else { - return Err(DataStoreError::CheckpointNotFound(checkpoint_id.clone())); + if !state.checkpoints.contains_key(®istry_log_length) { + return Err(DataStoreError::CheckpointNotFound(registry_log_length)); }; let start_log_idx = match since { @@ -514,13 +515,12 @@ impl DataStore for MemoryDataStore { }, None => 0, }; - let end_registry_idx = checkpoint.as_ref().checkpoint.log_length; Ok(log .entries .iter() .skip(start_log_idx as usize) - .take_while(|entry| entry.registry_index < end_registry_idx) + .take_while(|entry| entry.registry_index < registry_log_length) .map(|entry| PublishedProtoEnvelope { envelope: entry.record_content.clone(), registry_index: entry.registry_index, diff --git a/crates/server/src/datastore/mod.rs b/crates/server/src/datastore/mod.rs index 6a6c7007..f48d2fcd 100644 --- a/crates/server/src/datastore/mod.rs +++ b/crates/server/src/datastore/mod.rs @@ -4,7 +4,9 @@ use thiserror::Error; use warg_crypto::{hash::AnyHash, signing::KeyID}; use warg_protocol::{ operator, package, - registry::{LogId, LogLeaf, PackageId, RecordId, RegistryIndex, TimestampedCheckpoint}, + registry::{ + LogId, LogLeaf, PackageId, RecordId, RegistryIndex, RegistryLen, TimestampedCheckpoint, + }, ProtoEnvelope, PublishedProtoEnvelope, SerdeEnvelope, }; @@ -21,8 +23,8 @@ pub enum DataStoreError { #[error("a conflicting operation was processed: update to the latest checkpoint and try the operation again")] Conflict, - #[error("checkpoint `{0}` was not found")] - CheckpointNotFound(AnyHash), + #[error("checkpoint log length `{0}` was not found")] + CheckpointNotFound(RegistryLen), #[error("log `{0}` was not found")] LogNotFound(LogId), @@ -227,20 +229,20 @@ pub trait DataStore: Send + Sync { &self, ) -> Result, DataStoreError>; - /// Gets the operator records for the given registry checkpoint ID hash. + /// Gets the operator records for the given registry log length. async fn get_operator_records( &self, log_id: &LogId, - checkpoint_id: &AnyHash, + registry_log_length: RegistryLen, since: Option<&RecordId>, limit: u16, ) -> Result>, DataStoreError>; - /// Gets the package records for the given registry checkpoint ID hash. + /// Gets the package records for the given registry log length. async fn get_package_records( &self, log_id: &LogId, - checkpoint_id: &AnyHash, + registry_log_length: RegistryLen, since: Option<&RecordId>, limit: u16, ) -> Result>, DataStoreError>; diff --git a/crates/server/src/datastore/postgres/mod.rs b/crates/server/src/datastore/postgres/mod.rs index 752ab8e6..7ab2cda5 100644 --- a/crates/server/src/datastore/postgres/mod.rs +++ b/crates/server/src/datastore/postgres/mod.rs @@ -25,7 +25,8 @@ use warg_protocol::{ operator, package::{self, PackageEntry}, registry::{ - Checkpoint, LogId, LogLeaf, PackageId, RecordId, RegistryIndex, TimestampedCheckpoint, + Checkpoint, LogId, LogLeaf, PackageId, RecordId, RegistryIndex, RegistryLen, + TimestampedCheckpoint, }, ProtoEnvelope, PublishedProtoEnvelope, Record as _, SerdeEnvelope, Validator, }; @@ -36,17 +37,17 @@ mod schema; async fn get_records( conn: &mut AsyncPgConnection, log_id: i32, - checkpoint_id: &AnyHash, + registry_log_length: RegistryLen, since: Option<&RecordId>, limit: i64, ) -> Result>, DataStoreError> { - let checkpoint_length = schema::checkpoints::table + schema::checkpoints::table .select(schema::checkpoints::log_length) - .filter(schema::checkpoints::checkpoint_id.eq(TextRef(checkpoint_id))) + .filter(schema::checkpoints::log_length.eq(registry_log_length as i64)) .first::(conn) .await .optional()? - .ok_or_else(|| DataStoreError::CheckpointNotFound(checkpoint_id.clone()))?; + .ok_or_else(|| DataStoreError::CheckpointNotFound(registry_log_length))?; let mut query = schema::records::table .into_boxed() @@ -60,7 +61,7 @@ async fn get_records( .filter( schema::records::log_id .eq(log_id) - .and(schema::records::registry_log_index.lt(checkpoint_length)) + .and(schema::records::registry_log_index.lt(registry_log_length as i64)) .and(schema::records::status.eq(RecordStatus::Validated)), ); @@ -794,7 +795,7 @@ impl DataStore for PostgresDataStore { async fn get_operator_records( &self, log_id: &LogId, - checkpoint_id: &AnyHash, + registry_log_length: RegistryLen, since: Option<&RecordId>, limit: u16, ) -> Result>, DataStoreError> { @@ -807,13 +808,13 @@ impl DataStore for PostgresDataStore { .optional()? .ok_or_else(|| DataStoreError::LogNotFound(log_id.clone()))?; - get_records(&mut conn, log_id, checkpoint_id, since, limit as i64).await + get_records(&mut conn, log_id, registry_log_length, since, limit as i64).await } async fn get_package_records( &self, log_id: &LogId, - checkpoint_id: &AnyHash, + registry_log_length: RegistryLen, since: Option<&RecordId>, limit: u16, ) -> Result>, DataStoreError> { @@ -826,7 +827,7 @@ impl DataStore for PostgresDataStore { .optional()? .ok_or_else(|| DataStoreError::LogNotFound(log_id.clone()))?; - get_records(&mut conn, log_id, checkpoint_id, since, limit as i64).await + get_records(&mut conn, log_id, registry_log_length, since, limit as i64).await } async fn get_operator_record( From c914061ce7b39bea6c26799074d7cff7b71bd066 Mon Sep 17 00:00:00 2001 From: Calvin Prewitt Date: Sat, 26 Aug 2023 13:08:15 -0500 Subject: [PATCH 22/26] cargo fmt --- crates/api/src/v1/fetch.rs | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/crates/api/src/v1/fetch.rs b/crates/api/src/v1/fetch.rs index b2c48f75..dd77496d 100644 --- a/crates/api/src/v1/fetch.rs +++ b/crates/api/src/v1/fetch.rs @@ -144,9 +144,11 @@ impl<'de> Deserialize<'de> for FetchError { D: serde::Deserializer<'de>, { match RawError::::deserialize(deserializer)? { - RawError::CheckpointNotFound { status: _, ty: _, id } => { - Ok(Self::CheckpointNotFound(id)) - }, + RawError::CheckpointNotFound { + status: _, + ty: _, + id, + } => Ok(Self::CheckpointNotFound(id)), RawError::NotFound { status: _, ty, id } => match ty { EntityType::Log => Ok(Self::LogNotFound( id.parse::() @@ -166,9 +168,9 @@ impl<'de> Deserialize<'de> for FetchError { .into(), )), _ => Err(serde::de::Error::invalid_value( - Unexpected::Str(&id), - &"a valid log length", - )), + Unexpected::Str(&id), + &"a valid log length", + )), }, RawError::Message { status, message } => Ok(Self::Message { status, From 2c13f63c867f202cdcd6f2bc34879568981bd211 Mon Sep 17 00:00:00 2001 From: Calvin Prewitt Date: Sat, 26 Aug 2023 13:12:13 -0500 Subject: [PATCH 23/26] fixed debug endpoints --- crates/server/src/api/debug/mod.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/server/src/api/debug/mod.rs b/crates/server/src/api/debug/mod.rs index f13146fe..cd3ace9d 100644 --- a/crates/server/src/api/debug/mod.rs +++ b/crates/server/src/api/debug/mod.rs @@ -92,11 +92,11 @@ async fn get_package_info( .get_latest_checkpoint() .await .context("get_latest_checkpoint")?; - let checkpoint_id = Hash::::of(&checkpoint.as_ref().checkpoint).into(); + let checkpoint_log_length = checkpoint.as_ref().checkpoint.log_length; let log_id = LogId::package_log::(&package_id); let records = store - .get_package_records(&log_id, &checkpoint_id, None, u16::MAX) + .get_package_records(&log_id, checkpoint_log_length, None, u16::MAX) .await .context("get_package_records")?; From 163369c0b75d7d627a3f891a8789a5002ca7283a Mon Sep 17 00:00:00 2001 From: Calvin Prewitt Date: Sat, 26 Aug 2023 13:12:57 -0500 Subject: [PATCH 24/26] removed unused import --- crates/server/src/api/debug/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/server/src/api/debug/mod.rs b/crates/server/src/api/debug/mod.rs index cd3ace9d..94c62bed 100644 --- a/crates/server/src/api/debug/mod.rs +++ b/crates/server/src/api/debug/mod.rs @@ -11,7 +11,7 @@ use axum::{ }; use serde::Serialize; use warg_crypto::{ - hash::{AnyHash, Hash, Sha256}, + hash::{AnyHash, Sha256}, signing::KeyID, }; use warg_protocol::{ From 5bff826f16dd0c0e054ed727bd3ba15365586650 Mon Sep 17 00:00:00 2001 From: Calvin Prewitt Date: Mon, 28 Aug 2023 08:54:50 -0500 Subject: [PATCH 25/26] clippy fixes --- crates/api/src/v1/fetch.rs | 6 +----- crates/client/src/lib.rs | 2 +- crates/server/src/api/v1/package.rs | 2 +- crates/server/src/datastore/memory.rs | 12 ++++++------ crates/server/src/services/core.rs | 4 ++-- 5 files changed, 11 insertions(+), 15 deletions(-) diff --git a/crates/api/src/v1/fetch.rs b/crates/api/src/v1/fetch.rs index dd77496d..7bd59695 100644 --- a/crates/api/src/v1/fetch.rs +++ b/crates/api/src/v1/fetch.rs @@ -144,11 +144,7 @@ impl<'de> Deserialize<'de> for FetchError { D: serde::Deserializer<'de>, { match RawError::::deserialize(deserializer)? { - RawError::CheckpointNotFound { - status: _, - ty: _, - id, - } => Ok(Self::CheckpointNotFound(id)), + RawError::CheckpointNotFound { id, .. } => Ok(Self::CheckpointNotFound(id)), RawError::NotFound { status: _, ty, id } => match ty { EntityType::Log => Ok(Self::LogNotFound( id.parse::() diff --git a/crates/client/src/lib.rs b/crates/client/src/lib.rs index b696820e..bd5cfb4b 100644 --- a/crates/client/src/lib.rs +++ b/crates/client/src/lib.rs @@ -475,7 +475,7 @@ impl Client { log_length: checkpoint.log_length, leafs: leaf_indices, }, - &checkpoint, + checkpoint, &leafs, ) .await?; diff --git a/crates/server/src/api/v1/package.rs b/crates/server/src/api/v1/package.rs index 0a469373..e1b28cc9 100644 --- a/crates/server/src/api/v1/package.rs +++ b/crates/server/src/api/v1/package.rs @@ -306,7 +306,7 @@ async fn get_record( }) .collect(); - let registry_index = record.registry_index.unwrap().try_into().unwrap(); + let registry_index = record.registry_index.unwrap(); Ok(Json(PackageRecord { id: record_id, diff --git a/crates/server/src/datastore/memory.rs b/crates/server/src/datastore/memory.rs index 15baed6a..364d6795 100644 --- a/crates/server/src/datastore/memory.rs +++ b/crates/server/src/datastore/memory.rs @@ -223,7 +223,7 @@ impl DataStore for MemoryDataStore { record_content: record, }); *status = RecordStatus::Validated(Record { - index: index, + index, registry_index, }); log_leafs.insert( @@ -342,7 +342,7 @@ impl DataStore for MemoryDataStore { record_content: record, }); *status = RecordStatus::Validated(Record { - index: index, + index, registry_index, }); log_leafs.insert( @@ -480,7 +480,7 @@ impl DataStore for MemoryDataStore { Ok(log .entries .iter() - .skip(start_log_idx as usize) + .skip(start_log_idx) .take_while(|entry| entry.registry_index < registry_log_length) .map(|entry| PublishedProtoEnvelope { envelope: entry.record_content.clone(), @@ -519,7 +519,7 @@ impl DataStore for MemoryDataStore { Ok(log .entries .iter() - .skip(start_log_idx as usize) + .skip(start_log_idx) .take_while(|entry| entry.registry_index < registry_log_length) .map(|entry| PublishedProtoEnvelope { envelope: entry.record_content.clone(), @@ -569,7 +569,7 @@ impl DataStore for MemoryDataStore { } else { super::RecordStatus::Validated }, - log.entries[r.index as usize].record_content.clone(), + log.entries[r.index].record_content.clone(), Some(r.registry_index), ) } @@ -623,7 +623,7 @@ impl DataStore for MemoryDataStore { } else { super::RecordStatus::Validated }, - log.entries[r.index as usize].record_content.clone(), + log.entries[r.index].record_content.clone(), Some(r.registry_index), ) } diff --git a/crates/server/src/services/core.rs b/crates/server/src/services/core.rs index 04b3af39..379b62a0 100644 --- a/crates/server/src/services/core.rs +++ b/crates/server/src/services/core.rs @@ -296,7 +296,7 @@ impl Inner { let LogLeaf { log_id, record_id } = entry; // Validate and commit the package entry to the store - let registry_index = state.log.length().try_into().unwrap(); + let registry_index = state.log.length() as RegistryIndex; let commit_res = self .store .commit_package_record(log_id, record_id, registry_index) @@ -387,7 +387,7 @@ impl State { } Checkpoint { - log_length: log_length.try_into().unwrap(), + log_length, log_root: log_checkpoint.root().into(), map_root: map_root.into(), } From 6d5088e3b5d29d3a026b52413fd55058e606693d Mon Sep 17 00:00:00 2001 From: Calvin Prewitt Date: Mon, 28 Aug 2023 09:28:44 -0500 Subject: [PATCH 26/26] simplified datastore service get_all_validated_records --- crates/server/src/datastore/memory.rs | 6 ++-- crates/server/src/datastore/mod.rs | 5 +-- crates/server/src/datastore/postgres/mod.rs | 28 +++++----------- crates/server/src/services/core.rs | 37 +++++++++------------ 4 files changed, 27 insertions(+), 49 deletions(-) diff --git a/crates/server/src/datastore/memory.rs b/crates/server/src/datastore/memory.rs index 364d6795..22aa8adc 100644 --- a/crates/server/src/datastore/memory.rs +++ b/crates/server/src/datastore/memory.rs @@ -116,10 +116,8 @@ impl DataStore for MemoryDataStore { async fn get_all_validated_records( &self, - ) -> Result< - Pin> + Send>>, - DataStoreError, - > { + ) -> Result> + Send>>, DataStoreError> + { Ok(Box::pin(futures::stream::empty())) } diff --git a/crates/server/src/datastore/mod.rs b/crates/server/src/datastore/mod.rs index f48d2fcd..7b069e33 100644 --- a/crates/server/src/datastore/mod.rs +++ b/crates/server/src/datastore/mod.rs @@ -116,10 +116,7 @@ pub trait DataStore: Send + Sync { /// This is an expensive operation and should only be performed on startup. async fn get_all_validated_records( &self, - ) -> Result< - Pin> + Send>>, - DataStoreError, - >; + ) -> Result> + Send>>, DataStoreError>; /// Looks up the log_id and record_id from the registry log index. async fn get_log_leafs_with_registry_index( diff --git a/crates/server/src/datastore/postgres/mod.rs b/crates/server/src/datastore/postgres/mod.rs index 7ab2cda5..126c2e90 100644 --- a/crates/server/src/datastore/postgres/mod.rs +++ b/crates/server/src/datastore/postgres/mod.rs @@ -423,10 +423,8 @@ impl DataStore for PostgresDataStore { async fn get_all_validated_records( &self, - ) -> Result< - Pin> + Send>>, - DataStoreError, - > { + ) -> Result> + Send>>, DataStoreError> + { // The returned future will keep the connection from the pool until dropped let mut conn = self.pool.get().await?; @@ -437,26 +435,16 @@ impl DataStore for PostgresDataStore { Ok(Box::pin( schema::records::table .inner_join(schema::logs::table) - .select(( - schema::logs::log_id, - schema::records::record_id, - schema::records::registry_log_index, - )) + .select((schema::logs::log_id, schema::records::record_id)) .filter(schema::records::status.eq(RecordStatus::Validated)) .order(schema::records::registry_log_index.asc()) - .load_stream::<(ParsedText, ParsedText, Option)>(&mut conn) + .load_stream::<(ParsedText, ParsedText)>(&mut conn) .await? .map(|r| { - r.map_err(Into::into) - .map(|(log_id, record_id, registry_index)| { - ( - registry_index.unwrap() as RegistryIndex, - LogLeaf { - log_id: log_id.0.into(), - record_id: record_id.0.into(), - }, - ) - }) + r.map_err(Into::into).map(|(log_id, record_id)| LogLeaf { + log_id: log_id.0.into(), + record_id: record_id.0.into(), + }) }), )) } diff --git a/crates/server/src/services/core.rs b/crates/server/src/services/core.rs index 379b62a0..74f871c4 100644 --- a/crates/server/src/services/core.rs +++ b/crates/server/src/services/core.rs @@ -97,11 +97,12 @@ impl CoreService { let proofs = entries .iter() .map(|index| { - let node = state - .leaf_index - .get(index) - .ok_or_else(|| CoreServiceError::LeafNotFound(*index))?; - Ok(state.log.prove_inclusion(*node, log_length as usize)) + let node = if *index < state.leaf_index.len() as RegistryIndex { + state.leaf_index[*index as usize] + } else { + return Err(CoreServiceError::LeafNotFound(*index)); + }; + Ok(state.log.prove_inclusion(node, log_length as usize)) }) .collect::, CoreServiceError>>()?; @@ -205,8 +206,7 @@ impl Inner { let state = self.state.get_mut(); while let Some(entry) = published.next().await { - let (registry_index, log_leaf) = entry?; - state.push_entry(registry_index, log_leaf); + state.push_entry(entry?); if let Some(stored_checkpoint) = checkpoints_by_len.get(&(state.log.length() as RegistryLen)) { @@ -246,8 +246,7 @@ impl Inner { .await?; // Update state with init record - let entry = LogLeaf { log_id, record_id }; - state.push_entry(0, entry.clone()); + state.push_entry(LogLeaf { log_id, record_id }); // "zero" checkpoint to be updated let mut checkpoint = Checkpoint { @@ -319,7 +318,7 @@ impl Inner { return; } - state.push_entry(registry_index as RegistryIndex, entry.clone()); + state.push_entry(entry.clone()); } // Store a checkpoint including the given new entries @@ -354,7 +353,7 @@ struct State { // The verifiable log of all package log entries log: VecLog, // Index log tree nodes by registry log index of the record - leaf_index: HashMap, + leaf_index: Vec, // The verifiable map of package logs' latest entries (log_id -> record_id) map: VerifiableMap, @@ -363,16 +362,12 @@ struct State { } impl State { - fn push_entry(&mut self, registry_index: RegistryIndex, entry: LogLeaf) { - let node = self.log.push(&entry); - self.leaf_index.insert(registry_index, node); - - self.map = self.map.insert( - entry.log_id.clone(), - MapLeaf { - record_id: entry.record_id.clone(), - }, - ); + fn push_entry(&mut self, log_leaf: LogLeaf) { + let node = self.log.push(&log_leaf); + self.leaf_index.push(node); + + let LogLeaf { log_id, record_id } = log_leaf; + self.map = self.map.insert(log_id, MapLeaf { record_id }); } fn checkpoint(&mut self) -> Checkpoint {