diff --git a/Cargo.lock b/Cargo.lock index 682c702dd4..42d1d010d7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1751,10 +1751,12 @@ dependencies = [ name = "dal-macros" version = "0.1.0" dependencies = [ + "dal", "darling", "manyhow", "proc-macro2", "quote", + "si-events", "syn 2.0.79", "trybuild", ] diff --git a/lib/dal-macros/Cargo.toml b/lib/dal-macros/Cargo.toml index 35a9c72f63..f4b7d751c0 100644 --- a/lib/dal-macros/Cargo.toml +++ b/lib/dal-macros/Cargo.toml @@ -19,4 +19,6 @@ quote = { workspace = true } syn = { workspace = true } [dev-dependencies] +dal = { path = "../dal" } +si-events = { path = "../si-events-rs" } trybuild = { version = "1.0.99", features = ["diff"] } diff --git a/lib/dal-macros/src/lib.rs b/lib/dal-macros/src/lib.rs index dd2d44481d..d54dbbe469 100644 --- a/lib/dal-macros/src/lib.rs +++ b/lib/dal-macros/src/lib.rs @@ -10,3 +10,12 @@ pub fn si_versioned_node_weight( ) -> manyhow::Result { node_weight::versioned::derive_si_versioned_node_weight(input, errors) } + +#[manyhow] +#[proc_macro_derive(SiNodeWeight, attributes(si_node_weight))] +pub fn si_node_weight( + input: proc_macro::TokenStream, + errors: &mut manyhow::Emitter, +) -> manyhow::Result { + node_weight::derive_si_node_weight(input, errors) +} diff --git a/lib/dal-macros/src/node_weight.rs b/lib/dal-macros/src/node_weight.rs index 009f289f44..f1d768bd54 100644 --- a/lib/dal-macros/src/node_weight.rs +++ b/lib/dal-macros/src/node_weight.rs @@ -1 +1,299 @@ +use std::collections::HashSet; + +use darling::{util::IdentString, FromAttributes, FromMeta}; +use manyhow::{bail, emit}; +use proc_macro2::TokenStream; +use quote::{quote, ToTokens}; +use syn::{Data, DeriveInput, Expr, Path}; + pub(crate) mod versioned; + +#[derive(Debug, PartialEq, Eq, Hash)] +enum SkipOption { + ContentHash, + Id, + LineageId, + MerkleTreeHash, + NodeHash, + NodeWeightDiscriminants, + SetId, + SetLineageId, + SetMerkleTreeHash, +} + +impl FromMeta for SkipOption { + fn from_string(value: &str) -> darling::Result { + match value { + "content_hash" => Ok(Self::ContentHash), + "id" => Ok(Self::Id), + "lineage_id" => Ok(Self::LineageId), + "merkle_tree_hash" => Ok(Self::MerkleTreeHash), + "node_hash" => Ok(Self::NodeHash), + "node_weight_discriminants" => Ok(Self::NodeWeightDiscriminants), + "set_id" => Ok(Self::SetId), + "set_lineage_id" => Ok(Self::SetLineageId), + "set_merkle_tree_hash" => Ok(Self::SetMerkleTreeHash), + v => Err(darling::Error::unknown_value(v)), + } + } +} + +#[derive(Debug, Default)] +struct SkipOptionSet { + options: HashSet, +} + +impl SkipOptionSet { + fn should_skip(&self, option: SkipOption) -> bool { + self.options.contains(&option) + } + + fn add_skip_option(&mut self, option: SkipOption) { + self.options.insert(option); + } +} + +impl FromMeta for SkipOptionSet { + fn from_list(items: &[darling::ast::NestedMeta]) -> darling::Result { + let mut new_option_set = SkipOptionSet::default(); + for item in items { + new_option_set.add_skip_option(SkipOption::from_nested_meta(item)?); + } + + Ok(new_option_set) + } +} + +#[derive(FromAttributes)] +#[darling(attributes(si_node_weight))] +struct SiNodeWeightOptions { + discriminant: Option, + #[darling(default)] + skip: SkipOptionSet, +} + +#[derive(Debug, Default)] +struct NodeHashParticipation { + participant: bool, + custom_code: Option, +} + +impl FromMeta for NodeHashParticipation { + fn from_word() -> darling::Result { + Ok(Self { + participant: true, + custom_code: None, + }) + } + + fn from_value(value: &syn::Lit) -> darling::Result { + match value { + syn::Lit::Str(custom_str) => Ok(Self { + participant: true, + custom_code: Some(custom_str.value()), + }), + _ => Err(darling::Error::unexpected_lit_type(value)), + } + } +} + +#[derive(Debug, Default, FromAttributes)] +#[darling(attributes(si_node_weight))] +struct NodeWeightFieldAttrs { + #[darling(default, rename = "node_hash")] + node_hash_participation: NodeHashParticipation, +} + +pub fn derive_si_node_weight( + input: proc_macro::TokenStream, + errors: &mut manyhow::Emitter, +) -> manyhow::Result { + let input = syn::parse::(input)?; + let DeriveInput { + ident, + data: type_data, + attrs, + .. + } = input.clone(); + let struct_options = SiNodeWeightOptions::from_attributes(&attrs)?; + + let node_weight_discriminant = if let Some(discriminant) = struct_options.discriminant { + discriminant + } else { + emit!(errors, input, "No NodeWeightDiscriminants was specified."); + syn::parse_str::("")? + }; + + let struct_data = match &type_data { + Data::Struct(data) => data, + _ => { + bail!(input, "SiNodeWeight must be derived on a struct."); + } + }; + let mut struct_has_content_address = false; + let mut node_hash_parts = Vec::new(); + for field in &struct_data.fields { + if let Some(field_ident) = &field.ident { + let ident_string: IdentString = field_ident.clone().into(); + if ident_string == "content_address" { + struct_has_content_address = true; + } + } else { + emit!(errors, field, "No identifier found for field."); + continue; + } + let field_attrs = NodeWeightFieldAttrs::from_attributes(&field.attrs)?; + if field_attrs.node_hash_participation.participant { + let hasher_update = + if let Some(custom_code) = field_attrs.node_hash_participation.custom_code { + match syn::parse_str::(&custom_code) { + Ok(expr) => expr.to_token_stream(), + Err(e) => { + emit!( + errors, + syn::Error::new_spanned( + field, + format!("Invalid custom code for node_hash calculation: {e}") + ) + ); + continue; + } + } + } else { + let field_ident = field.ident.clone(); + quote! { + self.#field_ident.as_bytes() + } + }; + node_hash_parts.push(hasher_update); + } + } + errors.into_result()?; + + let id_fn = if struct_options.skip.should_skip(SkipOption::Id) { + quote! {} + } else { + quote! { + fn id(&self) -> Ulid { + self.id + } + } + }; + + let lineage_id_fn = if struct_options.skip.should_skip(SkipOption::LineageId) { + quote! {} + } else { + quote! { + fn lineage_id(&self) -> Ulid { + self.lineage_id + } + } + }; + + let content_hash_fn = if struct_options.skip.should_skip(SkipOption::ContentHash) { + quote! {} + } else { + let content_hash_location = if struct_has_content_address { + quote! { self.content_address.content_hash() } + } else { + quote! { self.node_hash() } + }; + + quote! { + fn content_hash(&self) -> ContentHash { + #content_hash_location + } + } + }; + + let merkle_tree_hash_fn = if struct_options.skip.should_skip(SkipOption::MerkleTreeHash) { + quote! {} + } else { + quote! { + fn merkle_tree_hash(&self) -> MerkleTreeHash { + self.merkle_tree_hash + } + } + }; + + let node_hash_fn = if struct_options.skip.should_skip(SkipOption::NodeHash) { + quote! {} + } else { + let mut hash_updates = TokenStream::new(); + for update in node_hash_parts { + let full_update = quote! { content_hasher.update(#update); }; + hash_updates.extend(full_update); + } + + quote! { + fn node_hash(&self) -> ContentHash { + let mut content_hasher = ContentHash::hasher(); + #hash_updates + + content_hasher.finalize() + } + } + }; + + let node_weight_discriminant_fn = if struct_options + .skip + .should_skip(SkipOption::NodeWeightDiscriminants) + { + quote! {} + } else { + quote! { + fn node_weight_discriminant(&self) -> NodeWeightDiscriminants { + #node_weight_discriminant + } + } + }; + + let set_id_fn = if struct_options.skip.should_skip(SkipOption::SetId) { + quote! {} + } else { + quote! { + fn set_id(&mut self, new_id: Ulid) { + self.id = new_id; + } + } + }; + + let set_lineage_id_fn = if struct_options.skip.should_skip(SkipOption::SetLineageId) { + quote! {} + } else { + quote! { + fn set_lineage_id(&mut self, new_lineage_id: Ulid) { + self.lineage_id = new_lineage_id; + } + } + }; + + let set_merkle_tree_hash_fn = if struct_options + .skip + .should_skip(SkipOption::SetMerkleTreeHash) + { + quote! {} + } else { + quote! { + fn set_merkle_tree_hash(&mut self, new_merkle_tree_hash: MerkleTreeHash) { + self.merkle_tree_hash = new_merkle_tree_hash; + } + } + }; + + let output = quote! { + impl SiNodeWeight for #ident { + #id_fn + #lineage_id_fn + #content_hash_fn + #merkle_tree_hash_fn + #node_hash_fn + #node_weight_discriminant_fn + #set_id_fn + #set_lineage_id_fn + #set_merkle_tree_hash_fn + } + }; + + Ok(output.into()) +} diff --git a/lib/dal-macros/tests/ui/06-node_weight-basic-tests-pass.rs b/lib/dal-macros/tests/ui/06-node_weight-basic-tests-pass.rs new file mode 100644 index 0000000000..9a602bea70 --- /dev/null +++ b/lib/dal-macros/tests/ui/06-node_weight-basic-tests-pass.rs @@ -0,0 +1,25 @@ +use dal::workspace_snapshot::{ + edge_weight::EdgeWeightKindDiscriminants, + node_weight::{ + traits::{CorrectExclusiveOutgoingEdge, CorrectTransforms, SiNodeWeight}, + NodeWeightDiscriminants, + }, +}; +use si_events::{merkle_tree_hash::MerkleTreeHash, ulid::Ulid, ContentHash}; + +#[derive(dal_macros::SiNodeWeight)] +#[si_node_weight(discriminant = NodeWeightDiscriminants::InputSocket)] +pub struct TestingNodeWeight { + id: Ulid, + lineage_id: Ulid, + merkle_tree_hash: MerkleTreeHash, +} + +impl CorrectTransforms for TestingNodeWeight {} +impl CorrectExclusiveOutgoingEdge for TestingNodeWeight { + fn exclusive_outgoing_edges(&self) -> &[EdgeWeightKindDiscriminants] { + todo!() + } +} + +fn main() {} diff --git a/lib/dal-macros/tests/ui/07-node_weight-no-discriminant-specified-fail.rs b/lib/dal-macros/tests/ui/07-node_weight-no-discriminant-specified-fail.rs new file mode 100644 index 0000000000..99081ce292 --- /dev/null +++ b/lib/dal-macros/tests/ui/07-node_weight-no-discriminant-specified-fail.rs @@ -0,0 +1,25 @@ +use dal::workspace_snapshot::{ + edge_weight::EdgeWeightKindDiscriminants, + node_weight::{ + traits::{CorrectExclusiveOutgoingEdge, CorrectTransforms, SiNodeWeight}, + NodeWeightDiscriminants, + }, +}; +use si_events::{merkle_tree_hash::MerkleTreeHash, ulid::Ulid, ContentHash}; + +#[derive(dal_macros::SiNodeWeight)] +#[si_node_weight] +pub struct TestingNodeWeight { + id: Ulid, + lineage_id: Ulid, + merkle_tree_hash: MerkleTreeHash, +} + +impl CorrectTransforms for TestingNodeWeight {} +impl CorrectExclusiveOutgoingEdge for TestingNodeWeight { + fn exclusive_outgoing_edges(&self) -> &[EdgeWeightKindDiscriminants] { + &[] + } +} + +fn main() {} diff --git a/lib/dal-macros/tests/ui/07-node_weight-no-discriminant-specified-fail.stderr b/lib/dal-macros/tests/ui/07-node_weight-no-discriminant-specified-fail.stderr new file mode 100644 index 0000000000..ae53be0151 --- /dev/null +++ b/lib/dal-macros/tests/ui/07-node_weight-no-discriminant-specified-fail.stderr @@ -0,0 +1,54 @@ +error: No NodeWeightDiscriminants was specified. + --> tests/ui/07-node_weight-no-discriminant-specified-fail.rs:11:1 + | +11 | / #[si_node_weight] +12 | | pub struct TestingNodeWeight { +13 | | id: Ulid, +14 | | lineage_id: Ulid, +15 | | merkle_tree_hash: MerkleTreeHash, +16 | | } + | |_^ + +error: unexpected end of input, expected identifier + --> tests/ui/07-node_weight-no-discriminant-specified-fail.rs:10:10 + | +10 | #[derive(dal_macros::SiNodeWeight)] + | ^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: this error originates in the derive macro `dal_macros::SiNodeWeight` (in Nightly builds, run with -Z macro-backtrace for more info) + +warning: unused imports: `NodeWeightDiscriminants`, `SiNodeWeight` + --> tests/ui/07-node_weight-no-discriminant-specified-fail.rs:4:67 + | +4 | traits::{CorrectExclusiveOutgoingEdge, CorrectTransforms, SiNodeWeight}, + | ^^^^^^^^^^^^ +5 | NodeWeightDiscriminants, + | ^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: `#[warn(unused_imports)]` on by default + +warning: unused import: `ContentHash` + --> tests/ui/07-node_weight-no-discriminant-specified-fail.rs:8:63 + | +8 | use si_events::{merkle_tree_hash::MerkleTreeHash, ulid::Ulid, ContentHash}; + | ^^^^^^^^^^^ + +error[E0277]: the trait bound `TestingNodeWeight: SiVersionedNodeWeight` is not satisfied + --> tests/ui/07-node_weight-no-discriminant-specified-fail.rs:19:6 + | +19 | impl CorrectExclusiveOutgoingEdge for TestingNodeWeight { + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `SiVersionedNodeWeight` is not implemented for `TestingNodeWeight`, which is required by `for<'a> NodeInformation: From<&'a TestingNodeWeight>` + | + = help: the following other types implement trait `SiVersionedNodeWeight`: + dal::workspace_snapshot::node_weight::InputSocketNodeWeight + dal::workspace_snapshot::node_weight::SchemaVariantNodeWeight + = note: required for `TestingNodeWeight` to implement `SiNodeWeight` + = note: required for `NodeInformation` to implement `for<'a> From<&'a TestingNodeWeight>` +note: required by a bound in `CorrectExclusiveOutgoingEdge` + --> $WORKSPACE/lib/dal/src/workspace_snapshot/node_weight/traits/correct_exclusive_outgoing_edge.rs + | + | pub trait CorrectExclusiveOutgoingEdge + | ---------------------------- required by a bound in this trait + | where + | NodeInformation: for<'a> From<&'a Self>, + | ^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `CorrectExclusiveOutgoingEdge` diff --git a/lib/dal-macros/tests/ui/08-node_weight-bad-node_hash-custom-code-fail.rs b/lib/dal-macros/tests/ui/08-node_weight-bad-node_hash-custom-code-fail.rs new file mode 100644 index 0000000000..4ea6385d1e --- /dev/null +++ b/lib/dal-macros/tests/ui/08-node_weight-bad-node_hash-custom-code-fail.rs @@ -0,0 +1,26 @@ +use dal::workspace_snapshot::{ + edge_weight::EdgeWeightKindDiscriminants, + node_weight::{ + traits::{CorrectExclusiveOutgoingEdge, CorrectTransforms, SiNodeWeight}, + NodeWeightDiscriminants, + }, +}; +use si_events::{merkle_tree_hash::MerkleTreeHash, ulid::Ulid, ContentHash}; + +#[derive(dal_macros::SiNodeWeight)] +#[si_node_weight(discriminant = NodeWeightDiscriminants::InputSocket)] +pub struct TestingNodeWeight { + #[si_node_weight(node_hash = "invalid code")] + id: Ulid, + lineage_id: Ulid, + merkle_tree_hash: MerkleTreeHash, +} + +impl CorrectTransforms for TestingNodeWeight {} +impl CorrectExclusiveOutgoingEdge for TestingNodeWeight { + fn exclusive_outgoing_edges(&self) -> &[EdgeWeightKindDiscriminants] { + todo!() + } +} + +fn main() {} diff --git a/lib/dal-macros/tests/ui/08-node_weight-bad-node_hash-custom-code-fail.stderr b/lib/dal-macros/tests/ui/08-node_weight-bad-node_hash-custom-code-fail.stderr new file mode 100644 index 0000000000..b7c6363169 --- /dev/null +++ b/lib/dal-macros/tests/ui/08-node_weight-bad-node_hash-custom-code-fail.stderr @@ -0,0 +1,42 @@ +error: Invalid custom code for node_hash calculation: unexpected token + --> tests/ui/08-node_weight-bad-node_hash-custom-code-fail.rs:13:5 + | +13 | / #[si_node_weight(node_hash = "invalid code")] +14 | | id: Ulid, + | |____________^ + +warning: unused imports: `NodeWeightDiscriminants`, `SiNodeWeight` + --> tests/ui/08-node_weight-bad-node_hash-custom-code-fail.rs:4:67 + | +4 | traits::{CorrectExclusiveOutgoingEdge, CorrectTransforms, SiNodeWeight}, + | ^^^^^^^^^^^^ +5 | NodeWeightDiscriminants, + | ^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: `#[warn(unused_imports)]` on by default + +warning: unused import: `ContentHash` + --> tests/ui/08-node_weight-bad-node_hash-custom-code-fail.rs:8:63 + | +8 | use si_events::{merkle_tree_hash::MerkleTreeHash, ulid::Ulid, ContentHash}; + | ^^^^^^^^^^^ + +error[E0277]: the trait bound `TestingNodeWeight: SiVersionedNodeWeight` is not satisfied + --> tests/ui/08-node_weight-bad-node_hash-custom-code-fail.rs:20:6 + | +20 | impl CorrectExclusiveOutgoingEdge for TestingNodeWeight { + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `SiVersionedNodeWeight` is not implemented for `TestingNodeWeight`, which is required by `for<'a> NodeInformation: From<&'a TestingNodeWeight>` + | + = help: the following other types implement trait `SiVersionedNodeWeight`: + dal::workspace_snapshot::node_weight::InputSocketNodeWeight + dal::workspace_snapshot::node_weight::SchemaVariantNodeWeight + = note: required for `TestingNodeWeight` to implement `SiNodeWeight` + = note: required for `NodeInformation` to implement `for<'a> From<&'a TestingNodeWeight>` +note: required by a bound in `CorrectExclusiveOutgoingEdge` + --> $WORKSPACE/lib/dal/src/workspace_snapshot/node_weight/traits/correct_exclusive_outgoing_edge.rs + | + | pub trait CorrectExclusiveOutgoingEdge + | ---------------------------- required by a bound in this trait + | where + | NodeInformation: for<'a> From<&'a Self>, + | ^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `CorrectExclusiveOutgoingEdge` diff --git a/lib/dal/src/workspace_snapshot/node_weight/input_socket_node_weight/v1.rs b/lib/dal/src/workspace_snapshot/node_weight/input_socket_node_weight/v1.rs index 43bd09257d..275ab3f55e 100644 --- a/lib/dal/src/workspace_snapshot/node_weight/input_socket_node_weight/v1.rs +++ b/lib/dal/src/workspace_snapshot/node_weight/input_socket_node_weight/v1.rs @@ -19,12 +19,15 @@ use crate::{ use super::{InputSocketNodeWeight, InputSocketNodeWeightError, InputSocketNodeWeightResult}; -#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, dal_macros::SiNodeWeight)] +#[si_node_weight(discriminant = NodeWeightDiscriminants::InputSocket)] pub struct InputSocketNodeWeightV1 { pub id: Ulid, pub lineage_id: LineageId, merkle_tree_hash: MerkleTreeHash, + #[si_node_weight(node_hash = "self.arity.to_string().as_bytes()")] arity: SocketArity, + #[si_node_weight(node_hash = "self.content_address.content_hash().as_bytes()")] content_address: ContentAddress, timestamp: Timestamp, } @@ -135,48 +138,6 @@ impl InputSocketNodeWeightV1 { } } -impl SiNodeWeight for InputSocketNodeWeightV1 { - fn content_hash(&self) -> ContentHash { - self.content_address.content_hash() - } - - fn id(&self) -> Ulid { - self.id - } - - fn lineage_id(&self) -> Ulid { - self.lineage_id - } - - fn merkle_tree_hash(&self) -> MerkleTreeHash { - self.merkle_tree_hash - } - - fn node_hash(&self) -> ContentHash { - let mut content_hasher = ContentHash::hasher(); - content_hasher.update(self.arity.to_string().as_bytes()); - content_hasher.update(self.content_address.content_hash().as_bytes()); - - content_hasher.finalize() - } - - fn node_weight_discriminant(&self) -> NodeWeightDiscriminants { - NodeWeightDiscriminants::InputSocket - } - - fn set_id(&mut self, new_id: Ulid) { - self.id = new_id; - } - - fn set_lineage_id(&mut self, new_lineage_id: Ulid) { - self.lineage_id = new_lineage_id; - } - - fn set_merkle_tree_hash(&mut self, new_hash: MerkleTreeHash) { - self.merkle_tree_hash = new_hash - } -} - impl CorrectTransforms for InputSocketNodeWeightV1 {} impl CorrectExclusiveOutgoingEdge for InputSocketNodeWeightV1 { diff --git a/lib/dal/src/workspace_snapshot/node_weight/schema_variant_node_weight/v1.rs b/lib/dal/src/workspace_snapshot/node_weight/schema_variant_node_weight/v1.rs index 0f1d151427..582db18a0d 100644 --- a/lib/dal/src/workspace_snapshot/node_weight/schema_variant_node_weight/v1.rs +++ b/lib/dal/src/workspace_snapshot/node_weight/schema_variant_node_weight/v1.rs @@ -23,12 +23,15 @@ use crate::{ Timestamp, WorkspaceSnapshotGraphV3, WorkspaceSnapshotGraphVCurrent, }; -#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, dal_macros::SiNodeWeight)] +#[si_node_weight(discriminant = NodeWeightDiscriminants::SchemaVariant)] pub struct SchemaVariantNodeWeightV1 { pub id: Ulid, pub lineage_id: LineageId, merkle_tree_hash: MerkleTreeHash, + #[si_node_weight(node_hash = "&[u8::from(self.is_locked)]")] is_locked: bool, + #[si_node_weight(node_hash = "self.content_address.content_hash().as_bytes()")] content_address: ContentAddress, timestamp: Timestamp, } @@ -164,48 +167,6 @@ impl SchemaVariantNodeWeightV1 { } } -impl SiNodeWeight for SchemaVariantNodeWeightV1 { - fn content_hash(&self) -> ContentHash { - self.content_address.content_hash() - } - - fn id(&self) -> Ulid { - self.id - } - - fn lineage_id(&self) -> Ulid { - self.lineage_id - } - - fn merkle_tree_hash(&self) -> MerkleTreeHash { - self.merkle_tree_hash - } - - fn node_hash(&self) -> ContentHash { - let mut content_hasher = ContentHash::hasher(); - content_hasher.update(&[u8::from(self.is_locked)]); - content_hasher.update(self.content_address.content_hash().as_bytes()); - - content_hasher.finalize() - } - - fn node_weight_discriminant(&self) -> NodeWeightDiscriminants { - NodeWeightDiscriminants::SchemaVariant - } - - fn set_id(&mut self, new_id: Ulid) { - self.id = new_id; - } - - fn set_lineage_id(&mut self, new_id: Ulid) { - self.lineage_id = new_id; - } - - fn set_merkle_tree_hash(&mut self, new_hash: MerkleTreeHash) { - self.merkle_tree_hash = new_hash - } -} - fn update_unlocks( updates: &[Update], update_idx: usize,