Skip to content

Commit

Permalink
Merge pull request #2 from Jac0xb/jacob/v1-rewrite
Browse files Browse the repository at this point in the history
  • Loading branch information
Jac0xb authored Dec 18, 2023
2 parents de039d0 + fa77f78 commit 51c68e9
Show file tree
Hide file tree
Showing 44 changed files with 1,906 additions and 1,260 deletions.
4 changes: 4 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -81,3 +81,7 @@ Lighthouse is provided "as is", with no warranties regarding its efficacy in com
- Save on tranasction space by only needing to call write once.
- Auto-increment validation
- Check to make sure transactions are ran in sequence or they fail
- Decide on using CPI events vs program logs events
- Extra account overhead for cpi events pda
- program logs concat
- Do we even need logs :P
47 changes: 47 additions & 0 deletions macros/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

12 changes: 12 additions & 0 deletions macros/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
[package]
name = "macros"
version = "0.1.0"
edition = "2021"

[lib]
proc-macro = true

[dependencies]
syn = { version = "1.0", features = ["full"] }
quote = "1.0"
proc-macro2 = "1.0"
119 changes: 119 additions & 0 deletions macros/src/lib.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,119 @@
extern crate proc_macro;

use proc_macro::TokenStream;
// use proc_macro::TokenStream;
use quote::quote;
use syn::{
parse_macro_input, punctuated::Punctuated, token::Comma, Data, DataStruct, DeriveInput, Field,
Fields,
};
// use syn::{parse_macro_input, DeriveInput};

#[proc_macro_derive(Optionize)]
pub fn optionize(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput);
let name = &input.ident;
let optional_name = syn::Ident::new(&format!("Optional{}", name), name.span());
let attrs = &input.attrs;

let fields = match &input.data {
Data::Struct(data_struct) => &data_struct.fields,
_ => panic!("Optionize macro only works with structs"),
};

let optional_fields = fields.iter().map(|f| {
let name = &f.ident;
let ty = &f.ty;
quote! { pub #name: Option<#ty>, }
});

let derive_attrs: Vec<_> = attrs
.iter()
.filter(|attr| attr.path.is_ident("derive"))
.collect();

let expanded = quote! {
// Original struct with its attributes
// #( #attrs )*
// pub struct #name {
// #( #fields, )*
// }

// Optional variant of the struct with the same derive attributes
#( #derive_attrs )*
pub struct #optional_name {
#( #optional_fields )*
}
};

TokenStream::from(expanded)
}

#[proc_macro_derive(FieldEnum)]
pub fn field_enum(input: TokenStream) -> TokenStream {
// Parse the input tokens into a syntax tree
let input = parse_macro_input!(input as DeriveInput);

// Extract the struct name and data
let name = input.ident;
let data = input.data;

match data {
Data::Struct(DataStruct {
fields: Fields::Named(fields),
..
}) => {
let field_names = fields.named.iter().map(|f| &f.ident);

// Generate enum variants from field names
let enum_name = quote::format_ident!("{}Fields", name);
let enum_tokens = quote! {
pub enum #enum_name {
#( #field_names ),*
}
};

// Convert generated enum into a TokenStream and return it
TokenStream::from(enum_tokens)
}
_ => panic!("FieldEnum macro only works with structs with named fields"),
}
}

#[proc_macro_derive(FieldOffset)]
pub fn field_offset(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput);
let struct_name = input.ident;
let field_names: Punctuated<Field, Comma>;

let fields = if let syn::Data::Struct(data_struct) = input.data {
match data_struct.fields {
Fields::Named(fields) => {
field_names = fields.named.clone();
field_names.iter().map(|f| {
let field_name = &f.ident;
let ty = &f.ty;
return quote! {
if field == stringify!(#field_name) {
return Some(std::mem::size_of::<#ty>());
}
};
})
}
_ => unimplemented!("FieldOffset only supports named fields"),
}
} else {
unimplemented!("FieldOffset only supports structs");
};

let expanded = quote! {
impl #struct_name {
pub fn get_field_offset(field: &str) -> Option<usize> {
#(#fields)*
None
}
}
};

TokenStream::from(expanded)
}
11 changes: 11 additions & 0 deletions programs/lighthouse/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

5 changes: 2 additions & 3 deletions programs/lighthouse/program/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -26,17 +26,16 @@ bytemuck = {version = "1.4.0", features = ["derive", "min_const_generics"]}
mpl-token-metadata = { version = "2.0.0-beta.1", features = ["no-entrypoint"] }
num-traits = "0.2.15"
solana-program = "~1.16.5"
# spl-account-compression = { version="0.2.0", features = ["cpi"] }
spl-associated-token-account = { version = ">= 1.1.3, < 3.0", features = ["no-entrypoint"] }
spl-token = { version = ">= 3.5.0, < 5.0", features = ["no-entrypoint"] }
macros = { path = "../../../macros" }

[dev-dependencies]
async-trait = "0.1.71"
# mpl-token-auth-rules = { version = "1.4.3", features = ["no-entrypoint"] }
solana-program-test = "~1.16.5"
solana-sdk = "~1.16.5"
spl-concurrent-merkle-tree = "0.2.0"
spl-merkle-tree-reference = "0.1.0"
spl-noop = { version = "0.1.3", features = ["no-entrypoint"] }
solana-banks-interface = "1.14.10"
# solana-banks-interface = { version = "^1.14.18", features = ["no-entrypoint"] }
regex = "1.5.4"
27 changes: 23 additions & 4 deletions programs/lighthouse/program/src/error.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
use anchor_lang::prelude::*;
use mpl_token_metadata::error::MetadataError;
use num_traits::FromPrimitive;

#[error_code]
pub enum ProgramError {
Expand All @@ -10,8 +8,29 @@ pub enum ProgramError {
AssertionFailed,
#[msg("NotEnoughAccounts")]
NotEnoughAccounts,
#[msg("BorshValueMismatch")]
BorshValueMismatch,
#[msg("DataValueMismatch")]
DataValueMismatch,
#[msg("UnsupportedOperator")]
UnsupportedOperator,
#[msg("OutOfRange")]
OutOfRange,
#[msg("AccountBorrowFailed")]
AccountBorrowFailed,
#[msg("InvalidAccount")]
InvalidAccount,

#[msg("InvalidDataLength")]
InvalidDataLength,

#[msg("AccountOutOfRange")]
AccountOutOfRange,

#[msg("AccountOwnerValidationFailed")]
AccountOwnerValidationFailed,

#[msg("AccountFundedValidationFailed")]
AccountFundedValidationFailed,

#[msg("AccountDiscriminatorValidationFailed")]
AccountDiscriminatorValidationFailed,
}
6 changes: 3 additions & 3 deletions programs/lighthouse/program/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
#![allow(clippy::too_many_arguments)]

use anchor_lang::prelude::*;
use borsh::{BorshDeserialize, BorshSerialize};
use borsh::BorshDeserialize;

pub mod error;
pub mod processor;
Expand Down Expand Up @@ -35,7 +35,7 @@ pub mod lighthouse {
pub fn write_v1<'info>(
ctx: Context<'_, '_, '_, 'info, WriteV1<'info>>,
cache_index: u8,
write_type: WriteType,
write_type: WriteTypeParameter,
) -> Result<()> {
processor::v1::write(ctx, cache_index, write_type)
}
Expand All @@ -44,7 +44,7 @@ pub mod lighthouse {
ctx: Context<'_, '_, '_, 'info, AssertV1<'info>>,
assertions: Vec<Assertion>,
logical_expression: Option<Vec<Expression>>,
options: Option<Config>,
options: Option<AssertionConfig>,
) -> Result<()> {
processor::assert(ctx, assertions, logical_expression, options)
}
Expand Down
Loading

0 comments on commit 51c68e9

Please sign in to comment.