diff --git a/sdk-libs/compressed-token-sdk/Cargo.toml b/sdk-libs/compressed-token-sdk/Cargo.toml index af5bb0d9fd..1c2d0cf63c 100644 --- a/sdk-libs/compressed-token-sdk/Cargo.toml +++ b/sdk-libs/compressed-token-sdk/Cargo.toml @@ -7,9 +7,8 @@ license = "Apache-2.0" repository = "https://github.com/Lightprotocol/light-protocol" [features] -default = ["anchor-discriminator"] +default = [] v1 = [] -anchor-discriminator = ["light-sdk/anchor-discriminator"] anchor = ["anchor-lang", "light-token-types/anchor", "light-token-interface/anchor"] # idl-build feature enables IDL generation for client-side tooling and SDK generation idl-build = ["anchor", "anchor-lang/idl-build", "light-sdk/idl-build"] diff --git a/sdk-libs/compressed-token-sdk/src/lib.rs b/sdk-libs/compressed-token-sdk/src/lib.rs index 8cbcd0b623..78775f600a 100644 --- a/sdk-libs/compressed-token-sdk/src/lib.rs +++ b/sdk-libs/compressed-token-sdk/src/lib.rs @@ -9,7 +9,6 @@ //! //! - `v1` - Enable v1 compressed token support //! - `anchor` - Enable Anchor framework integration -//! - `anchor-discriminator` - Use Anchor-style discriminators (default) //! //! ## Modules //! diff --git a/sdk-libs/macros/Cargo.toml b/sdk-libs/macros/Cargo.toml index a030be3876..20807439b5 100644 --- a/sdk-libs/macros/Cargo.toml +++ b/sdk-libs/macros/Cargo.toml @@ -8,7 +8,6 @@ edition = "2021" [features] -anchor-discriminator = [] [dependencies] proc-macro2 = { workspace = true } diff --git a/sdk-libs/macros/src/discriminator.rs b/sdk-libs/macros/src/discriminator.rs index d9796bfc6f..5de023c5d5 100644 --- a/sdk-libs/macros/src/discriminator.rs +++ b/sdk-libs/macros/src/discriminator.rs @@ -3,13 +3,10 @@ use proc_macro2::TokenStream; use quote::quote; use syn::{ItemStruct, Result}; -pub(crate) fn discriminator(input: ItemStruct) -> Result { +/// Light discriminator: SHA256("{name}")[0..8] +/// Implements LightDiscriminator trait +pub(crate) fn light_discriminator(input: ItemStruct) -> Result { let account_name = &input.ident; - // When anchor-discriminator-compat feature is enabled, use "account:" prefix like Anchor does - #[cfg(feature = "anchor-discriminator")] - let hash_input = format!("account:{}", account_name); - - #[cfg(not(feature = "anchor-discriminator"))] let hash_input = account_name.to_string(); let (impl_gen, type_gen, where_clause) = input.generics.split_for_impl(); @@ -30,16 +27,58 @@ pub(crate) fn discriminator(input: ItemStruct) -> Result { }) } +/// Anchor discriminator: SHA256("account:{name}")[0..8] +/// Implements the SAME LightDiscriminator trait, just with different hash input +pub(crate) fn anchor_discriminator(input: ItemStruct) -> Result { + let account_name = &input.ident; + let hash_input = format!("account:{}", account_name); + + let (impl_gen, type_gen, where_clause) = input.generics.split_for_impl(); + + let mut discriminator = [0u8; 8]; + discriminator.copy_from_slice(&Sha256::hash(hash_input.as_bytes()).unwrap()[..8]); + let discriminator: proc_macro2::TokenStream = format!("{discriminator:?}").parse().unwrap(); + + // Same trait, different value + Ok(quote! { + impl #impl_gen LightDiscriminator for #account_name #type_gen #where_clause { + const LIGHT_DISCRIMINATOR: [u8; 8] = #discriminator; + const LIGHT_DISCRIMINATOR_SLICE: &'static [u8] = &Self::LIGHT_DISCRIMINATOR; + + fn discriminator() -> [u8; 8] { + Self::LIGHT_DISCRIMINATOR + } + } + }) +} + #[cfg(test)] mod tests { + use syn::parse_quote; + + use super::*; - #[cfg(not(feature = "anchor-discriminator"))] #[test] - fn test_discriminator() { - use syn::parse_quote; + fn test_light_discriminator() { + let input: ItemStruct = parse_quote! { + struct MyAccount { + a: u32, + b: i32, + c: u64, + d: i64, + } + }; - use super::*; + let output = light_discriminator(input).unwrap(); + let output = output.to_string(); + assert!(output.contains("impl LightDiscriminator for MyAccount")); + // SHA256("MyAccount")[0..8] + assert!(output.contains("[181 , 255 , 112 , 42 , 17 , 188 , 66 , 199]")); + } + + #[test] + fn test_anchor_discriminator() { let input: ItemStruct = parse_quote! { struct MyAccount { a: u32, @@ -49,10 +88,11 @@ mod tests { } }; - let output = discriminator(input).unwrap(); + let output = anchor_discriminator(input).unwrap(); let output = output.to_string(); assert!(output.contains("impl LightDiscriminator for MyAccount")); - assert!(output.contains("[181 , 255 , 112 , 42 , 17 , 188 , 66 , 199]")); + // SHA256("account:MyAccount")[0..8] = f6 1c 06 57 fb 2d 32 2a + assert!(output.contains("[246 , 28 , 6 , 87 , 251 , 45 , 50 , 42]")); } } diff --git a/sdk-libs/macros/src/hasher/light_hasher.rs b/sdk-libs/macros/src/hasher/light_hasher.rs index 20f07b28c9..0a5792e8f5 100644 --- a/sdk-libs/macros/src/hasher/light_hasher.rs +++ b/sdk-libs/macros/src/hasher/light_hasher.rs @@ -652,7 +652,7 @@ impl ::light_hasher::DataHasher for OuterStruct { "SHA256 hasher should work with large structs" ); - let sha_discriminator_result = crate::discriminator::discriminator(input.clone()); + let sha_discriminator_result = crate::discriminator::light_discriminator(input.clone()); assert!( sha_discriminator_result.is_ok(), "SHA256 discriminator should work with large structs" @@ -736,7 +736,7 @@ impl ::light_hasher::DataHasher for OuterStruct { "SHA256 hasher must handle complex real-world structs" ); - let sha_discriminator_result = crate::discriminator::discriminator(input.clone()); + let sha_discriminator_result = crate::discriminator::light_discriminator(input.clone()); assert!( sha_discriminator_result.is_ok(), "SHA256 discriminator must handle complex real-world structs" diff --git a/sdk-libs/macros/src/lib.rs b/sdk-libs/macros/src/lib.rs index 992ff3b382..7d560c0095 100644 --- a/sdk-libs/macros/src/lib.rs +++ b/sdk-libs/macros/src/lib.rs @@ -1,5 +1,5 @@ extern crate proc_macro; -use discriminator::discriminator; +use discriminator::{anchor_discriminator, light_discriminator}; use hasher::{derive_light_hasher, derive_light_hasher_sha}; use proc_macro::TokenStream; use syn::{parse_macro_input, DeriveInput, ItemStruct}; @@ -15,10 +15,50 @@ mod utils; #[cfg(test)] mod light_pdas_tests; +/// Derives a discriminator using SHA256("{struct_name}")[0..8]. +/// +/// This is the Light Protocol native discriminator format. +/// Use this for new Light Protocol accounts that don't need Anchor compatibility. +/// +/// ## Example +/// +/// ```ignore +/// use light_sdk::LightDiscriminator; +/// +/// #[derive(LightDiscriminator)] +/// pub struct MyAccount { +/// pub owner: Pubkey, +/// pub counter: u64, +/// } +/// // MyAccount::LIGHT_DISCRIMINATOR = SHA256("MyAccount")[0..8] +/// ``` #[proc_macro_derive(LightDiscriminator)] -pub fn light_discriminator(input: TokenStream) -> TokenStream { +pub fn light_discriminator_derive(input: TokenStream) -> TokenStream { + let input = parse_macro_input!(input as ItemStruct); + into_token_stream(light_discriminator(input)) +} + +/// Derives a discriminator using SHA256("account:{struct_name}")[0..8]. +/// +/// This is the Anchor-compatible discriminator format. +/// Use this when you need compatibility with Anchor's account discriminator format. +/// +/// ## Example +/// +/// ```ignore +/// use light_sdk::AnchorDiscriminator; +/// +/// #[derive(AnchorDiscriminator)] +/// pub struct MyAccount { +/// pub owner: Pubkey, +/// pub counter: u64, +/// } +/// // MyAccount::LIGHT_DISCRIMINATOR = SHA256("account:MyAccount")[0..8] +/// ``` +#[proc_macro_derive(AnchorDiscriminator)] +pub fn anchor_discriminator_derive(input: TokenStream) -> TokenStream { let input = parse_macro_input!(input as ItemStruct); - into_token_stream(discriminator(input)) + into_token_stream(anchor_discriminator(input)) } /// Makes the annotated struct hashable by implementing the following traits: diff --git a/sdk-libs/macros/src/light_pdas/account/light_compressible.rs b/sdk-libs/macros/src/light_pdas/account/light_compressible.rs index 13cd796cf0..c059c1e3c3 100644 --- a/sdk-libs/macros/src/light_pdas/account/light_compressible.rs +++ b/sdk-libs/macros/src/light_pdas/account/light_compressible.rs @@ -11,7 +11,7 @@ use quote::quote; use syn::{DeriveInput, Fields, ItemStruct, Result}; use crate::{ - discriminator::discriminator, + discriminator::light_discriminator, hasher::derive_light_hasher_sha, light_pdas::account::{pack_unpack::derive_compressible_pack, traits::derive_compressible}, }; @@ -61,7 +61,7 @@ pub fn derive_light_account(input: DeriveInput) -> Result { let hasher_impl = derive_light_hasher_sha(item_struct.clone())?; // Generate LightDiscriminator implementation - let discriminator_impl = discriminator(item_struct)?; + let discriminator_impl = light_discriminator(item_struct)?; // Generate Compressible implementation (HasCompressionInfo + CompressAs + Size + CompressedInitSpace) let compressible_impl = derive_compressible(input.clone())?; diff --git a/sdk-libs/sdk/Cargo.toml b/sdk-libs/sdk/Cargo.toml index 63adf65865..93ace946f1 100644 --- a/sdk-libs/sdk/Cargo.toml +++ b/sdk-libs/sdk/Cargo.toml @@ -26,7 +26,6 @@ poseidon = ["light-hasher/poseidon", "light-compressed-account/poseidon"] keccak = ["light-hasher/keccak", "light-compressed-account/keccak"] sha256 = ["light-hasher/sha256", "light-compressed-account/sha256"] merkle-tree = ["light-concurrent-merkle-tree/solana"] -anchor-discriminator = ["light-sdk-macros/anchor-discriminator"] custom-heap = ["light-heap"] [dependencies] @@ -62,8 +61,8 @@ light-heap = { workspace = true, optional = true } [dev-dependencies] num-bigint = { workspace = true } -light-compressed-account = { workspace = true, features = ["new-unique"] } -light-hasher = { workspace = true, features = ["keccak"] } +light-compressed-account = { workspace = true, features = ["new-unique", "sha256", "poseidon"] } +light-hasher = { workspace = true, features = ["keccak", "sha256", "poseidon"] } anchor-lang = { workspace = true } [lints.rust.unexpected_cfgs] diff --git a/sdk-libs/sdk/src/lib.rs b/sdk-libs/sdk/src/lib.rs index fce8b42a5d..17f7d7d96e 100644 --- a/sdk-libs/sdk/src/lib.rs +++ b/sdk-libs/sdk/src/lib.rs @@ -206,8 +206,8 @@ pub extern crate light_hasher; use light_hasher::DataHasher; pub use light_macros::{derive_light_cpi_signer, derive_light_cpi_signer_pda}; pub use light_sdk_macros::{ - derive_light_rent_sponsor, derive_light_rent_sponsor_pda, LightDiscriminator, LightHasher, - LightHasherSha, + derive_light_rent_sponsor, derive_light_rent_sponsor_pda, AnchorDiscriminator, + LightDiscriminator, LightHasher, LightHasherSha, }; pub use light_sdk_types::{constants, instruction::PackedAddressTreeInfoExt, CpiSigner}; use solana_account_info::AccountInfo; diff --git a/sdk-libs/sdk/tests/address_derivation.rs b/sdk-libs/sdk/tests/address_derivation.rs new file mode 100644 index 0000000000..5247634b49 --- /dev/null +++ b/sdk-libs/sdk/tests/address_derivation.rs @@ -0,0 +1,379 @@ +//! Regression tests for address derivation functions. +//! +//! These tests ensure that address derivation produces stable, expected results +//! across SDK versions. Any change to these values indicates a breaking change +//! in address derivation. + +use light_sdk::address::{v1, AddressSeed}; +use solana_pubkey::Pubkey; + +// ============================================================================ +// V1 Address Derivation Tests +// ============================================================================ + +/// Regression test for v1::derive_address_seed with single seed. +#[test] +fn test_v1_derive_address_seed_single() { + let program_id = Pubkey::new_from_array([ + 100, 107, 175, 177, 40, 13, 216, 39, 157, 127, 44, 88, 81, 65, 139, 243, 208, 214, 99, 121, + 7, 157, 114, 42, 73, 26, 197, 102, 50, 36, 40, 122, + ]); // "7yucc7fL3JGbyMwg4neUaenNSdySS39hbAk89Ao3t1Hz" + + let address_seed = v1::derive_address_seed(&[b"counter"], &program_id); + + let expected_seed: [u8; 32] = [ + 0, 245, 19, 201, 93, 115, 34, 4, 40, 137, 210, 14, 49, 244, 116, 217, 75, 141, 75, 174, 91, + 204, 52, 232, 23, 205, 206, 11, 156, 153, 138, 2, + ]; + + assert_eq!( + address_seed, + AddressSeed::from(expected_seed), + "v1::derive_address_seed should produce expected hash for single seed" + ); +} + +/// Regression test for v1::derive_address_seed with multiple seeds. +#[test] +fn test_v1_derive_address_seed_multiple() { + let program_id = Pubkey::new_from_array([ + 100, 107, 175, 177, 40, 13, 216, 39, 157, 127, 44, 88, 81, 65, 139, 243, 208, 214, 99, 121, + 7, 157, 114, 42, 73, 26, 197, 102, 50, 36, 40, 122, + ]); // "7yucc7fL3JGbyMwg4neUaenNSdySS39hbAk89Ao3t1Hz" + + let address_seed = v1::derive_address_seed(&[b"foo", b"bar"], &program_id); + + let expected_seed: [u8; 32] = [ + 0, 144, 35, 68, 111, 204, 23, 151, 120, 31, 223, 158, 197, 136, 5, 247, 175, 29, 75, 0, 98, + 141, 6, 70, 59, 251, 227, 126, 157, 101, 113, 15, + ]; + + assert_eq!( + address_seed, + AddressSeed::from(expected_seed), + "v1::derive_address_seed should produce expected hash for multiple seeds" + ); +} + +/// Regression test for v1::derive_address (full address derivation). +#[test] +fn test_v1_derive_address() { + let program_id = Pubkey::new_from_array([ + 100, 107, 175, 177, 40, 13, 216, 39, 157, 127, 44, 88, 81, 65, 139, 243, 208, 214, 99, 121, + 7, 157, 114, 42, 73, 26, 197, 102, 50, 36, 40, 122, + ]); // "7yucc7fL3JGbyMwg4neUaenNSdySS39hbAk89Ao3t1Hz" + + let address_tree_pubkey = Pubkey::new_from_array([0u8; 32]); + + let (address, address_seed) = + v1::derive_address(&[b"foo", b"bar"], &address_tree_pubkey, &program_id); + + let expected_seed: [u8; 32] = [ + 0, 144, 35, 68, 111, 204, 23, 151, 120, 31, 223, 158, 197, 136, 5, 247, 175, 29, 75, 0, 98, + 141, 6, 70, 59, 251, 227, 126, 157, 101, 113, 15, + ]; + + let expected_address: [u8; 32] = [ + 0, 76, 248, 62, 238, 197, 1, 141, 147, 231, 141, 73, 114, 55, 148, 180, 248, 40, 93, 185, + 22, 21, 249, 166, 123, 52, 176, 211, 176, 181, 40, 137, + ]; + + assert_eq!( + address_seed, + AddressSeed::from(expected_seed), + "v1::derive_address should produce expected seed" + ); + assert_eq!( + address, expected_address, + "v1::derive_address should produce expected address" + ); +} + +/// Regression test for v1::derive_address with non-zero address tree. +#[test] +fn test_v1_derive_address_nonzero_tree() { + let program_id = Pubkey::new_from_array([ + 100, 107, 175, 177, 40, 13, 216, 39, 157, 127, 44, 88, 81, 65, 139, 243, 208, 214, 99, 121, + 7, 157, 114, 42, 73, 26, 197, 102, 50, 36, 40, 122, + ]); // "7yucc7fL3JGbyMwg4neUaenNSdySS39hbAk89Ao3t1Hz" + + // Non-zero address tree pubkey + let address_tree_pubkey = Pubkey::new_from_array([1u8; 32]); + + let (address, address_seed) = + v1::derive_address(&[b"foo", b"bar"], &address_tree_pubkey, &program_id); + + let expected_seed: [u8; 32] = [ + 0, 144, 35, 68, 111, 204, 23, 151, 120, 31, 223, 158, 197, 136, 5, 247, 175, 29, 75, 0, 98, + 141, 6, 70, 59, 251, 227, 126, 157, 101, 113, 15, + ]; + + let expected_address: [u8; 32] = [ + 0, 255, 198, 80, 93, 192, 235, 41, 155, 22, 132, 77, 249, 213, 151, 62, 5, 48, 131, 228, + 84, 7, 246, 208, 228, 186, 166, 253, 226, 207, 140, 63, + ]; + + assert_eq!( + address_seed, + AddressSeed::from(expected_seed), + "Seed should be independent of address tree" + ); + assert_eq!( + address, expected_address, + "Address should change with different tree" + ); +} + +// ============================================================================ +// V2 Address Derivation Tests (requires v2 feature) +// ============================================================================ + +#[cfg(feature = "v2")] +mod v2_tests { + use light_sdk::address::{v2, AddressSeed}; + use solana_pubkey::Pubkey; + + /// Regression test for v2::derive_address_seed with single seed. + #[test] + fn test_v2_derive_address_seed_single() { + let address_seed = v2::derive_address_seed(&[b"counter"]); + + let expected_seed: [u8; 32] = [ + 0, 165, 27, 203, 187, 69, 194, 192, 180, 210, 48, 0, 52, 246, 251, 212, 224, 61, 66, + 41, 49, 191, 123, 103, 166, 56, 32, 4, 195, 249, 84, 184, + ]; + + assert_eq!( + address_seed, + AddressSeed::from(expected_seed), + "v2::derive_address_seed should produce expected hash for single seed" + ); + } + + /// Regression test for v2::derive_address_seed with multiple seeds. + #[test] + fn test_v2_derive_address_seed_multiple() { + let address_seed = v2::derive_address_seed(&[b"foo", b"bar"]); + + let expected_seed: [u8; 32] = [ + 0, 177, 134, 198, 24, 76, 116, 207, 56, 127, 189, 181, 87, 237, 154, 181, 246, 54, 131, + 21, 150, 248, 106, 75, 26, 80, 147, 245, 3, 23, 136, 56, + ]; + + assert_eq!( + address_seed, + AddressSeed::from(expected_seed), + "v2::derive_address_seed should produce expected hash for multiple seeds" + ); + } + + /// Regression test for v2::derive_address_from_seed. + #[test] + fn test_v2_derive_address_from_seed() { + let program_id = Pubkey::new_from_array([ + 100, 107, 175, 177, 40, 13, 216, 39, 157, 127, 44, 88, 81, 65, 139, 243, 208, 214, 99, + 121, 7, 157, 114, 42, 73, 26, 197, 102, 50, 36, 40, 122, + ]); // "7yucc7fL3JGbyMwg4neUaenNSdySS39hbAk89Ao3t1Hz" + + let address_tree_pubkey = Pubkey::new_from_array([0u8; 32]); + + // Pre-computed seed for ["foo", "bar"] + let address_seed = AddressSeed::from([ + 0, 177, 134, 198, 24, 76, 116, 207, 56, 127, 189, 181, 87, 237, 154, 181, 246, 54, 131, + 21, 150, 248, 106, 75, 26, 80, 147, 245, 3, 23, 136, 56, + ]); + + let address = + v2::derive_address_from_seed(&address_seed, &address_tree_pubkey, &program_id); + + let expected_address: [u8; 32] = [ + 0, 132, 78, 228, 232, 12, 252, 191, 251, 208, 23, 174, 212, 63, 254, 118, 101, 12, 78, + 228, 149, 165, 165, 63, 78, 36, 207, 250, 77, 97, 137, 145, + ]; + + assert_eq!( + address, expected_address, + "v2::derive_address_from_seed should produce expected address" + ); + } + + /// Regression test for v2::derive_address (full address derivation). + #[test] + fn test_v2_derive_address() { + let program_id = Pubkey::new_from_array([ + 100, 107, 175, 177, 40, 13, 216, 39, 157, 127, 44, 88, 81, 65, 139, 243, 208, 214, 99, + 121, 7, 157, 114, 42, 73, 26, 197, 102, 50, 36, 40, 122, + ]); // "7yucc7fL3JGbyMwg4neUaenNSdySS39hbAk89Ao3t1Hz" + + let address_tree_pubkey = Pubkey::new_from_array([0u8; 32]); + + let (address, address_seed) = + v2::derive_address(&[b"foo", b"bar"], &address_tree_pubkey, &program_id); + + let expected_seed: [u8; 32] = [ + 0, 177, 134, 198, 24, 76, 116, 207, 56, 127, 189, 181, 87, 237, 154, 181, 246, 54, 131, + 21, 150, 248, 106, 75, 26, 80, 147, 245, 3, 23, 136, 56, + ]; + + let expected_address: [u8; 32] = [ + 0, 132, 78, 228, 232, 12, 252, 191, 251, 208, 23, 174, 212, 63, 254, 118, 101, 12, 78, + 228, 149, 165, 165, 63, 78, 36, 207, 250, 77, 97, 137, 145, + ]; + + assert_eq!( + address_seed, + AddressSeed::from(expected_seed), + "v2::derive_address should produce expected seed" + ); + assert_eq!( + address, expected_address, + "v2::derive_address should produce expected address" + ); + } + + /// Regression test for v2::derive_compressed_address (PDA-based derivation). + #[test] + fn test_v2_derive_compressed_address() { + let program_id = Pubkey::new_from_array([ + 100, 107, 175, 177, 40, 13, 216, 39, 157, 127, 44, 88, 81, 65, 139, 243, 208, 214, 99, + 121, 7, 157, 114, 42, 73, 26, 197, 102, 50, 36, 40, 122, + ]); // "7yucc7fL3JGbyMwg4neUaenNSdySS39hbAk89Ao3t1Hz" + + let address_tree_pubkey = Pubkey::new_from_array([0u8; 32]); + + // Use a PDA-like account address + let account_address = Pubkey::new_from_array([42u8; 32]); + + let address = + v2::derive_compressed_address(&account_address, &address_tree_pubkey, &program_id); + + let expected_address: [u8; 32] = [ + 0, 105, 30, 171, 212, 105, 4, 106, 75, 153, 240, 54, 131, 59, 249, 62, 190, 30, 127, + 237, 32, 34, 95, 178, 183, 217, 64, 102, 144, 199, 78, 77, + ]; + + assert_eq!( + address, expected_address, + "v2::derive_compressed_address should produce expected address" + ); + } + + /// Regression test for v2::derive_address with different tree. + #[test] + fn test_v2_derive_address_different_tree() { + let program_id = Pubkey::new_from_array([ + 100, 107, 175, 177, 40, 13, 216, 39, 157, 127, 44, 88, 81, 65, 139, 243, 208, 214, 99, + 121, 7, 157, 114, 42, 73, 26, 197, 102, 50, 36, 40, 122, + ]); // "7yucc7fL3JGbyMwg4neUaenNSdySS39hbAk89Ao3t1Hz" + + // Non-zero address tree + let address_tree_pubkey = Pubkey::new_from_array([1u8; 32]); + + let (address, address_seed) = + v2::derive_address(&[b"foo", b"bar"], &address_tree_pubkey, &program_id); + + let expected_seed: [u8; 32] = [ + 0, 177, 134, 198, 24, 76, 116, 207, 56, 127, 189, 181, 87, 237, 154, 181, 246, 54, 131, + 21, 150, 248, 106, 75, 26, 80, 147, 245, 3, 23, 136, 56, + ]; + + let expected_address: [u8; 32] = [ + 0, 206, 50, 238, 53, 179, 169, 71, 26, 123, 239, 155, 15, 63, 61, 61, 211, 48, 90, 217, + 119, 136, 77, 242, 208, 202, 252, 217, 54, 19, 114, 55, + ]; + + assert_eq!( + address_seed, + AddressSeed::from(expected_seed), + "v2 seed should be independent of address tree" + ); + assert_eq!( + address, expected_address, + "v2 address should change with different tree" + ); + } + + /// Verify v1 and v2 produce DIFFERENT results for same inputs. + /// This documents the intentional difference between versions. + #[test] + fn test_v1_v2_differ() { + use light_sdk::address::v1; + + let program_id = Pubkey::new_from_array([ + 100, 107, 175, 177, 40, 13, 216, 39, 157, 127, 44, 88, 81, 65, 139, 243, 208, 214, 99, + 121, 7, 157, 114, 42, 73, 26, 197, 102, 50, 36, 40, 122, + ]); + + let seeds: &[&[u8]] = &[b"foo", b"bar"]; + + let v1_seed = v1::derive_address_seed(seeds, &program_id); + let v2_seed = v2::derive_address_seed(seeds); + + // V1 and V2 use different hashing schemes + assert_ne!( + v1_seed, v2_seed, + "v1 and v2 should produce different seeds (v1 includes program_id, v2 does not)" + ); + } +} + +// ============================================================================ +// Edge Cases +// ============================================================================ + +/// Test that byte 0 is always 0 (BN254 field size constraint). +#[test] +fn test_address_seed_first_byte_zero() { + let program_id = Pubkey::new_from_array([255u8; 32]); + + // Try various seeds to ensure first byte is always 0 + for i in 0..10 { + let seed = format!("test_seed_{}", i); + let address_seed = v1::derive_address_seed(&[seed.as_bytes()], &program_id); + assert_eq!( + address_seed.0[0], 0, + "First byte must be 0 for BN254 compatibility" + ); + } +} + +/// Test that address first byte is within BN254 field (can be non-zero but < 48). +#[test] +fn test_address_first_byte_bn254() { + let program_id = Pubkey::new_from_array([1u8; 32]); + let address_tree_pubkey = Pubkey::new_from_array([2u8; 32]); + + // The address derivation uses a different truncation that allows first byte < 48 + for i in 0..10 { + let seed = format!("test_seed_{}", i); + let (address, _) = + v1::derive_address(&[seed.as_bytes()], &address_tree_pubkey, &program_id); + // BN254 field modulus starts with ~48, so first byte should be < 48 + assert!( + address[0] < 48, + "First byte must be < 48 for BN254 compatibility, got {}", + address[0] + ); + } +} + +/// Test empty seeds behavior. +#[test] +fn test_empty_seeds() { + let program_id = Pubkey::new_from_array([1u8; 32]); + + let address_seed = v1::derive_address_seed(&[], &program_id); + + // Empty seeds should still produce a valid hash + assert_eq!( + address_seed.0[0], 0, + "First byte must be 0 even with empty seeds" + ); + + // Should be deterministic + let address_seed2 = v1::derive_address_seed(&[], &program_id); + assert_eq!( + address_seed, address_seed2, + "Same inputs should produce same output" + ); +} diff --git a/sdk-libs/sdk/tests/discriminator.rs b/sdk-libs/sdk/tests/discriminator.rs new file mode 100644 index 0000000000..41554a286a --- /dev/null +++ b/sdk-libs/sdk/tests/discriminator.rs @@ -0,0 +1,94 @@ +//! Tests for LightDiscriminator and AnchorDiscriminator derive macros. +//! +//! Verifies that both discriminator formats produce expected values +//! and that they differ from each other. + +use light_sdk::{AnchorDiscriminator, LightDiscriminator}; + +/// Struct using Light discriminator format (SHA256("{name}")[0..8]) +#[derive(LightDiscriminator)] +pub struct LightFormatAccount; + +/// Struct using Anchor discriminator format (SHA256("account:{name}")[0..8]) +#[derive(AnchorDiscriminator)] +pub struct AnchorFormatAccount; + +/// Struct for testing both formats produce different values +#[derive(LightDiscriminator)] +pub struct TestAccount; + +/// Same name but with Anchor format to compare +#[derive(AnchorDiscriminator)] +pub struct TestAccountAnchor; + +#[test] +fn test_light_discriminator_format() { + // SHA256("LightFormatAccount")[0..8] = f9 30 5f 8c 86 2d 21 c3 + const EXPECTED: [u8; 8] = [249, 48, 95, 140, 134, 45, 33, 195]; + assert_eq!( + LightFormatAccount::LIGHT_DISCRIMINATOR, + EXPECTED, + "LightDiscriminator should use SHA256(name) format" + ); +} + +#[test] +fn test_anchor_discriminator_format() { + // SHA256("account:AnchorFormatAccount")[0..8] = f2 3b 7f 36 38 66 b8 c7 + const EXPECTED: [u8; 8] = [242, 59, 127, 54, 56, 102, 184, 199]; + assert_eq!( + AnchorFormatAccount::LIGHT_DISCRIMINATOR, + EXPECTED, + "AnchorDiscriminator should use SHA256(account:name) format" + ); +} + +#[test] +fn test_discriminators_are_different() { + // Light format: SHA256("TestAccount")[0..8] + let light_discriminator = TestAccount::LIGHT_DISCRIMINATOR; + + // Anchor format: SHA256("account:TestAccountAnchor")[0..8] + // Note: We can't derive both on the same struct, so we use a different struct name + // The key is that even if we manually computed SHA256("account:TestAccount"), + // it would differ from SHA256("TestAccount") + let anchor_discriminator = TestAccountAnchor::LIGHT_DISCRIMINATOR; + + // Verify they're different (even though they have similar names) + assert_ne!( + light_discriminator, anchor_discriminator, + "Light and Anchor discriminators should produce different values" + ); +} + +#[test] +fn test_discriminator_trait_methods() { + // Test that the discriminator() method returns the same value as the constant + assert_eq!( + LightFormatAccount::discriminator(), + LightFormatAccount::LIGHT_DISCRIMINATOR, + "discriminator() method should return LIGHT_DISCRIMINATOR constant" + ); + + assert_eq!( + AnchorFormatAccount::discriminator(), + AnchorFormatAccount::LIGHT_DISCRIMINATOR, + "discriminator() method should return LIGHT_DISCRIMINATOR constant" + ); +} + +#[test] +fn test_discriminator_slice() { + // Test that LIGHT_DISCRIMINATOR_SLICE matches LIGHT_DISCRIMINATOR + assert_eq!( + LightFormatAccount::LIGHT_DISCRIMINATOR_SLICE, + &LightFormatAccount::LIGHT_DISCRIMINATOR, + "LIGHT_DISCRIMINATOR_SLICE should be a slice of LIGHT_DISCRIMINATOR" + ); + + assert_eq!( + AnchorFormatAccount::LIGHT_DISCRIMINATOR_SLICE, + &AnchorFormatAccount::LIGHT_DISCRIMINATOR, + "LIGHT_DISCRIMINATOR_SLICE should be a slice of LIGHT_DISCRIMINATOR" + ); +} diff --git a/sdk-libs/sdk/tests/light_account_poseidon.rs b/sdk-libs/sdk/tests/light_account_poseidon.rs new file mode 100644 index 0000000000..3bc8e812c6 --- /dev/null +++ b/sdk-libs/sdk/tests/light_account_poseidon.rs @@ -0,0 +1,584 @@ +//! Tests for Poseidon-based LightAccount (HASH_FLAT = false). +//! +//! Each test uses a single assert_eq against a complete expected struct. +//! Tests cover: new_init, new_mut, new_empty, new_close, new_burn +//! +//! Requires the `poseidon` feature flag. + +#![cfg(feature = "poseidon")] + +use borsh::{BorshDeserialize, BorshSerialize}; +use light_compressed_account::{ + compressed_account::PackedMerkleContext, + instruction_data::with_account_info::{CompressedAccountInfo, InAccountInfo, OutAccountInfo}, +}; +use light_sdk::{ + account::poseidon::LightAccount, + instruction::{ + account_meta::{CompressedAccountMeta, CompressedAccountMetaBurn}, + PackedStateTreeInfo, + }, + AnchorDiscriminator, LightDiscriminator, LightHasher, +}; +use solana_pubkey::Pubkey; + +/// Test struct for Poseidon hashing tests. +/// Fields > 31 bytes (like Pubkey) need the `#[hash]` attribute to hash them to field size. +/// Uses AnchorDiscriminator for Anchor compatibility (SHA256("account:TestPoseidonAccount")[0..8]). +#[derive( + Clone, + Debug, + Default, + LightHasher, + AnchorDiscriminator, + BorshSerialize, + BorshDeserialize, + PartialEq, +)] +pub struct TestPoseidonAccount { + #[hash] + pub owner: Pubkey, + pub counter: u64, +} + +// Hardcoded discriminator for TestPoseidonAccount (derived from AnchorDiscriminator) +// SHA256("account:TestPoseidonAccount")[0..8] +const TEST_POSEIDON_DISCRIMINATOR: [u8; 8] = [250, 202, 237, 234, 244, 147, 165, 166]; + +// Hardcoded Poseidon data hash for TestPoseidonAccount { owner: [1u8; 32], counter: 42 } +// Poseidon(hash_to_field(owner), counter) +const TEST_POSEIDON_DATA_HASH: [u8; 32] = [ + 30, 49, 141, 11, 21, 190, 7, 27, 48, 25, 227, 164, 36, 37, 140, 76, 209, 159, 198, 111, 102, + 73, 56, 44, 165, 20, 220, 53, 47, 237, 64, 203, +]; + +// ============================================================================ +// Hash Regression Test +// ============================================================================ + +/// Regression test ensuring Poseidon hashing remains stable. +#[test] +fn test_poseidon_hash_regression() { + let owner = Pubkey::new_from_array([1u8; 32]); + let counter = 42u64; + let program_id = Pubkey::new_from_array([2u8; 32]); + + let tree_info = PackedStateTreeInfo { + root_index: 0, + prove_by_index: false, + merkle_tree_pubkey_index: 0, + queue_pubkey_index: 1, + leaf_index: 100, + }; + let account_meta = CompressedAccountMeta { + tree_info, + address: [3u8; 32], + output_state_tree_index: 0, + }; + let account_data = TestPoseidonAccount { owner, counter }; + + let account = + LightAccount::::new_mut(&program_id, &account_meta, account_data) + .expect("Failed to create LightAccount"); + + let input_info = account + .in_account_info() + .as_ref() + .expect("Should have input"); + + assert_eq!( + input_info.data_hash, TEST_POSEIDON_DATA_HASH, + "Poseidon data hash must match hardcoded value" + ); +} + +// ============================================================================ +// new_init Tests +// ============================================================================ + +/// Test new_init: creates account with output only (no input). +#[test] +fn test_new_init() { + let program_id = Pubkey::new_from_array([2u8; 32]); + let address = [3u8; 32]; + let output_tree_index = 5u8; + + let mut account = LightAccount::::new_init( + &program_id, + Some(address), + output_tree_index, + ); + + // Verify no input (init accounts have no input) + assert!( + account.in_account_info().is_none(), + "Init account should have no input" + ); + + // Verify output + let expected_out = OutAccountInfo { + discriminator: TEST_POSEIDON_DISCRIMINATOR, + data_hash: [0u8; 32], // Default, will be computed on to_account_info + output_merkle_tree_index: 5, + lamports: 0, + data: vec![], + }; + assert_eq!( + *account.out_account_info().as_ref().unwrap(), + expected_out, + "OutAccountInfo should match expected" + ); +} + +// ============================================================================ +// new_mut Tests +// ============================================================================ + +/// Test new_mut: creates account with both input and output. +#[test] +fn test_new_mut() { + let owner = Pubkey::new_from_array([1u8; 32]); + let counter = 42u64; + let program_id = Pubkey::new_from_array([2u8; 32]); + let address = [3u8; 32]; + + let tree_info = PackedStateTreeInfo { + root_index: 10, + prove_by_index: false, + merkle_tree_pubkey_index: 0, + queue_pubkey_index: 1, + leaf_index: 500, + }; + let account_meta = CompressedAccountMeta { + tree_info, + address, + output_state_tree_index: 2, + }; + let account_data = TestPoseidonAccount { owner, counter }; + + let mut account = LightAccount::::new_mut( + &program_id, + &account_meta, + account_data.clone(), + ) + .expect("Failed to create LightAccount"); + + // Expected InAccountInfo + let expected_in = InAccountInfo { + discriminator: TEST_POSEIDON_DISCRIMINATOR, + data_hash: TEST_POSEIDON_DATA_HASH, + merkle_context: PackedMerkleContext { + merkle_tree_pubkey_index: 0, + queue_pubkey_index: 1, + leaf_index: 500, + prove_by_index: false, + }, + root_index: 10, + lamports: 0, + }; + assert_eq!( + *account.in_account_info().as_ref().unwrap(), + expected_in, + "InAccountInfo should match expected" + ); + + // Expected OutAccountInfo + let expected_out = OutAccountInfo { + discriminator: TEST_POSEIDON_DISCRIMINATOR, + data_hash: [0u8; 32], // Default, will be computed on to_account_info + output_merkle_tree_index: 2, + lamports: 0, + data: vec![], + }; + assert_eq!( + *account.out_account_info().as_ref().unwrap(), + expected_out, + "OutAccountInfo should match expected" + ); +} + +// ============================================================================ +// new_empty Tests +// ============================================================================ + +/// Test new_empty: creates account with zeroed input hash (for address-only accounts). +#[test] +fn test_new_empty() { + let program_id = Pubkey::new_from_array([2u8; 32]); + let address = [3u8; 32]; + + let tree_info = PackedStateTreeInfo { + root_index: 5, + prove_by_index: true, + merkle_tree_pubkey_index: 1, + queue_pubkey_index: 2, + leaf_index: 200, + }; + let account_meta = CompressedAccountMeta { + tree_info, + address, + output_state_tree_index: 3, + }; + + let mut account = LightAccount::::new_empty(&program_id, &account_meta) + .expect("Failed to create empty LightAccount"); + + // Expected InAccountInfo with zeroed data_hash and discriminator + // Note: root_index=0 because prove_by_index=true -> get_root_index returns None -> defaults to 0 + let expected_in = InAccountInfo { + discriminator: [0u8; 8], // Zero for empty accounts + data_hash: [0u8; 32], // Zero for empty accounts + merkle_context: PackedMerkleContext { + merkle_tree_pubkey_index: 1, + queue_pubkey_index: 2, + leaf_index: 200, + prove_by_index: true, + }, + root_index: 0, // 0 because prove_by_index=true -> root_index ignored + lamports: 0, + }; + assert_eq!( + *account.in_account_info().as_ref().unwrap(), + expected_in, + "InAccountInfo for empty account should have zeroed data_hash and discriminator" + ); + + // Expected OutAccountInfo (discriminator is set for output) + let expected_out = OutAccountInfo { + discriminator: TEST_POSEIDON_DISCRIMINATOR, // Output has discriminator set + data_hash: [0u8; 32], + output_merkle_tree_index: 3, + lamports: 0, + data: vec![], + }; + assert_eq!( + *account.out_account_info().as_ref().unwrap(), + expected_out, + "OutAccountInfo for empty account should have discriminator set" + ); +} + +// ============================================================================ +// new_close Tests +// ============================================================================ + +/// Test new_close: creates account that will be closed (output with zeroed data). +#[test] +fn test_new_close() { + let owner = Pubkey::new_from_array([1u8; 32]); + let counter = 42u64; + let program_id = Pubkey::new_from_array([2u8; 32]); + let address = [3u8; 32]; + + let tree_info = PackedStateTreeInfo { + root_index: 0, + prove_by_index: false, + merkle_tree_pubkey_index: 0, + queue_pubkey_index: 1, + leaf_index: 100, + }; + let account_meta = CompressedAccountMeta { + tree_info, + address, + output_state_tree_index: 0, + }; + let account_data = TestPoseidonAccount { owner, counter }; + + let account = + LightAccount::::new_close(&program_id, &account_meta, account_data) + .expect("Failed to create close LightAccount"); + + // Verify to_account_info produces zeroed output + let account_info = account + .to_account_info() + .expect("Should convert to account info"); + + // Expected CompressedAccountInfo for closed account + let expected = CompressedAccountInfo { + address: Some(address), + input: Some(InAccountInfo { + discriminator: TEST_POSEIDON_DISCRIMINATOR, + data_hash: TEST_POSEIDON_DATA_HASH, + merkle_context: PackedMerkleContext { + merkle_tree_pubkey_index: 0, + queue_pubkey_index: 1, + leaf_index: 100, + prove_by_index: false, + }, + root_index: 0, + lamports: 0, + }), + output: Some(OutAccountInfo { + discriminator: [0u8; 8], // Zeroed for close + data_hash: [0u8; 32], // Zeroed for close + output_merkle_tree_index: 0, + lamports: 0, + data: vec![], // Empty for close + }), + }; + assert_eq!( + account_info, expected, + "Closed account should have zeroed output data_hash and discriminator" + ); +} + +// ============================================================================ +// new_burn Tests +// ============================================================================ + +/// Test new_burn: creates account with input only (no output). +#[test] +fn test_new_burn() { + let owner = Pubkey::new_from_array([1u8; 32]); + let counter = 42u64; + let program_id = Pubkey::new_from_array([2u8; 32]); + let address = [3u8; 32]; + + let tree_info = PackedStateTreeInfo { + root_index: 7, + prove_by_index: true, + merkle_tree_pubkey_index: 2, + queue_pubkey_index: 3, + leaf_index: 999, + }; + let account_meta = CompressedAccountMetaBurn { tree_info, address }; + let account_data = TestPoseidonAccount { owner, counter }; + + let mut account = + LightAccount::::new_burn(&program_id, &account_meta, account_data) + .expect("Failed to create burn LightAccount"); + + // Expected InAccountInfo + // Note: root_index=0 because prove_by_index=true -> get_root_index returns None -> defaults to 0 + let expected_in = InAccountInfo { + discriminator: TEST_POSEIDON_DISCRIMINATOR, + data_hash: TEST_POSEIDON_DATA_HASH, + merkle_context: PackedMerkleContext { + merkle_tree_pubkey_index: 2, + queue_pubkey_index: 3, + leaf_index: 999, + prove_by_index: true, + }, + root_index: 0, // 0 because prove_by_index=true -> root_index ignored + lamports: 0, + }; + assert_eq!( + *account.in_account_info().as_ref().unwrap(), + expected_in, + "InAccountInfo for burn should match expected" + ); + + // Verify no output (burn accounts have no output) + assert!( + account.out_account_info().is_none(), + "Burn account should have no output" + ); +} + +// ============================================================================ +// to_account_info Tests +// ============================================================================ + +/// Test to_account_info for normal mutable account. +#[test] +fn test_to_account_info_mut() { + let owner = Pubkey::new_from_array([1u8; 32]); + let counter = 42u64; + let program_id = Pubkey::new_from_array([2u8; 32]); + let address = [3u8; 32]; + + let tree_info = PackedStateTreeInfo { + root_index: 0, + prove_by_index: false, + merkle_tree_pubkey_index: 0, + queue_pubkey_index: 1, + leaf_index: 100, + }; + let account_meta = CompressedAccountMeta { + tree_info, + address, + output_state_tree_index: 5, + }; + let account_data = TestPoseidonAccount { owner, counter }; + + let account = LightAccount::::new_mut( + &program_id, + &account_meta, + account_data.clone(), + ) + .expect("Failed to create LightAccount"); + + let account_info = account + .to_account_info() + .expect("Should convert to account info"); + + // Expected serialized data + let expected_data = account_data.try_to_vec().expect("Should serialize"); + + // Expected CompressedAccountInfo + let expected = CompressedAccountInfo { + address: Some(address), + input: Some(InAccountInfo { + discriminator: TEST_POSEIDON_DISCRIMINATOR, + data_hash: TEST_POSEIDON_DATA_HASH, + merkle_context: PackedMerkleContext { + merkle_tree_pubkey_index: 0, + queue_pubkey_index: 1, + leaf_index: 100, + prove_by_index: false, + }, + root_index: 0, + lamports: 0, + }), + output: Some(OutAccountInfo { + discriminator: TEST_POSEIDON_DISCRIMINATOR, + data_hash: TEST_POSEIDON_DATA_HASH, // Same hash for unchanged data + output_merkle_tree_index: 5, + lamports: 0, + data: expected_data, + }), + }; + assert_eq!( + account_info, expected, + "to_account_info should produce expected CompressedAccountInfo" + ); +} + +// ============================================================================ +// Helper Method Tests +// ============================================================================ + +/// Test discriminator() method returns correct hardcoded value. +#[test] +fn test_discriminator_method() { + let program_id = Pubkey::new_from_array([2u8; 32]); + let account = LightAccount::::new_init(&program_id, None, 0); + + assert_eq!( + *account.discriminator(), + TEST_POSEIDON_DISCRIMINATOR, + "discriminator() should return hardcoded discriminator" + ); +} + +/// Test lamports() and lamports_mut() methods. +#[test] +fn test_lamports_methods() { + let program_id = Pubkey::new_from_array([2u8; 32]); + let tree_info = PackedStateTreeInfo { + root_index: 0, + prove_by_index: false, + merkle_tree_pubkey_index: 0, + queue_pubkey_index: 1, + leaf_index: 100, + }; + let account_meta = light_sdk::instruction::account_meta::CompressedAccountMetaWithLamports { + tree_info, + lamports: 1000, + address: [3u8; 32], + output_state_tree_index: 0, + }; + + let mut account = LightAccount::::new_mut( + &program_id, + &account_meta, + TestPoseidonAccount::default(), + ) + .expect("Failed to create LightAccount"); + + assert_eq!(account.lamports(), 1000, "Initial lamports should be 1000"); + + *account.lamports_mut() = 2000; + assert_eq!( + account.lamports(), + 2000, + "Lamports should be updated to 2000" + ); +} + +/// Test Deref and DerefMut to access inner account data. +#[test] +fn test_deref() { + let program_id = Pubkey::new_from_array([2u8; 32]); + let owner = Pubkey::new_from_array([1u8; 32]); + let counter = 42u64; + let tree_info = PackedStateTreeInfo { + root_index: 0, + prove_by_index: false, + merkle_tree_pubkey_index: 0, + queue_pubkey_index: 1, + leaf_index: 100, + }; + let account_meta = CompressedAccountMeta { + tree_info, + address: [3u8; 32], + output_state_tree_index: 0, + }; + + let mut account = LightAccount::::new_mut( + &program_id, + &account_meta, + TestPoseidonAccount { owner, counter }, + ) + .expect("Failed to create LightAccount"); + + // Test Deref - access inner fields + assert_eq!(account.owner, owner, "Deref should give access to owner"); + assert_eq!( + account.counter, counter, + "Deref should give access to counter" + ); + + // Test DerefMut - modify inner fields + account.counter = 100; + assert_eq!( + account.counter, 100, + "DerefMut should allow modifying counter" + ); +} + +/// Test remove_data functionality. +#[test] +fn test_remove_data() { + let program_id = Pubkey::new_from_array([2u8; 32]); + let tree_info = PackedStateTreeInfo { + root_index: 0, + prove_by_index: false, + merkle_tree_pubkey_index: 0, + queue_pubkey_index: 1, + leaf_index: 100, + }; + let account_meta = CompressedAccountMeta { + tree_info, + address: [3u8; 32], + output_state_tree_index: 0, + }; + + let mut account = LightAccount::::new_mut( + &program_id, + &account_meta, + TestPoseidonAccount::default(), + ) + .expect("Failed to create LightAccount"); + + account.remove_data(); + + let account_info = account + .to_account_info() + .expect("Should convert to account info"); + + let output = account_info.output.expect("Should have output"); + + // After remove_data, output should have zeroed hash and discriminator + let expected_output = OutAccountInfo { + discriminator: [0u8; 8], + data_hash: [0u8; 32], + output_merkle_tree_index: 0, + lamports: 0, + data: vec![], + }; + assert_eq!( + output, expected_output, + "Output after remove_data should have zeroed data_hash and discriminator" + ); +} diff --git a/sdk-libs/sdk/tests/light_account_sha.rs b/sdk-libs/sdk/tests/light_account_sha.rs new file mode 100644 index 0000000000..8029580fd0 --- /dev/null +++ b/sdk-libs/sdk/tests/light_account_sha.rs @@ -0,0 +1,515 @@ +//! Tests for SHA256-based LightAccount (HASH_FLAT = true). +//! +//! Each test uses a single assert_eq against a complete expected struct. +//! Tests cover: new_init, new_mut, new_empty, new_close, new_burn + +use borsh::{BorshDeserialize, BorshSerialize}; +use light_compressed_account::{ + compressed_account::PackedMerkleContext, + instruction_data::with_account_info::{CompressedAccountInfo, InAccountInfo, OutAccountInfo}, +}; +use light_sdk::{ + account::sha::LightAccount, + instruction::{ + account_meta::{CompressedAccountMeta, CompressedAccountMetaBurn}, + PackedStateTreeInfo, + }, + AnchorDiscriminator, LightDiscriminator, +}; +use solana_pubkey::Pubkey; + +/// Test struct for SHA256 hashing tests. +/// Uses AnchorDiscriminator for Anchor compatibility (SHA256("account:TestShaAccount")[0..8]). +#[derive( + Clone, Debug, Default, AnchorDiscriminator, BorshSerialize, BorshDeserialize, PartialEq, +)] +pub struct TestShaAccount { + pub owner: Pubkey, + pub counter: u64, +} + +// Hardcoded discriminator for TestShaAccount (derived from AnchorDiscriminator) +// SHA256("account:TestShaAccount")[0..8] +const TEST_SHA_DISCRIMINATOR: [u8; 8] = [133, 206, 241, 201, 32, 17, 165, 67]; + +// Hardcoded SHA256 data hash for TestShaAccount { owner: [1u8; 32], counter: 42 } +// SHA256(borsh_serialize(account)) with byte[0] = 0 for BN254 compatibility +const TEST_SHA_DATA_HASH: [u8; 32] = [ + 0, 9, 197, 147, 16, 241, 193, 207, 83, 232, 26, 162, 175, 208, 179, 15, 121, 149, 42, 178, 122, + 153, 187, 124, 40, 143, 206, 74, 247, 35, 196, 181, +]; + +// ============================================================================ +// Hash Regression Test +// ============================================================================ + +/// Regression test ensuring SHA256 hashing remains stable. +#[test] +fn test_sha_hash_regression() { + let owner = Pubkey::new_from_array([1u8; 32]); + let counter = 42u64; + let program_id = Pubkey::new_from_array([2u8; 32]); + + let tree_info = PackedStateTreeInfo { + root_index: 0, + prove_by_index: false, + merkle_tree_pubkey_index: 0, + queue_pubkey_index: 1, + leaf_index: 100, + }; + let account_meta = CompressedAccountMeta { + tree_info, + address: [3u8; 32], + output_state_tree_index: 0, + }; + let account_data = TestShaAccount { owner, counter }; + + let account = LightAccount::::new_mut(&program_id, &account_meta, account_data) + .expect("Failed to create LightAccount"); + + let input_info = account + .in_account_info() + .as_ref() + .expect("Should have input"); + + assert_eq!( + input_info.data_hash, TEST_SHA_DATA_HASH, + "SHA256 data hash must match hardcoded value" + ); +} + +// ============================================================================ +// new_init Tests +// ============================================================================ + +/// Test new_init: creates account with output only (no input). +#[test] +fn test_new_init() { + let program_id = Pubkey::new_from_array([2u8; 32]); + let address = [3u8; 32]; + let output_tree_index = 5u8; + + let mut account = + LightAccount::::new_init(&program_id, Some(address), output_tree_index); + + // Verify no input (init accounts have no input) + assert!( + account.in_account_info().is_none(), + "Init account should have no input" + ); + + // Verify output + let expected_out = OutAccountInfo { + discriminator: TEST_SHA_DISCRIMINATOR, + data_hash: [0u8; 32], // Default, will be computed on to_account_info + output_merkle_tree_index: 5, + lamports: 0, + data: vec![], + }; + assert_eq!( + *account.out_account_info().as_ref().unwrap(), + expected_out, + "OutAccountInfo should match expected" + ); +} + +// ============================================================================ +// new_mut Tests +// ============================================================================ + +/// Test new_mut: creates account with both input and output. +#[test] +fn test_new_mut() { + let owner = Pubkey::new_from_array([1u8; 32]); + let counter = 42u64; + let program_id = Pubkey::new_from_array([2u8; 32]); + let address = [3u8; 32]; + + let tree_info = PackedStateTreeInfo { + root_index: 10, + prove_by_index: false, + merkle_tree_pubkey_index: 0, + queue_pubkey_index: 1, + leaf_index: 500, + }; + let account_meta = CompressedAccountMeta { + tree_info, + address, + output_state_tree_index: 2, + }; + let account_data = TestShaAccount { owner, counter }; + + let mut account = + LightAccount::::new_mut(&program_id, &account_meta, account_data.clone()) + .expect("Failed to create LightAccount"); + + // Expected InAccountInfo + let expected_in = InAccountInfo { + discriminator: TEST_SHA_DISCRIMINATOR, + data_hash: TEST_SHA_DATA_HASH, + merkle_context: PackedMerkleContext { + merkle_tree_pubkey_index: 0, + queue_pubkey_index: 1, + leaf_index: 500, + prove_by_index: false, + }, + root_index: 10, + lamports: 0, + }; + assert_eq!( + *account.in_account_info().as_ref().unwrap(), + expected_in, + "InAccountInfo should match expected" + ); + + // Expected OutAccountInfo + let expected_out = OutAccountInfo { + discriminator: TEST_SHA_DISCRIMINATOR, + data_hash: [0u8; 32], // Default, will be computed on to_account_info + output_merkle_tree_index: 2, + lamports: 0, + data: vec![], + }; + assert_eq!( + *account.out_account_info().as_ref().unwrap(), + expected_out, + "OutAccountInfo should match expected" + ); +} + +// ============================================================================ +// new_empty Tests +// ============================================================================ + +/// Test new_empty: creates account with zeroed input hash (for address-only accounts). +#[test] +fn test_new_empty() { + let program_id = Pubkey::new_from_array([2u8; 32]); + let address = [3u8; 32]; + + let tree_info = PackedStateTreeInfo { + root_index: 5, + prove_by_index: true, + merkle_tree_pubkey_index: 1, + queue_pubkey_index: 2, + leaf_index: 200, + }; + let account_meta = CompressedAccountMeta { + tree_info, + address, + output_state_tree_index: 3, + }; + + let mut account = LightAccount::::new_empty(&program_id, &account_meta) + .expect("Failed to create empty LightAccount"); + + // Expected InAccountInfo with zeroed data_hash and discriminator + // Note: root_index=0 because prove_by_index=true -> get_root_index returns None -> defaults to 0 + let expected_in = InAccountInfo { + discriminator: [0u8; 8], // Zero for empty accounts + data_hash: [0u8; 32], // Zero for empty accounts + merkle_context: PackedMerkleContext { + merkle_tree_pubkey_index: 1, + queue_pubkey_index: 2, + leaf_index: 200, + prove_by_index: true, + }, + root_index: 0, // 0 because prove_by_index=true -> root_index ignored + lamports: 0, + }; + assert_eq!( + *account.in_account_info().as_ref().unwrap(), + expected_in, + "InAccountInfo for empty account should have zeroed data_hash and discriminator" + ); + + // Expected OutAccountInfo (discriminator is set for output) + let expected_out = OutAccountInfo { + discriminator: TEST_SHA_DISCRIMINATOR, // Output has discriminator set + data_hash: [0u8; 32], + output_merkle_tree_index: 3, + lamports: 0, + data: vec![], + }; + assert_eq!( + *account.out_account_info().as_ref().unwrap(), + expected_out, + "OutAccountInfo for empty account should have discriminator set" + ); +} + +// ============================================================================ +// new_close Tests +// ============================================================================ + +/// Test new_close: creates account that will be closed (output with zeroed data). +#[test] +fn test_new_close() { + let owner = Pubkey::new_from_array([1u8; 32]); + let counter = 42u64; + let program_id = Pubkey::new_from_array([2u8; 32]); + let address = [3u8; 32]; + + let tree_info = PackedStateTreeInfo { + root_index: 0, + prove_by_index: false, + merkle_tree_pubkey_index: 0, + queue_pubkey_index: 1, + leaf_index: 100, + }; + let account_meta = CompressedAccountMeta { + tree_info, + address, + output_state_tree_index: 0, + }; + let account_data = TestShaAccount { owner, counter }; + + let account = + LightAccount::::new_close(&program_id, &account_meta, account_data) + .expect("Failed to create close LightAccount"); + + // Verify to_account_info produces zeroed output + let account_info = account + .to_account_info() + .expect("Should convert to account info"); + + // Expected CompressedAccountInfo for closed account + let expected = CompressedAccountInfo { + address: Some(address), + input: Some(InAccountInfo { + discriminator: TEST_SHA_DISCRIMINATOR, + data_hash: TEST_SHA_DATA_HASH, + merkle_context: PackedMerkleContext { + merkle_tree_pubkey_index: 0, + queue_pubkey_index: 1, + leaf_index: 100, + prove_by_index: false, + }, + root_index: 0, + lamports: 0, + }), + output: Some(OutAccountInfo { + discriminator: [0u8; 8], // Zeroed for close + data_hash: [0u8; 32], // Zeroed for close + output_merkle_tree_index: 0, + lamports: 0, + data: vec![], // Empty for close + }), + }; + assert_eq!( + account_info, expected, + "Closed account should have zeroed output data_hash and discriminator" + ); +} + +// ============================================================================ +// new_burn Tests +// ============================================================================ + +/// Test new_burn: creates account with input only (no output). +#[test] +fn test_new_burn() { + let owner = Pubkey::new_from_array([1u8; 32]); + let counter = 42u64; + let program_id = Pubkey::new_from_array([2u8; 32]); + let address = [3u8; 32]; + + let tree_info = PackedStateTreeInfo { + root_index: 7, + prove_by_index: true, + merkle_tree_pubkey_index: 2, + queue_pubkey_index: 3, + leaf_index: 999, + }; + let account_meta = CompressedAccountMetaBurn { tree_info, address }; + let account_data = TestShaAccount { owner, counter }; + + let mut account = + LightAccount::::new_burn(&program_id, &account_meta, account_data) + .expect("Failed to create burn LightAccount"); + + // Expected InAccountInfo + // Note: root_index=0 because prove_by_index=true -> get_root_index returns None -> defaults to 0 + let expected_in = InAccountInfo { + discriminator: TEST_SHA_DISCRIMINATOR, + data_hash: TEST_SHA_DATA_HASH, + merkle_context: PackedMerkleContext { + merkle_tree_pubkey_index: 2, + queue_pubkey_index: 3, + leaf_index: 999, + prove_by_index: true, + }, + root_index: 0, // 0 because prove_by_index=true -> root_index ignored + lamports: 0, + }; + assert_eq!( + *account.in_account_info().as_ref().unwrap(), + expected_in, + "InAccountInfo for burn should match expected" + ); + + // Verify no output (burn accounts have no output) + assert!( + account.out_account_info().is_none(), + "Burn account should have no output" + ); +} + +// ============================================================================ +// to_account_info Tests +// ============================================================================ + +/// Test to_account_info for normal mutable account. +#[test] +fn test_to_account_info_mut() { + let owner = Pubkey::new_from_array([1u8; 32]); + let counter = 42u64; + let program_id = Pubkey::new_from_array([2u8; 32]); + let address = [3u8; 32]; + + let tree_info = PackedStateTreeInfo { + root_index: 0, + prove_by_index: false, + merkle_tree_pubkey_index: 0, + queue_pubkey_index: 1, + leaf_index: 100, + }; + let account_meta = CompressedAccountMeta { + tree_info, + address, + output_state_tree_index: 5, + }; + let account_data = TestShaAccount { owner, counter }; + + let account = + LightAccount::::new_mut(&program_id, &account_meta, account_data.clone()) + .expect("Failed to create LightAccount"); + + let account_info = account + .to_account_info() + .expect("Should convert to account info"); + + // Expected serialized data + let expected_data = account_data.try_to_vec().expect("Should serialize"); + + // Expected CompressedAccountInfo + let expected = CompressedAccountInfo { + address: Some(address), + input: Some(InAccountInfo { + discriminator: TEST_SHA_DISCRIMINATOR, + data_hash: TEST_SHA_DATA_HASH, + merkle_context: PackedMerkleContext { + merkle_tree_pubkey_index: 0, + queue_pubkey_index: 1, + leaf_index: 100, + prove_by_index: false, + }, + root_index: 0, + lamports: 0, + }), + output: Some(OutAccountInfo { + discriminator: TEST_SHA_DISCRIMINATOR, + data_hash: TEST_SHA_DATA_HASH, // Same hash for unchanged data + output_merkle_tree_index: 5, + lamports: 0, + data: expected_data, + }), + }; + assert_eq!( + account_info, expected, + "to_account_info should produce expected CompressedAccountInfo" + ); +} + +// ============================================================================ +// Helper Method Tests +// ============================================================================ + +/// Test discriminator() method returns correct hardcoded value. +#[test] +fn test_discriminator_method() { + let program_id = Pubkey::new_from_array([2u8; 32]); + let account = LightAccount::::new_init(&program_id, None, 0); + + assert_eq!( + *account.discriminator(), + TEST_SHA_DISCRIMINATOR, + "discriminator() should return hardcoded discriminator" + ); +} + +/// Test lamports() and lamports_mut() methods. +#[test] +fn test_lamports_methods() { + let program_id = Pubkey::new_from_array([2u8; 32]); + let tree_info = PackedStateTreeInfo { + root_index: 0, + prove_by_index: false, + merkle_tree_pubkey_index: 0, + queue_pubkey_index: 1, + leaf_index: 100, + }; + let account_meta = light_sdk::instruction::account_meta::CompressedAccountMetaWithLamports { + tree_info, + lamports: 1000, + address: [3u8; 32], + output_state_tree_index: 0, + }; + + let mut account = LightAccount::::new_mut( + &program_id, + &account_meta, + TestShaAccount::default(), + ) + .expect("Failed to create LightAccount"); + + assert_eq!(account.lamports(), 1000, "Initial lamports should be 1000"); + + *account.lamports_mut() = 2000; + assert_eq!( + account.lamports(), + 2000, + "Lamports should be updated to 2000" + ); +} + +/// Test Deref and DerefMut to access inner account data. +#[test] +fn test_deref() { + let program_id = Pubkey::new_from_array([2u8; 32]); + let owner = Pubkey::new_from_array([1u8; 32]); + let counter = 42u64; + let tree_info = PackedStateTreeInfo { + root_index: 0, + prove_by_index: false, + merkle_tree_pubkey_index: 0, + queue_pubkey_index: 1, + leaf_index: 100, + }; + let account_meta = CompressedAccountMeta { + tree_info, + address: [3u8; 32], + output_state_tree_index: 0, + }; + + let mut account = LightAccount::::new_mut( + &program_id, + &account_meta, + TestShaAccount { owner, counter }, + ) + .expect("Failed to create LightAccount"); + + // Test Deref - access inner fields + assert_eq!(account.owner, owner, "Deref should give access to owner"); + assert_eq!( + account.counter, counter, + "Deref should give access to counter" + ); + + // Test DerefMut - modify inner fields + account.counter = 100; + assert_eq!( + account.counter, 100, + "DerefMut should allow modifying counter" + ); +} diff --git a/sdk-libs/token-sdk/Cargo.toml b/sdk-libs/token-sdk/Cargo.toml index 1d673711ee..a62ac7ca21 100644 --- a/sdk-libs/token-sdk/Cargo.toml +++ b/sdk-libs/token-sdk/Cargo.toml @@ -7,9 +7,8 @@ license = "Apache-2.0" repository = "https://github.com/Lightprotocol/light-protocol" [features] -default = ["anchor-discriminator"] +default = [] v1 = ["light-compressed-token-sdk/v1"] -anchor-discriminator = ["light-sdk/anchor-discriminator", "light-compressed-token-sdk/anchor-discriminator"] anchor = ["anchor-lang", "light-token-types/anchor", "light-token-interface/anchor", "light-compressed-token-sdk/anchor", "dep:light-sdk-macros"] # idl-build feature enables IDL generation for client-side tooling and SDK generation idl-build = ["anchor", "anchor-lang/idl-build", "light-sdk/idl-build", "light-compressed-token-sdk/idl-build"] diff --git a/sdk-libs/token-sdk/src/anchor.rs b/sdk-libs/token-sdk/src/anchor.rs index d067a5a80c..9f1d968010 100644 --- a/sdk-libs/token-sdk/src/anchor.rs +++ b/sdk-libs/token-sdk/src/anchor.rs @@ -25,6 +25,7 @@ pub use light_sdk_macros::{ // Attribute macros light_program, // Derive macros + AnchorDiscriminator, CompressAs, Compressible, CompressiblePack, diff --git a/sdk-libs/token-sdk/tests/address_derivation.rs b/sdk-libs/token-sdk/tests/address_derivation.rs new file mode 100644 index 0000000000..183befcb3e --- /dev/null +++ b/sdk-libs/token-sdk/tests/address_derivation.rs @@ -0,0 +1,182 @@ +//! Tests for ATA and SPL interface PDA derivation functions. + +use light_token::instruction::{ + derive_associated_token_account, get_associated_token_address, + get_associated_token_address_and_bump, get_spl_interface_pda_and_bump, LIGHT_TOKEN_PROGRAM_ID, +}; +use solana_pubkey::Pubkey; + +/// Verify ATA derivation produces a valid PDA for a single owner/mint pair. +#[test] +fn test_derive_ata_single_owner_mint() { + let owner = Pubkey::new_unique(); + let mint = Pubkey::new_unique(); + + let (ata, bump) = derive_associated_token_account(&owner, &mint); + + // Verify the PDA is valid by checking we can recreate it + let (recreated_ata, recreated_bump) = Pubkey::find_program_address( + &[ + owner.as_ref(), + LIGHT_TOKEN_PROGRAM_ID.as_ref(), + mint.as_ref(), + ], + &LIGHT_TOKEN_PROGRAM_ID, + ); + + assert_eq!(ata, recreated_ata); + assert_eq!(bump, recreated_bump); + + // ATA should not equal owner, mint, or program ID + assert_ne!(ata, owner); + assert_ne!(ata, mint); + assert_ne!(ata, LIGHT_TOKEN_PROGRAM_ID); +} + +/// Verify different owners produce different ATAs for the same mint. +#[test] +fn test_derive_ata_different_owners_different_result() { + let owner1 = Pubkey::new_unique(); + let owner2 = Pubkey::new_unique(); + let mint = Pubkey::new_unique(); + + let (ata1, _bump1) = derive_associated_token_account(&owner1, &mint); + let (ata2, _bump2) = derive_associated_token_account(&owner2, &mint); + + // Different owners should produce different ATAs + assert_ne!( + ata1, ata2, + "Different owners should produce different ATAs for the same mint" + ); +} + +/// Verify different mints produce different ATAs for the same owner. +#[test] +fn test_derive_ata_different_mints_different_result() { + let owner = Pubkey::new_unique(); + let mint1 = Pubkey::new_unique(); + let mint2 = Pubkey::new_unique(); + + let (ata1, _bump1) = derive_associated_token_account(&owner, &mint1); + let (ata2, _bump2) = derive_associated_token_account(&owner, &mint2); + + // Different mints should produce different ATAs + assert_ne!( + ata1, ata2, + "Different mints should produce different ATAs for the same owner" + ); +} + +/// Verify same inputs always produce same bump (deterministic derivation). +#[test] +fn test_derive_ata_bump_consistency() { + let owner = Pubkey::new_unique(); + let mint = Pubkey::new_unique(); + + // Derive multiple times + let (ata1, bump1) = derive_associated_token_account(&owner, &mint); + let (ata2, bump2) = derive_associated_token_account(&owner, &mint); + let (ata3, bump3) = derive_associated_token_account(&owner, &mint); + + // All derivations should match + assert_eq!(ata1, ata2); + assert_eq!(ata2, ata3); + assert_eq!(bump1, bump2); + assert_eq!(bump2, bump3); +} + +/// Verify SPL interface PDA derivation works correctly. +#[test] +fn test_spl_interface_pda_derivation() { + let mint = Pubkey::new_unique(); + + let (pda, bump) = get_spl_interface_pda_and_bump(&mint); + + // Verify the PDA is valid by checking we can recreate it + let pool_seed: &[u8] = b"pool"; + let (recreated_pda, recreated_bump) = + Pubkey::find_program_address(&[pool_seed, mint.as_ref()], &LIGHT_TOKEN_PROGRAM_ID); + + assert_eq!(pda, recreated_pda); + assert_eq!(bump, recreated_bump); + + // PDA should not equal the mint or program ID + assert_ne!(pda, mint); + assert_ne!(pda, LIGHT_TOKEN_PROGRAM_ID); +} + +/// Verify different mints produce different SPL interface PDAs. +#[test] +fn test_spl_interface_pda_different_mints() { + let mint1 = Pubkey::new_unique(); + let mint2 = Pubkey::new_unique(); + + let (pda1, _bump1) = get_spl_interface_pda_and_bump(&mint1); + let (pda2, _bump2) = get_spl_interface_pda_and_bump(&mint2); + + assert_ne!( + pda1, pda2, + "Different mints should produce different SPL interface PDAs" + ); +} + +/// Verify SPL interface PDA derivation is deterministic. +#[test] +fn test_spl_interface_pda_consistency() { + let mint = Pubkey::new_unique(); + + let (pda1, bump1) = get_spl_interface_pda_and_bump(&mint); + let (pda2, bump2) = get_spl_interface_pda_and_bump(&mint); + + assert_eq!(pda1, pda2, "Same mint should always produce same PDA"); + assert_eq!(bump1, bump2, "Same mint should always produce same bump"); +} + +/// Verify get_associated_token_address matches derive_associated_token_account. +#[test] +fn test_get_associated_token_address_matches_with_bump() { + let owner = Pubkey::new_unique(); + let mint = Pubkey::new_unique(); + + // Get address without bump + let ata = get_associated_token_address(&owner, &mint); + + // Get address with bump + let (ata_with_bump, bump) = get_associated_token_address_and_bump(&owner, &mint); + + // Both should match the derive function + let (derived_ata, derived_bump) = derive_associated_token_account(&owner, &mint); + + assert_eq!( + ata, ata_with_bump, + "get_associated_token_address should match get_associated_token_address_and_bump" + ); + assert_eq!( + ata, derived_ata, + "get_associated_token_address should match derive_associated_token_account" + ); + assert_eq!(bump, derived_bump, "Bump from get_associated_token_address_and_bump should match derive_associated_token_account"); +} + +/// Verify that known fixed pubkeys produce deterministic ATAs. +/// This tests that the derivation uses the correct seeds in the correct order. +#[test] +fn test_derive_ata_seed_order() { + // Use fixed pubkeys to ensure deterministic testing + let owner = Pubkey::new_from_array([1u8; 32]); + let mint = Pubkey::new_from_array([2u8; 32]); + + let (ata, _bump) = derive_associated_token_account(&owner, &mint); + + // Verify with swapped order produces different result (confirms seed order matters) + let (ata_swapped, _) = Pubkey::find_program_address( + &[ + mint.as_ref(), + LIGHT_TOKEN_PROGRAM_ID.as_ref(), + owner.as_ref(), + ], + &LIGHT_TOKEN_PROGRAM_ID, + ); + + assert_ne!(ata, ata_swapped, "Seed order should affect the derived PDA"); +} diff --git a/sdk-libs/token-sdk/tests/constants_regression.rs b/sdk-libs/token-sdk/tests/constants_regression.rs new file mode 100644 index 0000000000..d9a9fa9488 --- /dev/null +++ b/sdk-libs/token-sdk/tests/constants_regression.rs @@ -0,0 +1,92 @@ +//! Hardcoded regression tests for protocol constants. +//! +//! These tests ensure that critical protocol constants remain stable across versions. +//! Any changes to these values would break compatibility with existing deployments. + +use std::str::FromStr; + +use light_token::constants::{ + config_pda, id, rent_sponsor_pda, COMPRESSIBLE_CONFIG_V1, LIGHT_TOKEN_CPI_AUTHORITY, + LIGHT_TOKEN_PROGRAM_ID, RENT_SPONSOR_V1, SPL_TOKEN_2022_PROGRAM_ID, SPL_TOKEN_PROGRAM_ID, +}; +use solana_pubkey::Pubkey; + +#[test] +fn test_light_token_cpi_authority_hardcoded() { + let expected = Pubkey::from_str("GXtd2izAiMJPwMEjfgTRH3d7k9mjn4Jq3JrWFv9gySYy").unwrap(); + assert_eq!( + LIGHT_TOKEN_CPI_AUTHORITY, expected, + "LIGHT_TOKEN_CPI_AUTHORITY must match expected value" + ); +} + +#[test] +fn test_compressible_config_v1_hardcoded() { + let expected = Pubkey::from_str("ACXg8a7VaqecBWrSbdu73W4Pg9gsqXJ3EXAqkHyhvVXg").unwrap(); + assert_eq!( + COMPRESSIBLE_CONFIG_V1, expected, + "COMPRESSIBLE_CONFIG_V1 must match expected value" + ); +} + +#[test] +fn test_rent_sponsor_v1_hardcoded() { + let expected = Pubkey::from_str("r18WwUxfG8kQ69bQPAB2jV6zGNKy3GosFGctjQoV4ti").unwrap(); + assert_eq!( + RENT_SPONSOR_V1, expected, + "RENT_SPONSOR_V1 must match expected value" + ); +} + +#[test] +fn test_light_token_program_id_hardcoded() { + let expected = Pubkey::from_str("cTokenmWW8bLPjZEBAUgYy3zKxQZW6VKi7bqNFEVv3m").unwrap(); + assert_eq!( + LIGHT_TOKEN_PROGRAM_ID, expected, + "LIGHT_TOKEN_PROGRAM_ID must match expected value" + ); +} + +#[test] +fn test_config_pda_returns_expected_value() { + assert_eq!( + config_pda(), + COMPRESSIBLE_CONFIG_V1, + "config_pda() must return COMPRESSIBLE_CONFIG_V1" + ); +} + +#[test] +fn test_rent_sponsor_pda_returns_expected_value() { + assert_eq!( + rent_sponsor_pda(), + RENT_SPONSOR_V1, + "rent_sponsor_pda() must return RENT_SPONSOR_V1" + ); +} + +#[test] +fn test_spl_token_program_ids_hardcoded() { + let expected_spl_token = + Pubkey::from_str("TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA").unwrap(); + let expected_spl_token_2022 = + Pubkey::from_str("TokenzQdBNbLqP5VEhdkAS6EPFLC1PHnBqCXEpPxuEb").unwrap(); + + assert_eq!( + SPL_TOKEN_PROGRAM_ID, expected_spl_token, + "SPL_TOKEN_PROGRAM_ID must match expected value" + ); + assert_eq!( + SPL_TOKEN_2022_PROGRAM_ID, expected_spl_token_2022, + "SPL_TOKEN_2022_PROGRAM_ID must match expected value" + ); +} + +#[test] +fn test_id_function_returns_program_id() { + assert_eq!( + id(), + LIGHT_TOKEN_PROGRAM_ID, + "id() must return LIGHT_TOKEN_PROGRAM_ID" + ); +} diff --git a/sdk-libs/token-sdk/tests/error_codes.rs b/sdk-libs/token-sdk/tests/error_codes.rs new file mode 100644 index 0000000000..708fe91a1d --- /dev/null +++ b/sdk-libs/token-sdk/tests/error_codes.rs @@ -0,0 +1,111 @@ +//! Tests for error code stability. +//! +//! These tests ensure that error codes remain stable across versions. +//! Changing error codes would break client-side error handling. + +use std::collections::HashSet; + +use light_token::error::LightTokenError; + +#[test] +fn test_error_codes_start_at_17500() { + let first_error: u32 = LightTokenError::SplInterfaceRequired.into(); + assert_eq!( + first_error, 17500, + "First error code must be 17500 to avoid conflicts with TokenSdkError" + ); +} + +#[test] +fn test_error_codes_unique() { + let codes: Vec = vec![ + LightTokenError::SplInterfaceRequired.into(), + LightTokenError::IncompleteSplInterface.into(), + LightTokenError::UseRegularSplTransfer.into(), + LightTokenError::CannotDetermineAccountType.into(), + LightTokenError::MissingMintAccount.into(), + LightTokenError::MissingSplTokenProgram.into(), + LightTokenError::MissingSplInterfacePda.into(), + LightTokenError::MissingSplInterfacePdaBump.into(), + LightTokenError::SplTokenProgramMismatch.into(), + LightTokenError::InvalidAccountData.into(), + LightTokenError::SerializationError.into(), + ]; + + let unique_codes: HashSet = codes.iter().copied().collect(); + + assert_eq!( + codes.len(), + unique_codes.len(), + "All error codes must be unique" + ); +} + +#[test] +fn test_spl_interface_required_is_17500() { + let code: u32 = LightTokenError::SplInterfaceRequired.into(); + assert_eq!(code, 17500, "SplInterfaceRequired must be 17500"); +} + +#[test] +fn test_error_display_messages() { + // Test each error's display message is non-empty + assert!( + !LightTokenError::SplInterfaceRequired.to_string().is_empty(), + "SplInterfaceRequired must have a non-empty display message" + ); + assert!( + !LightTokenError::IncompleteSplInterface + .to_string() + .is_empty(), + "IncompleteSplInterface must have a non-empty display message" + ); + assert!( + !LightTokenError::UseRegularSplTransfer + .to_string() + .is_empty(), + "UseRegularSplTransfer must have a non-empty display message" + ); + assert!( + !LightTokenError::CannotDetermineAccountType + .to_string() + .is_empty(), + "CannotDetermineAccountType must have a non-empty display message" + ); + assert!( + !LightTokenError::MissingMintAccount.to_string().is_empty(), + "MissingMintAccount must have a non-empty display message" + ); + assert!( + !LightTokenError::MissingSplTokenProgram + .to_string() + .is_empty(), + "MissingSplTokenProgram must have a non-empty display message" + ); + assert!( + !LightTokenError::MissingSplInterfacePda + .to_string() + .is_empty(), + "MissingSplInterfacePda must have a non-empty display message" + ); + assert!( + !LightTokenError::MissingSplInterfacePdaBump + .to_string() + .is_empty(), + "MissingSplInterfacePdaBump must have a non-empty display message" + ); + assert!( + !LightTokenError::SplTokenProgramMismatch + .to_string() + .is_empty(), + "SplTokenProgramMismatch must have a non-empty display message" + ); + assert!( + !LightTokenError::InvalidAccountData.to_string().is_empty(), + "InvalidAccountData must have a non-empty display message" + ); + assert!( + !LightTokenError::SerializationError.to_string().is_empty(), + "SerializationError must have a non-empty display message" + ); +} diff --git a/sdk-libs/token-sdk/tests/instruction_close.rs b/sdk-libs/token-sdk/tests/instruction_close.rs new file mode 100644 index 0000000000..537333c45a --- /dev/null +++ b/sdk-libs/token-sdk/tests/instruction_close.rs @@ -0,0 +1,67 @@ +use light_token::instruction::{CloseAccount, LIGHT_TOKEN_PROGRAM_ID, RENT_SPONSOR}; +use solana_instruction::{AccountMeta, Instruction}; +use solana_pubkey::Pubkey; + +/// Test CloseAccount instruction with default RENT_SPONSOR. +/// Verifies: program_id, all 4 accounts (pubkeys + writeability + signedness), and data (discriminator 9). +#[test] +fn test_close_account_instruction() { + // Use deterministic pubkeys for regression testing + let account = Pubkey::new_from_array([1u8; 32]); + let destination = Pubkey::new_from_array([2u8; 32]); + let owner = Pubkey::new_from_array([3u8; 32]); + + let instruction = CloseAccount::new(LIGHT_TOKEN_PROGRAM_ID, account, destination, owner) + .instruction() + .expect("Failed to create instruction"); + + // Hardcoded expected instruction + let expected = Instruction { + program_id: LIGHT_TOKEN_PROGRAM_ID, + accounts: vec![ + AccountMeta::new(account, false), // account: writable, not signer + AccountMeta::new(destination, false), // destination: writable, not signer + AccountMeta::new(owner, true), // owner: writable, signer + AccountMeta::new(RENT_SPONSOR, false), // rent_sponsor: writable, not signer + ], + data: vec![9u8], // CloseAccount discriminator + }; + + assert_eq!( + instruction, expected, + "CloseAccount instruction should match expected" + ); +} + +/// Test CloseAccount instruction with custom rent sponsor. +/// Verifies the rent_sponsor account is replaced with the custom one. +#[test] +fn test_close_account_custom_rent_sponsor() { + // Use deterministic pubkeys for regression testing + let account = Pubkey::new_from_array([1u8; 32]); + let destination = Pubkey::new_from_array([2u8; 32]); + let owner = Pubkey::new_from_array([3u8; 32]); + let custom_sponsor = Pubkey::new_from_array([4u8; 32]); + + let instruction = CloseAccount::new(LIGHT_TOKEN_PROGRAM_ID, account, destination, owner) + .custom_rent_sponsor(custom_sponsor) + .instruction() + .expect("Failed to create instruction"); + + // Hardcoded expected instruction with custom rent sponsor + let expected = Instruction { + program_id: LIGHT_TOKEN_PROGRAM_ID, + accounts: vec![ + AccountMeta::new(account, false), // account: writable, not signer + AccountMeta::new(destination, false), // destination: writable, not signer + AccountMeta::new(owner, true), // owner: writable, signer + AccountMeta::new(custom_sponsor, false), // custom_sponsor: writable, not signer + ], + data: vec![9u8], // CloseAccount discriminator + }; + + assert_eq!( + instruction, expected, + "CloseAccount instruction with custom rent sponsor should match expected" + ); +} diff --git a/sdk-libs/token-sdk/tests/instruction_transfer.rs b/sdk-libs/token-sdk/tests/instruction_transfer.rs new file mode 100644 index 0000000000..7752bb14fe --- /dev/null +++ b/sdk-libs/token-sdk/tests/instruction_transfer.rs @@ -0,0 +1,181 @@ +use light_token::instruction::{Transfer, LIGHT_TOKEN_PROGRAM_ID}; +use solana_instruction::{AccountMeta, Instruction}; +use solana_pubkey::Pubkey; + +/// Test Transfer instruction with no max_top_up or fee_payer. +/// Authority is readonly signer since it doesn't need to pay for top-ups. +#[test] +fn test_transfer_basic() { + let source = Pubkey::new_from_array([1u8; 32]); + let destination = Pubkey::new_from_array([2u8; 32]); + let authority = Pubkey::new_from_array([3u8; 32]); + + let instruction = Transfer { + source, + destination, + amount: 100, + authority, + max_top_up: None, + fee_payer: None, + } + .instruction() + .expect("Failed to create instruction"); + + // Hardcoded expected instruction + // - authority is readonly (no max_top_up) + // - data: discriminator (3) + amount (100 as le u64) = 9 bytes + let expected = Instruction { + program_id: LIGHT_TOKEN_PROGRAM_ID, + accounts: vec![ + AccountMeta::new(source, false), // source: writable, not signer + AccountMeta::new(destination, false), // destination: writable, not signer + AccountMeta::new_readonly(authority, true), // authority: readonly, signer + AccountMeta::new_readonly(Pubkey::default(), false), // system_program: readonly, not signer + ], + data: vec![ + 3u8, // Transfer discriminator + 100, 0, 0, 0, 0, 0, 0, 0, // amount: 100 as little-endian u64 + ], + }; + + assert_eq!( + instruction, expected, + "Transfer instruction should match expected" + ); +} + +/// Test Transfer instruction with max_top_up set (no fee_payer). +/// Authority becomes writable to pay for potential top-ups. +/// Data includes max_top_up as 2 extra bytes. +#[test] +fn test_transfer_with_max_top_up() { + let source = Pubkey::new_from_array([1u8; 32]); + let destination = Pubkey::new_from_array([2u8; 32]); + let authority = Pubkey::new_from_array([3u8; 32]); + + let instruction = Transfer { + source, + destination, + amount: 100, + authority, + max_top_up: Some(500), + fee_payer: None, + } + .instruction() + .expect("Failed to create instruction"); + + // Hardcoded expected instruction + // - authority is writable (max_top_up set, no fee_payer -> authority pays) + // - data: discriminator (3) + amount (8 bytes) + max_top_up (2 bytes) = 11 bytes + let expected = Instruction { + program_id: LIGHT_TOKEN_PROGRAM_ID, + accounts: vec![ + AccountMeta::new(source, false), // source: writable, not signer + AccountMeta::new(destination, false), // destination: writable, not signer + AccountMeta::new(authority, true), // authority: writable, signer (pays for top-ups) + AccountMeta::new_readonly(Pubkey::default(), false), // system_program: readonly, not signer + ], + data: vec![ + 3u8, // Transfer discriminator + 100, 0, 0, 0, 0, 0, 0, 0, // amount: 100 as little-endian u64 + 244, 1, // max_top_up: 500 as little-endian u16 + ], + }; + + assert_eq!( + instruction, expected, + "Transfer instruction with max_top_up should match expected" + ); +} + +/// Test Transfer instruction with fee_payer set (no max_top_up). +/// Fee_payer is added as 5th account. Authority remains readonly. +#[test] +fn test_transfer_with_fee_payer() { + let source = Pubkey::new_from_array([1u8; 32]); + let destination = Pubkey::new_from_array([2u8; 32]); + let authority = Pubkey::new_from_array([3u8; 32]); + let fee_payer = Pubkey::new_from_array([4u8; 32]); + + let instruction = Transfer { + source, + destination, + amount: 100, + authority, + max_top_up: None, + fee_payer: Some(fee_payer), + } + .instruction() + .expect("Failed to create instruction"); + + // Hardcoded expected instruction + // - authority is readonly (fee_payer pays instead) + // - fee_payer is 5th account: writable, signer + // - data: discriminator (3) + amount (8 bytes) = 9 bytes (no max_top_up) + let expected = Instruction { + program_id: LIGHT_TOKEN_PROGRAM_ID, + accounts: vec![ + AccountMeta::new(source, false), // source: writable, not signer + AccountMeta::new(destination, false), // destination: writable, not signer + AccountMeta::new_readonly(authority, true), // authority: readonly, signer + AccountMeta::new_readonly(Pubkey::default(), false), // system_program: readonly, not signer + AccountMeta::new(fee_payer, true), // fee_payer: writable, signer + ], + data: vec![ + 3u8, // Transfer discriminator + 100, 0, 0, 0, 0, 0, 0, 0, // amount: 100 as little-endian u64 + ], + }; + + assert_eq!( + instruction, expected, + "Transfer instruction with fee_payer should match expected" + ); +} + +/// Test Transfer instruction with both max_top_up and fee_payer set. +/// Authority is readonly (fee_payer pays for top-ups). +/// Data includes max_top_up. Fee_payer is 5th account. +#[test] +fn test_transfer_with_max_top_up_and_fee_payer() { + let source = Pubkey::new_from_array([1u8; 32]); + let destination = Pubkey::new_from_array([2u8; 32]); + let authority = Pubkey::new_from_array([3u8; 32]); + let fee_payer = Pubkey::new_from_array([4u8; 32]); + + let instruction = Transfer { + source, + destination, + amount: 100, + authority, + max_top_up: Some(500), + fee_payer: Some(fee_payer), + } + .instruction() + .expect("Failed to create instruction"); + + // Hardcoded expected instruction + // - authority is readonly (fee_payer pays instead, even with max_top_up) + // - fee_payer is 5th account: writable, signer + // - data: discriminator (3) + amount (8 bytes) + max_top_up (2 bytes) = 11 bytes + let expected = Instruction { + program_id: LIGHT_TOKEN_PROGRAM_ID, + accounts: vec![ + AccountMeta::new(source, false), // source: writable, not signer + AccountMeta::new(destination, false), // destination: writable, not signer + AccountMeta::new_readonly(authority, true), // authority: readonly, signer + AccountMeta::new_readonly(Pubkey::default(), false), // system_program: readonly, not signer + AccountMeta::new(fee_payer, true), // fee_payer: writable, signer + ], + data: vec![ + 3u8, // Transfer discriminator + 100, 0, 0, 0, 0, 0, 0, 0, // amount: 100 as little-endian u64 + 244, 1, // max_top_up: 500 as little-endian u16 + ], + }; + + assert_eq!( + instruction, expected, + "Transfer instruction with max_top_up and fee_payer should match expected" + ); +} diff --git a/sdk-libs/token-sdk/tests/transfer_type.rs b/sdk-libs/token-sdk/tests/transfer_type.rs new file mode 100644 index 0000000000..d38238f773 --- /dev/null +++ b/sdk-libs/token-sdk/tests/transfer_type.rs @@ -0,0 +1,316 @@ +//! Tests for transfer type determination based on account owners. + +use light_token::{ + constants::LIGHT_TOKEN_PROGRAM_ID, + error::TokenSdkError, + instruction::{SplInterface, TransferInterface}, + utils::is_light_token_owner, +}; +use solana_pubkey::Pubkey; + +// SPL Token Program IDs (from light_token_types) +const SPL_TOKEN_PROGRAM_ID: Pubkey = + Pubkey::new_from_array(light_token_types::SPL_TOKEN_PROGRAM_ID); +const SPL_TOKEN_2022_PROGRAM_ID: Pubkey = + Pubkey::new_from_array(light_token_types::SPL_TOKEN_2022_PROGRAM_ID); + +/// Verify is_light_token_owner returns Ok(true) for LIGHT_TOKEN_PROGRAM_ID. +#[test] +fn test_is_light_token_owner_light_program() { + let result = is_light_token_owner(&LIGHT_TOKEN_PROGRAM_ID); + + assert!( + result.is_ok(), + "Should successfully identify Light token program" + ); + assert!(result.unwrap(), "LIGHT_TOKEN_PROGRAM_ID should return true"); +} + +/// Verify is_light_token_owner returns Ok(false) for SPL_TOKEN_PROGRAM_ID. +#[test] +fn test_is_light_token_owner_spl_token() { + let result = is_light_token_owner(&SPL_TOKEN_PROGRAM_ID); + + assert!( + result.is_ok(), + "Should successfully identify SPL token program" + ); + assert!(!result.unwrap(), "SPL_TOKEN_PROGRAM_ID should return false"); +} + +/// Verify is_light_token_owner returns Ok(false) for SPL_TOKEN_2022_PROGRAM_ID. +#[test] +fn test_is_light_token_owner_spl_token_2022() { + let result = is_light_token_owner(&SPL_TOKEN_2022_PROGRAM_ID); + + assert!( + result.is_ok(), + "Should successfully identify SPL Token 2022 program" + ); + assert!( + !result.unwrap(), + "SPL_TOKEN_2022_PROGRAM_ID should return false" + ); +} + +/// Verify is_light_token_owner returns Err for random/unknown program. +#[test] +fn test_is_light_token_owner_unknown_program() { + let unknown_program = Pubkey::new_unique(); + let result = is_light_token_owner(&unknown_program); + + assert!(result.is_err(), "Unknown program should return error"); + match result { + Err(TokenSdkError::CannotDetermineAccountType) => { + // Expected error + } + Err(other) => { + panic!("Expected CannotDetermineAccountType, got {:?}", other); + } + Ok(_) => { + panic!("Expected error for unknown program"); + } + } +} + +/// Verify is_light_token_owner returns Err for system program. +#[test] +fn test_is_light_token_owner_system_program() { + // System program ID (all zeros) + let system_program = Pubkey::default(); + let result = is_light_token_owner(&system_program); + + assert!(result.is_err(), "System program should return error"); + match result { + Err(TokenSdkError::CannotDetermineAccountType) => { + // Expected error + } + Err(other) => { + panic!("Expected CannotDetermineAccountType, got {:?}", other); + } + Ok(_) => { + panic!("Expected error for system program"); + } + } +} + +/// Verify TransferInterface with both owners as LIGHT_TOKEN_PROGRAM_ID +/// does not require spl_interface (light-to-light transfer). +#[test] +fn test_transfer_interface_light_to_light_no_spl_interface() { + let source = Pubkey::new_unique(); + let destination = Pubkey::new_unique(); + let authority = Pubkey::new_unique(); + let payer = Pubkey::new_unique(); + + // Create TransferInterface for light-to-light transfer + let transfer = TransferInterface { + source, + destination, + amount: 1000, + decimals: 9, + authority, + payer, + spl_interface: None, // No SPL interface needed + max_top_up: None, + source_owner: LIGHT_TOKEN_PROGRAM_ID, + destination_owner: LIGHT_TOKEN_PROGRAM_ID, + }; + + // Should succeed without spl_interface + let result = transfer.instruction(); + assert!( + result.is_ok(), + "Light-to-light transfer should not require spl_interface: {:?}", + result.err() + ); + + let instruction = result.unwrap(); + // Verify it's directed to the Light Token program + assert_eq!(instruction.program_id, LIGHT_TOKEN_PROGRAM_ID); +} + +/// Verify TransferInterface light-to-SPL requires spl_interface. +#[test] +fn test_transfer_interface_light_to_spl_requires_interface() { + let source = Pubkey::new_unique(); + let destination = Pubkey::new_unique(); + let authority = Pubkey::new_unique(); + let payer = Pubkey::new_unique(); + + // Create TransferInterface for light-to-SPL transfer without interface + let transfer = TransferInterface { + source, + destination, + amount: 1000, + decimals: 9, + authority, + payer, + spl_interface: None, // Missing required interface + max_top_up: None, + source_owner: LIGHT_TOKEN_PROGRAM_ID, + destination_owner: SPL_TOKEN_PROGRAM_ID, + }; + + // Should fail without spl_interface + let result = transfer.instruction(); + assert!( + result.is_err(), + "Light-to-SPL transfer should require spl_interface" + ); +} + +/// Verify TransferInterface SPL-to-light requires spl_interface. +#[test] +fn test_transfer_interface_spl_to_light_requires_interface() { + let source = Pubkey::new_unique(); + let destination = Pubkey::new_unique(); + let authority = Pubkey::new_unique(); + let payer = Pubkey::new_unique(); + + // Create TransferInterface for SPL-to-light transfer without interface + let transfer = TransferInterface { + source, + destination, + amount: 1000, + decimals: 9, + authority, + payer, + spl_interface: None, // Missing required interface + max_top_up: None, + source_owner: SPL_TOKEN_PROGRAM_ID, + destination_owner: LIGHT_TOKEN_PROGRAM_ID, + }; + + // Should fail without spl_interface + let result = transfer.instruction(); + assert!( + result.is_err(), + "SPL-to-light transfer should require spl_interface" + ); +} + +/// Verify TransferInterface light-to-SPL succeeds with spl_interface. +#[test] +fn test_transfer_interface_light_to_spl_with_interface() { + let source = Pubkey::new_unique(); + let destination = Pubkey::new_unique(); + let authority = Pubkey::new_unique(); + let payer = Pubkey::new_unique(); + let mint = Pubkey::new_unique(); + let spl_interface_pda = Pubkey::new_unique(); + + let transfer = TransferInterface { + source, + destination, + amount: 1000, + decimals: 9, + authority, + payer, + spl_interface: Some(SplInterface { + mint, + spl_token_program: SPL_TOKEN_PROGRAM_ID, + spl_interface_pda, + spl_interface_pda_bump: 255, + }), + max_top_up: None, + source_owner: LIGHT_TOKEN_PROGRAM_ID, + destination_owner: SPL_TOKEN_PROGRAM_ID, + }; + + let result = transfer.instruction(); + assert!( + result.is_ok(), + "Light-to-SPL transfer with spl_interface should succeed: {:?}", + result.err() + ); +} + +/// Verify TransferInterface SPL-to-SPL also requires spl_interface. +#[test] +fn test_transfer_interface_spl_to_spl_requires_interface() { + let source = Pubkey::new_unique(); + let destination = Pubkey::new_unique(); + let authority = Pubkey::new_unique(); + let payer = Pubkey::new_unique(); + + // Both owners are the same SPL token program + let transfer = TransferInterface { + source, + destination, + amount: 1000, + decimals: 9, + authority, + payer, + spl_interface: None, // Missing interface + max_top_up: None, + source_owner: SPL_TOKEN_PROGRAM_ID, + destination_owner: SPL_TOKEN_PROGRAM_ID, + }; + + // SPL-to-SPL still goes through TransferInterface and needs mint info + let result = transfer.instruction(); + assert!( + result.is_err(), + "SPL-to-SPL transfer through TransferInterface should require spl_interface for mint" + ); +} + +/// Verify TransferInterface fails when source and destination have different SPL programs. +#[test] +fn test_transfer_interface_spl_program_mismatch() { + let source = Pubkey::new_unique(); + let destination = Pubkey::new_unique(); + let authority = Pubkey::new_unique(); + let payer = Pubkey::new_unique(); + let mint = Pubkey::new_unique(); + let spl_interface_pda = Pubkey::new_unique(); + + // Source is SPL Token, destination is SPL Token 2022 + let transfer = TransferInterface { + source, + destination, + amount: 1000, + decimals: 9, + authority, + payer, + spl_interface: Some(SplInterface { + mint, + spl_token_program: SPL_TOKEN_PROGRAM_ID, + spl_interface_pda, + spl_interface_pda_bump: 255, + }), + max_top_up: None, + source_owner: SPL_TOKEN_PROGRAM_ID, + destination_owner: SPL_TOKEN_2022_PROGRAM_ID, + }; + + // Should fail due to program mismatch + let result = transfer.instruction(); + assert!( + result.is_err(), + "Transfer between different SPL programs should fail" + ); +} + +/// Verify known program ID values match expected strings. +#[test] +fn test_program_id_values() { + // LIGHT_TOKEN_PROGRAM_ID = "cTokenmWW8bLPjZEBAUgYy3zKxQZW6VKi7bqNFEVv3m" + assert_eq!( + LIGHT_TOKEN_PROGRAM_ID.to_string(), + "cTokenmWW8bLPjZEBAUgYy3zKxQZW6VKi7bqNFEVv3m" + ); + + // SPL_TOKEN_PROGRAM_ID = "TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA" + assert_eq!( + SPL_TOKEN_PROGRAM_ID.to_string(), + "TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA" + ); + + // SPL_TOKEN_2022_PROGRAM_ID = "TokenzQdBNbLqP5VEhdkAS6EPFLC1PHnBqCXEpPxuEb" + assert_eq!( + SPL_TOKEN_2022_PROGRAM_ID.to_string(), + "TokenzQdBNbLqP5VEhdkAS6EPFLC1PHnBqCXEpPxuEb" + ); +} diff --git a/sdk-tests/csdk-anchor-full-derived-test/Cargo.toml b/sdk-tests/csdk-anchor-full-derived-test/Cargo.toml index a0752c4360..6295a5350d 100644 --- a/sdk-tests/csdk-anchor-full-derived-test/Cargo.toml +++ b/sdk-tests/csdk-anchor-full-derived-test/Cargo.toml @@ -20,7 +20,7 @@ test-sbf = [] [dependencies] light-heap = { workspace = true, optional = true } -light-sdk = { workspace = true, features = ["anchor", "idl-build", "v2", "anchor-discriminator", "cpi-context"] } +light-sdk = { workspace = true, features = ["anchor", "idl-build", "v2", "cpi-context"] } light-sdk-types = { workspace = true, features = ["v2", "cpi-context"] } light-hasher = { workspace = true, features = ["solana"] } solana-program = { workspace = true } diff --git a/sdk-tests/single-ata-test/Cargo.toml b/sdk-tests/single-ata-test/Cargo.toml index 3769f189c6..e2c8cf5446 100644 --- a/sdk-tests/single-ata-test/Cargo.toml +++ b/sdk-tests/single-ata-test/Cargo.toml @@ -20,7 +20,7 @@ test-sbf = [] [dependencies] light-heap = { workspace = true, optional = true } -light-sdk = { workspace = true, features = ["anchor", "idl-build", "v2", "anchor-discriminator", "cpi-context"] } +light-sdk = { workspace = true, features = ["anchor", "idl-build", "v2", "cpi-context"] } light-sdk-types = { workspace = true, features = ["v2", "cpi-context"] } light-macros = { workspace = true, features = ["solana"] } light-sdk-macros = { workspace = true } diff --git a/sdk-tests/single-mint-test/Cargo.toml b/sdk-tests/single-mint-test/Cargo.toml index 618df66e5b..2992547530 100644 --- a/sdk-tests/single-mint-test/Cargo.toml +++ b/sdk-tests/single-mint-test/Cargo.toml @@ -20,7 +20,7 @@ test-sbf = [] [dependencies] light-heap = { workspace = true, optional = true } -light-sdk = { workspace = true, features = ["anchor", "idl-build", "v2", "anchor-discriminator", "cpi-context"] } +light-sdk = { workspace = true, features = ["anchor", "idl-build", "v2", "cpi-context"] } light-sdk-types = { workspace = true, features = ["v2", "cpi-context"] } light-macros = { workspace = true, features = ["solana"] } light-sdk-macros = { workspace = true } diff --git a/sdk-tests/single-pda-test/Cargo.toml b/sdk-tests/single-pda-test/Cargo.toml index 4d8d9a40b2..2642eda072 100644 --- a/sdk-tests/single-pda-test/Cargo.toml +++ b/sdk-tests/single-pda-test/Cargo.toml @@ -20,7 +20,7 @@ test-sbf = [] [dependencies] light-heap = { workspace = true, optional = true } -light-sdk = { workspace = true, features = ["anchor", "idl-build", "v2", "anchor-discriminator", "cpi-context"] } +light-sdk = { workspace = true, features = ["anchor", "idl-build", "v2", "cpi-context"] } light-sdk-types = { workspace = true, features = ["v2", "cpi-context"] } light-macros = { workspace = true, features = ["solana"] } light-sdk-macros = { workspace = true } diff --git a/sdk-tests/single-token-test/Cargo.toml b/sdk-tests/single-token-test/Cargo.toml index 211a65303e..d5edd41e1f 100644 --- a/sdk-tests/single-token-test/Cargo.toml +++ b/sdk-tests/single-token-test/Cargo.toml @@ -20,7 +20,7 @@ test-sbf = [] [dependencies] light-heap = { workspace = true, optional = true } -light-sdk = { workspace = true, features = ["anchor", "idl-build", "v2", "anchor-discriminator", "cpi-context"] } +light-sdk = { workspace = true, features = ["anchor", "idl-build", "v2", "cpi-context"] } light-sdk-types = { workspace = true, features = ["v2", "cpi-context"] } light-macros = { workspace = true, features = ["solana"] } light-sdk-macros = { workspace = true }