diff --git a/.gitignore b/.gitignore index d01bd1a..0bc71ef 100644 --- a/.gitignore +++ b/.gitignore @@ -18,4 +18,7 @@ Cargo.lock # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore # and can be added to the global gitignore or merged into this file. For a more nuclear # option (not recommended) you can uncomment the following to ignore the entire idea folder. -#.idea/ \ No newline at end of file +#.idea/ +.DS_Store +#Private key files are now properly ignored +rollup_client/*.json \ No newline at end of file diff --git a/rollup_client/src/main.rs b/rollup_client/src/main.rs index cd819bc..e590748 100644 --- a/rollup_client/src/main.rs +++ b/rollup_client/src/main.rs @@ -12,7 +12,8 @@ use solana_sdk::{ transaction::Transaction, }; use solana_transaction_status::UiTransactionEncoding::{self, Binary}; -use std::{collections::HashMap, str::FromStr}; +use core::hash; +use std::{collections::HashMap, ops::Div, str::FromStr}; // use serde_json; #[derive(Serialize, Deserialize, Debug)] @@ -28,12 +29,15 @@ pub struct GetTransaction { #[tokio::main] async fn main() -> Result<()> { - let keypair = signer::keypair::read_keypair_file("/home/dev/.solana/testkey.json").unwrap(); - let keypair2 = signer::keypair::read_keypair_file("/home/dev/.solana/mykey_1.json").unwrap(); + let path = "/home/izomana/adv-svm/Basic_Rollup_fork/rollup_client/mykey_1.json"; + let path2 = "/home/izomana/adv-svm/Basic_Rollup_fork/rollup_client/testkey.json"; + let path3 = "/home/izomana/adv-svm/Basic_Rollup_fork/rollup_client/owner.json"; + let keypair = signer::keypair::read_keypair_file(path.to_string()).unwrap(); + let keypair2 = signer::keypair::read_keypair_file(path2.to_string()).unwrap(); let rpc_client = RpcClient::new("https://api.devnet.solana.com".into()); let ix = - system_instruction::transfer(&keypair2.pubkey(), &keypair.pubkey(), 1 * LAMPORTS_PER_SOL); + system_instruction::transfer(&keypair2.pubkey(), &keypair.pubkey(), 1 * (LAMPORTS_PER_SOL/2)); let tx = Transaction::new_signed_with_payer( &[ix], Some(&keypair2.pubkey()), @@ -63,29 +67,86 @@ async fn main() -> Result<()> { // let serialized_rollup_transaction = serde_json::to_string(&rtx)?; - let submit_transaction = client - .post("http://127.0.0.1:8080/submit_transaction") - .json(&rtx) - .send() - .await?; - // .json() - // .await?; - - println!("{submit_transaction:#?}"); - let mut hasher = Hasher::default(); - hasher.hash(bincode::serialize(&rtx.sol_transaction).unwrap().as_slice()); - - println!("{:#?}", hasher.clone().result()); - - let tx_resp = client - .post("http://127.0.0.1:8080/get_transaction") - .json(&HashMap::from([("get_tx", hasher.result().to_string())])) - .send() - .await? - .json::>() - .await?; - - println!("{tx_resp:#?}"); + //UNCOMMENT + // let submit_transaction = client + // .post("http://127.0.0.1:8080/submit_transaction") + // .json(&rtx) + // .send() + // .await?; + // // .json() + // // .await?; + + // println!("{submit_transaction:#?}"); + // let mut hasher = Hasher::default(); + // hasher.hash(bincode::serialize(&rtx.sol_transaction).unwrap().as_slice()); + + // println!("{:#?}", hasher.clone().result()); + + // let tx_resp = client + // .post("http://127.0.0.1:8080/get_transaction") + // .json(&GetTransaction{get_tx: rtx.sol_transaction.message.hash().to_string()}) + // .send() + // .await?; + // // .json::>() + // // .await?; + + // println!("{tx_resp:#?}"); + + // let amounts: Vec = vec![4, -2, 3, -5, 1, -4, 2, -1, 3, -1]; + let amounts: Vec<(String, String, i32)> = vec![ + (path.to_string(), path2.to_string(), 5), + (path3.to_string(), path.to_string(), -3), + (path2.to_string(), path3.to_string(), 8), + (path.to_string(), path3.to_string(), -7), + (path2.to_string(), path.to_string(), 4), + (path3.to_string(), path2.to_string(), -6), + (path.to_string(), path2.to_string(), 9), + (path2.to_string(), path3.to_string(), -2), + (path3.to_string(), path.to_string(), 1), + (path.to_string(), path3.to_string(), -4), + ]; + let mut txs: Vec = vec![]; + for amt in amounts { + if amt.2 > 0 { + txs.push(gen_transfer_tx(amt.0, amt.1, amt.2 as u64).await); + } else { + txs.push(gen_transfer_tx(amt.1, amt.0, amt.2.abs() as u64).await); + } + } + + for tx in txs { + let rtx = RollupTransaction { + sender: "Me".into(), + sol_transaction: tx + }; + + let submission = client + .post("http://127.0.0.1:8080/submit_transaction") + .json(&rtx) + .send() + .await?; + + println!("Submission {submission:#?}"); + } + + println!("KP: {}", keypair.pubkey()); + println!("KP2: {}", keypair2.pubkey()); Ok(()) } + +async fn gen_transfer_tx(path1: String, path2: String, amount: u64) -> Transaction { + println!("Amount: {amount}"); + let keypair = signer::keypair::read_keypair_file(path1.to_string()).unwrap(); + let keypair2 = signer::keypair::read_keypair_file(path2.to_string()).unwrap(); + let rpc_client = RpcClient::new("https://api.devnet.solana.com".into()); + + let ix = + system_instruction::transfer(&keypair2.pubkey(), &keypair.pubkey(), amount * (LAMPORTS_PER_SOL / 10)); + Transaction::new_signed_with_payer( + &[ix], + Some(&keypair2.pubkey()), + &[&keypair2], + rpc_client.get_latest_blockhash().await.unwrap(), + ) +} \ No newline at end of file diff --git a/rollup_core/Cargo.toml b/rollup_core/Cargo.toml index 53a9440..abd6209 100644 --- a/rollup_core/Cargo.toml +++ b/rollup_core/Cargo.toml @@ -15,8 +15,11 @@ solana-sdk = "2.0.7" solana-client = "2.0.7" solana-compute-budget = "2.0.7" solana-bpf-loader-program = "2.0.7" +solana-timings = "2.0.7" +solana-system-program = "2.0.7" env_logger = "0.11.5" log = "0.4.22" anyhow = "1.0.86" crossbeam = "0.8.4" -async-channel = "2.3.1" \ No newline at end of file +async-channel = "2.3.1" +bincode = "1.3.3" diff --git a/rollup_core/src/bundler.rs b/rollup_core/src/bundler.rs new file mode 100644 index 0000000..e46239a --- /dev/null +++ b/rollup_core/src/bundler.rs @@ -0,0 +1,112 @@ +use std::collections::HashMap; + +use solana_sdk::{instruction::{CompiledInstruction, Instruction}, pubkey::Pubkey, system_instruction::{self, SystemInstruction}, system_program, transaction::Transaction}; +use bincode::deserialize; + +pub fn get_transaction_instructions(tx: &Transaction) -> Vec{ + tx.message.instructions.clone() +} + +pub fn is_transfer_ix(cix: &CompiledInstruction, account_keys: &[Pubkey]) -> bool { + if cix.program_id_index as usize >= account_keys.len(){ + return false; + } + let program_id = account_keys[cix.program_id_index as usize]; + if program_id != system_program::ID{ + return false + } + + matches!( + deserialize::(&cix.data), + Ok(SystemInstruction::Transfer { .. }) + ) +} + +#[derive(PartialEq, Eq, Hash, Clone, Copy)] +struct TBundlerKey { + keys: [Pubkey; 2], +} + +pub struct TransferBundler { + transfers: HashMap, +} + +impl TransferBundler { + pub fn new() -> Self { + Self { + transfers: HashMap::new() + } + } + + pub fn parse_compiled_instruction(ix: &CompiledInstruction, account_keys: &[Pubkey]) -> Option<(Pubkey, Pubkey, i128)>{ + //Ensure the instruction is from System Program (where transfer is from) + if ix.program_id_index as usize >= account_keys.len() || account_keys[ix.program_id_index as usize] != system_program::ID{ + return None; + } + //Ensure we have at least 2 accounts for transfer and enough data for SOL amount + if ix.accounts.len() < 2 || ix.data.len() < 8 { + return None; + } + + //Get accounts involved in transfer and amount to be transferred + let from = account_keys[ix.accounts[0] as usize]; + let to = account_keys[ix.accounts[1] as usize]; + + log::info!("FROM: {:?}", from.to_string()); + log::info!("TO: {:?}", to.to_string()); + log::info!("IX DATA: {:?}", ix.data); + + let amount = u64::from_le_bytes(ix.data[4..12].try_into().ok()?); + Some((from, to, amount as i128)) + } + + pub fn parse_instruction(ix: &Instruction) -> Option<(Pubkey, Pubkey, i128)>{ + //Enusre ix is owned by system program + if ix.program_id != system_program::ID{ + return None; + } + + //Ensure we have enough accounts + if ix.accounts.len() < 2{ + return None; + } + let from = ix.accounts[0].pubkey; + let to = ix.accounts[1].pubkey; + let amount = u64::from_le_bytes(ix.data[4..].try_into().ok()?); + + log::info!("FROM: {:?}", from.to_string()); + log::info!("TO: {:?}", to.to_string()); + log::info!("AMOUNT: {amount}"); + log::info!("IX DATA: {:?}", ix.data); + + + Some((from, to, amount as i128)) + } + + //Parses transactions and add transfer ixs to TransferBundler + pub fn bundle(&mut self, transaction: Transaction){ + let ixs = get_transaction_instructions(&transaction); + let account_keys: &[Pubkey] = &transaction.message.account_keys; + for ix in ixs { + if is_transfer_ix(&ix, account_keys){ + let (from, to, amount) = Self::parse_compiled_instruction(&ix, account_keys).unwrap(); + let mut keys = [from, to]; + keys.sort(); + + *self.transfers.entry(TBundlerKey {keys}).or_default() += if from == keys[0] {amount} else {-amount}; + } + } + } + + pub fn generate_final(self) -> Vec { + self.transfers.into_iter().filter_map(|(map_key, val)| { + if val < 0 { + Some(system_instruction::transfer(&map_key.keys[1], &map_key.keys[0], val.unsigned_abs() as u64)) + } else if val > 0 { + Some(system_instruction::transfer(&map_key.keys[0], &map_key.keys[1], val as u64)) + } else { + None + } + }).collect() + } +} \ No newline at end of file diff --git a/rollup_core/src/frontend.rs b/rollup_core/src/frontend.rs index c1d0d8d..7e2a270 100644 --- a/rollup_core/src/frontend.rs +++ b/rollup_core/src/frontend.rs @@ -7,7 +7,7 @@ use actix_web::{error, web, HttpResponse}; use async_channel::{Receiver, Send, Sender}; use crossbeam::channel::{Sender as CBSender, Receiver as CBReceiver}; use serde::{Deserialize, Serialize}; -use solana_sdk::keccak::Hash; +use solana_sdk::hash::Hash; // keccak::Hash use solana_sdk::transaction::Transaction; use crate::rollupdb::RollupDBMessage; @@ -53,11 +53,12 @@ pub async fn submit_transaction( pub async fn get_transaction( body: web::Json, - sequencer_sender: web::Data>, + // sequencer_sender: web::Data>, rollupdb_sender: web::Data>, frontend_receiver: web::Data>, ) -> actix_web::Result { // Validate transaction structure with serialization in function signature + println!("Getting tx..."); log::info!("Requested transaction"); log::info!("{body:?}"); @@ -67,16 +68,18 @@ pub async fn get_transaction( add_processed_transaction: None, frontend_get_tx: Some(Hash::new(body.get_tx.as_bytes())), add_settle_proof: None, + bundle_tx: false }) .await .unwrap(); if let Ok(frontend_message) = frontend_receiver.recv().await { - return Ok(HttpResponse::Ok().json(RollupTransaction { - sender: "Rollup RPC".into(), - sol_transaction: frontend_message.transaction.unwrap(), - })); - // Ok(HttpResponse::Ok().json(HashMap::from([("Transaction status", "requested")]))) + // return Ok(HttpResponse::Ok().json(RollupTransaction { + // sender: "Rollup RPC".into(), + // sol_transaction: frontend_message.transaction.unwrap(), + // })); + log::info!("Requested TX:\n {:?}", frontend_message.transaction.unwrap()); + return Ok(HttpResponse::Ok().json(HashMap::from([("Requested transaction status", "gotten successfully")]))); } Ok(HttpResponse::Ok().json(HashMap::from([("Transaction status", "requested")]))) diff --git a/rollup_core/src/loader.rs b/rollup_core/src/loader.rs new file mode 100644 index 0000000..d21e719 --- /dev/null +++ b/rollup_core/src/loader.rs @@ -0,0 +1,62 @@ +//! PayTube's "account loader" component, which provides the SVM API with the +//! ability to load accounts for PayTube channels. +//! +//! The account loader is a simple example of an RPC client that can first load +//! an account from the base chain, then cache it locally within the protocol +//! for the duration of the channel. + +use { + solana_client::rpc_client::RpcClient, + solana_sdk::{ + account::{AccountSharedData, ReadableAccount}, + pubkey::Pubkey, + }, + solana_svm::transaction_processing_callback::TransactionProcessingCallback, + std::{collections::HashMap, sync::RwLock}, +}; + +/// An account loading mechanism to hoist accounts from the base chain up to +/// an active PayTube channel. +/// +/// Employs a simple cache mechanism to ensure accounts are only loaded once. +pub struct RollupAccountLoader<'a> { + pub cache: RwLock>, + pub rpc_client: &'a RpcClient, +} + +impl<'a> RollupAccountLoader<'a> { + pub fn new(rpc_client: &'a RpcClient) -> Self { + Self { + cache: RwLock::new(HashMap::new()), + rpc_client, + } + } + + pub fn add_account(&mut self, pubkey: Pubkey, account: AccountSharedData) { + self.cache.write().unwrap().insert(pubkey, account); + } +} + +/// Implementation of the SVM API's `TransactionProcessingCallback` interface. +/// +/// The SVM API requires this plugin be provided to provide the SVM with the +/// ability to load accounts. +/// +/// In the Agave validator, this implementation is Bank, powered by AccountsDB. +impl TransactionProcessingCallback for RollupAccountLoader<'_> { + fn get_account_shared_data(&self, pubkey: &Pubkey) -> Option { + if let Some(account) = self.cache.read().unwrap().get(pubkey) { + return Some(account.clone()); + } + + let account: AccountSharedData = self.rpc_client.get_account(pubkey).ok()?.into(); + self.cache.write().unwrap().insert(*pubkey, account.clone()); + + Some(account) + } + + fn account_matches_owners(&self, account: &Pubkey, owners: &[Pubkey]) -> Option { + self.get_account_shared_data(account) + .and_then(|account| owners.iter().position(|key| account.owner().eq(key))) + } +} diff --git a/rollup_core/src/main.rs b/rollup_core/src/main.rs index 8089972..a8381a1 100644 --- a/rollup_core/src/main.rs +++ b/rollup_core/src/main.rs @@ -11,6 +11,9 @@ mod frontend; mod rollupdb; mod sequencer; mod settle; +mod processor; +mod loader; +mod bundler; // #[actix_web::main] fn main() { @@ -44,8 +47,8 @@ fn main() { - rt.block_on(async {sequencer::run(sequencer_receiver, db_sender2).unwrap()}); rt.spawn(RollupDB::run(rollupdb_receiver, fe_2)); + rt.block_on(async {sequencer::run(sequencer_receiver, db_sender2).unwrap()}); }); // Create sequencer task // tokio::spawn(sequencer::run(sequencer_receiver, rollupdb_sender.clone())); diff --git a/rollup_core/src/processor.rs b/rollup_core/src/processor.rs new file mode 100644 index 0000000..90e2264 --- /dev/null +++ b/rollup_core/src/processor.rs @@ -0,0 +1,112 @@ +//! A helper to initialize Solana SVM API's `TransactionBatchProcessor`. + +use { + solana_bpf_loader_program::syscalls::create_program_runtime_environment_v1, + solana_compute_budget::compute_budget::ComputeBudget, + solana_program_runtime::loaded_programs::{BlockRelation, ForkGraph, ProgramCacheEntry}, + solana_sdk::{clock::Slot, feature_set::FeatureSet, pubkey::Pubkey, transaction}, + solana_svm::{ + account_loader::CheckedTransactionDetails, + transaction_processing_callback::TransactionProcessingCallback, + transaction_processor::TransactionBatchProcessor, + }, + solana_system_program::system_processor, + std::sync::{Arc, RwLock}, +}; + +/// In order to use the `TransactionBatchProcessor`, another trait - Solana +/// Program Runtime's `ForkGraph` - must be implemented, to tell the batch +/// processor how to work across forks. +/// +/// Since PayTube doesn't use slots or forks, this implementation is mocked. +pub(crate) struct RollupForkGraph {} + +impl ForkGraph for RollupForkGraph { + fn relationship(&self, _a: Slot, _b: Slot) -> BlockRelation { + BlockRelation::Unknown + } +} + +/// This function encapsulates some initial setup required to tweak the +/// `TransactionBatchProcessor` for use within PayTube. +/// +/// We're simply configuring the mocked fork graph on the SVM API's program +/// cache, then adding the System program to the processor's builtins. +pub(crate) fn create_transaction_batch_processor( + callbacks: &CB, + feature_set: &FeatureSet, + compute_budget: &ComputeBudget, + fork_graph: Arc>, + // needed_programs: Vec, +) -> TransactionBatchProcessor { + // Create a new transaction batch processor. + // + // We're going to use slot 1 specifically because any programs we add will + // be deployed in slot 0, and they are delayed visibility until the next + // slot (1). + // This includes programs owned by BPF Loader v2, which are automatically + // marked as "depoyed" in slot 0. + // See `solana_svm::program_loader::program_with_pubkey` for more + // details. + let processor = TransactionBatchProcessor::::new_uninitialized( + /* slot */ 1, + /* epoch */ 1, + // Arc::downgrade(&fork_graph), + // Some(Arc::new( + // create_program_runtime_environment_v1(feature_set, compute_budget, false, false) + // .unwrap(), + // )), + // None, + ); + + processor.program_cache.write().unwrap().set_fork_graph(Arc::downgrade(&fork_graph)); + + processor.prepare_program_cache_for_upcoming_feature_set(callbacks, feature_set, compute_budget, 1, 50); + + // processor.prepare_program_cache_for_upcoming_feature_set(callbacks, upcoming_feature_set, compute_budget, slot_index, slots_in_epoch); + + // Add the system program builtin. + processor.add_builtin( + callbacks, + solana_system_program::id(), + "system_program", + ProgramCacheEntry::new_builtin( + 0, + b"system_program".len(), + system_processor::Entrypoint::vm, + ), + ); + + // Add the BPF Loader v2 builtin, for the SPL Token program. + processor.add_builtin( + callbacks, + solana_sdk::bpf_loader::id(), + "solana_bpf_loader_program", + ProgramCacheEntry::new_builtin( + 0, + b"solana_bpf_loader_program".len(), + solana_bpf_loader_program::Entrypoint::vm, + ), + ); + + // Adding any needed programs to the processor. + + + processor +} + +/// This function is also a mock. In the Agave validator, the bank pre-checks +/// transactions before providing them to the SVM API. We mock this step in +/// PayTube, since we don't need to perform such pre-checks. +pub(crate) fn get_transaction_check_results( + len: usize, + lamports_per_signature: u64, +) -> Vec> { + vec![ + transaction::Result::Ok(CheckedTransactionDetails { + nonce: None, + lamports_per_signature, + }); + len + ] +} diff --git a/rollup_core/src/rollupdb.rs b/rollup_core/src/rollupdb.rs index b5995dc..fb46535 100644 --- a/rollup_core/src/rollupdb.rs +++ b/rollup_core/src/rollupdb.rs @@ -1,7 +1,8 @@ use async_channel::{Receiver, Sender}; +use log::log; use serde::{Deserialize, Serialize}; use solana_sdk::{ - account::AccountSharedData, keccak::Hash, pubkey::Pubkey, transaction::Transaction, + account::AccountSharedData, hash::Hash, pubkey::Pubkey, transaction::Transaction, // keccak::Hash -> hash::Hash }; use crossbeam::channel::{Receiver as CBReceiver, Sender as CBSender}; @@ -11,6 +12,7 @@ use std::{ }; use crate::frontend::FrontendMessage; +use crate::bundler::*; #[derive(Serialize, Deserialize)] pub struct RollupDBMessage { @@ -18,6 +20,8 @@ pub struct RollupDBMessage { pub add_processed_transaction: Option, pub frontend_get_tx: Option, pub add_settle_proof: Option, + //Testing purposes + pub bundle_tx: bool } #[derive(Serialize, Debug, Default)] @@ -39,6 +43,7 @@ impl RollupDB { }; while let Ok(message) = rollup_db_receiver.recv() { + log::info!("Received RollupDBMessage"); if let Some(accounts_to_lock) = message.lock_accounts { // Lock accounts, by removing them from the accounts_db hashmap, and adding them to locked accounts let _ = accounts_to_lock.iter().map(|pubkey| { @@ -46,6 +51,7 @@ impl RollupDB { .insert(pubkey.clone(), db.accounts_db.remove(pubkey).unwrap()) }); } else if let Some(get_this_hash_tx) = message.frontend_get_tx { + log::info!("Getting tx for frontend"); let req_tx = db.transactions.get(&get_this_hash_tx).unwrap(); frontend_sender @@ -56,6 +62,42 @@ impl RollupDB { .await .unwrap(); } else if let Some(tx) = message.add_processed_transaction { + log::info!("Adding processed tx"); + // unlocking accounts + let locked_keys = tx.message.account_keys.clone(); // get the keys + + // locked_keys.iter().for_each( + // |pubkey| if db.locked_accounts.contains_key(&pubkey) { + // db.locked_accounts.remove(&pubkey); + // } + // ); + + for pubkey in locked_keys { + if let Some(account) = db.locked_accounts.remove(&pubkey) { + db.accounts_db.insert(pubkey, account); // Unlock and restore + } + } + // send transaction to the db.transactions + + db.transactions.insert(tx.message.hash(), tx.clone()); + log::info!("PROCESSED TX: {}", db.transactions.len()); + + // communication channel with database + // communcation with the frontend + } else if message.bundle_tx { + log::info!("BUNDLING TX"); + let mut tx_bundler = TransferBundler::new(); + for (_, tx) in db.transactions.clone() { + tx_bundler.bundle(tx); + } + let final_ixs = tx_bundler.generate_final(); + log::info!("\nFinal Transfer Ixs:"); + for ix in final_ixs{ + if let Some((from, to, amount)) = TransferBundler::parse_instruction(&ix){ + } + } + log::info!("BUNDLING DONE"); + db.transactions.clear(); } } } diff --git a/rollup_core/src/sequencer.rs b/rollup_core/src/sequencer.rs index e4f0a1f..a07b88a 100644 --- a/rollup_core/src/sequencer.rs +++ b/rollup_core/src/sequencer.rs @@ -1,3 +1,4 @@ +use core::panic; use std::{ collections::{HashMap, HashSet}, sync::{Arc, RwLock}, @@ -10,20 +11,25 @@ use solana_client::{nonblocking::rpc_client as nonblocking_rpc_client, rpc_clien use solana_compute_budget::compute_budget::ComputeBudget; use solana_program_runtime::{ invoke_context::{self, EnvironmentConfig, InvokeContext}, - loaded_programs::{BlockRelation, ForkGraph, LoadProgramMetrics, ProgramCacheEntry, ProgramCacheForTxBatch, ProgramRuntimeEnvironments}, sysvar_cache, timings::ExecuteTimings, + loaded_programs::{BlockRelation, ForkGraph, LoadProgramMetrics, ProgramCacheEntry, ProgramCacheForTxBatch, ProgramRuntimeEnvironments}, sysvar_cache, }; use solana_bpf_loader_program::syscalls::create_program_runtime_environment_v1; use solana_sdk::{ - account::{AccountSharedData, ReadableAccount}, clock::{Epoch, Slot}, feature_set::FeatureSet, fee::FeeStructure, hash::Hash, pubkey::Pubkey, rent::Rent, rent_collector::RentCollector, transaction::{SanitizedTransaction, Transaction}, transaction_context::TransactionContext + account::{AccountSharedData, ReadableAccount}, clock::{Epoch, Slot}, feature_set::FeatureSet, fee::FeeStructure, hash::Hash, instruction, pubkey::Pubkey, rent::Rent, rent_collector::RentCollector, sysvar::instructions, transaction::{SanitizedTransaction, Transaction}, transaction_context::{IndexOfAccount, TransactionContext}, }; +use solana_timings::ExecuteTimings; use solana_svm::{ message_processor::MessageProcessor, + program_loader::load_program_with_pubkey, transaction_processing_callback::TransactionProcessingCallback, - transaction_processor::{TransactionBatchProcessor, TransactionProcessingEnvironment}, + transaction_processor::{TransactionBatchProcessor, TransactionProcessingConfig, TransactionProcessingEnvironment}, }; use crate::{rollupdb::RollupDBMessage, settle::settle_state}; +use crate::loader::RollupAccountLoader; +use crate::processor::*; +use crate::bundler::*; pub fn run( sequencer_receiver_channel: CBReceiver, @@ -40,6 +46,7 @@ pub fn run( frontend_get_tx: None, add_settle_proof: None, add_processed_transaction: None, + bundle_tx: false }) .map_err(|_| anyhow!("failed to send message to rollupdb"))?; @@ -52,6 +59,12 @@ pub fn run( let feature_set = FeatureSet::all_enabled(); let fee_structure = FeeStructure::default(); let lamports_per_signature = fee_structure.lamports_per_signature; + let rent_collector = RentCollector::default(); + let mut timings = ExecuteTimings::default(); + let fork_graph = Arc::new(RwLock::new(RollupForkGraph {})); + + + // let rent_collector = RentCollector::default(); // Solana runtime. @@ -70,7 +83,7 @@ pub fn run( let rpc_client_temp = RpcClient::new("https://api.devnet.solana.com".to_string()); - let accounts_data = transaction + let accounts_data = transaction // adding reference .message .account_keys .iter() @@ -82,86 +95,104 @@ pub fn run( }) .collect::>(); - let mut transaction_context = TransactionContext::new(accounts_data, Rent::default(), 0, 0); + let mut used_cu = 0u64; + let sanitized = SanitizedTransaction::try_from_legacy_transaction( // to check here for the problem + Transaction::from(transaction.clone()), + &HashSet::new(), + ); + log::info!("{:?}", sanitized.clone()); - let runtime_env = Arc::new( - create_program_runtime_environment_v1(&feature_set, &compute_budget, false, false) - .unwrap(), - ); + let needed_programs: Vec<(Pubkey, AccountSharedData)> = + accounts_data + .iter() + .filter(|(pubkey, account)| account.executable()) + .map(|(pubkey, account)| (pubkey.clone(), account.clone())) + .collect(); - let mut prog_cache = ProgramCacheForTxBatch::new( - Slot::default(), - ProgramRuntimeEnvironments { - program_runtime_v1: runtime_env.clone(), - program_runtime_v2: runtime_env, - }, - None, - Epoch::default(), - ); + log::info!("accounts_data: {needed_programs:?}"); - let sysvar_c = sysvar_cache::SysvarCache::default(); - let env = EnvironmentConfig::new( - Hash::default(), - None, - None, - Arc::new(feature_set), - lamports_per_signature, - &sysvar_c, + let mut rollup_account_loader = RollupAccountLoader::new( + &rpc_client_temp, ); - // let default_env = EnvironmentConfig::new(blockhash, epoch_total_stake, epoch_vote_accounts, feature_set, lamports_per_signature, sysvar_cache) - - // let processing_environment = TransactionProcessingEnvironment { - // blockhash: Hash::default(), - // epoch_total_stake: None, - // epoch_vote_accounts: None, - // feature_set: Arc::new(feature_set), - // fee_structure: Some(&fee_structure), - // lamports_per_signature, - // rent_collector: Some(&rent_collector), - // }; - - let mut invoke_context = InvokeContext::new( - &mut transaction_context, - &mut prog_cache, - env, - None, - compute_budget.to_owned() + + for (pubkey, account) in needed_programs.iter() { + rollup_account_loader.add_account(*pubkey, account.clone()); + } + + + let processor = create_transaction_batch_processor( + &rollup_account_loader, + &feature_set, + &compute_budget, + Arc::clone(&fork_graph), ); - let mut used_cu = 0u64; - let sanitized = SanitizedTransaction::try_from_legacy_transaction( - Transaction::from(transaction.clone()), - &HashSet::new(), - ) - ; - log::info!("{:?}", sanitized.clone()); + let checks = get_transaction_check_results(1, fee_structure.lamports_per_signature); + let sanitized_transaction = &[sanitized.unwrap()]; + let processing_environment = TransactionProcessingEnvironment { + blockhash: Hash::default(), + epoch_total_stake: None, + epoch_vote_accounts: None, + feature_set: Arc::new(feature_set), + fee_structure: Some(&fee_structure), + lamports_per_signature: fee_structure.lamports_per_signature, + rent_collector: Some(&rent_collector), + }; + + let processing_config = TransactionProcessingConfig { + compute_budget: Some(compute_budget), + ..Default::default() + }; - let mut timings = ExecuteTimings::default(); - - let result_msg = MessageProcessor::process_message( - &sanitized.unwrap().message(), - &vec![], - &mut invoke_context, - &mut timings, - &mut used_cu, - ); - // Send processed transaction to db for storage and availability + let status = processor.load_and_execute_sanitized_transactions( + &rollup_account_loader, + sanitized_transaction, + checks, + &processing_environment, + &processing_config + ); + log::info!("{:#?}", status.processing_results); + + // Send processed transaction to db for storage and availability rollupdb_sender .send(RollupDBMessage { lock_accounts: None, - add_processed_transaction: Some(transaction), + add_processed_transaction: Some(transaction.clone()), frontend_get_tx: None, add_settle_proof: None, + bundle_tx: false }) .unwrap(); + //View sent processed tx details + let ixs = get_transaction_instructions(&transaction); + let acc_keys: &[Pubkey] = &transaction.message.account_keys; + if let Some((from, to, amount)) = TransferBundler::parse_compiled_instruction(&ixs[0], acc_keys) { + log::info!(" + Transaction Info\n + From: {from:?}\n + To: {to:?}\n + Amount: {amount} + + ") + } + // Call settle if transaction amount since last settle hits 10 if tx_counter >= 10 { + //bundle transfer tx test + rollupdb_sender.send(RollupDBMessage { + lock_accounts: None, + add_processed_transaction: None, + add_settle_proof: None, + frontend_get_tx: None, + bundle_tx: true + }).unwrap(); + // Lock db to avoid state changes during settlement // Prepare root hash, or your own proof to send to chain @@ -170,12 +201,197 @@ pub fn run( // let _settle_tx_hash = settle_state("proof".into()).await?; tx_counter = 0u32; + + + // CREATE A PROOF FOR THE CHANGES STATE } } Ok(()) } + + + + + // //****************************************************************************************************/ + // // let instructions = &transaction.message.instructions; + // // // let index_array_of_program_pubkeys = Vec::with_capacity(instructions.len()); + // // let program_ids = &transaction.message.account_keys; + + // // let needed_programs: Vec<&Pubkey> = instructions + // // .iter() + // // .map( + // // |instruction| + // // instruction.program_id(program_ids)).collect(); + // //****************************************************************************************************/ + + // let mut transaction_context = TransactionContext::new( + // accounts_data, + // Rent::default(), + // compute_budget.max_instruction_stack_depth, + // compute_budget.max_instruction_trace_length, + // ); + // // transaction_context.get_current_instruction_context().unwrap().get_index_of_program_account_in_transaction(2).unwrap(); + // // transaction_context.push(); + + + // // here we have to load them somehow + + // let runtime_env = Arc::new( + // create_program_runtime_environment_v1(&feature_set, &compute_budget, false, false) + // .unwrap(), + // ); + + // let mut prog_cache = ProgramCacheForTxBatch::new( + // Slot::default(), + // ProgramRuntimeEnvironments { + // program_runtime_v1: runtime_env.clone(), + // program_runtime_v2: runtime_env, + // }, + // None, + // Epoch::default(), + // ); + + + // // prog_cache.replenish(accounts_data., entry) + + // let sysvar_c = sysvar_cache::SysvarCache::default(); + // let env = EnvironmentConfig::new( + // Hash::default(), + // None, + // None, + // Arc::new(feature_set), + // lamports_per_signature, + // &sysvar_c, + // ); + // // let default_env = EnvironmentConfig::new(blockhash, epoch_total_stake, epoch_vote_accounts, feature_set, lamports_per_signature, sysvar_cache) + + // // let processing_environment = TransactionProcessingEnvironment { + // // blockhash: Hash::default(), + // // epoch_total_stake: None, + // // epoch_vote_accounts: None, + // // feature_set: Arc::new(feature_set), + // // fee_structure: Some(&fee_structure), + // // lamports_per_signature, + // // rent_collector: Some(&rent_collector), + // // }; + + + + // // for (pubkey, account) in rollup_account_loader.cache.read().unwrap().iter() { + // // let _p = rollup_account_loader.get_account_shared_data(pubkey); + // // log::info!("account: {_p:?}"); + // // } + // // let cache = &rollup_account_loader.cache.read().unwrap(); + // // let pew = cache.keys().next().cloned().unwrap(); + // // let owner = cache.get(&pew).unwrap().owner(); + // // log::debug!("pubkey: {owner:?}"); + + + // let program_cache_entry = load_program_with_pubkey( + // &rollup_account_loader, + // &prog_cache.environments, + // &rollup_account_loader.cache.read().unwrap().keys().next().cloned().unwrap(),//&needed_programs[0].0, + // 0, + // &mut ExecuteTimings::default(), + // false + // ); + + // log::info!("program_cache_entry: {program_cache_entry:?}"); + + // prog_cache.replenish( + // needed_programs[0].0, + // program_cache_entry.unwrap(), + // ); + // // { + // // let instruction_ctx = transaction_context.get_current_instruction_context(); + // // log::debug!("instruction_ctx: {instruction_ctx:?}"); + // // } + // // let instruction_ctx_height = transaction_context.get_instruction_context_stack_height(); + + // // log::debug!("instruction_ctx_height: {instruction_ctx_height}"); + + // // let instruction_ctx_next = transaction_context.get_next_instruction_context(); + // // // let instruction_ctx = transaction_context.get_next_instruction_context(); + + // // log::debug!("instruction_ctx: {instruction_ctx_next:?}"); + + + + // let mut invoke_context = InvokeContext::new( + // &mut transaction_context, + // &mut prog_cache, + // env, + // None, + // compute_budget.to_owned() + // ); + + + // // let instruction_ctx_2 = invoke_context.transaction_context.get_current_instruction_context(); + // // log::debug!("instruction_ctx_2: {instruction_ctx_2:?}"); + // // let instruction_ctx_height = invoke_context.transaction_context.get_instruction_context_stack_height(); + // // log::debug!("instruction_ctx_height: {instruction_ctx_height}"); + // // let instruction_ctx_height = invoke_context.transaction_context.get_instruction_context_at_index_in_trace(0); + // // log::debug!("instruction_ctx_height: {instruction_ctx_height:?}"); + + + + + // // HAS TO BE AN ADDRESS OF THE PROGRAM + + // // invoke_context.program_cache_for_tx_batch.replenish(key, program_cache_entry.unwrap()); + + + + + + + + // // let account_index = invoke_context + // // .transaction_context + // // .find_index_of_account(&instructions::id()); + + // // if account_index.is_none() { + // // panic!("Could not find instructions account"); + // // } + + // let program_indices: Vec = vec![0]; + // let result_msg = MessageProcessor::process_message( + // &sanitized.unwrap().message().to_owned(), // ERROR WITH SOLANA_SVM VERSION + // // ?should be fixed with help of chagning versions of solana-svm ? + // // &sanitized.unwrap().message().to_owned(), + // &[program_indices], // TODO: automotize this process + // &mut invoke_context, + // &mut timings, + // &mut used_cu, + // ); + + // log::info!("{:?}", &result_msg); + // log::info!("The message was done sucessfully"); + + + + + + +// TWO WAYS -> TRANSACTIONBATCHPROCCESOR OR MESSAGEPROCESSOR + +// PAYTUBE in SVM FOLDER + +// The question of how often to pull/push the state out of mainnet state + +// PDA as a *treasury , to solve problem with sol that could disapear from account + +// to create kind of a program that will lock funds on mainnet + +// MagicBlock relyaing on their infrustructure + +// To make a buffer between sending two transactions + + + + // / In order to use the `TransactionBatchProcessor`, another trait - Solana // / Program Runtime's `ForkGraph` - must be implemented, to tell the batch // / processor how to work across forks. diff --git a/rollup_core/src/settle.rs b/rollup_core/src/settle.rs index bf5dab3..e16d7d8 100644 --- a/rollup_core/src/settle.rs +++ b/rollup_core/src/settle.rs @@ -1,6 +1,6 @@ use anyhow::Result; use solana_client::nonblocking::rpc_client::RpcClient; -use solana_sdk::{blake3::Hash, transaction::Transaction}; +use solana_sdk::{hash::Hash, transaction::Transaction}; // Settle the state on solana, called by sequencer pub async fn settle_state(proof: Hash) -> Result {