From 62af797af7a0461440ca810f527581d6fcfaaede Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafael=20C=C3=A1rdenas?= Date: Mon, 8 Jul 2024 21:36:25 -0600 Subject: [PATCH 1/8] ci: fix codecov (#26) * test: new structure * fix: codecov rust --- .github/workflows/ci.yml | 9 +- src/db/cache/index_cache.rs | 16 +- src/db/cache/input_rune_balance.rs | 29 ++++ src/db/cache/mod.rs | 1 + src/db/cache/transaction_cache.rs | 13 +- src/db/cache/transaction_location.rs | 2 +- src/db/cache/utils.rs | 245 +++++++++++---------------- src/db/mod.rs | 2 +- 8 files changed, 148 insertions(+), 169 deletions(-) create mode 100644 src/db/cache/input_rune_balance.rs diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 428bcda..615a00d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -142,10 +142,15 @@ jobs: docker-compose -f docker/docker-compose.dev.postgres.yml up -d docker-compose -f docker/docker-compose.dev.postgres.yml logs -t -f --no-color &> docker-compose-logs.txt & - - name: Cargo test + - name: Update Rust run: | rustup update - RUST_BACKTRACE=1 cargo test --all -- --test-threads=1 + + - name: Install and run cargo-tarpaulin + run: | + cargo install cargo-tarpaulin + cargo --version + cargo tarpaulin --out lcov -- --test-threads=1 - name: Upload coverage reports to Codecov uses: codecov/codecov-action@v4 diff --git a/src/db/cache/index_cache.rs b/src/db/cache/index_cache.rs index f81456d..a0d727b 100644 --- a/src/db/cache/index_cache.rs +++ b/src/db/cache/index_cache.rs @@ -21,10 +21,8 @@ use crate::{ }; use super::{ - db_cache::DbCache, - transaction_cache::{InputRuneBalance, TransactionCache}, - transaction_location::TransactionLocation, - utils::move_block_output_cache_to_output_cache, + db_cache::DbCache, input_rune_balance::InputRuneBalance, transaction_cache::TransactionCache, + transaction_location::TransactionLocation, utils::move_block_output_cache_to_output_cache, }; /// Holds rune data across multiple blocks for faster computations. Processes rune events as they happen during transactions and @@ -106,7 +104,10 @@ impl IndexCache { try_debug!(ctx, "INPUT {rune_id} {balances:?} {location}"); } if input_runes.len() > 0 { - try_debug!(ctx, "First output: {first_eligible_output:?}, total_outputs: {total_outputs}"); + try_debug!( + ctx, + "First output: {first_eligible_output:?}, total_outputs: {total_outputs}" + ); } } self.tx_cache = TransactionCache::new( @@ -125,7 +126,10 @@ impl IndexCache { } pub fn end_block(&mut self) { - move_block_output_cache_to_output_cache(&mut self.block_output_cache, &mut self.output_cache); + move_block_output_cache_to_output_cache( + &mut self.block_output_cache, + &mut self.output_cache, + ); } pub async fn apply_runestone( diff --git a/src/db/cache/input_rune_balance.rs b/src/db/cache/input_rune_balance.rs new file mode 100644 index 0000000..be65068 --- /dev/null +++ b/src/db/cache/input_rune_balance.rs @@ -0,0 +1,29 @@ +#[derive(Debug, Clone)] +pub struct InputRuneBalance { + /// Previous owner of this balance. If this is `None`, it means the balance was just minted or premined. + pub address: Option, + /// How much balance was input to this transaction. + pub amount: u128, +} + +#[cfg(test)] +impl InputRuneBalance { + pub fn dummy() -> Self { + InputRuneBalance { + address: Some( + "bc1p8zxlhgdsq6dmkzk4ammzcx55c3hfrg69ftx0gzlnfwq0wh38prds0nzqwf".to_string(), + ), + amount: 1000, + } + } + + pub fn amount(&mut self, amount: u128) -> &mut Self { + self.amount = amount; + return self; + } + + pub fn address(&mut self, address: Option) -> &mut Self { + self.address = address; + return self; + } +} diff --git a/src/db/cache/mod.rs b/src/db/cache/mod.rs index d09a35a..c558589 100644 --- a/src/db/cache/mod.rs +++ b/src/db/cache/mod.rs @@ -1,5 +1,6 @@ pub mod db_cache; pub mod index_cache; +pub mod input_rune_balance; pub mod transaction_cache; pub mod transaction_location; pub mod utils; diff --git a/src/db/cache/transaction_cache.rs b/src/db/cache/transaction_cache.rs index c8d3d76..3e4770c 100644 --- a/src/db/cache/transaction_cache.rs +++ b/src/db/cache/transaction_cache.rs @@ -17,15 +17,10 @@ use crate::{ try_debug, try_info, try_warn, }; -use super::{transaction_location::TransactionLocation, utils::move_rune_balance_to_output}; - -#[derive(Debug, Clone)] -pub struct InputRuneBalance { - /// Previous owner of this balance. If this is `None`, it means the balance was just minted or premined. - pub address: Option, - /// How much balance was input to this transaction. - pub amount: u128, -} +use super::{ + input_rune_balance::InputRuneBalance, transaction_location::TransactionLocation, + utils::move_rune_balance_to_output, +}; /// Holds cached data relevant to a single transaction during indexing. pub struct TransactionCache { diff --git a/src/db/cache/transaction_location.rs b/src/db/cache/transaction_location.rs index 4eaf127..71dad84 100644 --- a/src/db/cache/transaction_location.rs +++ b/src/db/cache/transaction_location.rs @@ -34,7 +34,7 @@ impl fmt::Display for TransactionLocation { #[cfg(test)] impl TransactionLocation { - pub fn factory() -> Self { + pub fn dummy() -> Self { TransactionLocation { network: Network::Bitcoin, block_hash: "0000000000000000000320283a032748cef8227873ff4872689bf23f1cda83a5" diff --git a/src/db/cache/utils.rs b/src/db/cache/utils.rs index 009ebed..fc13ed3 100644 --- a/src/db/cache/utils.rs +++ b/src/db/cache/utils.rs @@ -16,7 +16,7 @@ use crate::{ try_info, try_warn, }; -use super::{transaction_cache::InputRuneBalance, transaction_location::TransactionLocation}; +use super::{input_rune_balance::InputRuneBalance, transaction_location::TransactionLocation}; /// Takes all transaction inputs and transforms them into rune balances to be allocated for operations. Looks inside an output LRU /// cache and the DB when there are cache misses. @@ -88,7 +88,7 @@ pub fn move_block_output_cache_to_output_cache( block_output_cache.clear(); } -/// Creates a new ledger entry. +/// Creates a new ledger entry while incrementing the `next_event_index`. pub fn new_ledger_entry( location: &TransactionLocation, amount: Option, @@ -286,166 +286,111 @@ pub fn is_rune_mintable( #[cfg(test)] mod test { - use std::collections::{HashMap, VecDeque}; - use test_case::test_case; + mod move_balance { + use std::collections::{HashMap, VecDeque}; - use bitcoin::ScriptBuf; - use chainhook_sdk::utils::Context; - use ordinals::RuneId; + use bitcoin::ScriptBuf; + use chainhook_sdk::utils::Context; + use ordinals::RuneId; - use crate::db::{ - cache::{ - transaction_cache::InputRuneBalance, transaction_location::TransactionLocation, - utils::move_rune_balance_to_output, - }, - models::{db_ledger_operation::DbLedgerOperation, db_rune::DbRune}, - types::{pg_numeric_u128::PgNumericU128, pg_numeric_u64::PgNumericU64}, - }; - - use super::is_rune_mintable; - - #[test] - fn receives_are_registered_first() { - let ctx = Context::empty(); - let location = TransactionLocation { - network: bitcoin::Network::Bitcoin, - block_hash: "00000000000000000002c0cc73626b56fb3ee1ce605b0ce125cc4fb58775a0a9" - .to_string(), - block_height: 840002, - timestamp: 0, - tx_id: "37cd29676d626492cd9f20c60bc4f20347af9c0d91b5689ed75c05bb3e2f73ef".to_string(), - tx_index: 2936, + use crate::db::{ + cache::{ + input_rune_balance::InputRuneBalance, transaction_location::TransactionLocation, + utils::move_rune_balance_to_output, + }, + models::db_ledger_operation::DbLedgerOperation, }; - let mut available_inputs = VecDeque::new(); - // An input from a previous tx - available_inputs.push_back(InputRuneBalance { - address: Some( - "bc1p8zxlhgdsq6dmkzk4ammzcx55c3hfrg69ftx0gzlnfwq0wh38prds0nzqwf".to_string(), - ), - amount: 1000, - }); - // A mint - available_inputs.push_back(InputRuneBalance { - address: None, - amount: 1000, - }); - let mut eligible_outputs = HashMap::new(); - eligible_outputs.insert( - 0u32, - ScriptBuf::from_hex( - "5120388dfba1b0069bbb0ad5eef62c1a94c46e91a3454accf40bf34b80f75e2708db", - ) - .unwrap(), - ); - let mut next_event_index = 0; - let results = move_rune_balance_to_output( - &location, - Some(0), - &RuneId::new(840000, 25).unwrap(), - &mut available_inputs, - &eligible_outputs, - 0, - &mut next_event_index, - &ctx, - ); - let receive = results.get(0).unwrap(); - assert_eq!(receive.event_index.0, 0u32); - assert_eq!(receive.operation, DbLedgerOperation::Receive); - assert_eq!(receive.amount.unwrap().0, 2000u128); + #[test] + fn ledger_writes_receive_before_send() { + let address = + Some("bc1p8zxlhgdsq6dmkzk4ammzcx55c3hfrg69ftx0gzlnfwq0wh38prds0nzqwf".to_string()); + let mut available_inputs = VecDeque::new(); + let mut input1 = InputRuneBalance::dummy(); + input1.address(address.clone()).amount(1000); + available_inputs.push_back(input1); + let mut input2 = InputRuneBalance::dummy(); + input2.address(None).amount(1000); + available_inputs.push_back(input2); + let mut eligible_outputs = HashMap::new(); + eligible_outputs.insert( + 0u32, + ScriptBuf::from_hex( + "5120388dfba1b0069bbb0ad5eef62c1a94c46e91a3454accf40bf34b80f75e2708db", + ) + .unwrap(), + ); + let mut next_event_index = 0; - let send = results.get(1).unwrap(); - assert_eq!(send.event_index.0, 1u32); - assert_eq!(send.operation, DbLedgerOperation::Send); - assert_eq!(send.amount.unwrap().0, 1000u128); + let results = move_rune_balance_to_output( + &TransactionLocation::dummy(), + Some(0), + &RuneId::new(840000, 25).unwrap(), + &mut available_inputs, + &eligible_outputs, + 0, + &mut next_event_index, + &Context::empty(), + ); - assert_eq!(results.len(), 2); - } + let receive = results.get(0).unwrap(); + assert_eq!(receive.event_index.0, 0u32); + assert_eq!(receive.operation, DbLedgerOperation::Receive); + assert_eq!(receive.amount.unwrap().0, 2000u128); - #[test_case(840000 => false; "early block")] - #[test_case(840500 => false; "late block")] - #[test_case(840150 => true; "block in window")] - #[test_case(840100 => true; "first block")] - #[test_case(840200 => true; "last block")] - fn mint_block_height_terms_are_validated(block_height: u64) -> bool { - let mut rune = DbRune::factory(); - rune.terms_height_start(Some(PgNumericU64(840100))); - rune.terms_height_end(Some(PgNumericU64(840200))); - let mut location = TransactionLocation::factory(); - location.block_height(block_height); - is_rune_mintable(&rune, 0, &location) - } + let send = results.get(1).unwrap(); + assert_eq!(send.event_index.0, 1u32); + assert_eq!(send.operation, DbLedgerOperation::Send); + assert_eq!(send.amount.unwrap().0, 1000u128); - #[test_case(840000 => false; "early block")] - #[test_case(840500 => false; "late block")] - #[test_case(840150 => true; "block in window")] - #[test_case(840100 => true; "first block")] - #[test_case(840200 => true; "last block")] - fn mint_block_offset_terms_are_validated(block_height: u64) -> bool { - let mut rune = DbRune::factory(); - rune.terms_offset_start(Some(PgNumericU64(100))); - rune.terms_offset_end(Some(PgNumericU64(200))); - let mut location = TransactionLocation::factory(); - location.block_height(block_height); - is_rune_mintable(&rune, 0, &location) + assert_eq!(results.len(), 2); + } } - #[test_case(0 => true; "first mint")] - #[test_case(49 => true; "last mint")] - #[test_case(50 => false; "out of range")] - fn mint_cap_is_validated(cap: u128) -> bool { - let mut rune = DbRune::factory(); - rune.terms_cap(Some(PgNumericU128(50))); - is_rune_mintable(&rune, cap, &TransactionLocation::factory()) - } + mod mint_validation { + use test_case::test_case; - // use std::{collections::HashMap, num::NonZeroUsize, str::FromStr}; + use crate::db::{ + cache::{transaction_location::TransactionLocation, utils::is_rune_mintable}, + models::db_rune::DbRune, + types::{pg_numeric_u128::PgNumericU128, pg_numeric_u64::PgNumericU64}, + }; - // use chainhook_sdk::{ - // types::{ - // bitcoin::{OutPoint, TxIn}, - // TransactionIdentifier, - // }, - // utils::Context, - // }; - // use lru::LruCache; - // use ordinals::RuneId; + #[test_case(840000 => false; "early block")] + #[test_case(840500 => false; "late block")] + #[test_case(840150 => true; "block in window")] + #[test_case(840100 => true; "first block")] + #[test_case(840200 => true; "last block")] + fn mint_block_height_terms_are_validated(block_height: u64) -> bool { + let mut rune = DbRune::factory(); + rune.terms_height_start(Some(PgNumericU64(840100))); + rune.terms_height_end(Some(PgNumericU64(840200))); + let mut location = TransactionLocation::dummy(); + location.block_height(block_height); + is_rune_mintable(&rune, 0, &location) + } - // use crate::db::cache::transaction_cache::InputRuneBalance; + #[test_case(840000 => false; "early block")] + #[test_case(840500 => false; "late block")] + #[test_case(840150 => true; "block in window")] + #[test_case(840100 => true; "first block")] + #[test_case(840200 => true; "last block")] + fn mint_block_offset_terms_are_validated(block_height: u64) -> bool { + let mut rune = DbRune::factory(); + rune.terms_offset_start(Some(PgNumericU64(100))); + rune.terms_offset_end(Some(PgNumericU64(200))); + let mut location = TransactionLocation::dummy(); + location.block_height(block_height); + is_rune_mintable(&rune, 0, &location) + } - // #[test] - // fn from_output_cache() { - // let tx_inputs = vec![TxIn { - // previous_output: OutPoint { - // txid: TransactionIdentifier { - // hash: "aea76e5ef8135851d0387074cf7672013779e4506e56122e0e698e12ede62681" - // .to_string(), - // }, - // vout: 2, - // value: 100, - // block_height: 848300, - // }, - // script_sig: "".to_string(), - // sequence: 1, - // witness: vec![], - // }]; - // let mut value = HashMap::new(); - // value.insert( - // RuneId::from_str("840000:1").unwrap(), - // vec![InputRuneBalance { - // address: Some("1EDYZPvGqKzZYp6DoTtcgXwvSAkA9d9UKU".to_string()), - // amount: 10000, - // }], - // ); - // let mut output_cache: LruCache<(String, u32), HashMap>> = - // LruCache::new(NonZeroUsize::new(2).unwrap()); - // output_cache.put( - // ( - // "aea76e5ef8135851d0387074cf7672013779e4506e56122e0e698e12ede62681".to_string(), - // 2, - // ), - // value, - // ); - // let ctx = Context::empty(); - // } + #[test_case(0 => true; "first mint")] + #[test_case(49 => true; "last mint")] + #[test_case(50 => false; "out of range")] + fn mint_cap_is_validated(cap: u128) -> bool { + let mut rune = DbRune::factory(); + rune.terms_cap(Some(PgNumericU128(50))); + is_rune_mintable(&rune, cap, &TransactionLocation::dummy()) + } + } } diff --git a/src/db/mod.rs b/src/db/mod.rs index fbca077..3e68746 100644 --- a/src/db/mod.rs +++ b/src/db/mod.rs @@ -1,6 +1,6 @@ use std::{collections::HashMap, process, str::FromStr}; -use cache::transaction_cache::InputRuneBalance; +use cache::input_rune_balance::InputRuneBalance; use chainhook_sdk::utils::Context; use models::{ db_balance_change::DbBalanceChange, db_ledger_entry::DbLedgerEntry, db_rune::DbRune, From cad4124aa81c83cd0fc1830c1f9ffd4241812774 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafael=20C=C3=A1rdenas?= Date: Tue, 9 Jul 2024 13:55:18 -0600 Subject: [PATCH 2/8] test: balance moves to outputs and sequential ledger entries (#27) * test: balance moves * test: sequential ledger --- src/db/cache/transaction_cache.rs | 12 +- src/db/cache/utils.rs | 310 ++++++++++++++++++++++++++++-- 2 files changed, 297 insertions(+), 25 deletions(-) diff --git a/src/db/cache/transaction_cache.rs b/src/db/cache/transaction_cache.rs index 3e4770c..f2fbfc1 100644 --- a/src/db/cache/transaction_cache.rs +++ b/src/db/cache/transaction_cache.rs @@ -9,7 +9,7 @@ use ordinals::{Cenotaph, Edict, Etching, Rune, RuneId}; use crate::{ db::{ - cache::utils::{is_rune_mintable, new_ledger_entry}, + cache::utils::{is_rune_mintable, new_sequential_ledger_entry}, models::{ db_ledger_entry::DbLedgerEntry, db_ledger_operation::DbLedgerOperation, db_rune::DbRune, }, @@ -66,7 +66,7 @@ impl TransactionCache { let mut results = vec![]; for (rune_id, unallocated) in self.input_runes.iter() { for balance in unallocated { - results.push(new_ledger_entry( + results.push(new_sequential_ledger_entry( &self.location, Some(balance.amount), *rune_id, @@ -132,7 +132,7 @@ impl TransactionCache { }, ); } - let entry = new_ledger_entry( + let entry = new_sequential_ledger_entry( &self.location, None, rune_id, @@ -154,7 +154,7 @@ impl TransactionCache { // If the runestone that produced the cenotaph contained an etching, the etched rune has supply zero and is unmintable. let db_rune = DbRune::from_cenotaph_etching(rune, number, &self.location); self.etching = Some(db_rune.clone()); - let entry = new_ledger_entry( + let entry = new_sequential_ledger_entry( &self.location, None, rune_id, @@ -194,7 +194,7 @@ impl TransactionCache { amount: terms_amount.0, }, ); - Some(new_ledger_entry( + Some(new_sequential_ledger_entry( &self.location, Some(terms_amount.0), rune_id.clone(), @@ -226,7 +226,7 @@ impl TransactionCache { self.location ); // This entry does not go in the input runes, it gets burned immediately. - Some(new_ledger_entry( + Some(new_sequential_ledger_entry( &self.location, Some(terms_amount.0), rune_id.clone(), diff --git a/src/db/cache/utils.rs b/src/db/cache/utils.rs index fc13ed3..cb1145d 100644 --- a/src/db/cache/utils.rs +++ b/src/db/cache/utils.rs @@ -89,7 +89,7 @@ pub fn move_block_output_cache_to_output_cache( } /// Creates a new ledger entry while incrementing the `next_event_index`. -pub fn new_ledger_entry( +pub fn new_sequential_ledger_entry( location: &TransactionLocation, amount: Option, rune_id: RuneId, @@ -117,15 +117,25 @@ pub fn new_ledger_entry( entry } -/// Takes `amount` rune balance from `available_inputs` and moves it to `output` by generating the correct ledger entries. -/// Modifies `available_inputs` to consume balance that is already moved. If `amount` is zero, all remaining balances will be -/// transferred. If `output` is `None`, the runes will be burnt. +/// Moves rune balance from transaction inputs into a transaction output. +/// +/// # Arguments +/// +/// * `location` - Transaction location. +/// * `output` - Output where runes will be moved to. If `None`, runes are burned. +/// * `rune_id` - Rune that is being moved. +/// * `input_balances` - Balances input to this transaction for this rune. This value will be modified by the moves happening in +/// this function. +/// * `outputs` - Transaction outputs eligible to receive runes. +/// * `amount` - Amount of balance to move. If value is zero, all inputs will be moved to the output. +/// * `next_event_index` - Next sequential event index to create. This value will be modified. +/// * `ctx` - Context. pub fn move_rune_balance_to_output( location: &TransactionLocation, output: Option, rune_id: &RuneId, - available_inputs: &mut VecDeque, - eligible_outputs: &HashMap, + input_balances: &mut VecDeque, + outputs: &HashMap, amount: u128, next_event_index: &mut u32, ctx: &Context, @@ -133,7 +143,7 @@ pub fn move_rune_balance_to_output( let mut results = vec![]; // Who is this balance going to? let receiver_address = if let Some(output) = output { - match eligible_outputs.get(&output) { + match outputs.get(&output) { Some(script) => match Address::from_script(script, location.network) { Ok(address) => Some(address.to_string()), Err(e) => { @@ -171,7 +181,7 @@ pub fn move_rune_balance_to_output( let mut senders = vec![]; loop { // Do we still have input balance left to move? - let Some(input_bal) = available_inputs.pop_front() else { + let Some(input_bal) = input_balances.pop_front() else { break; }; // Select the correct move amount. @@ -188,7 +198,7 @@ pub fn move_rune_balance_to_output( // Is there still some balance left on this input? If so, keep it for later but break the loop because we've satisfied the // move amount. if balance_taken < input_bal.amount { - available_inputs.push_front(InputRuneBalance { + input_balances.push_front(InputRuneBalance { address: input_bal.address, amount: input_bal.amount - balance_taken, }); @@ -201,7 +211,7 @@ pub fn move_rune_balance_to_output( } // Add the "receive" entry, if applicable. if receiver_address.is_some() && total_sent > 0 { - results.push(new_ledger_entry( + results.push(new_sequential_ledger_entry( location, Some(total_sent), *rune_id, @@ -223,7 +233,7 @@ pub fn move_rune_balance_to_output( } // Add the "send"/"burn" entries. for (balance_taken, sender_address) in senders.iter() { - results.push(new_ledger_entry( + results.push(new_sequential_ledger_entry( location, Some(*balance_taken), *rune_id, @@ -301,6 +311,18 @@ mod test { models::db_ledger_operation::DbLedgerOperation, }; + fn dummy_eligible_output() -> HashMap { + let mut eligible_outputs = HashMap::new(); + eligible_outputs.insert( + 0u32, + ScriptBuf::from_hex( + "5120388dfba1b0069bbb0ad5eef62c1a94c46e91a3454accf40bf34b80f75e2708db", + ) + .unwrap(), + ); + eligible_outputs + } + #[test] fn ledger_writes_receive_before_send() { let address = @@ -312,14 +334,7 @@ mod test { let mut input2 = InputRuneBalance::dummy(); input2.address(None).amount(1000); available_inputs.push_back(input2); - let mut eligible_outputs = HashMap::new(); - eligible_outputs.insert( - 0u32, - ScriptBuf::from_hex( - "5120388dfba1b0069bbb0ad5eef62c1a94c46e91a3454accf40bf34b80f75e2708db", - ) - .unwrap(), - ); + let eligible_outputs = dummy_eligible_output(); let mut next_event_index = 0; let results = move_rune_balance_to_output( @@ -344,6 +359,216 @@ mod test { assert_eq!(send.amount.unwrap().0, 1000u128); assert_eq!(results.len(), 2); + assert_eq!(available_inputs.len(), 0); + } + + #[test] + fn move_to_empty_output_is_burned() { + let address = + Some("bc1p8zxlhgdsq6dmkzk4ammzcx55c3hfrg69ftx0gzlnfwq0wh38prds0nzqwf".to_string()); + let mut available_inputs = VecDeque::new(); + let mut input1 = InputRuneBalance::dummy(); + input1.address(address.clone()).amount(1000); + available_inputs.push_back(input1); + + let results = move_rune_balance_to_output( + &TransactionLocation::dummy(), + None, // Burn + &RuneId::new(840000, 25).unwrap(), + &mut available_inputs, + &HashMap::new(), + 0, + &mut 0, + &Context::empty(), + ); + + assert_eq!(results.len(), 1); + let entry1 = results.get(0).unwrap(); + assert_eq!(entry1.operation, DbLedgerOperation::Burn); + assert_eq!(entry1.address, address); + assert_eq!(entry1.amount.unwrap().0, 1000); + assert_eq!(available_inputs.len(), 0); + } + + #[test] + fn moves_partial_input_balance() { + let mut available_inputs = VecDeque::new(); + let mut input1 = InputRuneBalance::dummy(); + input1.amount(5000); // More than required in this move. + available_inputs.push_back(input1); + let eligible_outputs = dummy_eligible_output(); + + let results = move_rune_balance_to_output( + &TransactionLocation::dummy(), + Some(0), + &RuneId::new(840000, 25).unwrap(), + &mut available_inputs, + &eligible_outputs, + 1000, // Less than total available in first input. + &mut 0, + &Context::empty(), + ); + + assert_eq!(results.len(), 2); + let entry1 = results.get(0).unwrap(); + assert_eq!(entry1.operation, DbLedgerOperation::Receive); + assert_eq!(entry1.amount.unwrap().0, 1000); + let entry2 = results.get(1).unwrap(); + assert_eq!(entry2.operation, DbLedgerOperation::Send); + assert_eq!(entry2.amount.unwrap().0, 1000); + // Remainder is still in available inputs. + let remaining = available_inputs.get(0).unwrap(); + assert_eq!(remaining.amount, 4000); + } + + #[test] + fn moves_insufficient_input_balance() { + let mut available_inputs = VecDeque::new(); + let mut input1 = InputRuneBalance::dummy(); + input1.amount(1000); // Insufficient. + available_inputs.push_back(input1); + let eligible_outputs = dummy_eligible_output(); + + let results = move_rune_balance_to_output( + &TransactionLocation::dummy(), + Some(0), + &RuneId::new(840000, 25).unwrap(), + &mut available_inputs, + &eligible_outputs, + 3000, // More than total available in input. + &mut 0, + &Context::empty(), + ); + + assert_eq!(results.len(), 2); + let entry1 = results.get(0).unwrap(); + assert_eq!(entry1.operation, DbLedgerOperation::Receive); + assert_eq!(entry1.amount.unwrap().0, 1000); + let entry2 = results.get(1).unwrap(); + assert_eq!(entry2.operation, DbLedgerOperation::Send); + assert_eq!(entry2.amount.unwrap().0, 1000); + assert_eq!(available_inputs.len(), 0); + } + + #[test] + fn moves_all_remaining_balance() { + let mut available_inputs = VecDeque::new(); + let mut input1 = InputRuneBalance::dummy(); + input1.amount(6000); + available_inputs.push_back(input1); + let mut input2 = InputRuneBalance::dummy(); + input2.amount(2000); + available_inputs.push_back(input2); + let mut input3 = InputRuneBalance::dummy(); + input3.amount(2000); + available_inputs.push_back(input3); + let eligible_outputs = dummy_eligible_output(); + + let results = move_rune_balance_to_output( + &TransactionLocation::dummy(), + Some(0), + &RuneId::new(840000, 25).unwrap(), + &mut available_inputs, + &eligible_outputs, + 0, // Move all. + &mut 0, + &Context::empty(), + ); + + assert_eq!(results.len(), 4); + let entry1 = results.get(0).unwrap(); + assert_eq!(entry1.operation, DbLedgerOperation::Receive); + assert_eq!(entry1.amount.unwrap().0, 10000); + let entry2 = results.get(1).unwrap(); + assert_eq!(entry2.operation, DbLedgerOperation::Send); + assert_eq!(entry2.amount.unwrap().0, 6000); + let entry3 = results.get(2).unwrap(); + assert_eq!(entry3.operation, DbLedgerOperation::Send); + assert_eq!(entry3.amount.unwrap().0, 2000); + let entry4 = results.get(3).unwrap(); + assert_eq!(entry4.operation, DbLedgerOperation::Send); + assert_eq!(entry4.amount.unwrap().0, 2000); + assert_eq!(available_inputs.len(), 0); + } + + #[test] + fn move_to_output_with_address_failure_is_burned() { + let mut available_inputs = VecDeque::new(); + let mut input1 = InputRuneBalance::dummy(); + input1.amount(1000); + available_inputs.push_back(input1); + let mut eligible_outputs = HashMap::new(); + // Broken script buf that yields no address. + eligible_outputs.insert(0u32, ScriptBuf::from_hex("0101010101").unwrap()); + + let results = move_rune_balance_to_output( + &TransactionLocation::dummy(), + Some(0), + &RuneId::new(840000, 25).unwrap(), + &mut available_inputs, + &eligible_outputs, + 1000, + &mut 0, + &Context::empty(), + ); + + assert_eq!(results.len(), 1); + let entry1 = results.get(0).unwrap(); + assert_eq!(entry1.operation, DbLedgerOperation::Burn); + assert_eq!(entry1.amount.unwrap().0, 1000); + assert_eq!(available_inputs.len(), 0); + } + + #[test] + fn move_to_nonexistent_output_is_burned() { + let mut available_inputs = VecDeque::new(); + let mut input1 = InputRuneBalance::dummy(); + input1.amount(1000); + available_inputs.push_back(input1); + let eligible_outputs = dummy_eligible_output(); + + let results = move_rune_balance_to_output( + &TransactionLocation::dummy(), + Some(5), // Output does not exist. + &RuneId::new(840000, 25).unwrap(), + &mut available_inputs, + &eligible_outputs, + 1000, + &mut 0, + &Context::empty(), + ); + + assert_eq!(results.len(), 1); + let entry1 = results.get(0).unwrap(); + assert_eq!(entry1.operation, DbLedgerOperation::Burn); + assert_eq!(entry1.amount.unwrap().0, 1000); + assert_eq!(available_inputs.len(), 0); + } + + #[test] + fn send_not_generated_on_minted_balance() { + let mut available_inputs = VecDeque::new(); + let mut input1 = InputRuneBalance::dummy(); + input1.amount(1000).address(None); // No address because it's a mint. + available_inputs.push_back(input1); + let eligible_outputs = dummy_eligible_output(); + + let results = move_rune_balance_to_output( + &TransactionLocation::dummy(), + Some(0), + &RuneId::new(840000, 25).unwrap(), + &mut available_inputs, + &eligible_outputs, + 1000, + &mut 0, + &Context::empty(), + ); + + assert_eq!(results.len(), 1); + let entry1 = results.get(0).unwrap(); + assert_eq!(entry1.operation, DbLedgerOperation::Receive); + assert_eq!(entry1.amount.unwrap().0, 1000); + assert_eq!(available_inputs.len(), 0); } } @@ -393,4 +618,51 @@ mod test { is_rune_mintable(&rune, cap, &TransactionLocation::dummy()) } } + + mod sequential_ledger_entry { + use ordinals::RuneId; + + use crate::db::{cache::{ + transaction_location::TransactionLocation, utils::new_sequential_ledger_entry, + }, models::db_ledger_operation::DbLedgerOperation}; + + #[test] + fn increments_event_index() { + let location = TransactionLocation::dummy(); + let rune_id = RuneId::new(840000, 25).unwrap(); + let address = + Some("bc1p8zxlhgdsq6dmkzk4ammzcx55c3hfrg69ftx0gzlnfwq0wh38prds0nzqwf".to_string()); + let mut event_index = 0u32; + + let event0 = new_sequential_ledger_entry( + &location, + Some(100), + rune_id, + Some(0), + address.as_ref(), + None, + DbLedgerOperation::Receive, + &mut event_index, + ); + assert_eq!(event0.event_index.0, 0); + assert_eq!(event0.amount.unwrap().0, 100); + assert_eq!(event0.address, address); + + let event1 = new_sequential_ledger_entry( + &location, + Some(300), + rune_id, + Some(0), + None, + None, + DbLedgerOperation::Receive, + &mut event_index, + ); + assert_eq!(event1.event_index.0, 1); + assert_eq!(event1.amount.unwrap().0, 300); + assert_eq!(event1.address, None); + + assert_eq!(event_index, 2); + } + } } From f019623dbe703826bf2b5ccb3cf0dedb1b81e908 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafael=20C=C3=A1rdenas?= Date: Wed, 10 Jul 2024 16:10:38 -0600 Subject: [PATCH 3/8] test: input rune balances (#28) * test: input rune balances * chore: comment --- .cargo/config | 4 + Cargo.lock | 7 + Cargo.toml | 1 + src/db/cache/utils.rs | 247 ++++++++++++++++++++++++++++++-- src/db/mod.rs | 96 +++++++++---- src/db/types/pg_bigint_u32.rs | 3 +- src/db/types/pg_numeric_u128.rs | 3 +- src/db/types/pg_numeric_u64.rs | 3 +- src/db/types/pg_smallint_u8.rs | 3 +- 9 files changed, 322 insertions(+), 45 deletions(-) diff --git a/.cargo/config b/.cargo/config index ae65800..63f19d0 100644 --- a/.cargo/config +++ b/.cargo/config @@ -1,2 +1,6 @@ [alias] runehook-install = "install --path . --locked --force" + +[env] +# Run tests in one thread so we can support postgres migrations correctly. +RUST_TEST_THREADS = "1" diff --git a/Cargo.lock b/Cargo.lock index ab80907..fdfaa3d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2086,6 +2086,12 @@ dependencies = [ "libc", ] +[[package]] +name = "maplit" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d" + [[package]] name = "matchers" version = "0.1.0" @@ -3062,6 +3068,7 @@ dependencies = [ "hex", "hiro-system-kit", "lru", + "maplit", "num-traits", "ordinals", "rand", diff --git a/Cargo.toml b/Cargo.toml index 7e87d68..33d0179 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -32,6 +32,7 @@ tokio-postgres = "0.7.10" tokio = { version = "1.38.0", features = ["rt-multi-thread", "macros"] } refinery = { version = "0.8", features = ["tokio-postgres"] } num-traits = "0.2.14" +maplit = "1.0.2" [dev-dependencies] test-case = "3.1.0" diff --git a/src/db/cache/utils.rs b/src/db/cache/utils.rs index cb1145d..7bf2771 100644 --- a/src/db/cache/utils.rs +++ b/src/db/cache/utils.rs @@ -20,8 +20,16 @@ use super::{input_rune_balance::InputRuneBalance, transaction_location::Transact /// Takes all transaction inputs and transforms them into rune balances to be allocated for operations. Looks inside an output LRU /// cache and the DB when there are cache misses. +/// +/// # Arguments +/// +/// * `inputs` - Raw transaction inputs +/// * `block_output_cache` - Cache with output balances produced by the current block +/// * `output_cache` - LRU cache with output balances +/// * `db_tx` - DB transaction +/// * `ctx` - Context pub async fn input_rune_balances_from_tx_inputs( - tx_inputs: &Vec, + inputs: &Vec, block_output_cache: &HashMap<(String, u32), HashMap>>, output_cache: &mut LruCache<(String, u32), HashMap>>, db_tx: &mut Transaction<'_>, @@ -32,7 +40,7 @@ pub async fn input_rune_balances_from_tx_inputs( let mut cache_misses = vec![]; // Look in both current block output cache and in long term LRU cache. - for (i, input) in tx_inputs.iter().enumerate() { + for (i, input) in inputs.iter().enumerate() { let tx_id = input.previous_output.txid.hash[2..].to_string(); let vout = input.previous_output.vout; let k = (tx_id.clone(), vout); @@ -301,6 +309,7 @@ mod test { use bitcoin::ScriptBuf; use chainhook_sdk::utils::Context; + use maplit::hashmap; use ordinals::RuneId; use crate::db::{ @@ -312,15 +321,12 @@ mod test { }; fn dummy_eligible_output() -> HashMap { - let mut eligible_outputs = HashMap::new(); - eligible_outputs.insert( - 0u32, - ScriptBuf::from_hex( + hashmap! { + 0u32 => ScriptBuf::from_hex( "5120388dfba1b0069bbb0ad5eef62c1a94c46e91a3454accf40bf34b80f75e2708db", ) - .unwrap(), - ); - eligible_outputs + .unwrap() + } } #[test] @@ -622,9 +628,12 @@ mod test { mod sequential_ledger_entry { use ordinals::RuneId; - use crate::db::{cache::{ - transaction_location::TransactionLocation, utils::new_sequential_ledger_entry, - }, models::db_ledger_operation::DbLedgerOperation}; + use crate::db::{ + cache::{ + transaction_location::TransactionLocation, utils::new_sequential_ledger_entry, + }, + models::db_ledger_operation::DbLedgerOperation, + }; #[test] fn increments_event_index() { @@ -665,4 +674,218 @@ mod test { assert_eq!(event_index, 2); } } + + mod input_balances { + use std::num::NonZeroUsize; + + use chainhook_sdk::{ + types::{ + bitcoin::{OutPoint, TxIn}, + TransactionIdentifier, + }, + utils::Context, + }; + use lru::LruCache; + use maplit::hashmap; + use ordinals::RuneId; + + use crate::db::{ + cache::{ + input_rune_balance::InputRuneBalance, utils::input_rune_balances_from_tx_inputs, + }, models::{db_ledger_entry::DbLedgerEntry, db_ledger_operation::DbLedgerOperation}, pg_insert_ledger_entries, pg_test_client, pg_test_roll_back_migrations + }; + + #[tokio::test] + async fn from_block_cache() { + let inputs = vec![TxIn { + previous_output: OutPoint { + txid: TransactionIdentifier { + hash: "0x045fe33f1174d6a72084e751735a89746a259c6d3e418b65c03ec0740f924c7b" + .to_string(), + }, + vout: 1, + value: 100, + block_height: 840000, + }, + script_sig: "".to_string(), + sequence: 0, + witness: vec![], + }]; + let rune_id = RuneId::new(840000, 25).unwrap(); + let block_output_cache = hashmap! { + ("045fe33f1174d6a72084e751735a89746a259c6d3e418b65c03ec0740f924c7b" + .to_string(), 1) => hashmap! { + rune_id => vec![InputRuneBalance { address: None, amount: 2000 }] + } + }; + let mut output_cache = LruCache::new(NonZeroUsize::new(1).unwrap()); + let ctx = Context::empty(); + + let mut pg_client = pg_test_client(true, &ctx).await; + let mut db_tx = pg_client.transaction().await.unwrap(); + let results = input_rune_balances_from_tx_inputs( + &inputs, + &block_output_cache, + &mut output_cache, + &mut db_tx, + &ctx, + ) + .await; + let _ = db_tx.rollback().await; + pg_test_roll_back_migrations(&mut pg_client, &ctx).await; + + assert_eq!(results.len(), 1); + let rune_results = results.get(&rune_id).unwrap(); + assert_eq!(rune_results.len(), 1); + let input_bal = rune_results.get(0).unwrap(); + assert_eq!(input_bal.address, None); + assert_eq!(input_bal.amount, 2000); + } + + #[tokio::test] + async fn from_lru_cache() { + let inputs = vec![TxIn { + previous_output: OutPoint { + txid: TransactionIdentifier { + hash: "0x045fe33f1174d6a72084e751735a89746a259c6d3e418b65c03ec0740f924c7b" + .to_string(), + }, + vout: 1, + value: 100, + block_height: 840000, + }, + script_sig: "".to_string(), + sequence: 0, + witness: vec![], + }]; + let rune_id = RuneId::new(840000, 25).unwrap(); + let block_output_cache = hashmap! {}; + let mut output_cache = LruCache::new(NonZeroUsize::new(1).unwrap()); + output_cache.put( + ( + "045fe33f1174d6a72084e751735a89746a259c6d3e418b65c03ec0740f924c7b".to_string(), + 1, + ), + hashmap! { + rune_id => vec![InputRuneBalance { address: None, amount: 2000 }] + }, + ); + let ctx = Context::empty(); + + let mut pg_client = pg_test_client(true, &ctx).await; + let mut db_tx = pg_client.transaction().await.unwrap(); + let results = input_rune_balances_from_tx_inputs( + &inputs, + &block_output_cache, + &mut output_cache, + &mut db_tx, + &ctx, + ) + .await; + let _ = db_tx.rollback().await; + pg_test_roll_back_migrations(&mut pg_client, &ctx).await; + + assert_eq!(results.len(), 1); + let rune_results = results.get(&rune_id).unwrap(); + assert_eq!(rune_results.len(), 1); + let input_bal = rune_results.get(0).unwrap(); + assert_eq!(input_bal.address, None); + assert_eq!(input_bal.amount, 2000); + } + + #[tokio::test] + async fn from_db() { + let inputs = vec![TxIn { + previous_output: OutPoint { + txid: TransactionIdentifier { + hash: "0x045fe33f1174d6a72084e751735a89746a259c6d3e418b65c03ec0740f924c7b" + .to_string(), + }, + vout: 1, + value: 100, + block_height: 840000, + }, + script_sig: "".to_string(), + sequence: 0, + witness: vec![], + }]; + let rune_id = RuneId::new(840000, 25).unwrap(); + let block_output_cache = hashmap! {}; + let mut output_cache = LruCache::new(NonZeroUsize::new(1).unwrap()); + let ctx = Context::empty(); + + let mut pg_client = pg_test_client(true, &ctx).await; + let mut db_tx = pg_client.transaction().await.unwrap(); + + let entry = DbLedgerEntry::from_values( + Some(2000), + rune_id, + &"0x0000000000000000000044642cc1f64c22579d46a2a149ef2a51f9c98cb622e1".to_string(), + 840000, + 0, + 0, + &"0x045fe33f1174d6a72084e751735a89746a259c6d3e418b65c03ec0740f924c7b".to_string(), + Some(1), + None, + None, + DbLedgerOperation::Receive, + 0, + ); + let _ = pg_insert_ledger_entries(&vec![entry], &mut db_tx, &ctx).await; + + let results = input_rune_balances_from_tx_inputs( + &inputs, + &block_output_cache, + &mut output_cache, + &mut db_tx, + &ctx, + ) + .await; + let _ = db_tx.rollback().await; + pg_test_roll_back_migrations(&mut pg_client, &ctx).await; + + assert_eq!(results.len(), 1); + let rune_results = results.get(&rune_id).unwrap(); + assert_eq!(rune_results.len(), 1); + let input_bal = rune_results.get(0).unwrap(); + assert_eq!(input_bal.address, None); + assert_eq!(input_bal.amount, 2000); + } + + #[tokio::test] + async fn inputs_without_balances() { + let inputs = vec![TxIn { + previous_output: OutPoint { + txid: TransactionIdentifier { + hash: "0x045fe33f1174d6a72084e751735a89746a259c6d3e418b65c03ec0740f924c7b" + .to_string(), + }, + vout: 1, + value: 100, + block_height: 840000, + }, + script_sig: "".to_string(), + sequence: 0, + witness: vec![], + }]; + let block_output_cache = hashmap! {}; + let mut output_cache = LruCache::new(NonZeroUsize::new(1).unwrap()); + let ctx = Context::empty(); + + let mut pg_client = pg_test_client(true, &ctx).await; + let mut db_tx = pg_client.transaction().await.unwrap(); + let results = input_rune_balances_from_tx_inputs( + &inputs, + &block_output_cache, + &mut output_cache, + &mut db_tx, + &ctx, + ) + .await; + let _ = db_tx.rollback().await; + pg_test_roll_back_migrations(&mut pg_client, &ctx).await; + + assert_eq!(results.len(), 0); + } + } } diff --git a/src/db/mod.rs b/src/db/mod.rs index 3e68746..7db31ce 100644 --- a/src/db/mod.rs +++ b/src/db/mod.rs @@ -22,6 +22,22 @@ pub mod types; embed_migrations!("migrations"); +async fn pg_run_migrations(pg_client: &mut Client, ctx: &Context) { + try_info!(ctx, "Running postgres migrations"); + match migrations::runner() + .set_migration_table_name("pgmigrations") + .run_async(pg_client) + .await + { + Ok(_) => {} + Err(e) => { + try_error!(ctx, "Error running pg migrations: {}", e.to_string()); + process::exit(1); + } + }; + try_info!(ctx, "Postgres migrations complete"); +} + pub async fn pg_connect(config: &Config, run_migrations: bool, ctx: &Context) -> Client { let mut pg_config = tokio_postgres::Config::new(); pg_config @@ -58,40 +74,12 @@ pub async fn pg_connect(config: &Config, run_migrations: bool, ctx: &Context) -> } } } - if run_migrations { - try_info!(ctx, "Running postgres migrations"); - match migrations::runner() - .set_migration_table_name("pgmigrations") - .run_async(&mut pg_client) - .await - { - Ok(_) => {} - Err(e) => { - try_error!(ctx, "Error running pg migrations: {}", e.to_string()); - process::exit(1); - } - }; - try_info!(ctx, "Postgres migrations complete"); + pg_run_migrations(&mut pg_client, ctx).await; } - pg_client } -#[cfg(test)] -pub async fn pg_test_client() -> Client { - let (client, connection) = - tokio_postgres::connect("host=localhost user=postgres password=postgres", NoTls) - .await - .unwrap(); - tokio::spawn(async move { - if let Err(e) = connection.await { - eprintln!("test connection error: {}", e); - } - }); - client -} - pub async fn pg_insert_runes( rows: &Vec, db_tx: &mut Transaction<'_>, @@ -532,3 +520,53 @@ pub async fn pg_get_input_rune_balances( } results } + +#[cfg(test)] +pub async fn pg_test_client(run_migrations: bool, ctx: &Context) -> Client { + let (mut client, connection) = + tokio_postgres::connect("host=localhost user=postgres password=postgres", NoTls) + .await + .unwrap(); + tokio::spawn(async move { + if let Err(e) = connection.await { + eprintln!("test connection error: {}", e); + } + }); + if run_migrations { + pg_run_migrations(&mut client, ctx).await; + } + client +} + +#[cfg(test)] +pub async fn pg_test_roll_back_migrations(pg_client: &mut Client, ctx: &Context) { + match pg_client + .batch_execute( + " + DO $$ DECLARE + r RECORD; + BEGIN + FOR r IN (SELECT tablename FROM pg_tables WHERE schemaname = current_schema()) LOOP + EXECUTE 'DROP TABLE IF EXISTS ' || quote_ident(r.tablename) || ' CASCADE'; + END LOOP; + END $$; + DO $$ DECLARE + r RECORD; + BEGIN + FOR r IN (SELECT typname FROM pg_type WHERE typtype = 'e' AND typnamespace = (SELECT oid FROM pg_namespace WHERE nspname = current_schema())) LOOP + EXECUTE 'DROP TYPE IF EXISTS ' || quote_ident(r.typname) || ' CASCADE'; + END LOOP; + END $$;", + ) + .await { + Ok(rows) => rows, + Err(e) => { + try_error!( + ctx, + "error rolling back test migrations: {}", + e.to_string() + ); + process::exit(1); + } + }; +} diff --git a/src/db/types/pg_bigint_u32.rs b/src/db/types/pg_bigint_u32.rs index bd0cb3a..a0540f5 100644 --- a/src/db/types/pg_bigint_u32.rs +++ b/src/db/types/pg_bigint_u32.rs @@ -43,6 +43,7 @@ impl AddAssign for PgBigIntU32 { #[cfg(test)] mod test { + use chainhook_sdk::utils::Context; use test_case::test_case; use crate::db::pg_test_client; @@ -53,7 +54,7 @@ mod test { #[test_case(0; "zero")] #[tokio::test] async fn test_u32_to_postgres(val: u32) { - let mut client = pg_test_client().await; + let mut client = pg_test_client(false, &Context::empty()).await; let value = PgBigIntU32(val); let tx = client.transaction().await.unwrap(); let _ = tx.query("CREATE TABLE test (value BIGINT)", &[]).await; diff --git a/src/db/types/pg_numeric_u128.rs b/src/db/types/pg_numeric_u128.rs index 8a9b200..8aaeaf1 100644 --- a/src/db/types/pg_numeric_u128.rs +++ b/src/db/types/pg_numeric_u128.rs @@ -109,6 +109,7 @@ impl AddAssign for PgNumericU128 { #[cfg(test)] mod test { + use chainhook_sdk::utils::Context; use test_case::test_case; use crate::db::pg_test_client; @@ -120,7 +121,7 @@ mod test { #[test_case(0; "zero")] #[tokio::test] async fn test_u128_to_postgres(val: u128) { - let mut client = pg_test_client().await; + let mut client = pg_test_client(false, &Context::empty()).await; let value = PgNumericU128(val); let tx = client.transaction().await.unwrap(); let _ = tx.query("CREATE TABLE test (value NUMERIC)", &[]).await; diff --git a/src/db/types/pg_numeric_u64.rs b/src/db/types/pg_numeric_u64.rs index 78ddfd6..0e66b8b 100644 --- a/src/db/types/pg_numeric_u64.rs +++ b/src/db/types/pg_numeric_u64.rs @@ -39,6 +39,7 @@ impl<'a> FromSql<'a> for PgNumericU64 { #[cfg(test)] mod test { + use chainhook_sdk::utils::Context; use test_case::test_case; use crate::db::pg_test_client; @@ -50,7 +51,7 @@ mod test { #[test_case(0; "zero")] #[tokio::test] async fn test_u64_to_postgres(val: u64) { - let mut client = pg_test_client().await; + let mut client = pg_test_client(false, &Context::empty()).await; let value = PgNumericU64(val); let tx = client.transaction().await.unwrap(); let _ = tx.query("CREATE TABLE test (value NUMERIC)", &[]).await; diff --git a/src/db/types/pg_smallint_u8.rs b/src/db/types/pg_smallint_u8.rs index 6d7f4ee..e119608 100644 --- a/src/db/types/pg_smallint_u8.rs +++ b/src/db/types/pg_smallint_u8.rs @@ -37,6 +37,7 @@ impl<'a> FromSql<'a> for PgSmallIntU8 { #[cfg(test)] mod test { + use chainhook_sdk::utils::Context; use test_case::test_case; use crate::db::pg_test_client; @@ -47,7 +48,7 @@ mod test { #[test_case(0; "zero")] #[tokio::test] async fn test_u8_to_postgres(val: u8) { - let mut client = pg_test_client().await; + let mut client = pg_test_client(false, &Context::empty()).await; let value = PgSmallIntU8(val); let tx = client.transaction().await.unwrap(); let _ = tx.query("CREATE TABLE test (value SMALLINT)", &[]).await; From 2c24a91276aced277c8794302a2548d8e6f0fefe Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafael=20C=C3=A1rdenas?= Date: Thu, 11 Jul 2024 11:23:02 -0600 Subject: [PATCH 4/8] test: move from block output cache to lru cache (#29) * test: move from block output cache to lru cache * chore: rename config toml * ci: fix tarpaulin * ci: install tarpaulin force --- .cargo/{config => config.toml} | 0 .github/workflows/ci.yml | 5 ++-- src/db/cache/utils.rs | 49 +++++++++++++++++++++++++++++++++- 3 files changed, 50 insertions(+), 4 deletions(-) rename .cargo/{config => config.toml} (100%) diff --git a/.cargo/config b/.cargo/config.toml similarity index 100% rename from .cargo/config rename to .cargo/config.toml diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 615a00d..a28e40f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -146,10 +146,9 @@ jobs: run: | rustup update - - name: Install and run cargo-tarpaulin + - name: Run tests run: | - cargo install cargo-tarpaulin - cargo --version + cargo install --force cargo-tarpaulin cargo tarpaulin --out lcov -- --test-threads=1 - name: Upload coverage reports to Codecov diff --git a/src/db/cache/utils.rs b/src/db/cache/utils.rs index 7bf2771..e4cdaec 100644 --- a/src/db/cache/utils.rs +++ b/src/db/cache/utils.rs @@ -76,6 +76,11 @@ pub async fn input_rune_balances_from_tx_inputs( } /// Moves data from the current block's output cache to the long-term LRU output cache. Clears the block output cache when done. +/// +/// # Arguments +/// +/// * `block_output_cache` - Block output cache +/// * `output_cache` - Output LRU cache pub fn move_block_output_cache_to_output_cache( block_output_cache: &mut HashMap<(String, u32), HashMap>>, output_cache: &mut LruCache<(String, u32), HashMap>>, @@ -692,7 +697,9 @@ mod test { use crate::db::{ cache::{ input_rune_balance::InputRuneBalance, utils::input_rune_balances_from_tx_inputs, - }, models::{db_ledger_entry::DbLedgerEntry, db_ledger_operation::DbLedgerOperation}, pg_insert_ledger_entries, pg_test_client, pg_test_roll_back_migrations + }, + models::{db_ledger_entry::DbLedgerEntry, db_ledger_operation::DbLedgerOperation}, + pg_insert_ledger_entries, pg_test_client, pg_test_roll_back_migrations, }; #[tokio::test] @@ -888,4 +895,44 @@ mod test { assert_eq!(results.len(), 0); } } + + mod cache_move { + use std::num::NonZeroUsize; + + use lru::LruCache; + use maplit::hashmap; + use ordinals::RuneId; + + use crate::db::cache::{ + input_rune_balance::InputRuneBalance, utils::move_block_output_cache_to_output_cache, + }; + + #[test] + fn moves_to_lru_output_cache_and_clears() { + let rune_id = RuneId::new(840000, 25).unwrap(); + let mut block_output_cache = hashmap! { + ("045fe33f1174d6a72084e751735a89746a259c6d3e418b65c03ec0740f924c7b" + .to_string(), 1) => hashmap! { + rune_id => vec![InputRuneBalance { address: None, amount: 2000 }] + } + }; + let mut output_cache = LruCache::new(NonZeroUsize::new(1).unwrap()); + + move_block_output_cache_to_output_cache(&mut block_output_cache, &mut output_cache); + + let moved_val = output_cache + .get(&( + "045fe33f1174d6a72084e751735a89746a259c6d3e418b65c03ec0740f924c7b".to_string(), + 1, + )) + .unwrap(); + assert_eq!(moved_val.len(), 1); + let balances = moved_val.get(&rune_id).unwrap(); + assert_eq!(balances.len(), 1); + let balance = balances.get(0).unwrap(); + assert_eq!(balance.address, None); + assert_eq!(balance.amount, 2000); + assert_eq!(block_output_cache.len(), 0); + } + } } From 14c69f8b87de981bdacd31c37c8fad4a568b7b46 Mon Sep 17 00:00:00 2001 From: Brady Ouren Date: Mon, 15 Jul 2024 14:58:20 -0700 Subject: [PATCH 5/8] test: TransactionCache test cases (#31) * add some etching and minting tests * add back partial eq --------- Co-authored-by: Rafael Cardenas Co-authored-by: brady.ouren --- src/db/cache/transaction_cache.rs | 250 +++++++++++++++++++++++++++++- src/db/types/pg_numeric_u128.rs | 2 +- 2 files changed, 246 insertions(+), 6 deletions(-) diff --git a/src/db/cache/transaction_cache.rs b/src/db/cache/transaction_cache.rs index f2fbfc1..8ab1074 100644 --- a/src/db/cache/transaction_cache.rs +++ b/src/db/cache/transaction_cache.rs @@ -1,12 +1,12 @@ +use bitcoin::ScriptBuf; +use chainhook_sdk::utils::Context; +use maplit::hashmap; +use ordinals::{Cenotaph, Edict, Etching, Rune, RuneId}; use std::{ collections::{HashMap, VecDeque}, vec, }; -use bitcoin::ScriptBuf; -use chainhook_sdk::utils::Context; -use ordinals::{Cenotaph, Edict, Etching, Rune, RuneId}; - use crate::{ db::{ cache::utils::{is_rune_mintable, new_sequential_ledger_entry}, @@ -35,7 +35,7 @@ pub struct TransactionCache { pub output_pointer: Option, /// Holds input runes for the current transaction (input to this tx, premined or minted). Balances in the vector are in the /// order in which they were input to this transaction. - input_runes: HashMap>, + pub input_runes: HashMap>, /// Non-OP_RETURN outputs in this transaction eligible_outputs: HashMap, /// Total outputs contained in this transaction, including non-eligible outputs. @@ -61,6 +61,19 @@ impl TransactionCache { } } + #[cfg(test)] + pub fn empty(location: TransactionLocation) -> Self { + TransactionCache { + location, + next_event_index: 0, + etching: None, + output_pointer: None, + input_runes: hashmap! {}, + eligible_outputs: hashmap! {}, + total_outputs: 0, + } + } + /// Burns the rune balances input to this transaction. pub fn apply_cenotaph_input_burn(&mut self, _cenotaph: &Cenotaph) -> Vec { let mut results = vec![]; @@ -388,3 +401,230 @@ impl TransactionCache { } } } + +#[cfg(test)] +mod test { + use std::collections::VecDeque; + + use bitcoin::ScriptBuf; + use chainhook_sdk::utils::Context; + use maplit::hashmap; + use ordinals::{Edict, Etching, Rune, Terms}; + + use crate::db::{ + cache::{input_rune_balance::InputRuneBalance, transaction_location::TransactionLocation}, + models::{db_ledger_operation::DbLedgerOperation, db_rune::DbRune}, + }; + + use super::TransactionCache; + + #[test] + fn etches_rune() { + let location = TransactionLocation::dummy(); + let mut cache = TransactionCache::empty(location.clone()); + let etching = Etching { + divisibility: Some(2), + premine: Some(1000), + rune: Some(Rune::reserved(location.block_height, location.tx_index)), + spacers: None, + symbol: Some('x'), + terms: Some(Terms { + amount: Some(1000), + cap: None, + height: (None, None), + offset: (None, None), + }), + turbo: true, + }; + let (rune_id, db_rune, db_ledger_entry) = cache.apply_etching(&etching, 1); + + assert_eq!(rune_id.block, 840000); + assert_eq!(rune_id.tx, 0); + assert_eq!(db_rune.id, "840000:0"); + assert_eq!(db_rune.name, "AAAAAAAAAAAAAAAAZOMJMODBYFG"); + assert_eq!(db_rune.number.0, 1); + assert_eq!(db_ledger_entry.operation, DbLedgerOperation::Etching); + assert_eq!(db_ledger_entry.rune_id, "840000:0"); + } + + #[test] + // TODO add cenotaph field to DbRune before filling this in + fn etches_cenotaph_rune() { + // let location = TransactionLocation::dummy(); + // let mut cache = TransactionCache::empty(location.clone()); + + // // Create a cenotaph rune + // let rune = Rune::reserved(location.block_height, location.tx_index); + // let number = 2; + + // let (_rune_id, db_rune, db_ledger_entry) = cache.apply_cenotaph_etching(&rune, number); + + // // the etched rune has supply zero and is unmintable. + // // is_rune_mintable should have a cenotaph indicator column check + // assert_eq!(is_rune_mintable(&db_rune, 0, &location), false); + // assert_eq!(db_ledger_entry.amount, None); + // assert_eq!(db_rune.id, "840000:0"); + // assert_eq!(db_ledger_entry.operation, DbLedgerOperation::Etching); + // assert_eq!(db_ledger_entry.rune_id, "840000:0"); + } + + #[test] + fn mints_rune() { + let location = TransactionLocation::dummy(); + let mut cache = TransactionCache::empty(location.clone()); + let db_rune = &DbRune::factory(); + let rune_id = &db_rune.rune_id(); + + let ledger_entry = cache.apply_mint(&rune_id, 0, &db_rune, &Context::empty()); + + assert!(ledger_entry.is_some()); + let ledger_entry = ledger_entry.unwrap(); + assert_eq!(ledger_entry.operation, DbLedgerOperation::Mint); + assert_eq!(ledger_entry.rune_id, rune_id.to_string()); + // ledger entry is minted with the correct amount + assert_eq!(ledger_entry.amount, Some(db_rune.terms_amount.unwrap())); + + // minted amount is added to the input runes (`cache.input_runes`) + assert!(cache.input_runes.contains_key(&rune_id)); + } + + #[test] + fn does_not_mint_fully_minted_rune() { + let location = TransactionLocation::dummy(); + let mut cache = TransactionCache::empty(location.clone()); + let etching = Etching { + divisibility: Some(2), + premine: Some(1000), + rune: Some(Rune::reserved(location.block_height, location.tx_index)), + spacers: None, + symbol: Some('x'), + terms: Some(Terms { + amount: Some(1000), + cap: Some(1000), + height: (None, None), + offset: (None, None), + }), + turbo: true, + }; + let (rune_id, db_rune, _db_ledger_entry) = cache.apply_etching(&etching, 1); + let ledger_entry = cache.apply_mint(&rune_id, 1000, &db_rune, &Context::empty()); + assert!(ledger_entry.is_none()); + } + + #[test] + fn burns_cenotaph_mint() { + let location = TransactionLocation::dummy(); + let mut cache = TransactionCache::empty(location.clone()); + + let db_rune = DbRune::factory(); + let rune_id = db_rune.rune_id(); + let ledger_entry = cache.apply_cenotaph_mint(&rune_id, 0, &db_rune, &Context::empty()); + assert!(ledger_entry.is_some()); + let ledger_entry = ledger_entry.unwrap(); + assert_eq!(ledger_entry.operation, DbLedgerOperation::Burn); + assert_eq!( + ledger_entry.amount.unwrap().0, + db_rune.terms_amount.unwrap().0 + ); + } + + #[test] + fn moves_runes_with_edict() { + let location = TransactionLocation::dummy(); + let db_rune = &DbRune::factory(); + let rune_id = &db_rune.rune_id(); + let mut balances = VecDeque::new(); + let sender_address = + "bc1p3v7r3n4hv63z4s7jkhdzxsay9xem98hxul057w2mwur406zhw8xqrpwp9w".to_string(); + let receiver_address = + "bc1p8zxlhgdsq6dmkzk4ammzcx55c3hfrg69ftx0gzlnfwq0wh38prds0nzqwf".to_string(); + balances.push_back(InputRuneBalance { + address: Some(sender_address.clone()), + amount: 1000, + }); + let input_runes = hashmap! { + rune_id.clone() => balances + }; + let eligible_outputs = hashmap! {0=> ScriptBuf::from_hex("5120388dfba1b0069bbb0ad5eef62c1a94c46e91a3454accf40bf34b80f75e2708db").unwrap()}; + let mut cache = TransactionCache::new(location, input_runes, eligible_outputs, Some(0), 1); + + let edict = Edict { + id: rune_id.clone(), + amount: 1000, + output: 0, + }; + + let ledger_entry = cache.apply_edict(&edict, &Context::empty()); + assert_eq!(ledger_entry.len(), 2); + let receive = ledger_entry.first().unwrap(); + assert_eq!(receive.operation, DbLedgerOperation::Receive); + assert_eq!(receive.address, Some(receiver_address.clone())); + let send = ledger_entry.last().unwrap(); + assert_eq!(send.operation, DbLedgerOperation::Send); + assert_eq!(send.address, Some(sender_address.clone())); + assert_eq!(send.receiver_address, Some(receiver_address.clone())); + } + + #[test] + fn allocates_remaining_runes_to_first_eligible_output() { + let location = TransactionLocation::dummy(); + let db_rune = &DbRune::factory(); + let rune_id = &db_rune.rune_id(); + let mut balances = VecDeque::new(); + let sender_address = + "bc1p3v7r3n4hv63z4s7jkhdzxsay9xem98hxul057w2mwur406zhw8xqrpwp9w".to_string(); + let receiver_address = + "bc1p8zxlhgdsq6dmkzk4ammzcx55c3hfrg69ftx0gzlnfwq0wh38prds0nzqwf".to_string(); + balances.push_back(InputRuneBalance { + address: Some(sender_address.clone()), + amount: 1000, + }); + let input_runes = hashmap! { + rune_id.clone() => balances + }; + let eligible_outputs = hashmap! {0=> ScriptBuf::from_hex("5120388dfba1b0069bbb0ad5eef62c1a94c46e91a3454accf40bf34b80f75e2708db").unwrap()}; + let mut cache = TransactionCache::new(location, input_runes, eligible_outputs, Some(0), 1); + let ledger_entry = cache.allocate_remaining_balances(&Context::empty()); + + assert_eq!(ledger_entry.len(), 2); + let receive = ledger_entry.first().unwrap(); + assert_eq!(receive.operation, DbLedgerOperation::Receive); + assert_eq!(receive.address, Some(receiver_address.clone())); + let send = ledger_entry.last().unwrap(); + assert_eq!(send.operation, DbLedgerOperation::Send); + assert_eq!(send.address, Some(sender_address.clone())); + assert_eq!(send.receiver_address, Some(receiver_address.clone())); + } + + #[test] + fn allocates_remaining_runes_to_runestone_pointer_output() { + let location = TransactionLocation::dummy(); + let db_rune = &DbRune::factory(); + let rune_id = &db_rune.rune_id(); + let mut balances = VecDeque::new(); + let sender_address = + "bc1p3v7r3n4hv63z4s7jkhdzxsay9xem98hxul057w2mwur406zhw8xqrpwp9w".to_string(); + let receiver_address = + "bc1p8zxlhgdsq6dmkzk4ammzcx55c3hfrg69ftx0gzlnfwq0wh38prds0nzqwf".to_string(); + balances.push_back(InputRuneBalance { + address: Some(sender_address.clone()), + amount: 1000, + }); + let input_runes = hashmap! { + rune_id.clone() => balances + }; + let eligible_outputs = hashmap! {1=> ScriptBuf::from_hex("5120388dfba1b0069bbb0ad5eef62c1a94c46e91a3454accf40bf34b80f75e2708db").unwrap()}; + let mut cache = TransactionCache::new(location, input_runes, eligible_outputs, Some(0), 2); + cache.output_pointer = Some(1); + let ledger_entry = cache.allocate_remaining_balances(&Context::empty()); + + assert_eq!(ledger_entry.len(), 2); + let receive = ledger_entry.first().unwrap(); + assert_eq!(receive.operation, DbLedgerOperation::Receive); + assert_eq!(receive.address, Some(receiver_address.clone())); + let send = ledger_entry.last().unwrap(); + assert_eq!(send.operation, DbLedgerOperation::Send); + assert_eq!(send.address, Some(sender_address.clone())); + assert_eq!(send.receiver_address, Some(receiver_address.clone())); + } +} diff --git a/src/db/types/pg_numeric_u128.rs b/src/db/types/pg_numeric_u128.rs index 8aaeaf1..b0f271c 100644 --- a/src/db/types/pg_numeric_u128.rs +++ b/src/db/types/pg_numeric_u128.rs @@ -64,7 +64,7 @@ pub fn pg_numeric_bytes_to_u128(raw: &[u8]) -> u128 { result } -#[derive(Debug, Clone, Copy)] +#[derive(Debug, Clone, Copy, PartialEq)] pub struct PgNumericU128(pub u128); impl ToSql for PgNumericU128 { From 3d8d21a44ff118cf0669513c779feb7aefc7480f Mon Sep 17 00:00:00 2001 From: Brady Ouren Date: Wed, 17 Jul 2024 18:01:36 -0700 Subject: [PATCH 6/8] Add cenotaph column to runes (#33) * add cenotaph column to runes table * add cenotaph to existing mig * add cenotaph to api --------- Co-authored-by: brady.ouren --- api/src/api/util/helpers.ts | 1 + api/src/pg/types.ts | 1 + migrations/V1__runes.sql | 1 + src/db/cache/transaction_cache.rs | 28 +++++++++++++++------------- src/db/cache/utils.rs | 3 +++ src/db/models/db_rune.rs | 5 +++++ 6 files changed, 26 insertions(+), 13 deletions(-) diff --git a/api/src/api/util/helpers.ts b/api/src/api/util/helpers.ts index a73351e..547e453 100644 --- a/api/src/api/util/helpers.ts +++ b/api/src/api/util/helpers.ts @@ -14,6 +14,7 @@ export function parseEtchingResponse(rune: DbRuneWithChainTip): EtchingResponse const total_burns = rune.total_burns == null ? '0' : rune.total_burns; if ( rune.terms_amount == null || + rune.cenotaph || (rune.terms_cap && BigNumber(total_mints).gte(rune.terms_cap)) || (rune.terms_height_start && BigNumber(rune.chain_tip).lt(rune.terms_height_start)) || (rune.terms_height_end && BigNumber(rune.chain_tip).gt(rune.terms_height_end)) || diff --git a/api/src/pg/types.ts b/api/src/pg/types.ts index bf7a678..ce058c4 100644 --- a/api/src/pg/types.ts +++ b/api/src/pg/types.ts @@ -17,6 +17,7 @@ type DbRune = { divisibility: number; premine: string; symbol: string; + cenotaph: boolean; terms_amount: string | null; terms_cap: string | null; terms_height_start: string | null; diff --git a/migrations/V1__runes.sql b/migrations/V1__runes.sql index e965e6a..92e70c4 100644 --- a/migrations/V1__runes.sql +++ b/migrations/V1__runes.sql @@ -17,6 +17,7 @@ CREATE TABLE IF NOT EXISTS runes ( terms_offset_start NUMERIC, terms_offset_end NUMERIC, turbo BOOLEAN NOT NULL DEFAULT FALSE, + cenotaph BOOLEAN NOT NULL DEFAULT FALSE, timestamp BIGINT NOT NULL ); diff --git a/src/db/cache/transaction_cache.rs b/src/db/cache/transaction_cache.rs index 8ab1074..74c34d9 100644 --- a/src/db/cache/transaction_cache.rs +++ b/src/db/cache/transaction_cache.rs @@ -412,7 +412,10 @@ mod test { use ordinals::{Edict, Etching, Rune, Terms}; use crate::db::{ - cache::{input_rune_balance::InputRuneBalance, transaction_location::TransactionLocation}, + cache::{ + input_rune_balance::InputRuneBalance, transaction_location::TransactionLocation, + utils::is_rune_mintable, + }, models::{db_ledger_operation::DbLedgerOperation, db_rune::DbRune}, }; @@ -450,22 +453,21 @@ mod test { #[test] // TODO add cenotaph field to DbRune before filling this in fn etches_cenotaph_rune() { - // let location = TransactionLocation::dummy(); - // let mut cache = TransactionCache::empty(location.clone()); + let location = TransactionLocation::dummy(); + let mut cache = TransactionCache::empty(location.clone()); - // // Create a cenotaph rune - // let rune = Rune::reserved(location.block_height, location.tx_index); - // let number = 2; + // Create a cenotaph rune + let rune = Rune::reserved(location.block_height, location.tx_index); + let number = 2; - // let (_rune_id, db_rune, db_ledger_entry) = cache.apply_cenotaph_etching(&rune, number); + let (_rune_id, db_rune, db_ledger_entry) = cache.apply_cenotaph_etching(&rune, number); // // the etched rune has supply zero and is unmintable. - // // is_rune_mintable should have a cenotaph indicator column check - // assert_eq!(is_rune_mintable(&db_rune, 0, &location), false); - // assert_eq!(db_ledger_entry.amount, None); - // assert_eq!(db_rune.id, "840000:0"); - // assert_eq!(db_ledger_entry.operation, DbLedgerOperation::Etching); - // assert_eq!(db_ledger_entry.rune_id, "840000:0"); + assert_eq!(is_rune_mintable(&db_rune, 0, &location), false); + assert_eq!(db_ledger_entry.amount, None); + assert_eq!(db_rune.id, "840000:0"); + assert_eq!(db_ledger_entry.operation, DbLedgerOperation::Etching); + assert_eq!(db_ledger_entry.rune_id, "840000:0"); } #[test] diff --git a/src/db/cache/utils.rs b/src/db/cache/utils.rs index e4cdaec..fe13d0d 100644 --- a/src/db/cache/utils.rs +++ b/src/db/cache/utils.rs @@ -276,6 +276,9 @@ pub fn is_rune_mintable( total_mints: u128, location: &TransactionLocation, ) -> bool { + if db_rune.cenotaph { + return false; + } if db_rune.terms_amount.is_none() { return false; } diff --git a/src/db/models/db_rune.rs b/src/db/models/db_rune.rs index 84bd3be..561ed05 100644 --- a/src/db/models/db_rune.rs +++ b/src/db/models/db_rune.rs @@ -30,6 +30,7 @@ pub struct DbRune { pub terms_offset_start: Option, pub terms_offset_end: Option, pub turbo: bool, + pub cenotaph: bool, pub timestamp: PgBigIntU32, } @@ -87,6 +88,7 @@ impl DbRune { terms_offset_start, terms_offset_end, turbo: etching.turbo, + cenotaph: false, timestamp: PgBigIntU32(location.timestamp), } } @@ -111,6 +113,7 @@ impl DbRune { terms_offset_start: None, terms_offset_end: None, turbo: false, + cenotaph: true, timestamp: PgBigIntU32(location.timestamp), } } @@ -135,6 +138,7 @@ impl DbRune { terms_offset_start: row.get("terms_offset_start"), terms_offset_end: row.get("terms_offset_end"), turbo: row.get("turbo"), + cenotaph: row.get("cenotaph"), timestamp: row.get("timestamp"), } } @@ -170,6 +174,7 @@ impl DbRune { terms_offset_start: None, terms_offset_end: None, turbo: true, + cenotaph: false, timestamp: PgBigIntU32(1713571767), } } From ada15c755f3b0fbf0589f14da6ebc9a0b7e20b62 Mon Sep 17 00:00:00 2001 From: Brady Ouren Date: Tue, 23 Jul 2024 12:11:43 -0700 Subject: [PATCH 7/8] test: etching api tests (#36) * add cenotaph column to runes table * add cenotaph to existing mig * chore: add api code to run test migrations * fix migration setup and attempt some inserts * fix some types and setup functions * fix: testing setup and response asserts * fix the spaced name regex and add test * better expectation and review changes --------- Co-authored-by: brady.ouren Co-authored-by: Rafael Cardenas --- .vscode/launch.json | 23 ++++- .vscode/tasks.json | 43 ++++++++ api/package.json | 5 +- api/src/api/schemas.ts | 4 +- api/src/pg/types.ts | 2 +- api/tests/api/api.test.ts | 111 ++++++++++++++++++++- api/tests/helpers.ts | 200 ++++++++++++++++++++++++++++++++++++++ 7 files changed, 382 insertions(+), 6 deletions(-) create mode 100644 .vscode/tasks.json create mode 100644 api/tests/helpers.ts diff --git a/.vscode/launch.json b/.vscode/launch.json index f2b8f38..def10c4 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -16,6 +16,27 @@ "TS_NODE_SKIP_IGNORE": "true" }, "killBehavior": "polite" - } + }, + { + "type": "node", + "request": "launch", + "name": "Jest", + "program": "${workspaceFolder}/api/node_modules/jest/bin/jest", + "cwd": "${workspaceFolder}/api/", + "args": [ + "--testTimeout=3600000", + "--runInBand", + "--no-cache", + ], + "outputCapture": "std", + "console": "integratedTerminal", + "preLaunchTask": "npm: testenv:run", + "postDebugTask": "npm: testenv:stop", + "env": { + "PGHOST": "localhost", + "PGUSER": "postgres", + "PGPASSWORD": "postgres", + }, + }, ] } diff --git a/.vscode/tasks.json b/.vscode/tasks.json new file mode 100644 index 0000000..3d03aae --- /dev/null +++ b/.vscode/tasks.json @@ -0,0 +1,43 @@ +{ + "version": "2.0.0", + "tasks": [ + { + "label": "npm: testenv:run", + "type": "shell", + "command": "npm run testenv:run -- -d", + "isBackground": true, + "options": { + "cwd": "${workspaceFolder}/api/", + }, + "problemMatcher": { + "pattern": { + "regexp": ".", + "file": 1, + "location": 2, + "message": 3 + }, + "background": { + "activeOnStart": true, + "beginsPattern": ".", + "endsPattern": "." + } + } + }, + { + "label": "npm: testenv:stop", + "type": "shell", + "command": "npm run testenv:stop", + "options": { + "cwd": "${workspaceFolder}/api/", + }, + "presentation": { + "echo": true, + "reveal": "silent", + "focus": false, + "panel": "shared", + "showReuseMessage": true, + "clear": false + } + } + ] +} diff --git a/api/package.json b/api/package.json index f629b42..21320c9 100644 --- a/api/package.json +++ b/api/package.json @@ -14,7 +14,10 @@ "generate:vercel": "npm run generate:git-info && npm run generate:openapi && npm run generate:docs", "lint:eslint": "eslint . --ext .ts,.tsx -f unix", "lint:prettier": "prettier --check src/**/*.ts tests/**/*.ts", - "lint:unused-exports": "ts-unused-exports tsconfig.json --showLineNumber --excludePathsFromReport=util/*" + "lint:unused-exports": "ts-unused-exports tsconfig.json --showLineNumber --excludePathsFromReport=util/*", + "testenv:run": "docker-compose -f ../docker/docker-compose.dev.postgres.yml up", + "testenv:stop": "docker-compose -f ../docker/docker-compose.dev.postgres.yml down -v -t 0", + "testenv:logs": "docker-compose -f ../docker/docker-compose.dev.postgres.yml logs -t -f" }, "author": "Hiro Systems PBC (https://hiro.so)", "license": "Apache 2.0", diff --git a/api/src/api/schemas.ts b/api/src/api/schemas.ts index 12cf13a..d873e82 100644 --- a/api/src/api/schemas.ts +++ b/api/src/api/schemas.ts @@ -61,7 +61,9 @@ const RuneNumberSchema = Type.RegEx(/^[0-9]+$/, { title: 'Rune number' }); export const RuneNumberSchemaCType = TypeCompiler.Compile(RuneNumberSchema); const RuneNameSchema = Type.RegEx(/^[A-Z]+$/, { title: 'Rune name' }); export const RuneNameSchemaCType = TypeCompiler.Compile(RuneNameSchema); -const RuneSpacedNameSchema = Type.RegEx(/^[A-Z](•[A-Z]+)+$/, { title: 'Rune name with spacers' }); +const RuneSpacedNameSchema = Type.RegEx(/^[A-Za-z]+(•[A-Za-z]+)+$/, { + title: 'Rune name with spacers', +}); export const RuneSpacedNameSchemaCType = TypeCompiler.Compile(RuneSpacedNameSchema); export const RuneSchema = Type.Union([ diff --git a/api/src/pg/types.ts b/api/src/pg/types.ts index ce058c4..99e5c28 100644 --- a/api/src/pg/types.ts +++ b/api/src/pg/types.ts @@ -5,7 +5,7 @@ export type DbPaginatedResult = { export type DbCountedQueryResult = T & { total: number }; -type DbRune = { +export type DbRune = { id: string; number: number; name: string; diff --git a/api/tests/api/api.test.ts b/api/tests/api/api.test.ts index 4773567..345450c 100644 --- a/api/tests/api/api.test.ts +++ b/api/tests/api/api.test.ts @@ -1,3 +1,110 @@ -test('sample', () => { - expect(true); +import { ENV } from '../../src/env'; +import { PgStore } from '../../src/pg/pg-store'; +import { + dropDatabase, + insertDbLedgerEntry, + insertRune, + sampleRune, + runMigrations, + startTestApiServer, + TestFastifyServer, + insertSupplyChange, + sampleLedgerEntry, +} from '../helpers'; + +describe('Etchings', () => { + let db: PgStore; + let fastify: TestFastifyServer; + + const rune = sampleRune('1:1', 'Sample Rune'); + const ledgerEntry = sampleLedgerEntry(rune.id); + + beforeEach(async () => { + ENV.PGDATABASE = 'postgres'; + db = await PgStore.connect(); + fastify = await startTestApiServer(db); + await runMigrations(db); + await insertRune(db, rune); + const event_index = 0; + await insertDbLedgerEntry(db, ledgerEntry, event_index); + await insertSupplyChange(db, rune.id, 1); + }); + + afterEach(async () => { + if (fastify) { + await fastify.close(); + } + + await dropDatabase(db); + await db.close(); + }); + + test('lists runes', async () => { + const expected = { + divisibility: 0, + id: '1:1', + location: { + block_hash: '0000000000000000000320283a032748cef8227873ff4872689bf23f1cda83a5', + block_height: 840000, + timestamp: 0, + tx_id: '2bb85f4b004be6da54f766c17c1e855187327112c231ef2ff35ebad0ea67c69e', + tx_index: 1, + }, + mint_terms: { + amount: '100', + cap: '5000000', + height_end: null, + height_start: null, + offset_end: null, + offset_start: null, + }, + name: 'Sample Rune', + number: 1, + spaced_name: 'Sample•Rune', + supply: { + burned: '0', + current: '0', + mint_percentage: '0.0000', + mintable: false, + minted: '0', + premine: '0', + total_burns: '0', + total_mints: '0', + }, + symbol: 'ᚠ', + turbo: false, + }; + const runesResponse = await fastify.inject({ + method: 'GET', + url: '/runes/v1/etchings', + }); + expect(runesResponse.statusCode).toBe(200); + expect(runesResponse.json().results).not.toHaveLength(0); + + const response = await fastify.inject({ + method: 'GET', + url: '/runes/v1/etchings/' + ledgerEntry.rune_id, + }); + expect(response.statusCode).toBe(200); + expect(response.json()).toStrictEqual(expected); + }); + + test('can fetch by spaced name', async () => { + const url = '/runes/v1/etchings/' + rune.spaced_name; + const response = await fastify.inject({ + method: 'GET', + url: url, + }); + expect(response.statusCode).toBe(200); + expect(response.json().spaced_name).toEqual(rune.spaced_name); + }); + + test('can not fetch by spaced name if lacking bullets', async () => { + const url = '/runes/v1/etchings/' + rune.spaced_name.replaceAll('•', '-'); + const response = await fastify.inject({ + method: 'GET', + url: url, + }); + expect(response.statusCode).toBe(400); + }); }); diff --git a/api/tests/helpers.ts b/api/tests/helpers.ts new file mode 100644 index 0000000..baf74ce --- /dev/null +++ b/api/tests/helpers.ts @@ -0,0 +1,200 @@ +import { readdirSync } from 'fs'; +import { PgStore } from '../src/pg/pg-store'; +import { FastifyBaseLogger, FastifyInstance } from 'fastify'; +import { IncomingMessage, Server, ServerResponse } from 'http'; +import { TypeBoxTypeProvider } from '@fastify/type-provider-typebox'; +import { buildApiServer } from '../src/api/init'; +import { Rune } from '../src/api/schemas'; +import { DbLedgerEntry, DbRune } from '../src/pg/types'; + +export type TestFastifyServer = FastifyInstance< + Server, + IncomingMessage, + ServerResponse, + FastifyBaseLogger, + TypeBoxTypeProvider +>; + +export async function startTestApiServer(db: PgStore): Promise { + return await buildApiServer({ db }); +} + +export async function runMigrations(db: PgStore) { + const contents = readdirSync('../migrations'); + await db.sqlWriteTransaction(async sql => { + for (const fileName of contents) { + if (!fileName.endsWith('.sql')) continue; + await db.sql.file('../migrations/' + fileName); + } + }); +} + +export async function dropDatabase(db: PgStore) { + await db.sqlWriteTransaction(async sql => { + // Drop all tables. + await sql` + DO $$ DECLARE + r RECORD; + BEGIN + FOR r IN (SELECT tablename FROM pg_tables WHERE schemaname = current_schema()) LOOP + EXECUTE 'DROP TABLE IF EXISTS ' || quote_ident(r.tablename) || ' CASCADE'; + END LOOP; + END $$ + `; + // Drop all types. + await sql` + DO $$ DECLARE + r RECORD; + BEGIN + FOR r IN (SELECT typname FROM pg_type WHERE typtype = 'e' AND typnamespace = (SELECT oid FROM pg_namespace WHERE nspname = current_schema())) LOOP + EXECUTE 'DROP TYPE IF EXISTS ' || quote_ident(r.typname) || ' CASCADE'; + END LOOP; + END $$; + `; + }); +} +export function sampleLedgerEntry(rune_id: string, block_height?: string): DbLedgerEntry { + return { + rune_id: '1:1', + block_hash: '0000000000000000000320283a032748cef8227873ff4872689bf23f1cda83a5', + block_height: block_height || '840000', + tx_index: 0, + tx_id: '2bb85f4b004be6da54f766c17c1e855187327112c231ef2ff35ebad0ea67c69e', + output: 0, + address: '0', + receiver_address: '0', + amount: '0', + operation: 'etching', + timestamp: 0, + }; +} + +function toSpacedName(name: string | null): string | null { + if (name === null) { + return null; + } + // should take "Some name" and make it "Some•name" + const words = name.split(' '); + return words.join('•'); +} +export function sampleRune(id: string, name?: string): DbRune { + return { + id: '1:1', + name: name || 'SAMPLERUNENAME', + spaced_name: (name && toSpacedName(name)) || 'SAMPLE•RUNE•NAME', + number: 1, + block_hash: '0000000000000000000320283a032748cef8227873ff4872689bf23f1cda83a5', + block_height: '840000', + tx_index: 1, + tx_id: '2bb85f4b004be6da54f766c17c1e855187327112c231ef2ff35ebad0ea67c69e', + divisibility: 2, + premine: '1000', + symbol: 'ᚠ', + cenotaph: true, + terms_amount: '100', + terms_cap: '5000000', + terms_height_start: null, + terms_height_end: null, + terms_offset_start: null, + terms_offset_end: null, + turbo: false, + minted: '1000', + total_mints: '1500', + burned: '500', + total_burns: '750', + total_operations: '1', + timestamp: 1713571767, + }; +} + +export async function insertDbLedgerEntry( + db: PgStore, + payload: DbLedgerEntry, + event_index: number +): Promise { + await db.sqlWriteTransaction(async sql => { + const { + rune_id, + block_hash, + block_height, + tx_index, + tx_id, + output, + address, + receiver_address, + amount, + operation, + } = payload; + + await sql` + INSERT INTO ledger ( + rune_id, block_hash, block_height, tx_index, tx_id, output, + address, receiver_address, amount, operation, timestamp, event_index + ) + VALUES ( + + ${rune_id}, ${block_hash}, ${block_height}, ${tx_index}, ${tx_id}, ${output}, ${address}, ${receiver_address}, ${amount}, ${operation}, 0, ${event_index} + ) + `; + }); +} + +export async function insertSupplyChange( + db: PgStore, + rune_id: string, + block_height: number, + minted?: number, + total_mints?: number, + total_operations?: number +): Promise { + await db.sqlWriteTransaction(async sql => { + const burned = 0; + const total_burned = 0; + + await sql` + INSERT INTO supply_changes ( + rune_id, block_height, minted, total_mints, burned, total_burns, total_operations + ) + VALUES ( + + ${rune_id}, ${block_height}, ${minted || 0}, ${ + total_mints || 0 + }, ${burned}, ${total_burned}, ${total_operations || 0} + ) + `; + }); +} + +export async function insertRune(db: PgStore, payload: DbRune): Promise { + await db.sqlWriteTransaction(async sql => { + const { + id, + name, + spaced_name, + number, + block_hash, + block_height, + tx_index, + tx_id, + symbol, + cenotaph, + terms_amount, + terms_cap, + terms_height_start, + terms_height_end, + } = payload; + + await sql` + INSERT INTO runes ( + id, number, name, spaced_name, block_hash, block_height, tx_index, tx_id, symbol, cenotaph, + terms_amount, terms_cap, terms_height_start, terms_height_end, timestamp + ) + VALUES ( + + ${id}, ${number}, ${name}, ${spaced_name}, ${block_hash}, ${block_height}, ${tx_index}, ${tx_id}, ${symbol}, ${cenotaph}, ${ + terms_amount || '' + }, ${terms_cap || ''}, ${terms_height_start}, ${terms_height_end}, 0 + ) + `; + }); +} From 4485057825dcf22bd4bb4bd667da0d9d2886e1ab Mon Sep 17 00:00:00 2001 From: Brady Ouren Date: Mon, 29 Jul 2024 09:05:16 -0700 Subject: [PATCH 8/8] feat: number field in rune response (#35) * add number field to Rune response * add number fetch to the sql * add test which actually exercises the rune response with number --------- Co-authored-by: brady.ouren --- api/src/api/schemas.ts | 1 + api/src/api/util/helpers.ts | 2 + api/src/pg/pg-store.ts | 8 +- api/src/pg/types.ts | 1 + api/tests/api/api.test.ts | 163 ++++++++++++++++++++++-------------- 5 files changed, 110 insertions(+), 65 deletions(-) diff --git a/api/src/api/schemas.ts b/api/src/api/schemas.ts index d873e82..6a26048 100644 --- a/api/src/api/schemas.ts +++ b/api/src/api/schemas.ts @@ -308,6 +308,7 @@ const RuneDetailResponseSchema = Type.Object({ rune: Type.Object( { id: RuneIdResponseSchema, + number: RuneNumberResponseSchema, name: RuneNameResponseSchema, spaced_name: RuneSpacedNameResponseSchema, }, diff --git a/api/src/api/util/helpers.ts b/api/src/api/util/helpers.ts index 547e453..1292a43 100644 --- a/api/src/api/util/helpers.ts +++ b/api/src/api/util/helpers.ts @@ -68,6 +68,7 @@ export function parseActivityResponse(entry: DbItemWithRune): Act return { rune: { id: entry.rune_id, + number: entry.number, name: entry.name, spaced_name: entry.spaced_name, }, @@ -91,6 +92,7 @@ export function parseBalanceResponse(item: DbItemWithRune): BalanceRe return { rune: { id: item.rune_id, + number: item.number, name: item.name, spaced_name: item.spaced_name, }, diff --git a/api/src/pg/pg-store.ts b/api/src/pg/pg-store.ts index abcfe1c..bb039f5 100644 --- a/api/src/pg/pg-store.ts +++ b/api/src/pg/pg-store.ts @@ -146,7 +146,7 @@ export class PgStore extends BasePgStore { const results = await this.sql>[]>` WITH ${cte ? cte : this.sql`none AS (SELECT NULL)`}, results AS ( - SELECT l.*, r.name, r.spaced_name, r.divisibility, ${count} AS total + SELECT l.*, r.name, r.number, r.spaced_name, r.divisibility, ${count} AS total FROM ledger AS l INNER JOIN runes AS r ON r.id = l.rune_id WHERE ${filter} @@ -222,7 +222,7 @@ export class PgStore extends BasePgStore { ): Promise>> { const results = await this.sql>[]>` WITH grouped AS ( - SELECT DISTINCT ON (b.address) b.address, b.balance, b.total_operations, b.rune_id, r.name, + SELECT DISTINCT ON (b.address) b.address, b.balance, b.total_operations, b.rune_id, r.name, r.number r.spaced_name, r.divisibility, COUNT(*) OVER() AS total FROM balance_changes AS b INNER JOIN runes AS r ON r.id = b.rune_id @@ -245,7 +245,7 @@ export class PgStore extends BasePgStore { ): Promise | undefined> { const results = await this.sql[]>` SELECT b.rune_id, b.address, b.balance, b.total_operations, r.name, - r.spaced_name, r.divisibility, COUNT(*) OVER() AS total + r.number, r.spaced_name, r.divisibility, COUNT(*) OVER() AS total FROM balance_changes AS b INNER JOIN runes AS r ON r.id = b.rune_id WHERE ${runeFilter(this.sql, id, 'r')} AND address = ${address} @@ -263,7 +263,7 @@ export class PgStore extends BasePgStore { const results = await this.sql>[]>` WITH grouped AS ( SELECT DISTINCT ON (b.rune_id) b.address, b.balance, b.total_operations, b.rune_id, r.name, - r.spaced_name, r.divisibility, COUNT(*) OVER() AS total + r.number, r.spaced_name, r.divisibility, COUNT(*) OVER() AS total FROM balance_changes AS b INNER JOIN runes AS r ON r.id = b.rune_id WHERE address = ${address} diff --git a/api/src/pg/types.ts b/api/src/pg/types.ts index 99e5c28..9e08004 100644 --- a/api/src/pg/types.ts +++ b/api/src/pg/types.ts @@ -53,6 +53,7 @@ export type DbLedgerEntry = { export type DbItemWithRune = T & { name: string; + number: number; spaced_name: string; divisibility: number; total_operations: number; diff --git a/api/tests/api/api.test.ts b/api/tests/api/api.test.ts index 345450c..ee6d957 100644 --- a/api/tests/api/api.test.ts +++ b/api/tests/api/api.test.ts @@ -12,7 +12,7 @@ import { sampleLedgerEntry, } from '../helpers'; -describe('Etchings', () => { +describe('Endpoints', () => { let db: PgStore; let fastify: TestFastifyServer; @@ -39,72 +39,113 @@ describe('Etchings', () => { await db.close(); }); - test('lists runes', async () => { - const expected = { - divisibility: 0, - id: '1:1', - location: { - block_hash: '0000000000000000000320283a032748cef8227873ff4872689bf23f1cda83a5', - block_height: 840000, - timestamp: 0, - tx_id: '2bb85f4b004be6da54f766c17c1e855187327112c231ef2ff35ebad0ea67c69e', - tx_index: 1, - }, - mint_terms: { - amount: '100', - cap: '5000000', - height_end: null, - height_start: null, - offset_end: null, - offset_start: null, - }, - name: 'Sample Rune', - number: 1, - spaced_name: 'Sample•Rune', - supply: { - burned: '0', - current: '0', - mint_percentage: '0.0000', - mintable: false, - minted: '0', - premine: '0', - total_burns: '0', - total_mints: '0', - }, - symbol: 'ᚠ', - turbo: false, - }; - const runesResponse = await fastify.inject({ - method: 'GET', - url: '/runes/v1/etchings', + describe('Etchings', () => { + test('lists runes', async () => { + const expected = { + divisibility: 0, + id: '1:1', + location: { + block_hash: '0000000000000000000320283a032748cef8227873ff4872689bf23f1cda83a5', + block_height: 840000, + timestamp: 0, + tx_id: '2bb85f4b004be6da54f766c17c1e855187327112c231ef2ff35ebad0ea67c69e', + tx_index: 1, + }, + mint_terms: { + amount: '100', + cap: '5000000', + height_end: null, + height_start: null, + offset_end: null, + offset_start: null, + }, + name: 'Sample Rune', + number: 1, + spaced_name: 'Sample•Rune', + supply: { + burned: '0', + current: '0', + mint_percentage: '0.0000', + mintable: false, + minted: '0', + premine: '0', + total_burns: '0', + total_mints: '0', + }, + symbol: 'ᚠ', + turbo: false, + }; + const runesResponse = await fastify.inject({ + method: 'GET', + url: '/runes/v1/etchings', + }); + expect(runesResponse.statusCode).toBe(200); + expect(runesResponse.json().results).not.toHaveLength(0); + + const response = await fastify.inject({ + method: 'GET', + url: '/runes/v1/etchings/' + ledgerEntry.rune_id, + }); + expect(response.statusCode).toBe(200); + expect(response.json()).toStrictEqual(expected); }); - expect(runesResponse.statusCode).toBe(200); - expect(runesResponse.json().results).not.toHaveLength(0); - const response = await fastify.inject({ - method: 'GET', - url: '/runes/v1/etchings/' + ledgerEntry.rune_id, + test('can fetch by spaced name', async () => { + const url = '/runes/v1/etchings/' + rune.spaced_name; + const response = await fastify.inject({ + method: 'GET', + url: url, + }); + expect(response.statusCode).toBe(200); + expect(response.json().spaced_name).toEqual(rune.spaced_name); }); - expect(response.statusCode).toBe(200); - expect(response.json()).toStrictEqual(expected); - }); - test('can fetch by spaced name', async () => { - const url = '/runes/v1/etchings/' + rune.spaced_name; - const response = await fastify.inject({ - method: 'GET', - url: url, + test('can not fetch by spaced name if lacking bullets', async () => { + const url = '/runes/v1/etchings/' + rune.spaced_name.replaceAll('•', '-'); + const response = await fastify.inject({ + method: 'GET', + url: url, + }); + expect(response.statusCode).toBe(400); }); - expect(response.statusCode).toBe(200); - expect(response.json().spaced_name).toEqual(rune.spaced_name); }); - - test('can not fetch by spaced name if lacking bullets', async () => { - const url = '/runes/v1/etchings/' + rune.spaced_name.replaceAll('•', '-'); - const response = await fastify.inject({ - method: 'GET', - url: url, + describe('Transactions', () => { + test('shows details', async () => { + const expected = { + limit: 20, + offset: 0, + results: [ + { + address: '0', + amount: '0', + location: { + block_hash: '0000000000000000000320283a032748cef8227873ff4872689bf23f1cda83a5', + block_height: 840000, + output: '2bb85f4b004be6da54f766c17c1e855187327112c231ef2ff35ebad0ea67c69e:0', + timestamp: 0, + tx_id: '2bb85f4b004be6da54f766c17c1e855187327112c231ef2ff35ebad0ea67c69e', + tx_index: 0, + vout: 0, + }, + operation: 'etching', + receiver_address: '0', + rune: { + id: '1:1', + name: 'Sample Rune', + number: 1, + spaced_name: 'Sample•Rune', + }, + }, + ], + total: 1, + }; + const txid = ledgerEntry.tx_id; + const response = await fastify.inject({ + method: 'GET', + url: '/runes/v1/transactions/' + txid + '/activity', + }); + expect(response.statusCode).toBe(200); + expect(response.json()).toStrictEqual(expected); }); - expect(response.statusCode).toBe(400); }); });