From 3dd76708081bc86514ddee35e30f17f94c5803a9 Mon Sep 17 00:00:00 2001 From: jlest01 <174762002+jlest01@users.noreply.github.com> Date: Thu, 22 Aug 2024 08:10:55 -0300 Subject: [PATCH] Scan for SP tweaks after initial sync --- src/db.rs | 34 ++++++- src/electrum.rs | 13 ++- src/index.rs | 253 +++++++++++++++++++++++++++++++++++++++++++++--- src/lib.rs | 1 + src/sp.rs | 215 ++++++++++++++++++++++++++++++++++++++++ src/tracker.rs | 14 ++- 6 files changed, 514 insertions(+), 16 deletions(-) create mode 100644 src/sp.rs diff --git a/src/db.rs b/src/db.rs index 07e9f9d33..1e4768815 100644 --- a/src/db.rs +++ b/src/db.rs @@ -9,10 +9,12 @@ use crate::types::{HashPrefix, SerializedHashPrefixRow, SerializedHeaderRow}; #[derive(Default)] pub(crate) struct WriteBatch { pub(crate) tip_row: [u8; 32], + pub(crate) sp_tip_row: [u8; 32], pub(crate) header_rows: Vec, pub(crate) funding_rows: Vec, pub(crate) spending_rows: Vec, pub(crate) txid_rows: Vec, + pub(crate) tweak_rows: Vec>, } impl WriteBatch { @@ -21,6 +23,7 @@ impl WriteBatch { self.funding_rows.sort_unstable(); self.spending_rows.sort_unstable(); self.txid_rows.sort_unstable(); + self.tweak_rows.sort_unstable(); } } @@ -35,11 +38,13 @@ const HEADERS_CF: &str = "headers"; const TXID_CF: &str = "txid"; const FUNDING_CF: &str = "funding"; const SPENDING_CF: &str = "spending"; +const TWEAK_CF: &str = "tweak"; -const COLUMN_FAMILIES: &[&str] = &[CONFIG_CF, HEADERS_CF, TXID_CF, FUNDING_CF, SPENDING_CF]; +const COLUMN_FAMILIES: &[&str] = &[CONFIG_CF, HEADERS_CF, TXID_CF, FUNDING_CF, SPENDING_CF, TWEAK_CF]; const CONFIG_KEY: &str = "C"; const TIP_KEY: &[u8] = b"T"; +const SP_KEY: &[u8] = b"SP"; // Taken from https://github.com/facebook/rocksdb/blob/master/include/rocksdb/db.h#L654-L689 const DB_PROPERTIES: &[&str] = &[ @@ -218,6 +223,10 @@ impl DBStore { self.db.cf_handle(HEADERS_CF).expect("missing HEADERS_CF") } + fn tweak_cf(&self) -> &rocksdb::ColumnFamily { + self.db.cf_handle(TWEAK_CF).expect("missing TWEAK_CF") + } + pub(crate) fn iter_funding( &self, prefix: HashPrefix, @@ -270,6 +279,12 @@ impl DBStore { .expect("get_tip failed") } + pub(crate) fn last_sp(&self) -> Option> { + self.db + .get_cf(self.headers_cf(), SP_KEY) + .expect("last_sp failed") + } + pub(crate) fn write(&self, batch: &WriteBatch) { let mut db_batch = rocksdb::WriteBatch::default(); for key in &batch.funding_rows { @@ -293,6 +308,23 @@ impl DBStore { self.db.write_opt(db_batch, &opts).unwrap(); } + pub(crate) fn write_sp(&self, batch: &WriteBatch) { + let mut db_batch = rocksdb::WriteBatch::default(); + + for key in &batch.tweak_rows { + if key.len() > 8 { + db_batch.put_cf(self.tweak_cf(), &key[..8], &key[8..]); + } + } + db_batch.put_cf(self.headers_cf(), SP_KEY, batch.sp_tip_row); + + let mut opts = rocksdb::WriteOptions::new(); + let bulk_import = self.bulk_import.load(Ordering::Relaxed); + opts.set_sync(!bulk_import); + opts.disable_wal(bulk_import); + self.db.write_opt(db_batch, &opts).unwrap(); + } + pub(crate) fn flush(&self) { debug!("flushing DB column families"); let mut config = self.get_config().unwrap_or_default(); diff --git a/src/electrum.rs b/src/electrum.rs index 46e0fa968..3c23759c3 100644 --- a/src/electrum.rs +++ b/src/electrum.rs @@ -529,7 +529,18 @@ impl Rpc { Err(response) => return response, // params parsing may fail - the response contains request id }; self.rpc_duration.observe_duration(&call.method, || { - if self.tracker.status().is_err() { + let is_sp_indexing = self.tracker.silent_payments_index && self.tracker.sp_status().is_err(); + + if is_sp_indexing { + match &call.params { + Params::SpTweaks(_) => { + return error_msg(&call.id, RpcError::UnavailableIndex) + } + _ => (), + }; + } + + if is_sp_indexing || self.tracker.status().is_err() { // Allow only a few RPC (for sync status notification) not requiring index DB being compacted. match &call.params { Params::BlockHeader(_) diff --git a/src/index.rs b/src/index.rs index 5a22dc669..fec90b485 100644 --- a/src/index.rs +++ b/src/index.rs @@ -1,8 +1,10 @@ use anyhow::{Context, Result}; use bitcoin::consensus::{deserialize, Decodable, Encodable}; +use bitcoin::secp256k1::PublicKey; use bitcoin::hashes::Hash; -use bitcoin::{BlockHash, OutPoint, Txid}; +use bitcoin::{BlockHash, OutPoint, Txid, XOnlyPublicKey}; use bitcoin_slices::{bsl, Visit, Visitor}; +use std::collections::HashMap; use std::ops::ControlFlow; use crate::{ @@ -87,6 +89,7 @@ pub struct Index { chain: Chain, stats: Stats, is_ready: bool, + is_sp_ready: bool, flush_needed: bool, } @@ -117,6 +120,7 @@ impl Index { chain, stats, is_ready: false, + is_sp_ready: false, flush_needed: false, }) } @@ -163,6 +167,65 @@ impl Index { .map(|row| HashPrefixRow::from_db_row(row).height()) .filter_map(move |height| self.chain.get_block_hash(height)) } + pub(crate) fn silent_payments_sync( + &mut self, + daemon: &Daemon, + exit_flag: &ExitFlag, + ) -> Result { + + let mut new_headers: Vec = Vec::with_capacity(2000); + let start: usize; + if let Some(row) = self.store.last_sp() { + let blockhash: BlockHash = deserialize(&row).expect("invalid block_hash"); + start = self.chain.get_block_height(&blockhash).expect("Can't find block_hash") + 1; + } else { + start = 70_000; + } + let end = if start + 2000 < self.chain.height() { + start + 2000 + } else { + self.chain.height() + }; + for block_height in start..end { + new_headers.push(NewHeader::from(( + *self + .chain + .get_block_header(block_height) + .expect("Unexpected missing block header"), + block_height, + ))); + } + match (new_headers.first(), new_headers.last()) { + (Some(first), Some(last)) => { + let count = new_headers.len(); + info!( + "Looking for sp tweaks in {} blocks: [{}..{}]", + count, + first.height(), + last.height() + ); + } + _ => { + if self.flush_needed { + self.store.flush(); // full compaction is performed on the first flush call + self.flush_needed = false; + } + self.is_sp_ready = true; + return Ok(true); // no more blocks to index (done for now) + } + } + for chunk in new_headers.chunks(self.batch_size) { + exit_flag.poll().with_context(|| { + format!( + "indexing sp interrupted at height: {}", + chunk.first().unwrap().height() + ) + })?; + self.sync_blocks(daemon, chunk, true)?; + } + self.flush_needed = true; + Ok(false) // sync is not done + } // Return `Ok(true)` when the chain is fully synced and the index is compacted. pub(crate) fn sync(&mut self, daemon: &Daemon, exit_flag: &ExitFlag) -> Result { @@ -195,7 +258,7 @@ impl Index { chunk.first().unwrap().height() ) })?; - self.sync_blocks(daemon, chunk)?; + self.sync_blocks(daemon, chunk, false)?; } self.chain.update(new_headers); self.stats.observe_chain(&self.chain); @@ -203,19 +266,39 @@ impl Index { Ok(false) // sync is not done } - fn sync_blocks(&mut self, daemon: &Daemon, chunk: &[NewHeader]) -> Result<()> { + fn sync_blocks(&mut self, daemon: &Daemon, chunk: &[NewHeader], sp: bool) -> Result<()> { let blockhashes: Vec = chunk.iter().map(|h| h.hash()).collect(); let mut heights = chunk.iter().map(|h| h.height()); let mut batch = WriteBatch::default(); + if !sp { + let scan_block = |blockhash, block| { + let height = heights.next().expect("unexpected block"); + self.stats.observe_duration("block", || { + index_single_block(blockhash, block, height, &mut batch); + }); + self.stats.height.set("tip", height as f64); + }; + + daemon.for_blocks(blockhashes, scan_block)?; + } else { + let scan_block_for_sp = |blockhash, block| { + let height = heights.next().expect("unexpected block"); + self.stats.observe_duration("block_sp", || { + scan_single_block_for_silent_payments( + self, + daemon, + blockhash, + block, + &mut batch, + ); + }); + self.stats.height.set("sp", height as f64); + }; + + daemon.for_blocks(blockhashes, scan_block_for_sp)?; + } - daemon.for_blocks(blockhashes, |blockhash, block| { - let height = heights.next().expect("unexpected block"); - self.stats.observe_duration("block", || { - index_single_block(blockhash, block, height, &mut batch); - }); - self.stats.height.set("tip", height as f64); - })?; let heights: Vec<_> = heights.collect(); assert!( heights.is_empty(), @@ -224,8 +307,14 @@ impl Index { ); batch.sort(); self.stats.observe_batch(&batch); - self.stats - .observe_duration("write", || self.store.write(&batch)); + if !sp { + self.stats + .observe_duration("write", || self.store.write(&batch)); + } else { + self.stats + .observe_duration("write_sp", || self.store.write_sp(&batch)); + } + self.stats.observe_db(&self.store); Ok(()) } @@ -233,6 +322,10 @@ impl Index { pub(crate) fn is_ready(&self) -> bool { self.is_ready } + + pub(crate) fn is_sp_ready(&self) -> bool { + self.is_sp_ready + } } fn index_single_block( @@ -293,3 +386,139 @@ fn index_single_block( .expect("in-memory writers don't error"); debug_assert_eq!(len, BlockHash::LEN); } + +fn scan_single_block_for_silent_payments( + index: &Index, + daemon: &Daemon, + block_hash: BlockHash, + block: SerBlock, + batch: &mut WriteBatch, +) { + struct IndexBlockVisitor<'a> { + daemon: &'a Daemon, + index: &'a Index, + map: &'a mut HashMap>, + } + + impl<'a> Visitor for IndexBlockVisitor<'a> { + fn visit_transaction(&mut self, tx: &bsl::Transaction) -> core::ops::ControlFlow<()> { + let parsed_tx: bitcoin::Transaction = match deserialize(tx.as_ref()) { + Ok(tx) => tx, + Err(_) => panic!("Unexpected invalid transaction"), + }; + + if parsed_tx.is_coinbase() { return ControlFlow::Continue(()) }; + + let txid = bsl_txid(tx); + + let mut to_scan = false; + for (i, o) in parsed_tx.output.iter().enumerate() { + if o.script_pubkey.is_p2tr() { + let outpoint = OutPoint { + txid, + vout: i.try_into().expect("Unexpectedly high vout"), + }; + if self + .index + .store + .iter_spending(SpendingPrefixRow::scan_prefix(outpoint)) + .next() + .is_none() + { + to_scan = true; + break; // Stop iterating once a relevant P2TR output is found + } + } + } + + if !to_scan { + return ControlFlow::Continue(()); + } + + // Iterate over inputs + let mut pubkeys: Vec = Vec::new(); + let mut xonly_pubkeys: Vec = Vec::new(); + let mut outpoints: Vec<(Txid, u32)> = Vec::with_capacity(parsed_tx.input.len()); + for i in parsed_tx.input.iter() { + outpoints.push((i.previous_output.txid, i.previous_output.vout)); + let prev_tx: bitcoin::Transaction = self + .daemon + .get_transaction(&i.previous_output.txid, None) + .expect("Spending non existent UTXO"); + let index: usize = i + .previous_output + .vout + .try_into() + .expect("Unexpectedly high vout"); + let prevout: &bitcoin::TxOut = prev_tx + .output + .get(index) + .expect("Spending a non existent UTXO"); + match crate::sp::get_pubkey_from_input(&crate::sp::VinData { + script_sig: i.script_sig.to_bytes(), + txinwitness: i.witness.to_vec(), + script_pub_key: prevout.script_pubkey.to_bytes(), + }) { + Ok(Some(pubkey_from_input)) => match pubkey_from_input { + crate::sp::PubKeyFromInput::XOnlyPublicKey(xonly_pubkey) => xonly_pubkeys.push(xonly_pubkey), + crate::sp::PubKeyFromInput::PublicKey(pubkey) => pubkeys.push(pubkey), + }, + Ok(None) => (), + Err(_) => panic!("Scanning for public keys failed for tx: {}", txid), + } + } + let pubkeys_ref: Vec<&PublicKey> = pubkeys.iter().collect(); + let pubkeys_ref = pubkeys_ref.as_slice(); + + // if the pubkeys have opposite parity, the combine_pubkey_result will be Err + let combine_pubkey_result = PublicKey::combine_keys(pubkeys_ref); + + if combine_pubkey_result.is_ok() && (!pubkeys.is_empty() || !xonly_pubkeys.is_empty()) { + + let input_pub_keys = if pubkeys.is_empty() { + None + } else { + Some(pubkeys.as_slice()) + }; + + let input_xpub_keys = if xonly_pubkeys.is_empty() { + None + } else { + Some(xonly_pubkeys.as_slice()) + }; + + let tweak = crate::sp::recipient_calculate_tweak_data(input_xpub_keys, input_pub_keys, &outpoints).unwrap(); + + if let Some(block_hash) = self.index.filter_by_txid(txid).next() { + if let Some(value) = self.map.get_mut(&block_hash) { + value.extend(tweak.iter()); + } else { + self.map.insert(block_hash, Vec::from_iter(tweak)); + } + } else { + panic!("Unexpected unknown transaction"); + } + } + + ControlFlow::Continue(()) + } + } + + let mut map: HashMap> = HashMap::with_capacity(index.batch_size); + let mut index_block = IndexBlockVisitor { + daemon, + index, + map: &mut map + }; + bsl::Block::visit(&block, &mut index_block).expect("core returned invalid block"); + for (hash, tweaks) in map { + let height = index.chain.get_block_height(&hash).expect("Unexpected non existing blockhash"); + let mut value: Vec = u64::try_from(height).expect("Unexpected invalid usize").to_be_bytes().to_vec(); + value.extend(tweaks.iter()); + batch.tweak_rows.push(value); + } + let len = block_hash + .consensus_encode(&mut (&mut batch.sp_tip_row as &mut [u8])) + .expect("in-memory writers don't error"); + debug_assert_eq!(len, BlockHash::LEN); +} diff --git a/src/lib.rs b/src/lib.rs index d1df9c062..ac44dc5ea 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -22,6 +22,7 @@ mod metrics; mod p2p; mod server; mod signals; +mod sp; mod status; mod thread; mod tracker; diff --git a/src/sp.rs b/src/sp.rs new file mode 100644 index 000000000..c63ae2bc4 --- /dev/null +++ b/src/sp.rs @@ -0,0 +1,215 @@ +use bitcoin::{key::Secp256k1, secp256k1::{silentpayments::SilentpaymentsPublicData, PublicKey, XOnlyPublicKey}, Txid}; +use bitcoin_slices::bitcoin_hashes::{hash160, Hash}; + +use anyhow::Error; + +// ** Putting all the pubkey extraction logic in the test utils for now. ** +// NUMS_H (defined in BIP340) +const NUMS_H: [u8; 32] = [ + 0x50, 0x92, 0x9b, 0x74, 0xc1, 0xa0, 0x49, 0x54, 0xb7, 0x8b, 0x4b, 0x60, 0x35, 0xe9, 0x7a, 0x5e, + 0x07, 0x8a, 0x5a, 0x0f, 0x28, 0xec, 0x96, 0xd5, 0x47, 0xbf, 0xee, 0x9a, 0xce, 0x80, 0x3a, 0xc0, +]; + +// Define OP_CODES used in script template matching for readability +const OP_1: u8 = 0x51; +const OP_0: u8 = 0x00; +const OP_PUSHBYTES_20: u8 = 0x14; +const OP_PUSHBYTES_32: u8 = 0x20; +const OP_HASH160: u8 = 0xA9; +const OP_EQUAL: u8 = 0x87; +const OP_DUP: u8 = 0x76; +const OP_EQUALVERIFY: u8 = 0x88; +const OP_CHECKSIG: u8 = 0xAC; + +// Only compressed pubkeys are supported for silent payments +const COMPRESSED_PUBKEY_SIZE: usize = 33; + +pub struct VinData { + pub script_sig: Vec, + pub txinwitness: Vec>, + pub script_pub_key: Vec, +} + +// script templates for inputs allowed in BIP352 shared secret derivation +pub fn is_p2tr(spk: &[u8]) -> bool { + matches!(spk, [OP_1, OP_PUSHBYTES_32, ..] if spk.len() == 34) +} + +fn is_p2wpkh(spk: &[u8]) -> bool { + matches!(spk, [OP_0, OP_PUSHBYTES_20, ..] if spk.len() == 22) +} + +fn is_p2sh(spk: &[u8]) -> bool { + matches!(spk, [OP_HASH160, OP_PUSHBYTES_20, .., OP_EQUAL] if spk.len() == 23) +} + +fn is_p2pkh(spk: &[u8]) -> bool { + matches!(spk, [OP_DUP, OP_HASH160, OP_PUSHBYTES_20, .., OP_EQUALVERIFY, OP_CHECKSIG] if spk.len() == 25) +} + +pub enum PubKeyFromInput { + XOnlyPublicKey(XOnlyPublicKey), + PublicKey(PublicKey), +} + +pub fn get_pubkey_from_input(vin: &VinData) -> Result, Error> { + if is_p2pkh(&vin.script_pub_key) { + match (&vin.txinwitness.is_empty(), &vin.script_sig.is_empty()) { + (true, false) => { + let spk_hash = &vin.script_pub_key[3..23]; + for i in (COMPRESSED_PUBKEY_SIZE..=vin.script_sig.len()).rev() { + if let Some(pubkey_bytes) = &vin.script_sig.get(i - COMPRESSED_PUBKEY_SIZE..i) { + let pubkey_hash = hash160::Hash::hash(pubkey_bytes); + if pubkey_hash.to_byte_array() == spk_hash { + let pubkey = PublicKey::from_slice(pubkey_bytes)?; + let result = PubKeyFromInput::PublicKey(pubkey); + return Ok(Some(result)); + } + } else { + return Ok(None); + } + } + } + (_, true) => return Err(Error::msg("Empty script_sig for spending a p2pkh")), + (false, _) => return Err(Error::msg("non empty witness for spending a p2pkh")), + } + } else if is_p2sh(&vin.script_pub_key) { + match (&vin.txinwitness.is_empty(), &vin.script_sig.is_empty()) { + (false, false) => { + let redeem_script = &vin.script_sig[1..]; + if is_p2wpkh(redeem_script) { + if let Some(value) = vin.txinwitness.last() { + if let Ok(pubkey) = PublicKey::from_slice(value) { + let result = PubKeyFromInput::PublicKey(pubkey); + return Ok(Some(result)); + } else { + return Ok(None); + } + } + } + } + (_, true) => { + return Err(Error::msg( + "Empty script_sig for spending a p2sh".to_owned(), + )) + } + (true, false) => { + return Ok(None); + } + } + } else if is_p2wpkh(&vin.script_pub_key) { + match (&vin.txinwitness.is_empty(), &vin.script_sig.is_empty()) { + (false, true) => { + if let Some(value) = vin.txinwitness.last() { + if let Ok(pubkey) = PublicKey::from_slice(value) { + let result = PubKeyFromInput::PublicKey(pubkey); + return Ok(Some(result)); + } else { + return Ok(None); + } + } else { + return Err(Error::msg("Empty witness".to_owned())); + } + } + (_, false) => { + return Err(Error::msg( + "Non empty script sig for spending a segwit output".to_owned(), + )) + } + (true, _) => { + return Err(Error::msg( + "Empty witness for spending a segwit output".to_owned(), + )) + } + } + } else if is_p2tr(&vin.script_pub_key) { + match (&vin.txinwitness.is_empty(), &vin.script_sig.is_empty()) { + (false, true) => { + // check for the optional annex + let annex = match vin.txinwitness.last().and_then(|value| value.get(0)) { + Some(&0x50) => 1, + Some(_) => 0, + None => return Err(Error::msg("Empty or invalid witness".to_owned())), + }; + + // Check for script path + let stack_size = vin.txinwitness.len(); + if stack_size > annex && vin.txinwitness[stack_size - annex - 1][1..33] == NUMS_H { + return Ok(None); + } + + // Return the pubkey from the script pubkey + return XOnlyPublicKey::from_slice(&vin.script_pub_key[2..34]) + .map_err(|e| Error::new(e)) + .map(|x_only_public_key| { + let result = PubKeyFromInput::XOnlyPublicKey(x_only_public_key); + Some(result) + }); + } + (_, false) => { + return Err(Error::msg( + "Non empty script sig for spending a segwit output".to_owned(), + )) + } + (true, _) => { + return Err(Error::msg( + "Empty witness for spending a segwit output".to_owned(), + )) + } + } + } + return Ok(None); +} + + +pub fn get_smallest_outpoint(outpoints_data: &[(Txid, u32)],) -> Result<[u8; 36], Error> { + if outpoints_data.is_empty() { + return Err(Error::msg("No outpoints provided")); + } + + let mut outpoints: Vec<[u8; 36]> = Vec::with_capacity(outpoints_data.len()); + + for (txid, vout) in outpoints_data { + let bytes = bitcoin::consensus::encode::serialize(&txid); + + if bytes.len() != 32 { + return Err(Error::msg(format!( + "Invalid outpoint hex representation: {}", + txid + ))); + } + + let mut buffer = [0u8; 36]; + + buffer[..32].copy_from_slice(&bytes); + buffer[32..].copy_from_slice(&vout.to_le_bytes()); + outpoints.push(buffer); + } + + // sort outpoints + outpoints.sort_unstable(); + + if let Some(smallest_outpoint) = outpoints.first() { + Ok(smallest_outpoint.clone()) + } else { + // This should never happen + Err(Error::msg("Unexpected empty outpoints vector")) + } +} + +pub fn recipient_calculate_tweak_data( + input_xpub_keys: Option<&[XOnlyPublicKey]>, + input_pub_keys: Option<&[PublicKey]>, + outpoints_data: &[(Txid, u32)], +) -> Result<[u8; 33], Error> { + let smallest_outpoint = get_smallest_outpoint(outpoints_data).unwrap(); + let secp = Secp256k1::new(); + let public_data = SilentpaymentsPublicData::create( + &secp, + &smallest_outpoint, + input_xpub_keys, + input_pub_keys + ).unwrap(); + let light_client_data33 = public_data.serialize(&secp).unwrap(); + return Ok(light_client_data33); +} diff --git a/src/tracker.rs b/src/tracker.rs index ec3c6506a..f47787daf 100644 --- a/src/tracker.rs +++ b/src/tracker.rs @@ -25,7 +25,7 @@ pub struct Tracker { mempool: Mempool, metrics: Metrics, ignore_mempool: bool, - silent_payments_index: bool, + pub silent_payments_index: bool, } pub(crate) enum Error { @@ -74,7 +74,10 @@ impl Tracker { } pub(crate) fn sync(&mut self, daemon: &Daemon, exit_flag: &ExitFlag) -> Result { - let done = self.index.sync(daemon, exit_flag)?; + let mut done = self.index.sync(daemon, exit_flag)?; + if done { + done = self.index.silent_payments_sync(daemon, exit_flag)?; + } if done && !self.ignore_mempool { self.mempool.sync(daemon, exit_flag); // TODO: double check tip - and retry on diff @@ -89,6 +92,13 @@ impl Tracker { Err(Error::NotReady) } + pub(crate) fn sp_status(&self) -> Result<(), Error> { + if self.index.is_sp_ready() { + return Ok(()); + } + Err(Error::NotReady) + } + pub(crate) fn update_scripthash_status( &self, status: &mut ScriptHashStatus,