From 841cb92acfdd84ae150fb011921cb7fe18e3651d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=BF=97=E5=AE=87?= Date: Thu, 20 Apr 2023 23:02:02 +0800 Subject: [PATCH 01/15] [persist_redesign] Introduce `Persist` and `PersistBackend` This is similar to `keychain::Persist`, however, we allow for a generic tracker and changeset. --- crates/chain/src/lib.rs | 2 + crates/chain/src/persist.rs | 85 +++++++++++++++++++++++++++++++++++++ 2 files changed, 87 insertions(+) create mode 100644 crates/chain/src/persist.rs diff --git a/crates/chain/src/lib.rs b/crates/chain/src/lib.rs index 265276234..ee518f4cd 100644 --- a/crates/chain/src/lib.rs +++ b/crates/chain/src/lib.rs @@ -33,6 +33,8 @@ pub mod tx_graph; pub use tx_data_traits::*; mod chain_oracle; pub use chain_oracle::*; +mod persist; +pub use persist::*; #[doc(hidden)] pub mod example_utils; diff --git a/crates/chain/src/persist.rs b/crates/chain/src/persist.rs new file mode 100644 index 000000000..045149841 --- /dev/null +++ b/crates/chain/src/persist.rs @@ -0,0 +1,85 @@ +use core::marker::PhantomData; + +use crate::Append; + +/// `Persist` wraps a [`PersistBackend`] (`B`) to create a convenient staging area for changes (`C`) +/// before they are persisted. +/// +/// Not all changes to the tracker (`T`), which is an in-memory representation of wallet/blockchain +/// data, needs to be written to disk right away, so [`Persist::stage`] can be used to *stage* +/// changes first and then [`Persist::commit`] can be used to write changes to disk. +pub struct Persist { + backend: B, + stage: C, + tracker: PhantomData, +} + +impl, C: Append + Default> Persist { + /// Create a new [`Persist`] from [`PersistBackend`]. + pub fn new(backend: B) -> Self { + Self { + backend, + stage: Default::default(), + tracker: Default::default(), + } + } + + /// Stage a `changeset` to be commited later with [`commit`]. + /// + /// [`commit`]: Self::commit + pub fn stage(&mut self, changeset: C) { + self.stage.append(changeset) + } + + /// Get the changes that have not been commited yet. + pub fn staged(&self) -> &C { + &self.stage + } + + /// Commit the staged changes to the underlying persistance backend. + /// + /// Returns a backend-defined error if this fails. + pub fn commit(&mut self) -> Result<(), B::WriteError> { + let mut temp = C::default(); + core::mem::swap(&mut temp, &mut self.stage); + self.backend.write_changes(temp) + } +} + +/// A persistence backend for [`Persist`]. +/// +/// * `T` represents the tracker. +/// * `C` represents the changeset. +pub trait PersistBackend { + /// The error the backend returns when it fails to write. + type WriteError: core::fmt::Debug; + + /// The error the backend returns when it fails to load. + type LoadError: core::fmt::Debug; + + /// Writes a changeset to the persistence backend. + /// + /// It is up to the backend what it does with this. It could store every changeset in a list or + /// it inserts the actual changes into a more structured database. All it needs to guarantee is + /// that [`load_into_tracker`] restores a keychain tracker to what it should be if all + /// changesets had been applied sequentially. + /// + /// [`load_into_tracker`]: Self::load_into_tracker + fn write_changes(&mut self, changeset: C) -> Result<(), Self::WriteError>; + + /// Loads all data from the persistence backend into `tracker`. + fn load_into_tracker(&mut self, tracker: &mut T) -> Result<(), Self::LoadError>; +} + +impl PersistBackend for () { + type WriteError = (); + type LoadError = (); + + fn write_changes(&mut self, _changeset: C) -> Result<(), Self::WriteError> { + Ok(()) + } + + fn load_into_tracker(&mut self, _tracker: &mut T) -> Result<(), Self::LoadError> { + Ok(()) + } +} From 382f36522defb803366ee0c04e44076e51a1914c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=BF=97=E5=AE=87?= Date: Thu, 20 Apr 2023 23:30:44 +0800 Subject: [PATCH 02/15] [persist_redesign] Move `EntryIter` into it's own file --- crates/file_store/src/entry_iter.rs | 94 ++++++++++++++++++ .../src/{file_store.rs => keychain_store.rs} | 98 +------------------ crates/file_store/src/lib.rs | 11 ++- 3 files changed, 108 insertions(+), 95 deletions(-) create mode 100644 crates/file_store/src/entry_iter.rs rename crates/file_store/src/{file_store.rs => keychain_store.rs} (80%) diff --git a/crates/file_store/src/entry_iter.rs b/crates/file_store/src/entry_iter.rs new file mode 100644 index 000000000..200ad7813 --- /dev/null +++ b/crates/file_store/src/entry_iter.rs @@ -0,0 +1,94 @@ +use bincode::Options; +use std::{ + fs::File, + io::{self, Seek}, + marker::PhantomData, +}; + +use crate::bincode_options; + +/// Iterator over entries in a file store. +/// +/// Reads and returns an entry each time [`next`] is called. If an error occurs while reading the +/// iterator will yield a `Result::Err(_)` instead and then `None` for the next call to `next`. +/// +/// [`next`]: Self::next +pub struct EntryIter<'a, V> { + db_file: &'a mut File, + types: PhantomData, + error_exit: bool, +} + +impl<'a, V> EntryIter<'a, V> { + pub fn new(db_file: &'a mut File) -> Self { + Self { + db_file, + types: PhantomData, + error_exit: false, + } + } +} + +impl<'a, V> Iterator for EntryIter<'a, V> +where + V: serde::de::DeserializeOwned, +{ + type Item = Result; + + fn next(&mut self) -> Option { + let result = (|| { + let pos = self.db_file.stream_position()?; + + match bincode_options().deserialize_from(&mut self.db_file) { + Ok(changeset) => Ok(Some(changeset)), + Err(e) => { + if let bincode::ErrorKind::Io(inner) = &*e { + if inner.kind() == io::ErrorKind::UnexpectedEof { + let eof = self.db_file.seek(io::SeekFrom::End(0))?; + if pos == eof { + return Ok(None); + } + } + } + + self.db_file.seek(io::SeekFrom::Start(pos))?; + Err(IterError::Bincode(*e)) + } + } + })(); + + let result = result.transpose(); + + if let Some(Err(_)) = &result { + self.error_exit = true; + } + + result + } +} + +impl From for IterError { + fn from(value: io::Error) -> Self { + IterError::Io(value) + } +} + +/// Error type for [`EntryIter`]. +#[derive(Debug)] +pub enum IterError { + /// Failure to read from the file. + Io(io::Error), + /// Failure to decode data from the file. + Bincode(bincode::ErrorKind), +} + +impl core::fmt::Display for IterError { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + match self { + IterError::Io(e) => write!(f, "io error trying to read entry {}", e), + IterError::Bincode(e) => write!(f, "bincode error while reading entry {}", e), + } + } +} + +impl std::error::Error for IterError {} diff --git a/crates/file_store/src/file_store.rs b/crates/file_store/src/keychain_store.rs similarity index 80% rename from crates/file_store/src/file_store.rs rename to crates/file_store/src/keychain_store.rs index 824e3ccc5..d60b12737 100644 --- a/crates/file_store/src/file_store.rs +++ b/crates/file_store/src/keychain_store.rs @@ -6,14 +6,15 @@ use bdk_chain::{ keychain::{KeychainChangeSet, KeychainTracker}, sparse_chain, }; -use bincode::{DefaultOptions, Options}; -use core::marker::PhantomData; +use bincode::Options; use std::{ fs::{File, OpenOptions}, io::{self, Read, Seek, Write}, path::Path, }; +use crate::{bincode_options, EntryIter, IterError}; + /// BDK File Store magic bytes length. const MAGIC_BYTES_LEN: usize = 12; @@ -28,10 +29,6 @@ pub struct KeychainStore { changeset_type_params: core::marker::PhantomData<(K, P)>, } -fn bincode() -> impl bincode::Options { - DefaultOptions::new().with_varint_encoding() -} - impl KeychainStore where K: Ord + Clone + core::fmt::Debug, @@ -144,7 +141,7 @@ where return Ok(()); } - bincode() + bincode_options() .serialize_into(&mut self.db_file, changeset) .map_err(|e| match *e { bincode::ErrorKind::Io(inner) => inner, @@ -197,92 +194,6 @@ impl From for FileError { impl std::error::Error for FileError {} -/// Error type for [`EntryIter`]. -#[derive(Debug)] -pub enum IterError { - /// Failure to read from the file. - Io(io::Error), - /// Failure to decode data from the file. - Bincode(bincode::ErrorKind), -} - -impl core::fmt::Display for IterError { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - match self { - IterError::Io(e) => write!(f, "io error trying to read entry {}", e), - IterError::Bincode(e) => write!(f, "bincode error while reading entry {}", e), - } - } -} - -impl std::error::Error for IterError {} - -/// Iterator over entries in a file store. -/// -/// Reads and returns an entry each time [`next`] is called. If an error occurs while reading the -/// iterator will yield a `Result::Err(_)` instead and then `None` for the next call to `next`. -/// -/// [`next`]: Self::next -pub struct EntryIter<'a, V> { - db_file: &'a mut File, - types: PhantomData, - error_exit: bool, -} - -impl<'a, V> EntryIter<'a, V> { - pub fn new(db_file: &'a mut File) -> Self { - Self { - db_file, - types: PhantomData, - error_exit: false, - } - } -} - -impl<'a, V> Iterator for EntryIter<'a, V> -where - V: serde::de::DeserializeOwned, -{ - type Item = Result; - - fn next(&mut self) -> Option { - let result = (|| { - let pos = self.db_file.stream_position()?; - - match bincode().deserialize_from(&mut self.db_file) { - Ok(changeset) => Ok(Some(changeset)), - Err(e) => { - if let bincode::ErrorKind::Io(inner) = &*e { - if inner.kind() == io::ErrorKind::UnexpectedEof { - let eof = self.db_file.seek(io::SeekFrom::End(0))?; - if pos == eof { - return Ok(None); - } - } - } - - self.db_file.seek(io::SeekFrom::Start(pos))?; - Err(IterError::Bincode(*e)) - } - } - })(); - - let result = result.transpose(); - - if let Some(Err(_)) = &result { - self.error_exit = true; - } - - result - } -} - -impl From for IterError { - fn from(value: io::Error) -> Self { - IterError::Io(value) - } -} - #[cfg(test)] mod test { use super::*; @@ -290,6 +201,7 @@ mod test { keychain::{DerivationAdditions, KeychainChangeSet}, TxHeight, }; + use bincode::DefaultOptions; use std::{ io::{Read, Write}, vec::Vec, diff --git a/crates/file_store/src/lib.rs b/crates/file_store/src/lib.rs index e33474194..8e140f76b 100644 --- a/crates/file_store/src/lib.rs +++ b/crates/file_store/src/lib.rs @@ -1,10 +1,17 @@ #![doc = include_str!("../README.md")] -mod file_store; +mod entry_iter; +mod keychain_store; use bdk_chain::{ keychain::{KeychainChangeSet, KeychainTracker, PersistBackend}, sparse_chain::ChainPosition, }; -pub use file_store::*; +use bincode::{DefaultOptions, Options}; +pub use entry_iter::*; +pub use keychain_store::*; + +pub(crate) fn bincode_options() -> impl bincode::Options { + DefaultOptions::new().with_varint_encoding() +} impl PersistBackend for KeychainStore where From 07f74b2c26b09516a937793f3fc0a0b210f25c49 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=BF=97=E5=AE=87?= Date: Fri, 21 Apr 2023 02:12:55 +0800 Subject: [PATCH 03/15] [persist_redesign] Introduce `bdk_file_store::Store` Similar to `KeychainStore`, this is intended to be a `PersistBackend`. The difference is that `Store` is more generic, allowing support for different changeset and tracker types. `Store` does not implement `PersistBackend` directly (it cannot), but can the logic can be easily reused to implement it. --- crates/file_store/src/keychain_store.rs | 42 +--- crates/file_store/src/lib.rs | 37 ++++ crates/file_store/src/store.rs | 265 ++++++++++++++++++++++++ 3 files changed, 310 insertions(+), 34 deletions(-) create mode 100644 crates/file_store/src/store.rs diff --git a/crates/file_store/src/keychain_store.rs b/crates/file_store/src/keychain_store.rs index d60b12737..83eb3eae2 100644 --- a/crates/file_store/src/keychain_store.rs +++ b/crates/file_store/src/keychain_store.rs @@ -13,7 +13,7 @@ use std::{ path::Path, }; -use crate::{bincode_options, EntryIter, IterError}; +use crate::{bincode_options, EntryIter, FileError, IterError}; /// BDK File Store magic bytes length. const MAGIC_BYTES_LEN: usize = 12; @@ -47,7 +47,10 @@ where file.read_exact(&mut magic_bytes)?; if magic_bytes != MAGIC_BYTES { - return Err(FileError::InvalidMagicBytes(magic_bytes)); + return Err(FileError::InvalidMagicBytes { + got: magic_bytes.into(), + expected: &MAGIC_BYTES, + }); } Ok(Self { @@ -164,36 +167,6 @@ where } } -/// Error that occurs due to problems encountered with the file. -#[derive(Debug)] -pub enum FileError { - /// IO error, this may mean that the file is too short. - Io(io::Error), - /// Magic bytes do not match what is expected. - InvalidMagicBytes([u8; MAGIC_BYTES_LEN]), -} - -impl core::fmt::Display for FileError { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - match self { - Self::Io(e) => write!(f, "io error trying to read file: {}", e), - Self::InvalidMagicBytes(b) => write!( - f, - "file has invalid magic bytes: expected={:?} got={:?}", - MAGIC_BYTES, b - ), - } - } -} - -impl From for FileError { - fn from(value: io::Error) -> Self { - Self::Io(value) - } -} - -impl std::error::Error for FileError {} - #[cfg(test)] mod test { use super::*; @@ -207,6 +180,7 @@ mod test { vec::Vec, }; use tempfile::NamedTempFile; + #[derive( Debug, Clone, @@ -259,8 +233,8 @@ mod test { .expect("should write"); match KeychainStore::::new(file.reopen().unwrap()) { - Err(FileError::InvalidMagicBytes(b)) => { - assert_eq!(b, invalid_magic_bytes.as_bytes()) + Err(FileError::InvalidMagicBytes { got, .. }) => { + assert_eq!(got, invalid_magic_bytes.as_bytes()) } unexpected => panic!("unexpected result: {:?}", unexpected), }; diff --git a/crates/file_store/src/lib.rs b/crates/file_store/src/lib.rs index 8e140f76b..987e9d6f5 100644 --- a/crates/file_store/src/lib.rs +++ b/crates/file_store/src/lib.rs @@ -1,6 +1,9 @@ #![doc = include_str!("../README.md")] mod entry_iter; mod keychain_store; +mod store; +use std::io; + use bdk_chain::{ keychain::{KeychainChangeSet, KeychainTracker, PersistBackend}, sparse_chain::ChainPosition, @@ -8,6 +11,7 @@ use bdk_chain::{ use bincode::{DefaultOptions, Options}; pub use entry_iter::*; pub use keychain_store::*; +pub use store::*; pub(crate) fn bincode_options() -> impl bincode::Options { DefaultOptions::new().with_varint_encoding() @@ -37,3 +41,36 @@ where KeychainStore::load_into_keychain_tracker(self, tracker) } } + +/// Error that occurs due to problems encountered with the file. +#[derive(Debug)] +pub enum FileError { + /// IO error, this may mean that the file is too short. + Io(io::Error), + /// Magic bytes do not match what is expected. + InvalidMagicBytes { + got: Vec, + expected: &'static [u8], + }, +} + +impl core::fmt::Display for FileError { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + match self { + Self::Io(e) => write!(f, "io error trying to read file: {}", e), + Self::InvalidMagicBytes { got, expected } => write!( + f, + "file has invalid magic bytes: expected={:?} got={:?}", + expected, got, + ), + } + } +} + +impl From for FileError { + fn from(value: io::Error) -> Self { + Self::Io(value) + } +} + +impl std::error::Error for FileError {} diff --git a/crates/file_store/src/store.rs b/crates/file_store/src/store.rs new file mode 100644 index 000000000..2691c3d68 --- /dev/null +++ b/crates/file_store/src/store.rs @@ -0,0 +1,265 @@ +use std::{ + fs::{File, OpenOptions}, + io::{self, Read, Seek, Write}, + marker::PhantomData, + path::Path, +}; + +use bdk_chain::Append; +use bincode::Options; + +use crate::{bincode_options, EntryIter, FileError, IterError}; + +/// Persists an append-only list of changesets (`C`) to a single file. +/// +/// The changesets are the results of altering a tracker implementation (`T`). +#[derive(Debug)] +pub struct Store { + magic: &'static [u8], + db_file: File, + marker: PhantomData<(T, C)>, +} + +impl Store +where + C: Append + Default + serde::Serialize + serde::de::DeserializeOwned, +{ + /// Creates a new store from a [`File`]. + /// + /// The file must have been opened with read and write permissions. + /// + /// [`File`]: std::fs::File + pub fn new(magic: &'static [u8], mut db_file: File) -> Result { + db_file.rewind()?; + + let mut magic_buf = Vec::from_iter((0..).take(magic.len())); + db_file.read_exact(magic_buf.as_mut())?; + + if magic_buf != magic { + return Err(FileError::InvalidMagicBytes { + got: magic_buf, + expected: magic, + }); + } + + Ok(Self { + magic, + db_file, + marker: Default::default(), + }) + } + + /// Creates or loads a store from `db_path`. + /// + /// If no file exists there, it will be created. + pub fn new_from_path

(magic: &'static [u8], db_path: P) -> Result + where + P: AsRef, + { + let already_exists = db_path.as_ref().exists(); + + let mut db_file = OpenOptions::new() + .read(true) + .write(true) + .create(true) + .open(db_path)?; + + if !already_exists { + db_file.write_all(magic)?; + } + + Self::new(magic, db_file) + } + + /// Iterates over the stored changeset from first to last, changing the seek position at each + /// iteration. + /// + /// The iterator may fail to read an entry and therefore return an error. However, the first time + /// it returns an error will be the last. After doing so, the iterator will always yield `None`. + /// + /// **WARNING**: This method changes the write position in the underlying file. You should + /// always iterate over all entries until `None` is returned if you want your next write to go + /// at the end; otherwise, you will write over existing entries. + pub fn iter_changesets(&mut self) -> Result, io::Error> { + self.db_file + .seek(io::SeekFrom::Start(self.magic.len() as _))?; + + Ok(EntryIter::new(&mut self.db_file)) + } + + /// Loads all the changesets that have been stored as one giant changeset. + /// + /// This function returns a tuple of the aggregate changeset and a result that indicates + /// whether an error occurred while reading or deserializing one of the entries. If so the + /// changeset will consist of all of those it was able to read. + /// + /// You should usually check the error. In many applications, it may make sense to do a full + /// wallet scan with a stop-gap after getting an error, since it is likely that one of the + /// changesets it was unable to read changed the derivation indices of the tracker. + /// + /// **WARNING**: This method changes the write position of the underlying file. The next + /// changeset will be written over the erroring entry (or the end of the file if none existed). + pub fn aggregate_changesets(&mut self) -> (C, Result<(), IterError>) { + let mut changeset = C::default(); + let result = (|| { + let iter_changeset = self.iter_changesets()?; + for next_changeset in iter_changeset { + changeset.append(next_changeset?); + } + Ok(()) + })(); + + (changeset, result) + } + + /// Append a new changeset to the file and truncate the file to the end of the appended + /// changeset. + /// + /// The truncation is to avoid the possibility of having a valid but inconsistent changeset + /// directly after the appended changeset. + /// + /// **WARNING**: This method does not detect whether the changeset is empty or not, and will + /// append an empty changeset to the file (not catastrophic, just a waste of space). + pub fn append_changeset(&mut self, changeset: &C) -> Result<(), io::Error> { + bincode_options() + .serialize_into(&mut self.db_file, changeset) + .map_err(|e| match *e { + bincode::ErrorKind::Io(inner) => inner, + unexpected_err => panic!("unexpected bincode error: {}", unexpected_err), + })?; + + // truncate file after this changeset addition + // if this is not done, data after this changeset may represent valid changesets, however + // applying those changesets on top of this one may result in an inconsistent state + let pos = self.db_file.stream_position()?; + self.db_file.set_len(pos)?; + + Ok(()) + } +} + +#[cfg(test)] +mod test { + use super::*; + + use bincode::DefaultOptions; + use std::{ + io::{Read, Write}, + vec::Vec, + }; + use tempfile::NamedTempFile; + + const TEST_MAGIC_BYTES_LEN: usize = 12; + const TEST_MAGIC_BYTES: [u8; TEST_MAGIC_BYTES_LEN] = + [98, 100, 107, 102, 115, 49, 49, 49, 49, 49, 49, 49]; + + #[derive( + Debug, + Clone, + Copy, + PartialOrd, + Ord, + PartialEq, + Eq, + Hash, + serde::Serialize, + serde::Deserialize, + )] + enum TestKeychain { + External, + Internal, + } + + impl core::fmt::Display for TestKeychain { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::External => write!(f, "external"), + Self::Internal => write!(f, "internal"), + } + } + } + + #[derive(Debug, Default, serde::Serialize, serde::Deserialize)] + struct TestChangeSet { + pub changes: Vec, + } + + impl Append for TestChangeSet { + fn append(&mut self, mut other: Self) { + self.changes.append(&mut other.changes) + } + } + + #[test] + fn new_fails_if_file_is_too_short() { + let mut file = NamedTempFile::new().unwrap(); + file.write_all(&TEST_MAGIC_BYTES[..TEST_MAGIC_BYTES_LEN - 1]) + .expect("should write"); + + match Store::<(), TestChangeSet>::new(&TEST_MAGIC_BYTES, file.reopen().unwrap()) { + Err(FileError::Io(e)) => assert_eq!(e.kind(), std::io::ErrorKind::UnexpectedEof), + unexpected => panic!("unexpected result: {:?}", unexpected), + }; + } + + #[test] + fn new_fails_if_magic_bytes_are_invalid() { + let invalid_magic_bytes = "ldkfs0000000"; + + let mut file = NamedTempFile::new().unwrap(); + file.write_all(invalid_magic_bytes.as_bytes()) + .expect("should write"); + + match Store::<(), TestChangeSet>::new(&TEST_MAGIC_BYTES, file.reopen().unwrap()) { + Err(FileError::InvalidMagicBytes { got, .. }) => { + assert_eq!(got, invalid_magic_bytes.as_bytes()) + } + unexpected => panic!("unexpected result: {:?}", unexpected), + }; + } + + #[test] + fn append_changeset_truncates_invalid_bytes() { + // initial data to write to file (magic bytes + invalid data) + let mut data = [255_u8; 2000]; + data[..TEST_MAGIC_BYTES_LEN].copy_from_slice(&TEST_MAGIC_BYTES); + + let changeset = TestChangeSet { + changes: vec!["one".into(), "two".into(), "three!".into()], + }; + + let mut file = NamedTempFile::new().unwrap(); + file.write_all(&data).expect("should write"); + + let mut store = Store::<(), TestChangeSet>::new(&TEST_MAGIC_BYTES, file.reopen().unwrap()) + .expect("should open"); + match store.iter_changesets().expect("seek should succeed").next() { + Some(Err(IterError::Bincode(_))) => {} + unexpected_res => panic!("unexpected result: {:?}", unexpected_res), + } + + store.append_changeset(&changeset).expect("should append"); + + drop(store); + + let got_bytes = { + let mut buf = Vec::new(); + file.reopen() + .unwrap() + .read_to_end(&mut buf) + .expect("should read"); + buf + }; + + let expected_bytes = { + let mut buf = TEST_MAGIC_BYTES.to_vec(); + DefaultOptions::new() + .with_varint_encoding() + .serialize_into(&mut buf, &changeset) + .expect("should encode"); + buf + }; + + assert_eq!(got_bytes, expected_bytes); + } +} From 7acedb631d8faaab959d30aac6278a358cdafc9c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=BF=97=E5=AE=87?= Date: Fri, 21 Apr 2023 02:45:13 +0800 Subject: [PATCH 04/15] [persist_redesign] Split `PersistBackend` into two traits `PersistBackend` is now two traits: * `PersistBackend` has method `write_changes()`. * `LoadablePersistBackend` has method `load_into_tracker()`. This is because `LoadablePersistBackend` requires a second generic `T` (for the tracker) which `Store` cannot implement for without introducing a new tracker trait, or having a concrete type for `T`. By having two traits, `Store` can still implement `PersistBackend` and the `LoadablePersistBackend` can be implemented individually with each concrete tracker type. --- crates/chain/src/persist.rs | 43 ++++++++++++++----------- crates/file_store/src/keychain_store.rs | 29 +++++++++++++++-- crates/file_store/src/lib.rs | 29 ----------------- crates/file_store/src/store.rs | 13 +++++++- 4 files changed, 63 insertions(+), 51 deletions(-) diff --git a/crates/chain/src/persist.rs b/crates/chain/src/persist.rs index 045149841..2cd69c9fa 100644 --- a/crates/chain/src/persist.rs +++ b/crates/chain/src/persist.rs @@ -1,26 +1,26 @@ -use core::marker::PhantomData; - use crate::Append; /// `Persist` wraps a [`PersistBackend`] (`B`) to create a convenient staging area for changes (`C`) /// before they are persisted. /// -/// Not all changes to the tracker (`T`), which is an in-memory representation of wallet/blockchain +/// Not all changes to the tracker, which is an in-memory representation of wallet/blockchain /// data, needs to be written to disk right away, so [`Persist::stage`] can be used to *stage* /// changes first and then [`Persist::commit`] can be used to write changes to disk. -pub struct Persist { +pub struct Persist { backend: B, stage: C, - tracker: PhantomData, } -impl, C: Append + Default> Persist { +impl Persist +where + B: PersistBackend, + C: Append + Default, +{ /// Create a new [`Persist`] from [`PersistBackend`]. pub fn new(backend: B) -> Self { Self { backend, stage: Default::default(), - tracker: Default::default(), } } @@ -42,21 +42,17 @@ impl, C: Append + Default> Persist { pub fn commit(&mut self) -> Result<(), B::WriteError> { let mut temp = C::default(); core::mem::swap(&mut temp, &mut self.stage); - self.backend.write_changes(temp) + self.backend.write_changes(&temp) } } /// A persistence backend for [`Persist`]. /// -/// * `T` represents the tracker. -/// * `C` represents the changeset. -pub trait PersistBackend { +/// `C` represents the changeset. +pub trait PersistBackend { /// The error the backend returns when it fails to write. type WriteError: core::fmt::Debug; - /// The error the backend returns when it fails to load. - type LoadError: core::fmt::Debug; - /// Writes a changeset to the persistence backend. /// /// It is up to the backend what it does with this. It could store every changeset in a list or @@ -64,20 +60,29 @@ pub trait PersistBackend { /// that [`load_into_tracker`] restores a keychain tracker to what it should be if all /// changesets had been applied sequentially. /// - /// [`load_into_tracker`]: Self::load_into_tracker - fn write_changes(&mut self, changeset: C) -> Result<(), Self::WriteError>; + /// [`load_into_tracker`]: LoadablePersistBackend::load_into_tracker + fn write_changes(&mut self, changeset: &C) -> Result<(), Self::WriteError>; +} + +/// A trait that extends [`PersistBackend`] to be able to load into a tracker implementation (`T`). +pub trait LoadablePersistBackend: PersistBackend { + /// The error the backend returns when it fails to load. + type LoadError: core::fmt::Debug; /// Loads all data from the persistence backend into `tracker`. fn load_into_tracker(&mut self, tracker: &mut T) -> Result<(), Self::LoadError>; } -impl PersistBackend for () { +impl PersistBackend for () { type WriteError = (); - type LoadError = (); - fn write_changes(&mut self, _changeset: C) -> Result<(), Self::WriteError> { + fn write_changes(&mut self, _changeset: &C) -> Result<(), Self::WriteError> { Ok(()) } +} + +impl LoadablePersistBackend for () { + type LoadError = (); fn load_into_tracker(&mut self, _tracker: &mut T) -> Result<(), Self::LoadError> { Ok(()) diff --git a/crates/file_store/src/keychain_store.rs b/crates/file_store/src/keychain_store.rs index 83eb3eae2..4da7f7045 100644 --- a/crates/file_store/src/keychain_store.rs +++ b/crates/file_store/src/keychain_store.rs @@ -3,8 +3,8 @@ //! The star of the show is [`KeychainStore`], which maintains an append-only file of //! [`KeychainChangeSet`]s which can be used to restore a [`KeychainTracker`]. use bdk_chain::{ - keychain::{KeychainChangeSet, KeychainTracker}, - sparse_chain, + keychain::{KeychainChangeSet, KeychainTracker, PersistBackend}, + sparse_chain::{self, ChainPosition}, }; use bincode::Options; use std::{ @@ -167,6 +167,31 @@ where } } +impl PersistBackend for KeychainStore +where + K: Ord + Clone + core::fmt::Debug, + P: ChainPosition, + KeychainChangeSet: serde::Serialize + serde::de::DeserializeOwned, +{ + type WriteError = std::io::Error; + + type LoadError = IterError; + + fn append_changeset( + &mut self, + changeset: &KeychainChangeSet, + ) -> Result<(), Self::WriteError> { + KeychainStore::append_changeset(self, changeset) + } + + fn load_into_keychain_tracker( + &mut self, + tracker: &mut KeychainTracker, + ) -> Result<(), Self::LoadError> { + KeychainStore::load_into_keychain_tracker(self, tracker) + } +} + #[cfg(test)] mod test { use super::*; diff --git a/crates/file_store/src/lib.rs b/crates/file_store/src/lib.rs index 987e9d6f5..e8cd9d147 100644 --- a/crates/file_store/src/lib.rs +++ b/crates/file_store/src/lib.rs @@ -4,10 +4,6 @@ mod keychain_store; mod store; use std::io; -use bdk_chain::{ - keychain::{KeychainChangeSet, KeychainTracker, PersistBackend}, - sparse_chain::ChainPosition, -}; use bincode::{DefaultOptions, Options}; pub use entry_iter::*; pub use keychain_store::*; @@ -17,31 +13,6 @@ pub(crate) fn bincode_options() -> impl bincode::Options { DefaultOptions::new().with_varint_encoding() } -impl PersistBackend for KeychainStore -where - K: Ord + Clone + core::fmt::Debug, - P: ChainPosition, - KeychainChangeSet: serde::Serialize + serde::de::DeserializeOwned, -{ - type WriteError = std::io::Error; - - type LoadError = IterError; - - fn append_changeset( - &mut self, - changeset: &KeychainChangeSet, - ) -> Result<(), Self::WriteError> { - KeychainStore::append_changeset(self, changeset) - } - - fn load_into_keychain_tracker( - &mut self, - tracker: &mut KeychainTracker, - ) -> Result<(), Self::LoadError> { - KeychainStore::load_into_keychain_tracker(self, tracker) - } -} - /// Error that occurs due to problems encountered with the file. #[derive(Debug)] pub enum FileError { diff --git a/crates/file_store/src/store.rs b/crates/file_store/src/store.rs index 2691c3d68..2d577f53a 100644 --- a/crates/file_store/src/store.rs +++ b/crates/file_store/src/store.rs @@ -5,7 +5,7 @@ use std::{ path::Path, }; -use bdk_chain::Append; +use bdk_chain::{Append, PersistBackend}; use bincode::Options; use crate::{bincode_options, EntryIter, FileError, IterError}; @@ -138,6 +138,17 @@ where } } +impl PersistBackend for Store +where + C: Default + Append + serde::Serialize + serde::de::DeserializeOwned, +{ + type WriteError = std::io::Error; + + fn write_changes(&mut self, changeset: &C) -> Result<(), Self::WriteError> { + Store::append_changeset(self, changeset) + } +} + #[cfg(test)] mod test { use super::*; From 7b022bb12d07a38df902ddcdb886fd1fdbb59ee5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=BF=97=E5=AE=87?= Date: Fri, 21 Apr 2023 03:03:22 +0800 Subject: [PATCH 05/15] [persist_redesign] Implement `ConfirmationHeightAnchor` This is an `Anchor` implementation that also informs of the exact confirmation height of the transaction. --- crates/chain/src/chain_data.rs | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/crates/chain/src/chain_data.rs b/crates/chain/src/chain_data.rs index a360b304e..39e121c44 100644 --- a/crates/chain/src/chain_data.rs +++ b/crates/chain/src/chain_data.rs @@ -187,6 +187,31 @@ impl From<(&u32, &BlockHash)> for BlockId { } } +/// An [`Anchor`] implementation that also records the exact confirmation height of the transaction. +#[derive(Debug, Default, Clone, PartialEq, Eq, Copy, PartialOrd, Ord, core::hash::Hash)] +#[cfg_attr( + feature = "serde", + derive(serde::Deserialize, serde::Serialize), + serde(crate = "serde_crate") +)] +pub struct ConfirmationHeightAnchor { + /// The anchor block. + pub anchor_block: BlockId, + + /// The exact confirmation height of the transaction (if any). + pub confirmation_height: Option, +} + +impl Anchor for ConfirmationHeightAnchor { + fn anchor_block(&self) -> BlockId { + self.anchor_block + } + + fn confirmation_height_upper_bound(&self) -> u32 { + self.confirmation_height.unwrap_or(self.anchor_block.height) + } +} + /// A `TxOut` with as much data as we can retrieve about it #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] pub struct FullTxOut

{ From b91116c29663bbc2c05a954844edbc6d8aa5e545 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=BF=97=E5=AE=87?= Date: Sun, 23 Apr 2023 04:14:35 +0800 Subject: [PATCH 06/15] [examples_redesign] Introduce `tracker_example_cli` example crate Essentially a copy of `keychain_tracker_example_cli` but uses the new data structures. This also requires us to create an example `Tracker`. --- Cargo.toml | 1 + crates/chain/src/chain_data.rs | 9 + crates/chain/src/keychain/txout_index.rs | 6 +- example-crates/tracker_example_cli/Cargo.toml | 17 + example-crates/tracker_example_cli/src/lib.rs | 735 ++++++++++++++++++ .../tracker_example_cli/src/tracker.rs | 133 ++++ 6 files changed, 898 insertions(+), 3 deletions(-) create mode 100644 example-crates/tracker_example_cli/Cargo.toml create mode 100644 example-crates/tracker_example_cli/src/lib.rs create mode 100644 example-crates/tracker_example_cli/src/tracker.rs diff --git a/Cargo.toml b/Cargo.toml index 2104196be..a24ed72ec 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -7,6 +7,7 @@ members = [ "example-crates/keychain_tracker_electrum", "example-crates/keychain_tracker_esplora", "example-crates/keychain_tracker_example_cli", + "example-crates/tracker_example_cli", "example-crates/wallet_electrum", "example-crates/wallet_esplora", "example-crates/wallet_esplora_async", diff --git a/crates/chain/src/chain_data.rs b/crates/chain/src/chain_data.rs index 39e121c44..0ce74a841 100644 --- a/crates/chain/src/chain_data.rs +++ b/crates/chain/src/chain_data.rs @@ -16,6 +16,15 @@ pub enum ObservedAs { Unconfirmed(u64), } +impl ObservedAs { + pub fn is_confirmed(&self) -> bool { + match self { + ObservedAs::Confirmed(_) => true, + ObservedAs::Unconfirmed(_) => false, + } + } +} + impl ObservedAs<&A> { pub fn cloned(self) -> ObservedAs { match self { diff --git a/crates/chain/src/keychain/txout_index.rs b/crates/chain/src/keychain/txout_index.rs index fbe67d1fc..e2dec2354 100644 --- a/crates/chain/src/keychain/txout_index.rs +++ b/crates/chain/src/keychain/txout_index.rs @@ -91,7 +91,7 @@ impl Deref for KeychainTxOutIndex { } } -impl Indexer for KeychainTxOutIndex { +impl Indexer for KeychainTxOutIndex { type Additions = DerivationAdditions; fn index_txout(&mut self, outpoint: OutPoint, txout: &TxOut) -> Self::Additions { @@ -111,9 +111,9 @@ impl Indexer for KeychainTxOutIndex { } } -impl OwnedIndexer for KeychainTxOutIndex { +impl OwnedIndexer for KeychainTxOutIndex { fn is_spk_owned(&self, spk: &Script) -> bool { - self.inner().is_spk_owned(spk) + self.index_of_spk(spk).is_some() } } diff --git a/example-crates/tracker_example_cli/Cargo.toml b/example-crates/tracker_example_cli/Cargo.toml new file mode 100644 index 000000000..29f60c0b3 --- /dev/null +++ b/example-crates/tracker_example_cli/Cargo.toml @@ -0,0 +1,17 @@ +[package] +name = "tracker_example_cli" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +bdk_chain = { path = "../../crates/chain", features = ["serde", "miniscript"]} +bdk_file_store = { path = "../../crates/file_store" } +bdk_tmp_plan = { path = "../../nursery/tmp_plan" } +bdk_coin_select = { path = "../../nursery/coin_select" } + +clap = { version = "3.2.23", features = ["derive", "env"] } +anyhow = "1" +serde = { version = "1", features = ["derive"] } +serde_json = { version = "^1.0" } diff --git a/example-crates/tracker_example_cli/src/lib.rs b/example-crates/tracker_example_cli/src/lib.rs new file mode 100644 index 000000000..2d50be30f --- /dev/null +++ b/example-crates/tracker_example_cli/src/lib.rs @@ -0,0 +1,735 @@ +mod tracker; +use anyhow::anyhow; +use bdk_chain::{ + bitcoin::{ + psbt::Prevouts, secp256k1::Secp256k1, util::sighash::SighashCache, Address, LockTime, + Network, Sequence, Transaction, TxIn, TxOut, + }, + keychain::DerivationAdditions, + miniscript::{ + descriptor::{DescriptorSecretKey, KeyMap}, + Descriptor, DescriptorPublicKey, + }, + Anchor, Append, BlockId, ChainOracle, DescriptorExt, FullTxOut, ObservedAs, +}; +use bdk_coin_select::{coin_select_bnb, CoinSelector, CoinSelectorOpt, WeightedValue}; +use clap::{Parser, Subcommand}; +use std::{cmp::Reverse, collections::HashMap, path::PathBuf, sync::Mutex, time::Duration}; + +pub use bdk_file_store; +pub use clap; +pub use tracker::*; + +#[derive( + Debug, Clone, Copy, PartialOrd, Ord, PartialEq, Eq, serde::Deserialize, serde::Serialize, +)] +pub enum Keychain { + External, + Internal, +} + +impl core::fmt::Display for Keychain { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Keychain::External => write!(f, "external"), + Keychain::Internal => write!(f, "internal"), + } + } +} + +impl Default for Keychain { + fn default() -> Self { + Self::External + } +} + +#[derive(Parser)] +#[clap(author, version, about, long_about = None)] +#[clap(propagate_version = true)] +pub struct Args { + #[clap(env = "DESCRIPTOR")] + pub descriptor: String, + #[clap(env = "CHANGE_DESCRIPTOR")] + pub change_descriptor: Option, + + #[clap(env = "BITCOIN_NETWORK", long, default_value = "signet")] + pub network: Network, + + #[clap(env = "BDK_DB_PATH", long, default_value = ".bdk_example_db")] + pub db_path: PathBuf, + + #[clap(env = "BDK_CP_LIMIT", long, default_value = "20")] + pub cp_limit: usize, + + #[clap(subcommand)] + pub command: Commands, +} + +#[derive(Subcommand, Debug, Clone)] +pub enum Commands { + #[clap(flatten)] + ChainSpecific(C), + /// Address generation and inspection. + Address { + #[clap(subcommand)] + addr_cmd: AddressCmd, + }, + /// Get the wallet balance. + Balance, + /// TxOut related commands. + #[clap(name = "txout")] + TxOut { + #[clap(subcommand)] + txout_cmd: TxOutCmd, + }, + /// Send coins to an address. + Send { + value: u64, + address: Address, + #[clap(short, default_value = "largest-first")] + coin_select: CoinSelectionAlgo, + }, +} + +#[derive(Clone, Debug)] +pub enum CoinSelectionAlgo { + LargestFirst, + SmallestFirst, + OldestFirst, + NewestFirst, + BranchAndBound, +} + +impl Default for CoinSelectionAlgo { + fn default() -> Self { + Self::LargestFirst + } +} + +impl core::str::FromStr for CoinSelectionAlgo { + type Err = anyhow::Error; + + fn from_str(s: &str) -> Result { + use CoinSelectionAlgo::*; + Ok(match s { + "largest-first" => LargestFirst, + "smallest-first" => SmallestFirst, + "oldest-first" => OldestFirst, + "newest-first" => NewestFirst, + "bnb" => BranchAndBound, + unknown => return Err(anyhow!("unknown coin selection algorithm '{}'", unknown)), + }) + } +} + +impl core::fmt::Display for CoinSelectionAlgo { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + use CoinSelectionAlgo::*; + write!( + f, + "{}", + match self { + LargestFirst => "largest-first", + SmallestFirst => "smallest-first", + OldestFirst => "oldest-first", + NewestFirst => "newest-first", + BranchAndBound => "bnb", + } + ) + } +} + +#[derive(Subcommand, Debug, Clone)] +pub enum AddressCmd { + /// Get the next unused address. + Next, + /// Get a new address regardless of the existing unused addresses. + New, + /// List all addresses + List { + #[clap(long)] + change: bool, + }, + Index, +} + +#[derive(Subcommand, Debug, Clone)] +pub enum TxOutCmd { + List { + /// Return only spent outputs. + #[clap(short, long)] + spent: bool, + /// Return only unspent outputs. + #[clap(short, long)] + unspent: bool, + /// Return only confirmed outputs. + #[clap(long)] + confirmed: bool, + /// Return only unconfirmed outputs. + #[clap(long)] + unconfirmed: bool, + }, +} + +/// A structure defining the output of an [`AddressCmd`]` execution. +#[derive(serde::Serialize, serde::Deserialize)] +pub struct AddrsOutput { + keychain: String, + index: u32, + addrs: Address, + used: bool, +} + +pub fn run_address_cmd( + tracker: &Mutex>, + db: &Mutex>, + addr_cmd: AddressCmd, + network: Network, +) -> anyhow::Result<()> +where + A: Default + Anchor + serde::de::DeserializeOwned + serde::Serialize, +{ + let mut tracker = tracker.lock().unwrap(); + let txout_index = &mut tracker.inner.index; + + let addr_cmd_output = match addr_cmd { + AddressCmd::Next => Some(txout_index.next_unused_spk(&Keychain::External)), + AddressCmd::New => Some(txout_index.reveal_next_spk(&Keychain::External)), + _ => None, + }; + + if let Some(((index, spk), additions)) = addr_cmd_output { + let mut db = db.lock().unwrap(); + // update database since we're about to give out a new address + db.append_changeset(&additions.into())?; + + let spk = spk.clone(); + let address = + Address::from_script(&spk, network).expect("should always be able to derive address"); + eprintln!("This is the address at index {}", index); + println!("{}", address); + } + + match addr_cmd { + AddressCmd::Next | AddressCmd::New => { + /* covered */ + Ok(()) + } + AddressCmd::Index => { + for (keychain, derivation_index) in txout_index.last_revealed_indices() { + println!("{:?}: {}", keychain, derivation_index); + } + Ok(()) + } + AddressCmd::List { change } => { + let target_keychain = match change { + true => Keychain::Internal, + false => Keychain::External, + }; + for (index, spk) in txout_index.revealed_spks_of_keychain(&target_keychain) { + let address = Address::from_script(spk, network) + .expect("should always be able to derive address"); + println!( + "{:?} {} used:{}", + index, + address, + txout_index.is_used(&(target_keychain, index)) + ); + } + Ok(()) + } + } +} + +pub fn run_balance_cmd( + tracker: &Mutex>, + chain: &Mutex, + chain_tip: BlockId, +) -> anyhow::Result<()> +where + C::Error: std::error::Error + Send + Sync + 'static, +{ + let chain = &*chain.lock().unwrap(); + let tracker = tracker.lock().unwrap(); + let utxos = tracker + .inner + .try_list_owned_unspents(chain, chain_tip) + .collect::, C::Error>>()?; + + let (confirmed, unconfirmed) = + utxos + .into_iter() + .fold((0, 0), |(confirmed, unconfirmed), utxo| { + match utxo.chain_position { + bdk_chain::ObservedAs::Confirmed(_) => { + (confirmed + utxo.txout.value, unconfirmed) + } + bdk_chain::ObservedAs::Unconfirmed(_) => { + (confirmed, unconfirmed + utxo.txout.value) + } + } + }); + + println!("confirmed: {}", confirmed); + println!("unconfirmed: {}", unconfirmed); + Ok(()) +} + +pub fn run_txo_cmd( + txout_cmd: TxOutCmd, + tracker: &Mutex>, + chain: &Mutex, + chain_tip: BlockId, + network: Network, +) -> anyhow::Result<()> +where + C::Error: std::error::Error + Send + Sync + 'static, +{ + let chain = &*chain.lock().unwrap(); + + match txout_cmd { + TxOutCmd::List { + unspent, + spent, + confirmed, + unconfirmed, + } => { + let tracker = tracker.lock().unwrap(); + + let txouts = tracker + .try_list_owned_txouts(chain, chain_tip) + .filter(|r| match r { + Ok((_, full_txo)) => match (unspent, spent) { + (true, false) => full_txo.spent_by.is_none(), + (false, true) => full_txo.spent_by.is_some(), + _ => true, + }, + Err(_) => true, + }) + .filter(|r| match r { + Ok((_, full_txo)) => match (confirmed, unconfirmed) { + (true, false) => full_txo.chain_position.is_confirmed(), + (false, true) => !full_txo.chain_position.is_confirmed(), + _ => true, + }, + Err(_) => true, + }) + .collect::, _>>()?; + + for (spk_index, full_txout) in txouts { + let address = + Address::from_script(&full_txout.txout.script_pubkey, network).unwrap(); + + println!( + "{:?} {} {} {} spent:{:?}", + spk_index, + full_txout.txout.value, + full_txout.outpoint, + address, + full_txout.spent_by + ) + } + Ok(()) + } + } +} + +#[allow(clippy::type_complexity)] // FIXME +pub fn create_tx( + value: u64, + address: Address, + coin_select: CoinSelectionAlgo, + tracker: &mut Tracker, + chain: &C, + chain_tip: BlockId, + keymap: &HashMap, +) -> anyhow::Result<( + Transaction, + Option<(DerivationAdditions, (Keychain, u32))>, +)> +where + C::Error: std::error::Error + Send + Sync + 'static, +{ + let mut additions = DerivationAdditions::default(); + + let assets = bdk_tmp_plan::Assets { + keys: keymap.iter().map(|(pk, _)| pk.clone()).collect(), + ..Default::default() + }; + + // TODO use planning module + let mut candidates = + planned_utxos(tracker, chain, chain_tip, &assets).collect::, C::Error>>()?; + + // apply coin selection algorithm + match coin_select { + CoinSelectionAlgo::LargestFirst => { + candidates.sort_by_key(|(_, utxo)| Reverse(utxo.txout.value)) + } + CoinSelectionAlgo::SmallestFirst => candidates.sort_by_key(|(_, utxo)| utxo.txout.value), + CoinSelectionAlgo::OldestFirst => { + candidates.sort_by_key(|(_, utxo)| utxo.chain_position.clone()) + } + CoinSelectionAlgo::NewestFirst => { + candidates.sort_by_key(|(_, utxo)| Reverse(utxo.chain_position.clone())) + } + CoinSelectionAlgo::BranchAndBound => {} + } + + // turn the txos we chose into weight and value + let wv_candidates = candidates + .iter() + .map(|(plan, utxo)| { + WeightedValue::new( + utxo.txout.value, + plan.expected_weight() as _, + plan.witness_version().is_some(), + ) + }) + .collect(); + + let mut outputs = vec![TxOut { + value, + script_pubkey: address.script_pubkey(), + }]; + + let internal_keychain = if tracker + .inner + .index + .keychains() + .get(&Keychain::Internal) + .is_some() + { + Keychain::Internal + } else { + Keychain::External + }; + + let ((change_index, change_script), change_additions) = + tracker.inner.index.next_unused_spk(&internal_keychain); + additions.append(change_additions); + + // Clone to drop the immutable reference. + let change_script = change_script.clone(); + + let change_plan = bdk_tmp_plan::plan_satisfaction( + &tracker + .inner + .index + .keychains() + .get(&internal_keychain) + .expect("must exist") + .at_derivation_index(change_index), + &assets, + ) + .expect("failed to obtain change plan"); + + let mut change_output = TxOut { + value: 0, + script_pubkey: change_script, + }; + + let cs_opts = CoinSelectorOpt { + target_feerate: 0.5, + min_drain_value: tracker + .inner + .index + .keychains() + .get(&internal_keychain) + .expect("must exist") + .dust_value(), + ..CoinSelectorOpt::fund_outputs( + &outputs, + &change_output, + change_plan.expected_weight() as u32, + ) + }; + + // TODO: How can we make it easy to shuffle in order of inputs and outputs here? + // apply coin selection by saying we need to fund these outputs + let mut coin_selector = CoinSelector::new(&wv_candidates, &cs_opts); + + // just select coins in the order provided until we have enough + // only use the first result (least waste) + let selection = match coin_select { + CoinSelectionAlgo::BranchAndBound => { + coin_select_bnb(Duration::from_secs(10), coin_selector.clone()) + .map_or_else(|| coin_selector.select_until_finished(), |cs| cs.finish())? + } + _ => coin_selector.select_until_finished()?, + }; + let (_, selection_meta) = selection.best_strategy(); + + // get the selected utxos + let selected_txos = selection.apply_selection(&candidates).collect::>(); + + if let Some(drain_value) = selection_meta.drain_value { + change_output.value = drain_value; + // if the selection tells us to use change and the change value is sufficient, we add it as an output + outputs.push(change_output) + } + + let mut transaction = Transaction { + version: 0x02, + lock_time: LockTime::from_height(chain_tip.height) + .unwrap_or(LockTime::ZERO) + .into(), + input: selected_txos + .iter() + .map(|(_, utxo)| TxIn { + previous_output: utxo.outpoint, + sequence: Sequence::ENABLE_RBF_NO_LOCKTIME, + ..Default::default() + }) + .collect(), + output: outputs, + }; + + let prevouts = selected_txos + .iter() + .map(|(_, utxo)| utxo.txout.clone()) + .collect::>(); + let sighash_prevouts = Prevouts::All(&prevouts); + + // first, set tx values for the plan so that we don't change them while signing + for (i, (plan, _)) in selected_txos.iter().enumerate() { + if let Some(sequence) = plan.required_sequence() { + transaction.input[i].sequence = sequence + } + } + + // create a short lived transaction + let _sighash_tx = transaction.clone(); + let mut sighash_cache = SighashCache::new(&_sighash_tx); + + for (i, (plan, _)) in selected_txos.iter().enumerate() { + let requirements = plan.requirements(); + let mut auth_data = bdk_tmp_plan::SatisfactionMaterial::default(); + assert!( + !requirements.requires_hash_preimages(), + "can't have hash pre-images since we didn't provide any." + ); + assert!( + requirements.signatures.sign_with_keymap( + i, + keymap, + &sighash_prevouts, + None, + None, + &mut sighash_cache, + &mut auth_data, + &Secp256k1::default(), + )?, + "we should have signed with this input." + ); + + match plan.try_complete(&auth_data) { + bdk_tmp_plan::PlanState::Complete { + final_script_sig, + final_script_witness, + } => { + if let Some(witness) = final_script_witness { + transaction.input[i].witness = witness; + } + + if let Some(script_sig) = final_script_sig { + transaction.input[i].script_sig = script_sig; + } + } + bdk_tmp_plan::PlanState::Incomplete(_) => { + return Err(anyhow!( + "we weren't able to complete the plan with our keys." + )); + } + } + } + + let change_info = if selection_meta.drain_value.is_some() { + Some((additions, (internal_keychain, change_index))) + } else { + None + }; + + Ok((transaction, change_info)) +} + +pub fn planned_utxos<'a, AK: bdk_tmp_plan::CanDerive + Clone, A: Anchor, C: ChainOracle>( + tracker: &'a Tracker, + chain: &'a C, + chain_tip: BlockId, + assets: &'a bdk_tmp_plan::Assets, +) -> impl Iterator, FullTxOut>), C::Error>> + 'a +where + C::Error: std::error::Error + Send + Sync + 'static, +{ + tracker + .try_list_owned_unspents(chain, chain_tip) + .filter_map(|r| match r { + Ok(((keychain, derivation_index), full_txo)) => { + let desc = tracker + .inner + .index + .keychains() + .get(keychain) + .expect("must exist") + .at_derivation_index(*derivation_index); + let plan = bdk_tmp_plan::plan_satisfaction(&desc, assets)?; + Some(Ok((plan, full_txo))) + } + Err(err) => Some(Err(err)), + }) +} + +#[allow(clippy::too_many_arguments)] // FIXME +pub fn handle_commands( + command: Commands, + broadcast: impl FnOnce(&Transaction) -> anyhow::Result<()>, + // we Mutex around these not because we need them for a simple CLI app but to demonstrate how + // all the stuff we're doing can be made thread-safe and not keep locks up over an IO bound. + tracker: &Mutex>, + store: &Mutex>, + chain: &Mutex, + chain_tip: BlockId, + network: Network, + keymap: &HashMap, +) -> anyhow::Result<()> +where + A: Default + Anchor + serde::de::DeserializeOwned + serde::Serialize, + C: ChainOracle, + C::Error: std::error::Error + Send + Sync + 'static, +{ + match command { + // TODO: Make these functions return stuffs + Commands::Address { addr_cmd } => run_address_cmd(tracker, store, addr_cmd, network), + Commands::Balance => run_balance_cmd(tracker, chain, chain_tip), + Commands::TxOut { txout_cmd } => run_txo_cmd(txout_cmd, tracker, chain, chain_tip, network), + Commands::Send { + value, + address, + coin_select, + } => { + let (transaction, change_index) = { + // take mutable ref to construct tx -- it is only open for a short time while building it. + let tracker = &mut *tracker.lock().unwrap(); + let chain = &*chain.lock().unwrap(); + + let (transaction, change_info) = create_tx( + value, + address, + coin_select, + tracker, + chain, + chain_tip, + keymap, + )?; + + if let Some((change_derivation_changes, (change_keychain, index))) = change_info { + // We must first persist to disk the fact that we've got a new address from the + // change keychain so future scans will find the tx we're about to broadcast. + // If we're unable to persist this, then we don't want to broadcast. + let store = &mut *store.lock().unwrap(); + store.append_changeset(&change_derivation_changes.into())?; + + // We don't want other callers/threads to use this address while we're using it + // but we also don't want to scan the tx we just created because it's not + // technically in the blockchain yet. + tracker.inner.index.mark_used(&change_keychain, index); + (transaction, Some((change_keychain, index))) + } else { + (transaction, None) + } + }; + + match (broadcast)(&transaction) { + Ok(_) => { + println!("Broadcasted Tx : {}", transaction.txid()); + let now = std::time::SystemTime::elapsed(&std::time::UNIX_EPOCH).unwrap(); + + let mut tracker = tracker.lock().unwrap(); + let additions = + tracker + .inner + .insert_tx(&transaction, None, Some(now.as_secs())); + if !additions.graph_additions.is_empty() + || !additions.index_additions.is_empty() + { + let store = &mut *store.lock().unwrap(); + // We know the tx is at least unconfirmed now. Note if persisting here fails, + // it's not a big deal since we can always find it again form + // blockchain. + store.append_changeset(&additions.into())?; + } + Ok(()) + } + Err(e) => { + let tracker = &mut *tracker.lock().unwrap(); + if let Some((keychain, index)) = change_index { + // We failed to broadcast, so allow our change address to be used in the future + tracker.inner.index.unmark_used(&keychain, index); + } + Err(e) + } + } + } + Commands::ChainSpecific(_) => { + todo!("example code is meant to handle this!") + } + } +} + +#[allow(clippy::type_complexity)] // FIXME +pub fn init( + db_magic: &'static [u8], +) -> anyhow::Result<( + Args, + KeyMap, + // These don't need to have mutexes around them, but we want the cli example code to make it obvious how they + // are thread-safe, forcing the example developers to show where they would lock and unlock things. + Mutex>, + Mutex>, +)> +where + A: Default + Anchor + serde::de::DeserializeOwned + serde::Serialize, + C: ChainOracle, + C::Error: std::error::Error + Send + Sync + 'static, +{ + use bdk_chain::LoadablePersistBackend; + + let args = Args::::parse(); + let secp = Secp256k1::default(); + let (descriptor, mut keymap) = + Descriptor::::parse_descriptor(&secp, &args.descriptor)?; + + let mut tracker = Tracker::default(); + + tracker + .inner + .index + .add_keychain(Keychain::External, descriptor); + + let internal = args + .change_descriptor + .clone() + .map(|descriptor| Descriptor::::parse_descriptor(&secp, &descriptor)) + .transpose()?; + if let Some((internal_descriptor, internal_keymap)) = internal { + keymap.extend(internal_keymap); + tracker + .inner + .index + .add_keychain(Keychain::Internal, internal_descriptor); + }; + + let mut db = TrackerStore::::new_from_path(db_magic, args.db_path.as_path())?; + + if let Err(e) = db.load_into_tracker(&mut tracker) { + match tracker.last_seen_height() { + Some(tip) => eprintln!("Failed to load all changesets from {}. Last checkpoint was at height {}. Error: {}", args.db_path.display(), tip, e), + None => eprintln!("Failed to load any checkpoints from {}: {}", args.db_path.display(), e), + + } + eprintln!("⚠ Consider running a rescan of chain data."); + } + + Ok((args, keymap, Mutex::new(tracker), Mutex::new(db))) +} diff --git a/example-crates/tracker_example_cli/src/tracker.rs b/example-crates/tracker_example_cli/src/tracker.rs new file mode 100644 index 000000000..1b709a17f --- /dev/null +++ b/example-crates/tracker_example_cli/src/tracker.rs @@ -0,0 +1,133 @@ +use std::fmt::Debug; + +use bdk_chain::{ + indexed_tx_graph::{IndexedAdditions, IndexedTxGraph}, + keychain::{DerivationAdditions, KeychainTxOutIndex}, + Anchor, Append, BlockId, ChainOracle, FullTxOut, LoadablePersistBackend, ObservedAs, +}; +use bdk_file_store::{IterError, Store}; + +pub type TrackerStore = Store, ChangeSet>; + +#[derive(Default)] +pub struct Tracker { + pub inner: IndexedTxGraph>, + last_seen_height: Option, +} + +impl Tracker { + pub fn last_seen_height(&self) -> Option { + self.last_seen_height + } + + pub fn update_last_seen_height(&mut self, last_seen_height: Option) -> ChangeSet { + if self.last_seen_height < last_seen_height { + self.last_seen_height = Ord::max(self.last_seen_height, last_seen_height); + ChangeSet { + last_seen_height, + ..Default::default() + } + } else { + Default::default() + } + } +} + +impl Tracker { + pub fn apply_changeset(&mut self, changeset: ChangeSet) { + self.inner.apply_additions(changeset.additions); + self.last_seen_height = Ord::max(self.last_seen_height, changeset.last_seen_height); + } + + pub fn try_list_owned_txouts<'a, C: ChainOracle + 'a>( + &'a self, + chain: &'a C, + chain_tip: BlockId, + ) -> impl Iterator>), C::Error>> + 'a { + self.inner + .graph() + .try_list_chain_txouts(chain, chain_tip) + .filter_map(|r| match r { + Err(err) => Some(Err(err)), + Ok(full_txo) => Some(Ok(( + self.inner + .index + .index_of_spk(&full_txo.txout.script_pubkey)?, + full_txo, + ))), + }) + } + + pub fn try_list_owned_unspents<'a, C: ChainOracle + 'a>( + &'a self, + chain: &'a C, + chain_tip: BlockId, + ) -> impl Iterator>), C::Error>> + 'a { + self.try_list_owned_txouts(chain, chain_tip).filter(|r| { + if let Ok((_, full_txo)) = r { + if full_txo.spent_by.is_some() { + return false; + } + } + true + }) + } +} + +#[derive(Debug, Default, PartialEq, serde::Deserialize, serde::Serialize)] +#[serde(bound( + deserialize = "A: Ord + serde::Deserialize<'de>, K: Ord + serde::Deserialize<'de>", + serialize = "A: Ord + serde::Serialize, K: Ord + serde::Serialize", +))] +pub struct ChangeSet { + pub additions: IndexedAdditions>, + pub last_seen_height: Option, +} + +impl Append for ChangeSet { + fn append(&mut self, other: Self) { + Append::append(&mut self.additions, other.additions); + self.last_seen_height = Ord::max(self.last_seen_height, other.last_seen_height); + } +} + +impl From>> for ChangeSet { + fn from(additions: IndexedAdditions>) -> Self { + let last_seen_height = additions + .graph_additions + .anchors + .iter() + .last() + .map(|(a, _)| a.confirmation_height_upper_bound()); + ChangeSet { + additions, + last_seen_height, + } + } +} + +impl From> for ChangeSet { + fn from(index_additions: DerivationAdditions) -> Self { + ChangeSet { + additions: IndexedAdditions { + graph_additions: Default::default(), + index_additions, + }, + last_seen_height: None, + } + } +} + +impl LoadablePersistBackend, ChangeSet> for TrackerStore +where + A: Anchor + Default + serde::de::DeserializeOwned + serde::Serialize, + K: Default + Ord + Clone + Debug + serde::de::DeserializeOwned + serde::Serialize, +{ + type LoadError = IterError; + + fn load_into_tracker(&mut self, tracker: &mut Tracker) -> Result<(), Self::LoadError> { + let (changeset, res) = self.aggregate_changesets(); + tracker.apply_changeset(changeset); + res + } +} From 86f17473c7182b580446a3db4a83523d00c34f09 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=BF=97=E5=AE=87?= Date: Mon, 24 Apr 2023 09:18:18 +0800 Subject: [PATCH 07/15] [persist_redesign] Modify persist traits Instead of splitting `PersistBackend` into two traits, we introduce `Loadable` which is the mimimum functionality for a tracker to be able to be "loaded into" by the `PersistBackend`. `Loadable` also makes it easier to make trakers "composable". Typically, trackers would wrap other tracker implementations internally, and so the associated changeset would also be wrapped. Having a `Loadable` trait would allow the new tracker implementation to also have a corresponding `PersistBackend`. --- crates/chain/src/persist.rs | 54 ++++++++++--------- crates/file_store/src/store.rs | 48 +++++++++++------ example-crates/tracker_example_cli/src/lib.rs | 2 +- .../tracker_example_cli/src/tracker.rs | 20 +++---- 4 files changed, 71 insertions(+), 53 deletions(-) diff --git a/crates/chain/src/persist.rs b/crates/chain/src/persist.rs index 2cd69c9fa..f12773ea5 100644 --- a/crates/chain/src/persist.rs +++ b/crates/chain/src/persist.rs @@ -1,38 +1,42 @@ +use core::marker::PhantomData; + use crate::Append; /// `Persist` wraps a [`PersistBackend`] (`B`) to create a convenient staging area for changes (`C`) -/// before they are persisted. +/// to the tracker (`T`) before they are persisted. /// /// Not all changes to the tracker, which is an in-memory representation of wallet/blockchain /// data, needs to be written to disk right away, so [`Persist::stage`] can be used to *stage* /// changes first and then [`Persist::commit`] can be used to write changes to disk. -pub struct Persist { +pub struct Persist { backend: B, stage: C, + marker: PhantomData, } -impl Persist +impl Persist where - B: PersistBackend, - C: Append + Default, + T: Loadable, + B: PersistBackend, { /// Create a new [`Persist`] from [`PersistBackend`]. pub fn new(backend: B) -> Self { Self { backend, stage: Default::default(), + marker: Default::default(), } } /// Stage a `changeset` to be commited later with [`commit`]. /// /// [`commit`]: Self::commit - pub fn stage(&mut self, changeset: C) { + pub fn stage(&mut self, changeset: T::ChangeSet) { self.stage.append(changeset) } /// Get the changes that have not been commited yet. - pub fn staged(&self) -> &C { + pub fn staged(&self) -> &T::ChangeSet { &self.stage } @@ -40,7 +44,7 @@ where /// /// Returns a backend-defined error if this fails. pub fn commit(&mut self) -> Result<(), B::WriteError> { - let mut temp = C::default(); + let mut temp = T::ChangeSet::default(); core::mem::swap(&mut temp, &mut self.stage); self.backend.write_changes(&temp) } @@ -48,11 +52,14 @@ where /// A persistence backend for [`Persist`]. /// -/// `C` represents the changeset. -pub trait PersistBackend { +/// `T` represents the tracker, the in-memory data structure which we wish to persist. +pub trait PersistBackend { /// The error the backend returns when it fails to write. type WriteError: core::fmt::Debug; + /// The error the backend returns when it fails to load. + type LoadError: core::fmt::Debug; + /// Writes a changeset to the persistence backend. /// /// It is up to the backend what it does with this. It could store every changeset in a list or @@ -60,31 +67,30 @@ pub trait PersistBackend { /// that [`load_into_tracker`] restores a keychain tracker to what it should be if all /// changesets had been applied sequentially. /// - /// [`load_into_tracker`]: LoadablePersistBackend::load_into_tracker - fn write_changes(&mut self, changeset: &C) -> Result<(), Self::WriteError>; -} - -/// A trait that extends [`PersistBackend`] to be able to load into a tracker implementation (`T`). -pub trait LoadablePersistBackend: PersistBackend { - /// The error the backend returns when it fails to load. - type LoadError: core::fmt::Debug; + /// [`load_into_tracker`]: Self::load_into_tracker + fn write_changes(&mut self, changeset: &T::ChangeSet) -> Result<(), Self::WriteError>; /// Loads all data from the persistence backend into `tracker`. fn load_into_tracker(&mut self, tracker: &mut T) -> Result<(), Self::LoadError>; } -impl PersistBackend for () { +impl PersistBackend for () { type WriteError = (); + type LoadError = (); - fn write_changes(&mut self, _changeset: &C) -> Result<(), Self::WriteError> { + fn write_changes(&mut self, _changeset: &T::ChangeSet) -> Result<(), Self::WriteError> { Ok(()) } -} - -impl LoadablePersistBackend for () { - type LoadError = (); fn load_into_tracker(&mut self, _tracker: &mut T) -> Result<(), Self::LoadError> { Ok(()) } } + +/// A trait that represents a structure which can be loaded with changesets. +pub trait Loadable { + /// The changeset to be loaded into `self`. + type ChangeSet: Default + Append; + /// Loads the `changeset` into `self`. + fn load_changeset(&mut self, changeset: Self::ChangeSet); +} diff --git a/crates/file_store/src/store.rs b/crates/file_store/src/store.rs index 2d577f53a..d5f518ae7 100644 --- a/crates/file_store/src/store.rs +++ b/crates/file_store/src/store.rs @@ -5,7 +5,7 @@ use std::{ path::Path, }; -use bdk_chain::{Append, PersistBackend}; +use bdk_chain::{Append, Loadable, PersistBackend}; use bincode::Options; use crate::{bincode_options, EntryIter, FileError, IterError}; @@ -14,15 +14,16 @@ use crate::{bincode_options, EntryIter, FileError, IterError}; /// /// The changesets are the results of altering a tracker implementation (`T`). #[derive(Debug)] -pub struct Store { +pub struct Store { magic: &'static [u8], db_file: File, - marker: PhantomData<(T, C)>, + marker: PhantomData, } -impl Store +impl Store where - C: Append + Default + serde::Serialize + serde::de::DeserializeOwned, + T: Loadable, + T::ChangeSet: serde::Serialize + serde::de::DeserializeOwned, { /// Creates a new store from a [`File`]. /// @@ -80,7 +81,7 @@ where /// **WARNING**: This method changes the write position in the underlying file. You should /// always iterate over all entries until `None` is returned if you want your next write to go /// at the end; otherwise, you will write over existing entries. - pub fn iter_changesets(&mut self) -> Result, io::Error> { + pub fn iter_changesets(&mut self) -> Result, io::Error> { self.db_file .seek(io::SeekFrom::Start(self.magic.len() as _))?; @@ -99,8 +100,8 @@ where /// /// **WARNING**: This method changes the write position of the underlying file. The next /// changeset will be written over the erroring entry (or the end of the file if none existed). - pub fn aggregate_changesets(&mut self) -> (C, Result<(), IterError>) { - let mut changeset = C::default(); + pub fn aggregate_changesets(&mut self) -> (T::ChangeSet, Result<(), IterError>) { + let mut changeset = T::ChangeSet::default(); let result = (|| { let iter_changeset = self.iter_changesets()?; for next_changeset in iter_changeset { @@ -120,7 +121,7 @@ where /// /// **WARNING**: This method does not detect whether the changeset is empty or not, and will /// append an empty changeset to the file (not catastrophic, just a waste of space). - pub fn append_changeset(&mut self, changeset: &C) -> Result<(), io::Error> { + pub fn append_changeset(&mut self, changeset: &T::ChangeSet) -> Result<(), io::Error> { bincode_options() .serialize_into(&mut self.db_file, changeset) .map_err(|e| match *e { @@ -138,15 +139,23 @@ where } } -impl PersistBackend for Store +impl PersistBackend for Store where - C: Default + Append + serde::Serialize + serde::de::DeserializeOwned, + T: Loadable, + T::ChangeSet: serde::de::DeserializeOwned + serde::Serialize, { type WriteError = std::io::Error; + type LoadError = IterError; - fn write_changes(&mut self, changeset: &C) -> Result<(), Self::WriteError> { + fn write_changes(&mut self, changeset: &T::ChangeSet) -> Result<(), Self::WriteError> { Store::append_changeset(self, changeset) } + + fn load_into_tracker(&mut self, tracker: &mut T) -> Result<(), Self::LoadError> { + let (changeset, result) = self.aggregate_changesets(); + tracker.load_changeset(changeset); + result + } } #[cfg(test)] @@ -201,13 +210,22 @@ mod test { } } + #[derive(Debug)] + struct TestTracker; + + impl Loadable for TestTracker { + type ChangeSet = TestChangeSet; + + fn load_changeset(&mut self, _changeset: Self::ChangeSet) {} + } + #[test] fn new_fails_if_file_is_too_short() { let mut file = NamedTempFile::new().unwrap(); file.write_all(&TEST_MAGIC_BYTES[..TEST_MAGIC_BYTES_LEN - 1]) .expect("should write"); - match Store::<(), TestChangeSet>::new(&TEST_MAGIC_BYTES, file.reopen().unwrap()) { + match Store::::new(&TEST_MAGIC_BYTES, file.reopen().unwrap()) { Err(FileError::Io(e)) => assert_eq!(e.kind(), std::io::ErrorKind::UnexpectedEof), unexpected => panic!("unexpected result: {:?}", unexpected), }; @@ -221,7 +239,7 @@ mod test { file.write_all(invalid_magic_bytes.as_bytes()) .expect("should write"); - match Store::<(), TestChangeSet>::new(&TEST_MAGIC_BYTES, file.reopen().unwrap()) { + match Store::::new(&TEST_MAGIC_BYTES, file.reopen().unwrap()) { Err(FileError::InvalidMagicBytes { got, .. }) => { assert_eq!(got, invalid_magic_bytes.as_bytes()) } @@ -242,7 +260,7 @@ mod test { let mut file = NamedTempFile::new().unwrap(); file.write_all(&data).expect("should write"); - let mut store = Store::<(), TestChangeSet>::new(&TEST_MAGIC_BYTES, file.reopen().unwrap()) + let mut store = Store::::new(&TEST_MAGIC_BYTES, file.reopen().unwrap()) .expect("should open"); match store.iter_changesets().expect("seek should succeed").next() { Some(Err(IterError::Bincode(_))) => {} diff --git a/example-crates/tracker_example_cli/src/lib.rs b/example-crates/tracker_example_cli/src/lib.rs index 2d50be30f..bcae5e0d2 100644 --- a/example-crates/tracker_example_cli/src/lib.rs +++ b/example-crates/tracker_example_cli/src/lib.rs @@ -693,7 +693,7 @@ where C: ChainOracle, C::Error: std::error::Error + Send + Sync + 'static, { - use bdk_chain::LoadablePersistBackend; + use bdk_chain::PersistBackend; let args = Args::::parse(); let secp = Secp256k1::default(); diff --git a/example-crates/tracker_example_cli/src/tracker.rs b/example-crates/tracker_example_cli/src/tracker.rs index 1b709a17f..dd3d59148 100644 --- a/example-crates/tracker_example_cli/src/tracker.rs +++ b/example-crates/tracker_example_cli/src/tracker.rs @@ -3,11 +3,11 @@ use std::fmt::Debug; use bdk_chain::{ indexed_tx_graph::{IndexedAdditions, IndexedTxGraph}, keychain::{DerivationAdditions, KeychainTxOutIndex}, - Anchor, Append, BlockId, ChainOracle, FullTxOut, LoadablePersistBackend, ObservedAs, + Anchor, Append, BlockId, ChainOracle, FullTxOut, Loadable, ObservedAs, }; -use bdk_file_store::{IterError, Store}; +use bdk_file_store::Store; -pub type TrackerStore = Store, ChangeSet>; +pub type TrackerStore = Store>; #[derive(Default)] pub struct Tracker { @@ -118,16 +118,10 @@ impl From> for ChangeSet { } } -impl LoadablePersistBackend, ChangeSet> for TrackerStore -where - A: Anchor + Default + serde::de::DeserializeOwned + serde::Serialize, - K: Default + Ord + Clone + Debug + serde::de::DeserializeOwned + serde::Serialize, -{ - type LoadError = IterError; +impl Loadable for Tracker { + type ChangeSet = ChangeSet; - fn load_into_tracker(&mut self, tracker: &mut Tracker) -> Result<(), Self::LoadError> { - let (changeset, res) = self.aggregate_changesets(); - tracker.apply_changeset(changeset); - res + fn load_changeset(&mut self, changeset: Self::ChangeSet) { + self.apply_changeset(changeset) } } From 9143b6d6fdce6b78548fb1e3cc3921ff0c63971c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=BF=97=E5=AE=87?= Date: Mon, 24 Apr 2023 15:22:24 +0800 Subject: [PATCH 08/15] [examples_redesign] Add best-chain representation to `Tracker` Introduce field `Tracker::chain` which represents the best-chain history. It can either be `LocalChain` or `RemoteChain`. `Tracker::chain` implements `Loadable`, thus allowing persistance no matter the implementation. For `RemoteChain`s, we would want to persist the last-seen height so we know which height to starting syncing from for the next scan. --- crates/chain/src/local_chain.rs | 10 +- crates/chain/src/tx_data_traits.rs | 8 + example-crates/tracker_example_cli/src/lib.rs | 135 ++++++----- .../tracker_example_cli/src/tracker.rs | 215 ++++++++++++------ 4 files changed, 231 insertions(+), 137 deletions(-) diff --git a/crates/chain/src/local_chain.rs b/crates/chain/src/local_chain.rs index 30dfe80b8..534a88e21 100644 --- a/crates/chain/src/local_chain.rs +++ b/crates/chain/src/local_chain.rs @@ -3,7 +3,7 @@ use core::convert::Infallible; use alloc::collections::{BTreeMap, BTreeSet}; use bitcoin::BlockHash; -use crate::{BlockId, ChainOracle}; +use crate::{BlockId, ChainOracle, Loadable}; /// This is a local implementation of [`ChainOracle`]. /// @@ -43,6 +43,14 @@ impl ChainOracle for LocalChain { } } +impl Loadable for LocalChain { + type ChangeSet = ChangeSet; + + fn load_changeset(&mut self, changeset: Self::ChangeSet) { + self.apply_changeset(changeset) + } +} + impl AsRef> for LocalChain { fn as_ref(&self) -> &BTreeMap { &self.blocks diff --git a/crates/chain/src/tx_data_traits.rs b/crates/chain/src/tx_data_traits.rs index 8ec695add..7bf871cfd 100644 --- a/crates/chain/src/tx_data_traits.rs +++ b/crates/chain/src/tx_data_traits.rs @@ -81,3 +81,11 @@ impl Append for BTreeSet { BTreeSet::append(self, &mut other) } } + +impl Append for Option { + fn append(&mut self, other: Self) { + if *self < other { + *self = other; + } + } +} diff --git a/example-crates/tracker_example_cli/src/lib.rs b/example-crates/tracker_example_cli/src/lib.rs index bcae5e0d2..72123ba49 100644 --- a/example-crates/tracker_example_cli/src/lib.rs +++ b/example-crates/tracker_example_cli/src/lib.rs @@ -10,7 +10,7 @@ use bdk_chain::{ descriptor::{DescriptorSecretKey, KeyMap}, Descriptor, DescriptorPublicKey, }, - Anchor, Append, BlockId, ChainOracle, DescriptorExt, FullTxOut, ObservedAs, + Anchor, Append, BlockId, ChainOracle, DescriptorExt, FullTxOut, Loadable, ObservedAs, }; use bdk_coin_select::{coin_select_bnb, CoinSelector, CoinSelectorOpt, WeightedValue}; use clap::{Parser, Subcommand}; @@ -180,17 +180,18 @@ pub struct AddrsOutput { used: bool, } -pub fn run_address_cmd( - tracker: &Mutex>, - db: &Mutex>, +pub fn run_address_cmd( + tracker: &Mutex>, + db: &Mutex>, addr_cmd: AddressCmd, network: Network, ) -> anyhow::Result<()> where - A: Default + Anchor + serde::de::DeserializeOwned + serde::Serialize, + as Loadable>::ChangeSet: + serde::de::DeserializeOwned + serde::Serialize, { let mut tracker = tracker.lock().unwrap(); - let txout_index = &mut tracker.inner.index; + let txout_index = &mut tracker.indexed_graph.index; let addr_cmd_output = match addr_cmd { AddressCmd::Next => Some(txout_index.next_unused_spk(&Keychain::External)), @@ -242,24 +243,21 @@ where } pub fn run_balance_cmd( - tracker: &Mutex>, - chain: &Mutex, + tracker: &Mutex>, chain_tip: BlockId, ) -> anyhow::Result<()> where - C::Error: std::error::Error + Send + Sync + 'static, + ::Error: std::error::Error + Send + Sync + 'static, { - let chain = &*chain.lock().unwrap(); let tracker = tracker.lock().unwrap(); let utxos = tracker - .inner - .try_list_owned_unspents(chain, chain_tip) + .try_list_owned_unspents(chain_tip) .collect::, C::Error>>()?; let (confirmed, unconfirmed) = utxos .into_iter() - .fold((0, 0), |(confirmed, unconfirmed), utxo| { + .fold((0, 0), |(confirmed, unconfirmed), (_, utxo)| { match utxo.chain_position { bdk_chain::ObservedAs::Confirmed(_) => { (confirmed + utxo.txout.value, unconfirmed) @@ -277,16 +275,13 @@ where pub fn run_txo_cmd( txout_cmd: TxOutCmd, - tracker: &Mutex>, - chain: &Mutex, + tracker: &Mutex>, chain_tip: BlockId, network: Network, ) -> anyhow::Result<()> where - C::Error: std::error::Error + Send + Sync + 'static, + ::Error: std::error::Error + Send + Sync + 'static, { - let chain = &*chain.lock().unwrap(); - match txout_cmd { TxOutCmd::List { unspent, @@ -297,7 +292,7 @@ where let tracker = tracker.lock().unwrap(); let txouts = tracker - .try_list_owned_txouts(chain, chain_tip) + .try_list_owned_txouts(chain_tip) .filter(|r| match r { Ok((_, full_txo)) => match (unspent, spent) { (true, false) => full_txo.spent_by.is_none(), @@ -339,8 +334,7 @@ pub fn create_tx( value: u64, address: Address, coin_select: CoinSelectionAlgo, - tracker: &mut Tracker, - chain: &C, + tracker: &mut Tracker, chain_tip: BlockId, keymap: &HashMap, ) -> anyhow::Result<( @@ -348,7 +342,7 @@ pub fn create_tx( Option<(DerivationAdditions, (Keychain, u32))>, )> where - C::Error: std::error::Error + Send + Sync + 'static, + ::Error: std::error::Error + Send + Sync + 'static, { let mut additions = DerivationAdditions::default(); @@ -359,7 +353,7 @@ where // TODO use planning module let mut candidates = - planned_utxos(tracker, chain, chain_tip, &assets).collect::, C::Error>>()?; + planned_utxos(tracker, chain_tip, &assets).collect::, C::Error>>()?; // apply coin selection algorithm match coin_select { @@ -394,7 +388,7 @@ where }]; let internal_keychain = if tracker - .inner + .indexed_graph .index .keychains() .get(&Keychain::Internal) @@ -405,8 +399,10 @@ where Keychain::External }; - let ((change_index, change_script), change_additions) = - tracker.inner.index.next_unused_spk(&internal_keychain); + let ((change_index, change_script), change_additions) = tracker + .indexed_graph + .index + .next_unused_spk(&internal_keychain); additions.append(change_additions); // Clone to drop the immutable reference. @@ -414,7 +410,7 @@ where let change_plan = bdk_tmp_plan::plan_satisfaction( &tracker - .inner + .indexed_graph .index .keychains() .get(&internal_keychain) @@ -432,7 +428,7 @@ where let cs_opts = CoinSelectorOpt { target_feerate: 0.5, min_drain_value: tracker - .inner + .indexed_graph .index .keychains() .get(&internal_keychain) @@ -554,20 +550,19 @@ where } pub fn planned_utxos<'a, AK: bdk_tmp_plan::CanDerive + Clone, A: Anchor, C: ChainOracle>( - tracker: &'a Tracker, - chain: &'a C, + tracker: &'a Tracker, chain_tip: BlockId, assets: &'a bdk_tmp_plan::Assets, ) -> impl Iterator, FullTxOut>), C::Error>> + 'a where - C::Error: std::error::Error + Send + Sync + 'static, + ::Error: std::error::Error + Send + Sync + 'static, { tracker - .try_list_owned_unspents(chain, chain_tip) + .try_list_owned_unspents(chain_tip) .filter_map(|r| match r { Ok(((keychain, derivation_index), full_txo)) => { let desc = tracker - .inner + .indexed_graph .index .keychains() .get(keychain) @@ -581,28 +576,27 @@ where } #[allow(clippy::too_many_arguments)] // FIXME -pub fn handle_commands( +pub fn handle_commands( command: Commands, broadcast: impl FnOnce(&Transaction) -> anyhow::Result<()>, // we Mutex around these not because we need them for a simple CLI app but to demonstrate how // all the stuff we're doing can be made thread-safe and not keep locks up over an IO bound. - tracker: &Mutex>, - store: &Mutex>, - chain: &Mutex, + tracker: &Mutex>, + store: &Mutex>, chain_tip: BlockId, network: Network, keymap: &HashMap, ) -> anyhow::Result<()> where - A: Default + Anchor + serde::de::DeserializeOwned + serde::Serialize, - C: ChainOracle, - C::Error: std::error::Error + Send + Sync + 'static, + ::Error: std::error::Error + Send + Sync + 'static, + as Loadable>::ChangeSet: + serde::de::DeserializeOwned + serde::Serialize, { match command { // TODO: Make these functions return stuffs Commands::Address { addr_cmd } => run_address_cmd(tracker, store, addr_cmd, network), - Commands::Balance => run_balance_cmd(tracker, chain, chain_tip), - Commands::TxOut { txout_cmd } => run_txo_cmd(txout_cmd, tracker, chain, chain_tip, network), + Commands::Balance => run_balance_cmd(tracker, chain_tip), + Commands::TxOut { txout_cmd } => run_txo_cmd(txout_cmd, tracker, chain_tip, network), Commands::Send { value, address, @@ -611,17 +605,9 @@ where let (transaction, change_index) = { // take mutable ref to construct tx -- it is only open for a short time while building it. let tracker = &mut *tracker.lock().unwrap(); - let chain = &*chain.lock().unwrap(); - let (transaction, change_info) = create_tx( - value, - address, - coin_select, - tracker, - chain, - chain_tip, - keymap, - )?; + let (transaction, change_info) = + create_tx(value, address, coin_select, tracker, chain_tip, keymap)?; if let Some((change_derivation_changes, (change_keychain, index))) = change_info { // We must first persist to disk the fact that we've got a new address from the @@ -633,7 +619,10 @@ where // We don't want other callers/threads to use this address while we're using it // but we also don't want to scan the tx we just created because it's not // technically in the blockchain yet. - tracker.inner.index.mark_used(&change_keychain, index); + tracker + .indexed_graph + .index + .mark_used(&change_keychain, index); (transaction, Some((change_keychain, index))) } else { (transaction, None) @@ -648,7 +637,7 @@ where let mut tracker = tracker.lock().unwrap(); let additions = tracker - .inner + .indexed_graph .insert_tx(&transaction, None, Some(now.as_secs())); if !additions.graph_additions.is_empty() || !additions.index_additions.is_empty() @@ -665,7 +654,7 @@ where let tracker = &mut *tracker.lock().unwrap(); if let Some((keychain, index)) = change_index { // We failed to broadcast, so allow our change address to be used in the future - tracker.inner.index.unmark_used(&keychain, index); + tracker.indexed_graph.index.unmark_used(&keychain, index); } Err(e) } @@ -678,20 +667,21 @@ where } #[allow(clippy::type_complexity)] // FIXME -pub fn init( +pub fn init( db_magic: &'static [u8], + mut tracker: Tracker, ) -> anyhow::Result<( Args, KeyMap, // These don't need to have mutexes around them, but we want the cli example code to make it obvious how they // are thread-safe, forcing the example developers to show where they would lock and unlock things. - Mutex>, - Mutex>, + Mutex>, + Mutex>, )> where - A: Default + Anchor + serde::de::DeserializeOwned + serde::Serialize, - C: ChainOracle, - C::Error: std::error::Error + Send + Sync + 'static, + ::Error: std::error::Error + Send + Sync + 'static, + as Loadable>::ChangeSet: + serde::de::DeserializeOwned + serde::Serialize, { use bdk_chain::PersistBackend; @@ -700,10 +690,8 @@ where let (descriptor, mut keymap) = Descriptor::::parse_descriptor(&secp, &args.descriptor)?; - let mut tracker = Tracker::default(); - tracker - .inner + .indexed_graph .index .add_keychain(Keychain::External, descriptor); @@ -715,19 +703,24 @@ where if let Some((internal_descriptor, internal_keymap)) = internal { keymap.extend(internal_keymap); tracker - .inner + .indexed_graph .index .add_keychain(Keychain::Internal, internal_descriptor); }; - let mut db = TrackerStore::::new_from_path(db_magic, args.db_path.as_path())?; + let mut db = TrackerStore::::new_from_path(db_magic, args.db_path.as_path())?; if let Err(e) = db.load_into_tracker(&mut tracker) { - match tracker.last_seen_height() { - Some(tip) => eprintln!("Failed to load all changesets from {}. Last checkpoint was at height {}. Error: {}", args.db_path.display(), tip, e), - None => eprintln!("Failed to load any checkpoints from {}: {}", args.db_path.display(), e), - - } + // [TODO] Should we introduce a `TipChainOracle` trait? + // match tracker.last_seen_height() { + // Some(tip) => eprintln!("Failed to load all changesets from {}. Last checkpoint was at height {}. Error: {}", args.db_path.display(), tip, e), + // None => eprintln!("Failed to load any checkpoints from {}: {}", args.db_path.display(), e), + // } + eprintln!( + "Failed to load changesets from {}: {}", + args.db_path.display(), + e + ); eprintln!("⚠ Consider running a rescan of chain data."); } diff --git a/example-crates/tracker_example_cli/src/tracker.rs b/example-crates/tracker_example_cli/src/tracker.rs index dd3d59148..b33c8d211 100644 --- a/example-crates/tracker_example_cli/src/tracker.rs +++ b/example-crates/tracker_example_cli/src/tracker.rs @@ -3,54 +3,66 @@ use std::fmt::Debug; use bdk_chain::{ indexed_tx_graph::{IndexedAdditions, IndexedTxGraph}, keychain::{DerivationAdditions, KeychainTxOutIndex}, + local_chain::{self, LocalChain}, Anchor, Append, BlockId, ChainOracle, FullTxOut, Loadable, ObservedAs, }; use bdk_file_store::Store; -pub type TrackerStore = Store>; +/// Structure for persisting [`Tracker`] data. +pub type TrackerStore = Store>; -#[derive(Default)] -pub struct Tracker { - pub inner: IndexedTxGraph>, - last_seen_height: Option, +/// An in-memory representation of chain data that we are tracking. +/// +/// * `A` is the [`Anchor`] implementation. +/// * `K` is our keychain identifier. +/// * `C` is the representation of the best chain history. This can either be a [`LocalChain`] or a +/// remote [`ChainOracle`] implementation. +/// +/// [`Tracker`] can be constructed with [`new_local`] or [`new_remote`] (depending on the +/// chain-history type). +/// +/// [`new_local`]: Self::new_local +/// [`new_remote`]: Self::new_remote +pub struct Tracker { + pub indexed_graph: IndexedTxGraph>, + pub chain: C, } -impl Tracker { - pub fn last_seen_height(&self) -> Option { - self.last_seen_height - } - - pub fn update_last_seen_height(&mut self, last_seen_height: Option) -> ChangeSet { - if self.last_seen_height < last_seen_height { - self.last_seen_height = Ord::max(self.last_seen_height, last_seen_height); - ChangeSet { - last_seen_height, - ..Default::default() - } - } else { - Default::default() +impl Tracker { + /// New [`Tracker`] with a [`LocalChain`] as the best-chain representation. + pub fn new_local() -> Self { + Self { + indexed_graph: Default::default(), + chain: LocalChain::default(), } } } -impl Tracker { - pub fn apply_changeset(&mut self, changeset: ChangeSet) { - self.inner.apply_additions(changeset.additions); - self.last_seen_height = Ord::max(self.last_seen_height, changeset.last_seen_height); +impl Tracker> { + /// New [`Tracker`] with a remote [`ChainOracle`] as the best-chain representation. + pub fn new_remote(oracle: O) -> Self { + Self { + indexed_graph: Default::default(), + chain: RemoteChain { + oracle, + last_seen_height: None, + }, + } } +} - pub fn try_list_owned_txouts<'a, C: ChainOracle + 'a>( - &'a self, - chain: &'a C, +impl Tracker { + pub fn try_list_owned_txouts( + &self, chain_tip: BlockId, - ) -> impl Iterator>), C::Error>> + 'a { - self.inner + ) -> impl Iterator>), C::Error>> { + self.indexed_graph .graph() - .try_list_chain_txouts(chain, chain_tip) + .try_list_chain_txouts(&self.chain, chain_tip) .filter_map(|r| match r { Err(err) => Some(Err(err)), Ok(full_txo) => Some(Ok(( - self.inner + self.indexed_graph .index .index_of_spk(&full_txo.txout.script_pubkey)?, full_txo, @@ -58,12 +70,11 @@ impl Tracker { }) } - pub fn try_list_owned_unspents<'a, C: ChainOracle + 'a>( - &'a self, - chain: &'a C, + pub fn try_list_owned_unspents( + &self, chain_tip: BlockId, - ) -> impl Iterator>), C::Error>> + 'a { - self.try_list_owned_txouts(chain, chain_tip).filter(|r| { + ) -> impl Iterator>), C::Error>> { + self.try_list_owned_txouts(chain_tip).filter(|r| { if let Ok((_, full_txo)) = r { if full_txo.spent_by.is_some() { return false; @@ -74,54 +85,128 @@ impl Tracker { } } -#[derive(Debug, Default, PartialEq, serde::Deserialize, serde::Serialize)] +impl Loadable for Tracker { + type ChangeSet = ChangeSet; + + fn load_changeset(&mut self, changeset: Self::ChangeSet) { + self.indexed_graph + .apply_additions(changeset.indexed_graph_additions); + self.chain.load_changeset(changeset.chain_changeset); + } +} + +#[derive(Debug, PartialEq, serde::Deserialize, serde::Serialize)] #[serde(bound( - deserialize = "A: Ord + serde::Deserialize<'de>, K: Ord + serde::Deserialize<'de>", - serialize = "A: Ord + serde::Serialize, K: Ord + serde::Serialize", + deserialize = "A: Ord + serde::Deserialize<'de>, K: Ord + serde::Deserialize<'de>, C: Ord + serde::Deserialize<'de>", + serialize = "A: Ord + serde::Serialize, K: Ord + serde::Serialize, C: Ord + serde::Serialize", ))] -pub struct ChangeSet { - pub additions: IndexedAdditions>, - pub last_seen_height: Option, +pub struct ChangeSet { + pub indexed_graph_additions: IndexedAdditions>, + pub chain_changeset: C, +} + +impl Default for ChangeSet { + fn default() -> Self { + Self { + indexed_graph_additions: Default::default(), + chain_changeset: Default::default(), + } + } } -impl Append for ChangeSet { +impl Append for ChangeSet { fn append(&mut self, other: Self) { - Append::append(&mut self.additions, other.additions); - self.last_seen_height = Ord::max(self.last_seen_height, other.last_seen_height); - } -} - -impl From>> for ChangeSet { - fn from(additions: IndexedAdditions>) -> Self { - let last_seen_height = additions - .graph_additions - .anchors - .iter() - .last() - .map(|(a, _)| a.confirmation_height_upper_bound()); - ChangeSet { - additions, - last_seen_height, + Append::append( + &mut self.indexed_graph_additions, + other.indexed_graph_additions, + ); + Append::append(&mut self.chain_changeset, other.chain_changeset) + } +} + +impl From>> for ChangeSet { + fn from(inner_additions: IndexedAdditions>) -> Self { + Self { + indexed_graph_additions: inner_additions, + chain_changeset: Default::default(), } } } -impl From> for ChangeSet { +impl From> for ChangeSet { fn from(index_additions: DerivationAdditions) -> Self { - ChangeSet { - additions: IndexedAdditions { + Self { + indexed_graph_additions: IndexedAdditions { graph_additions: Default::default(), index_additions, }, - last_seen_height: None, + chain_changeset: Default::default(), + } + } +} + +impl From for ChangeSet { + fn from(chain_changeset: local_chain::ChangeSet) -> Self { + Self { + indexed_graph_additions: Default::default(), + chain_changeset, } } } -impl Loadable for Tracker { - type ChangeSet = ChangeSet; +impl From> for ChangeSet> { + fn from(chain_changeset: Option) -> Self { + Self { + indexed_graph_additions: Default::default(), + chain_changeset, + } + } +} + +/// Contains a remote best-chain representation alongside the last-seen block's height. +/// +/// The last-seen block height is persisted locally and can be used to determine which height to +/// start syncing from for block-by-block chain sources. +pub struct RemoteChain { + oracle: O, + last_seen_height: Option, +} + +impl RemoteChain { + pub fn inner(&self) -> &O { + &self.oracle + } + + pub fn last_seen_height(&self) -> Option { + self.last_seen_height + } + + pub fn update_last_seen_height(&mut self, last_seen_height: Option) -> Option { + if self.last_seen_height < last_seen_height { + self.last_seen_height = last_seen_height; + last_seen_height + } else { + None + } + } +} + +impl Loadable for RemoteChain { + type ChangeSet = Option; fn load_changeset(&mut self, changeset: Self::ChangeSet) { - self.apply_changeset(changeset) + self.last_seen_height.append(changeset) + } +} + +impl ChainOracle for RemoteChain { + type Error = O::Error; + + fn is_block_in_chain( + &self, + block: BlockId, + static_block: BlockId, + ) -> Result, Self::Error> { + self.oracle.is_block_in_chain(block, static_block) } } From 5d5735e7f2ee958303b159f04ab4a3fbd2aa26c7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=BF=97=E5=AE=87?= Date: Tue, 25 Apr 2023 10:59:36 +0800 Subject: [PATCH 09/15] [bdk_chain_redesign] Add `LocalChain::insert_block` --- crates/chain/src/local_chain.rs | 45 +++++++++++++++++++++++++++++++++ 1 file changed, 45 insertions(+) diff --git a/crates/chain/src/local_chain.rs b/crates/chain/src/local_chain.rs index 534a88e21..9ee72154b 100644 --- a/crates/chain/src/local_chain.rs +++ b/crates/chain/src/local_chain.rs @@ -178,6 +178,30 @@ impl LocalChain { .collect() } + pub fn insert_block( + &mut self, + block_id: BlockId, + ) -> Result { + let mut update = Self::default(); + + if let Some(block_id) = self.tip() { + let _old_hash = update.blocks.insert(block_id.height, block_id.hash); + debug_assert!(_old_hash.is_none()); + } + + if let Some(original_hash) = update.blocks.insert(block_id.height, block_id.hash) { + if original_hash != block_id.hash { + return Err(InsertBlockNotMatchingError { + height: block_id.height, + original_hash, + update_hash: block_id.hash, + }); + } + } + + Ok(self.apply_update(update).expect("should always connect")) + } + pub fn heights(&self) -> BTreeSet { self.blocks.keys().cloned().collect() } @@ -209,3 +233,24 @@ impl core::fmt::Display for UpdateNotConnectedError { #[cfg(feature = "std")] impl std::error::Error for UpdateNotConnectedError {} + +/// Represents a failure when trying to insert a checkpoint into [`LocalChain`]. +#[derive(Clone, Debug, PartialEq)] +pub struct InsertBlockNotMatchingError { + pub height: u32, + pub original_hash: BlockHash, + pub update_hash: BlockHash, +} + +impl core::fmt::Display for InsertBlockNotMatchingError { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + write!( + f, + "failed to insert block at height {} as blockhashes conflict: original={}, update={}", + self.height, self.original_hash, self.update_hash + ) + } +} + +#[cfg(feature = "std")] +impl std::error::Error for InsertBlockNotMatchingError {} From 0f115569721593e77764fb0c0a27a4a2a150795c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=BF=97=E5=AE=87?= Date: Tue, 25 Apr 2023 11:03:05 +0800 Subject: [PATCH 10/15] [redesign] Reimplement `electrum` crate to support redesigned structures This creates a duplicate module of `electrum` as `electrum::v2`. --- crates/electrum/src/lib.rs | 15 +- crates/electrum/src/v2.rs | 441 +++++++++++++++++++++++++++++++++++++ 2 files changed, 449 insertions(+), 7 deletions(-) create mode 100644 crates/electrum/src/v2.rs diff --git a/crates/electrum/src/lib.rs b/crates/electrum/src/lib.rs index bddbd8f25..6312eb047 100644 --- a/crates/electrum/src/lib.rs +++ b/crates/electrum/src/lib.rs @@ -20,12 +20,6 @@ //! [`batch_transaction_get`]: ElectrumApi::batch_transaction_get //! [`bdk_electrum_example`]: https://github.com/LLFourn/bdk_core_staging/tree/master/bdk_electrum_example -use std::{ - collections::{BTreeMap, HashMap}, - fmt::Debug, -}; - -pub use bdk_chain; use bdk_chain::{ bitcoin::{hashes::hex::FromHex, BlockHash, OutPoint, Script, Transaction, Txid}, chain_graph::{self, ChainGraph}, @@ -34,8 +28,15 @@ use bdk_chain::{ tx_graph::TxGraph, BlockId, ConfirmationTime, TxHeight, }; -pub use electrum_client; use electrum_client::{Client, ElectrumApi, Error}; +use std::{ + collections::{BTreeMap, HashMap}, + fmt::Debug, +}; + +pub mod v2; +pub use bdk_chain; +pub use electrum_client; /// Trait to extend [`electrum_client::Client`] functionality. /// diff --git a/crates/electrum/src/v2.rs b/crates/electrum/src/v2.rs new file mode 100644 index 000000000..41d1faa4e --- /dev/null +++ b/crates/electrum/src/v2.rs @@ -0,0 +1,441 @@ +use bdk_chain::{ + bitcoin::{hashes::hex::FromHex, BlockHash, OutPoint, Script, Transaction, Txid}, + indexed_tx_graph::{IndexedAdditions, IndexedTxGraph, Indexer}, + local_chain::{self, LocalChain, UpdateNotConnectedError}, + tx_graph::TxGraph, + Anchor, Append, BlockId, ConfirmationHeightAnchor, +}; +use electrum_client::{Client, ElectrumApi, Error}; +use std::collections::{BTreeMap, BTreeSet, HashMap}; + +use crate::InternalError; + +pub struct ElectrumUpdate { + pub graph_update: G, + pub chain_update: LocalChain, + pub keychain_update: BTreeMap, +} + +impl Default for ElectrumUpdate { + fn default() -> Self { + Self { + graph_update: Default::default(), + chain_update: Default::default(), + keychain_update: Default::default(), + } + } +} + +pub type IntermediaryElectrumUpdate = ElectrumUpdate>, K>; + +impl<'a, A: Anchor, K> IntermediaryElectrumUpdate { + pub fn missing_full_txs(&'a self, graph: G) -> impl Iterator + 'a + where + G: AsRef + 'a, + { + self.graph_update + .keys() + .filter(move |&&txid| graph.as_ref().get_tx(txid).is_none()) + } + + pub fn finalize(self, seen_at: Option, new_txs: T) -> FinalElectrumUpdate + where + T: IntoIterator, + { + let mut graph_update = TxGraph::::new(new_txs); + for (txid, anchors) in self.graph_update { + if let Some(seen_at) = seen_at { + let _ = graph_update.insert_seen_at(txid, seen_at); + } + for anchor in anchors { + let _ = graph_update.insert_anchor(txid, anchor); + } + } + FinalElectrumUpdate { + graph_update, + chain_update: self.chain_update, + keychain_update: self.keychain_update, + } + } +} + +pub type FinalElectrumUpdate = ElectrumUpdate, K>; + +impl FinalElectrumUpdate { + pub fn apply( + self, + indexed_graph: &mut IndexedTxGraph, + chain: &mut LocalChain, + ) -> Result<(IndexedAdditions, local_chain::ChangeSet), UpdateNotConnectedError> + where + I::Additions: Default + Append, + { + let additions = indexed_graph.apply_update(self.graph_update); + let changeset = chain.apply_update(self.chain_update)?; + Ok((additions, changeset)) + } +} + +pub trait ElectrumExt { + fn get_tip(&self) -> Result<(u32, BlockHash), Error>; + + fn scan( + &self, + local_chain: &BTreeMap, + keychain_spks: BTreeMap>, + txids: impl IntoIterator, + outpoints: impl IntoIterator, + stop_gap: usize, + batch_size: usize, + ) -> Result, Error>; + + fn scan_without_keychain( + &self, + local_chain: &BTreeMap, + misc_spks: impl IntoIterator, + txids: impl IntoIterator, + outpoints: impl IntoIterator, + batch_size: usize, + ) -> Result, Error> { + let spk_iter = misc_spks + .into_iter() + .enumerate() + .map(|(i, spk)| (i as u32, spk)); + + self.scan( + local_chain, + [((), spk_iter)].into(), + txids, + outpoints, + usize::MAX, + batch_size, + ) + } +} + +impl ElectrumExt for Client { + fn get_tip(&self) -> Result<(u32, BlockHash), Error> { + // TODO: unsubscribe when added to the client, or is there a better call to use here? + self.block_headers_subscribe() + .map(|data| (data.height as u32, data.header.block_hash())) + } + + fn scan( + &self, + local_chain: &BTreeMap, + keychain_spks: BTreeMap>, + txids: impl IntoIterator, + outpoints: impl IntoIterator, + stop_gap: usize, + batch_size: usize, + ) -> Result, Error> { + let mut request_spks = keychain_spks + .into_iter() + .map(|(k, s)| (k, s.into_iter())) + .collect::>(); + let mut scanned_spks = BTreeMap::<(K, u32), (Script, bool)>::new(); + + let txids = txids.into_iter().collect::>(); + let outpoints = outpoints.into_iter().collect::>(); + + let update = loop { + let mut update = IntermediaryElectrumUpdate:: { + chain_update: prepare_chain_update(self, local_chain)?, + ..Default::default() + }; + let anchor_block = update + .chain_update + .tip() + .expect("must have atleast one block"); + + if !request_spks.is_empty() { + if !scanned_spks.is_empty() { + let mut scanned_spk_iter = scanned_spks + .iter() + .map(|(i, (spk, _))| (i.clone(), spk.clone())); + match populate_with_spks( + self, + anchor_block, + &mut update, + &mut scanned_spk_iter, + stop_gap, + batch_size, + ) { + Err(InternalError::Reorg) => continue, + Err(InternalError::ElectrumError(e)) => return Err(e), + Ok(mut spks) => scanned_spks.append(&mut spks), + }; + } + for (keychain, keychain_spks) in &mut request_spks { + match populate_with_spks( + self, + anchor_block, + &mut update, + keychain_spks, + stop_gap, + batch_size, + ) { + Err(InternalError::Reorg) => continue, + Err(InternalError::ElectrumError(e)) => return Err(e), + Ok(spks) => scanned_spks.extend( + spks.into_iter() + .map(|(spk_i, spk)| ((keychain.clone(), spk_i), spk)), + ), + }; + } + } + + match populate_with_txids(self, anchor_block, &mut update, &mut txids.iter().cloned()) { + Err(InternalError::Reorg) => continue, + Err(InternalError::ElectrumError(e)) => return Err(e), + Ok(_) => {} + } + + match populate_with_outpoints( + self, + anchor_block, + &mut update, + &mut outpoints.iter().cloned(), + ) { + Err(InternalError::Reorg) => continue, + Err(InternalError::ElectrumError(e)) => return Err(e), + Ok(_txs) => { /* [TODO] cache full txs to reduce bandwidth */ } + } + + // check for reorgs during scan process + let server_blockhash = self + .block_header(anchor_block.height as usize)? + .block_hash(); + if anchor_block.hash != server_blockhash { + continue; // reorg + } + + update.keychain_update = request_spks + .into_keys() + .filter_map(|k| { + scanned_spks + .range((k.clone(), u32::MIN)..=(k.clone(), u32::MAX)) + .rev() + .find(|(_, (_, active))| *active) + .map(|((_, i), _)| (k, *i)) + }) + .collect::>(); + break update; + }; + + Ok(update) + } +} + +/// Prepare an update "template" based on the checkpoints of the `local_chain`. +fn prepare_chain_update( + client: &Client, + local_chain: &BTreeMap, +) -> Result { + let mut update = LocalChain::default(); + + // Find the local chain block that is still there so our update can connect to the local chain. + for (&existing_height, &existing_hash) in local_chain.iter().rev() { + // TODO: a batch request may be safer, as a reorg that happens when we are obtaining + // `block_header`s will result in inconsistencies + let current_hash = client.block_header(existing_height as usize)?.block_hash(); + let _ = update + .insert_block(BlockId { + height: existing_height, + hash: current_hash, + }) + .expect("This never errors because we are working with a fresh chain"); + + if current_hash == existing_hash { + break; + } + } + + // Insert the new tip so new transactions will be accepted into the sparsechain. + let tip = { + let (height, hash) = crate::get_tip(client)?; + BlockId { height, hash } + }; + if update.insert_block(tip).is_err() { + // There has been a re-org before we even begin scanning addresses. + // Just recursively call (this should never happen). + return prepare_chain_update(client, local_chain); + } + + Ok(update) +} + +fn determine_tx_anchor( + anchor_block: BlockId, + raw_height: i32, + txid: Txid, +) -> Option { + if txid + == Txid::from_hex("4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b") + .expect("must deserialize genesis coinbase txid") + { + return Some(ConfirmationHeightAnchor { + anchor_block, + confirmation_height: Some(0), + }); + } + match raw_height { + h if h <= 0 => { + debug_assert!(h == 0 || h == -1, "unexpected height ({}) from electrum", h); + None + } + h => { + let h = h as u32; + if h > anchor_block.height { + None + } else { + Some(ConfirmationHeightAnchor { + anchor_block, + confirmation_height: Some(h), + }) + } + } + } +} + +fn populate_with_outpoints( + client: &Client, + anchor_block: BlockId, + update: &mut IntermediaryElectrumUpdate, + outpoints: &mut impl Iterator, +) -> Result, InternalError> { + let mut full_txs = HashMap::new(); + for outpoint in outpoints { + let txid = outpoint.txid; + let tx = client.transaction_get(&txid)?; + debug_assert_eq!(tx.txid(), txid); + let txout = match tx.output.get(outpoint.vout as usize) { + Some(txout) => txout, + None => continue, + }; + // attempt to find the following transactions (alongside their chain positions), and + // add to our sparsechain `update`: + let mut has_residing = false; // tx in which the outpoint resides + let mut has_spending = false; // tx that spends the outpoint + for res in client.script_get_history(&txout.script_pubkey)? { + if has_residing && has_spending { + break; + } + + if res.tx_hash == txid { + if has_residing { + continue; + } + has_residing = true; + full_txs.insert(res.tx_hash, tx.clone()); + } else { + if has_spending { + continue; + } + let res_tx = match full_txs.get(&res.tx_hash) { + Some(tx) => tx, + None => { + let res_tx = client.transaction_get(&res.tx_hash)?; + full_txs.insert(res.tx_hash, res_tx); + full_txs.get(&res.tx_hash).expect("just inserted") + } + }; + has_spending = res_tx + .input + .iter() + .any(|txin| txin.previous_output == outpoint); + if !has_spending { + continue; + } + }; + + let anchor = determine_tx_anchor(anchor_block, res.height, res.tx_hash); + + let tx_entry = update.graph_update.entry(res.tx_hash).or_default(); + if let Some(anchor) = anchor { + tx_entry.insert(anchor); + } + } + } + Ok(full_txs) +} + +fn populate_with_txids( + client: &Client, + anchor_block: BlockId, + update: &mut IntermediaryElectrumUpdate, + txids: &mut impl Iterator, +) -> Result<(), InternalError> { + for txid in txids { + let tx = match client.transaction_get(&txid) { + Ok(tx) => tx, + Err(electrum_client::Error::Protocol(_)) => continue, + Err(other_err) => return Err(other_err.into()), + }; + + let spk = tx + .output + .get(0) + .map(|txo| &txo.script_pubkey) + .expect("tx must have an output"); + + let anchor = match client + .script_get_history(spk)? + .into_iter() + .find(|r| r.tx_hash == txid) + { + Some(r) => determine_tx_anchor(anchor_block, r.height, txid), + None => continue, + }; + + let tx_entry = update.graph_update.entry(txid).or_default(); + if let Some(anchor) = anchor { + tx_entry.insert(anchor); + } + } + Ok(()) +} + +fn populate_with_spks( + client: &Client, + anchor_block: BlockId, + update: &mut IntermediaryElectrumUpdate, + spks: &mut impl Iterator, + stop_gap: usize, + batch_size: usize, +) -> Result, InternalError> { + let mut unused_spk_count = 0_usize; + let mut scanned_spks = BTreeMap::new(); + + loop { + let spks = (0..batch_size) + .map_while(|_| spks.next()) + .collect::>(); + if spks.is_empty() { + return Ok(scanned_spks); + } + + let spk_histories = client.batch_script_get_history(spks.iter().map(|(_, s)| s))?; + + for ((spk_index, spk), spk_history) in spks.into_iter().zip(spk_histories) { + if spk_history.is_empty() { + scanned_spks.insert(spk_index, (spk, false)); + unused_spk_count += 1; + if unused_spk_count > stop_gap { + return Ok(scanned_spks); + } + continue; + } else { + scanned_spks.insert(spk_index, (spk, true)); + unused_spk_count = 0; + } + + for tx in spk_history { + let tx_entry = update.graph_update.entry(tx.tx_hash).or_default(); + if let Some(anchor) = determine_tx_anchor(anchor_block, tx.height, tx.tx_hash) { + tx_entry.insert(anchor); + } + } + } + } +} From c0a006f53adb1681a2955575150fdd52f2e40870 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=BF=97=E5=AE=87?= Date: Tue, 25 Apr 2023 13:23:42 +0800 Subject: [PATCH 11/15] [examples_redesign] Implement `tracker_electrum_example` This is `keychain_tracker_electrum_example` using the redesigned structures. --- Cargo.toml | 1 + crates/electrum/src/v2.rs | 46 ++- example-crates/tracker_electrum/Cargo.toml | 11 + example-crates/tracker_electrum/src/main.rs | 272 ++++++++++++++++++ example-crates/tracker_example_cli/src/lib.rs | 11 +- .../tracker_example_cli/src/tracker.rs | 39 ++- 6 files changed, 362 insertions(+), 18 deletions(-) create mode 100644 example-crates/tracker_electrum/Cargo.toml create mode 100644 example-crates/tracker_electrum/src/main.rs diff --git a/Cargo.toml b/Cargo.toml index a24ed72ec..63a8a039e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -7,6 +7,7 @@ members = [ "example-crates/keychain_tracker_electrum", "example-crates/keychain_tracker_esplora", "example-crates/keychain_tracker_example_cli", + "example-crates/tracker_electrum", "example-crates/tracker_example_cli", "example-crates/wallet_electrum", "example-crates/wallet_esplora", diff --git a/crates/electrum/src/v2.rs b/crates/electrum/src/v2.rs index 41d1faa4e..01c28d3a6 100644 --- a/crates/electrum/src/v2.rs +++ b/crates/electrum/src/v2.rs @@ -1,15 +1,20 @@ use bdk_chain::{ bitcoin::{hashes::hex::FromHex, BlockHash, OutPoint, Script, Transaction, Txid}, - indexed_tx_graph::{IndexedAdditions, IndexedTxGraph, Indexer}, + indexed_tx_graph::{IndexedAdditions, IndexedTxGraph}, + keychain::{DerivationAdditions, KeychainTxOutIndex}, local_chain::{self, LocalChain, UpdateNotConnectedError}, tx_graph::TxGraph, Anchor, Append, BlockId, ConfirmationHeightAnchor, }; use electrum_client::{Client, ElectrumApi, Error}; -use std::collections::{BTreeMap, BTreeSet, HashMap}; +use std::{ + collections::{BTreeMap, BTreeSet, HashMap}, + fmt::Debug, +}; use crate::InternalError; +#[derive(Debug)] pub struct ElectrumUpdate { pub graph_update: G, pub chain_update: LocalChain, @@ -29,10 +34,10 @@ impl Default for ElectrumUpdate { pub type IntermediaryElectrumUpdate = ElectrumUpdate>, K>; impl<'a, A: Anchor, K> IntermediaryElectrumUpdate { - pub fn missing_full_txs(&'a self, graph: G) -> impl Iterator + 'a - where - G: AsRef + 'a, - { + pub fn missing_full_txs( + &'a self, + graph: &'a TxGraph, + ) -> impl Iterator + 'a { self.graph_update .keys() .filter(move |&&txid| graph.as_ref().get_tx(txid).is_none()) @@ -61,17 +66,30 @@ impl<'a, A: Anchor, K> IntermediaryElectrumUpdate { pub type FinalElectrumUpdate = ElectrumUpdate, K>; -impl FinalElectrumUpdate { - pub fn apply( +impl FinalElectrumUpdate { + pub fn apply( self, - indexed_graph: &mut IndexedTxGraph, + indexed_graph: &mut IndexedTxGraph>, chain: &mut LocalChain, - ) -> Result<(IndexedAdditions, local_chain::ChangeSet), UpdateNotConnectedError> - where - I::Additions: Default + Append, - { - let additions = indexed_graph.apply_update(self.graph_update); + ) -> Result< + ( + IndexedAdditions>, + local_chain::ChangeSet, + ), + UpdateNotConnectedError, + > { + let (_, derivation_additions) = indexed_graph + .index + .reveal_to_target_multi(&self.keychain_update); + + let additions = { + let mut additions = indexed_graph.apply_update(self.graph_update); + additions.index_additions.append(derivation_additions); + additions + }; + let changeset = chain.apply_update(self.chain_update)?; + Ok((additions, changeset)) } } diff --git a/example-crates/tracker_electrum/Cargo.toml b/example-crates/tracker_electrum/Cargo.toml new file mode 100644 index 000000000..5ab6922c3 --- /dev/null +++ b/example-crates/tracker_electrum/Cargo.toml @@ -0,0 +1,11 @@ +[package] +name = "tracker_electrum_example" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +bdk_chain = { path = "../../crates/chain", features = ["serde"] } +bdk_electrum = { path = "../../crates/electrum" } +tracker_example_cli = { path = "../tracker_example_cli" } diff --git a/example-crates/tracker_electrum/src/main.rs b/example-crates/tracker_electrum/src/main.rs new file mode 100644 index 000000000..3a47fb5f2 --- /dev/null +++ b/example-crates/tracker_electrum/src/main.rs @@ -0,0 +1,272 @@ +use std::{ + collections::BTreeMap, + io::{self, Write}, + time::UNIX_EPOCH, +}; + +use bdk_chain::{ + bitcoin::{Address, BlockHash, Network, OutPoint, Txid}, + Append, ConfirmationHeightAnchor, +}; +use bdk_electrum::{ + electrum_client::{self, ElectrumApi}, + v2::{ElectrumExt, ElectrumUpdate}, +}; +use tracker_example_cli::{ + self as cli, + anyhow::{self, Context}, + clap::{self, Parser, Subcommand}, +}; + +const DB_MAGIC: &[u8] = b"bdk_example_electrum"; + +#[derive(Subcommand, Debug, Clone)] +enum ElectrumCommands { + /// Scans the addresses in the wallet using the esplora API. + Scan { + /// When a gap this large has been found for a keychain, it will stop. + #[clap(long, default_value = "5")] + stop_gap: usize, + #[clap(flatten)] + scan_options: ScanOptions, + }, + /// Scans particular addresses using the esplora API. + Sync { + /// Scan all the unused addresses. + #[clap(long)] + unused_spks: bool, + /// Scan every address that you have derived. + #[clap(long)] + all_spks: bool, + /// Scan unspent outpoints for spends or changes to confirmation status of residing tx. + #[clap(long)] + utxos: bool, + /// Scan unconfirmed transactions for updates. + #[clap(long)] + unconfirmed: bool, + #[clap(flatten)] + scan_options: ScanOptions, + }, +} + +#[derive(Parser, Debug, Clone, PartialEq)] +pub struct ScanOptions { + /// Set batch size for each script_history call to electrum client. + #[clap(long, default_value = "25")] + pub batch_size: usize, +} + +fn main() -> anyhow::Result<()> { + let (args, keymap, tracker, db) = cli::init::( + DB_MAGIC, + cli::Tracker::new_local(), + )?; + + let electrum_url = match args.network { + Network::Bitcoin => "ssl://electrum.blockstream.info:50002", + Network::Testnet => "ssl://electrum.blockstream.info:60002", + Network::Regtest => "tcp://localhost:60401", + Network::Signet => "tcp://signet-electrumx.wakiyamap.dev:50001", + }; + let config = electrum_client::Config::builder() + .validate_domain(matches!(args.network, Network::Bitcoin)) + .build(); + + let client = electrum_client::Client::from_config(electrum_url, config)?; + + // [TODO]: Use genesis block based on network! + let chain_tip = tracker.lock().unwrap().chain.tip().unwrap_or_default(); + + let electrum_cmd = match args.command.clone() { + cli::Commands::ChainSpecific(electrum_cmd) => electrum_cmd, + general_command => { + return cli::handle_commands( + general_command, + |transaction| { + let _txid = client.transaction_broadcast(transaction)?; + Ok(()) + }, + &tracker, + &db, + chain_tip, + args.network, + &keymap, + ) + } + }; + + let response = match electrum_cmd { + ElectrumCommands::Scan { + stop_gap, + scan_options, + } => { + let (spk_iters, local_chain) = { + let tracker = &*tracker.lock().unwrap(); + let spk_iters = tracker + .indexed_graph + .index + .spks_of_all_keychains() + .into_iter() + .map(|(keychain, iter)| { + let mut first = true; + let spk_iter = iter.inspect(move |(i, _)| { + if first { + eprint!("\nscanning {}: ", keychain); + first = false; + } + + eprint!("{} ", i); + let _ = io::stdout().flush(); + }); + (keychain, spk_iter) + }) + .collect::>(); + let local_chain: BTreeMap = tracker.chain.clone().into(); + (spk_iters, local_chain) + }; + + client.scan( + &local_chain, + spk_iters, + core::iter::empty(), + core::iter::empty(), + stop_gap, + scan_options.batch_size, + )? + } + ElectrumCommands::Sync { + mut unused_spks, + all_spks, + mut utxos, + mut unconfirmed, + scan_options, + } => { + // Get a short lock on the tracker to get the spks we're interested in + let tracker = tracker.lock().unwrap(); + + if !(all_spks || unused_spks || utxos || unconfirmed) { + unused_spks = true; + unconfirmed = true; + utxos = true; + } else if all_spks { + unused_spks = false; + } + + let mut spks: Box> = + Box::new(core::iter::empty()); + if all_spks { + let index = &tracker.indexed_graph.index; + let all_spks = index + .all_spks() + .iter() + .map(|(k, v)| (*k, v.clone())) + .collect::>(); + spks = Box::new(spks.chain(all_spks.into_iter().map(|(index, script)| { + eprintln!("scanning {:?}", index); + script + }))); + } + if unused_spks { + let index = &tracker.indexed_graph.index; + let unused_spks = index + .unused_spks(..) + .map(|(k, v)| (*k, v.clone())) + .collect::>(); + spks = Box::new(spks.chain(unused_spks.into_iter().map(|(index, script)| { + eprintln!( + "Checking if address {} {:?} has been used", + Address::from_script(&script, args.network).unwrap(), + index + ); + + script + }))); + } + + let mut outpoints: Box> = Box::new(core::iter::empty()); + + if utxos { + let utxos = tracker + .list_owned_unspents(chain_tip) + .map(|(_, utxo)| utxo) + .collect::>(); + outpoints = Box::new( + utxos + .into_iter() + .inspect(|utxo| { + eprintln!( + "Checking if outpoint {} (value: {}) has been spent", + utxo.outpoint, utxo.txout.value + ); + }) + .map(|utxo| utxo.outpoint), + ); + }; + + let mut txids: Box> = Box::new(core::iter::empty()); + + if unconfirmed { + let unconfirmed_txids = tracker + .list_txs(chain_tip) + .filter(|ctx| !ctx.observed_as.is_confirmed()) + .map(|ctx| ctx.node.txid) + .collect::>(); + + txids = Box::new(unconfirmed_txids.into_iter().inspect(|txid| { + eprintln!("Checking if {} is confirmed yet", txid); + })); + } + + let local_chain: BTreeMap = tracker.chain.clone().into(); + drop(tracker); + + let update = client.scan_without_keychain( + &local_chain, + spks, + txids, + outpoints, + scan_options.batch_size, + )?; + ElectrumUpdate { + graph_update: update.graph_update, + chain_update: update.chain_update, + keychain_update: BTreeMap::new(), + } + } + }; + println!(); + + let missing_txids = { + let tracker = &*tracker.lock().unwrap(); + response + .missing_full_txs(tracker.indexed_graph.graph()) + .cloned() + .collect::>() + }; + + let update = response.finalize( + UNIX_EPOCH.elapsed().map(|d| d.as_secs()).ok(), + client + .batch_transaction_get(&missing_txids) + .context("fetching full transactions")?, + ); + + { + use bdk_chain::PersistBackend; + let tracker = &mut *tracker.lock().unwrap(); + let db = &mut *db.lock().unwrap(); + + let (additions, changeset) = + update.apply(&mut tracker.indexed_graph, &mut tracker.chain)?; + + let mut tracker_changeset = cli::ChangeSet::default(); + tracker_changeset.append(additions.into()); + tracker_changeset.append(changeset.into()); + + // [TODO] How do we check if changeset is empty? + // [TODO] When should we flush? + db.write_changes(&tracker_changeset)?; + } + + Ok(()) +} diff --git a/example-crates/tracker_example_cli/src/lib.rs b/example-crates/tracker_example_cli/src/lib.rs index 72123ba49..134814782 100644 --- a/example-crates/tracker_example_cli/src/lib.rs +++ b/example-crates/tracker_example_cli/src/lib.rs @@ -1,5 +1,4 @@ mod tracker; -use anyhow::anyhow; use bdk_chain::{ bitcoin::{ psbt::Prevouts, secp256k1::Secp256k1, util::sighash::SighashCache, Address, LockTime, @@ -16,6 +15,7 @@ use bdk_coin_select::{coin_select_bnb, CoinSelector, CoinSelectorOpt, WeightedVa use clap::{Parser, Subcommand}; use std::{cmp::Reverse, collections::HashMap, path::PathBuf, sync::Mutex, time::Duration}; +pub use anyhow; pub use bdk_file_store; pub use clap; pub use tracker::*; @@ -117,7 +117,12 @@ impl core::str::FromStr for CoinSelectionAlgo { "oldest-first" => OldestFirst, "newest-first" => NewestFirst, "bnb" => BranchAndBound, - unknown => return Err(anyhow!("unknown coin selection algorithm '{}'", unknown)), + unknown => { + return Err(anyhow::anyhow!( + "unknown coin selection algorithm '{}'", + unknown + )) + } }) } } @@ -533,7 +538,7 @@ where } } bdk_tmp_plan::PlanState::Incomplete(_) => { - return Err(anyhow!( + return Err(anyhow::anyhow!( "we weren't able to complete the plan with our keys." )); } diff --git a/example-crates/tracker_example_cli/src/tracker.rs b/example-crates/tracker_example_cli/src/tracker.rs index b33c8d211..c03140ffa 100644 --- a/example-crates/tracker_example_cli/src/tracker.rs +++ b/example-crates/tracker_example_cli/src/tracker.rs @@ -1,9 +1,11 @@ -use std::fmt::Debug; +use std::{convert::Infallible, fmt::Debug}; use bdk_chain::{ + bitcoin::Transaction, indexed_tx_graph::{IndexedAdditions, IndexedTxGraph}, keychain::{DerivationAdditions, KeychainTxOutIndex}, local_chain::{self, LocalChain}, + tx_graph::CanonicalTx, Anchor, Append, BlockId, ChainOracle, FullTxOut, Loadable, ObservedAs, }; use bdk_file_store::Store; @@ -83,6 +85,41 @@ impl Tracker { true }) } + + pub fn try_list_txs( + &self, + chain_tip: BlockId, + ) -> impl Iterator, C::Error>> { + self.indexed_graph + .graph() + .try_list_chain_txs(&self.chain, chain_tip) + } +} + +impl> Tracker { + pub fn list_owned_txouts( + &self, + chain_tip: BlockId, + ) -> impl Iterator>)> { + self.try_list_owned_txouts(chain_tip) + .map(|r| r.expect("oracle is infallible")) + } + + pub fn list_owned_unspents( + &self, + chain_tip: BlockId, + ) -> impl Iterator>)> { + self.try_list_owned_unspents(chain_tip) + .map(|r| r.expect("oracle is infallible")) + } + + pub fn list_txs( + &self, + chain_tip: BlockId, + ) -> impl Iterator> { + self.try_list_txs(chain_tip) + .map(|r| r.expect("oracle is infallible")) + } } impl Loadable for Tracker { From b44e4c6af1a588c6a0fbb0feda9a8dd1512fcb2e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=BF=97=E5=AE=87?= Date: Wed, 26 Apr 2023 11:39:34 +0800 Subject: [PATCH 12/15] [examples_redesign] Change `init` to have custom default db path --- example-crates/tracker_electrum/src/main.rs | 2 ++ example-crates/tracker_example_cli/src/lib.rs | 5 +++++ 2 files changed, 7 insertions(+) diff --git a/example-crates/tracker_electrum/src/main.rs b/example-crates/tracker_electrum/src/main.rs index 3a47fb5f2..7034fae67 100644 --- a/example-crates/tracker_electrum/src/main.rs +++ b/example-crates/tracker_electrum/src/main.rs @@ -19,6 +19,7 @@ use tracker_example_cli::{ }; const DB_MAGIC: &[u8] = b"bdk_example_electrum"; +const DB_PATH: &str = ".bdk_electrum_example.db"; #[derive(Subcommand, Debug, Clone)] enum ElectrumCommands { @@ -59,6 +60,7 @@ pub struct ScanOptions { fn main() -> anyhow::Result<()> { let (args, keymap, tracker, db) = cli::init::( DB_MAGIC, + DB_PATH, cli::Tracker::new_local(), )?; diff --git a/example-crates/tracker_example_cli/src/lib.rs b/example-crates/tracker_example_cli/src/lib.rs index 134814782..4845ce720 100644 --- a/example-crates/tracker_example_cli/src/lib.rs +++ b/example-crates/tracker_example_cli/src/lib.rs @@ -674,6 +674,7 @@ where #[allow(clippy::type_complexity)] // FIXME pub fn init( db_magic: &'static [u8], + db_default_path: &str, mut tracker: Tracker, ) -> anyhow::Result<( Args, @@ -690,6 +691,10 @@ where { use bdk_chain::PersistBackend; + if std::env::var("BDK_DB_PATH").is_err() { + std::env::set_var("BDK_DB_PATH", db_default_path); + } + let args = Args::::parse(); let secp = Secp256k1::default(); let (descriptor, mut keymap) = From e4347b3a4a74ea1650eeaca04201f7b25ecc12de Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=BF=97=E5=AE=87?= Date: Thu, 27 Apr 2023 14:03:25 +0800 Subject: [PATCH 13/15] [persist_redesign] Remove `Loadable` trait It is not needed. Also moved `tracker_example_cli::RemoteChain` into it's own file. --- crates/chain/src/local_chain.rs | 10 +- crates/chain/src/persist.rs | 30 ++- crates/file_store/src/store.rs | 53 ++---- example-crates/tracker_electrum/src/main.rs | 2 +- example-crates/tracker_example_cli/src/lib.rs | 76 ++++---- .../tracker_example_cli/src/remote_chain.rs | 57 ++++++ .../tracker_example_cli/src/tracker.rs | 173 ++++++++++-------- 7 files changed, 217 insertions(+), 184 deletions(-) create mode 100644 example-crates/tracker_example_cli/src/remote_chain.rs diff --git a/crates/chain/src/local_chain.rs b/crates/chain/src/local_chain.rs index 9ee72154b..7c74e74cf 100644 --- a/crates/chain/src/local_chain.rs +++ b/crates/chain/src/local_chain.rs @@ -3,7 +3,7 @@ use core::convert::Infallible; use alloc::collections::{BTreeMap, BTreeSet}; use bitcoin::BlockHash; -use crate::{BlockId, ChainOracle, Loadable}; +use crate::{BlockId, ChainOracle}; /// This is a local implementation of [`ChainOracle`]. /// @@ -43,14 +43,6 @@ impl ChainOracle for LocalChain { } } -impl Loadable for LocalChain { - type ChangeSet = ChangeSet; - - fn load_changeset(&mut self, changeset: Self::ChangeSet) { - self.apply_changeset(changeset) - } -} - impl AsRef> for LocalChain { fn as_ref(&self) -> &BTreeMap { &self.blocks diff --git a/crates/chain/src/persist.rs b/crates/chain/src/persist.rs index f12773ea5..35d4b44ff 100644 --- a/crates/chain/src/persist.rs +++ b/crates/chain/src/persist.rs @@ -8,16 +8,16 @@ use crate::Append; /// Not all changes to the tracker, which is an in-memory representation of wallet/blockchain /// data, needs to be written to disk right away, so [`Persist::stage`] can be used to *stage* /// changes first and then [`Persist::commit`] can be used to write changes to disk. -pub struct Persist { +pub struct Persist { backend: B, stage: C, marker: PhantomData, } -impl Persist +impl Persist where - T: Loadable, - B: PersistBackend, + B: PersistBackend, + C: Default + Append, { /// Create a new [`Persist`] from [`PersistBackend`]. pub fn new(backend: B) -> Self { @@ -31,12 +31,12 @@ where /// Stage a `changeset` to be commited later with [`commit`]. /// /// [`commit`]: Self::commit - pub fn stage(&mut self, changeset: T::ChangeSet) { + pub fn stage(&mut self, changeset: C) { self.stage.append(changeset) } /// Get the changes that have not been commited yet. - pub fn staged(&self) -> &T::ChangeSet { + pub fn staged(&self) -> &C { &self.stage } @@ -44,7 +44,7 @@ where /// /// Returns a backend-defined error if this fails. pub fn commit(&mut self) -> Result<(), B::WriteError> { - let mut temp = T::ChangeSet::default(); + let mut temp = C::default(); core::mem::swap(&mut temp, &mut self.stage); self.backend.write_changes(&temp) } @@ -53,7 +53,7 @@ where /// A persistence backend for [`Persist`]. /// /// `T` represents the tracker, the in-memory data structure which we wish to persist. -pub trait PersistBackend { +pub trait PersistBackend { /// The error the backend returns when it fails to write. type WriteError: core::fmt::Debug; @@ -68,17 +68,17 @@ pub trait PersistBackend { /// changesets had been applied sequentially. /// /// [`load_into_tracker`]: Self::load_into_tracker - fn write_changes(&mut self, changeset: &T::ChangeSet) -> Result<(), Self::WriteError>; + fn write_changes(&mut self, changeset: &C) -> Result<(), Self::WriteError>; /// Loads all data from the persistence backend into `tracker`. fn load_into_tracker(&mut self, tracker: &mut T) -> Result<(), Self::LoadError>; } -impl PersistBackend for () { +impl PersistBackend for () { type WriteError = (); type LoadError = (); - fn write_changes(&mut self, _changeset: &T::ChangeSet) -> Result<(), Self::WriteError> { + fn write_changes(&mut self, _changeset: &C) -> Result<(), Self::WriteError> { Ok(()) } @@ -86,11 +86,3 @@ impl PersistBackend for () { Ok(()) } } - -/// A trait that represents a structure which can be loaded with changesets. -pub trait Loadable { - /// The changeset to be loaded into `self`. - type ChangeSet: Default + Append; - /// Loads the `changeset` into `self`. - fn load_changeset(&mut self, changeset: Self::ChangeSet); -} diff --git a/crates/file_store/src/store.rs b/crates/file_store/src/store.rs index d5f518ae7..1af635a10 100644 --- a/crates/file_store/src/store.rs +++ b/crates/file_store/src/store.rs @@ -5,7 +5,7 @@ use std::{ path::Path, }; -use bdk_chain::{Append, Loadable, PersistBackend}; +use bdk_chain::Append; use bincode::Options; use crate::{bincode_options, EntryIter, FileError, IterError}; @@ -14,16 +14,15 @@ use crate::{bincode_options, EntryIter, FileError, IterError}; /// /// The changesets are the results of altering a tracker implementation (`T`). #[derive(Debug)] -pub struct Store { +pub struct Store { magic: &'static [u8], db_file: File, - marker: PhantomData, + marker: PhantomData<(T, C)>, } -impl Store +impl Store where - T: Loadable, - T::ChangeSet: serde::Serialize + serde::de::DeserializeOwned, + C: Default + Append + serde::Serialize + serde::de::DeserializeOwned, { /// Creates a new store from a [`File`]. /// @@ -81,7 +80,7 @@ where /// **WARNING**: This method changes the write position in the underlying file. You should /// always iterate over all entries until `None` is returned if you want your next write to go /// at the end; otherwise, you will write over existing entries. - pub fn iter_changesets(&mut self) -> Result, io::Error> { + pub fn iter_changesets(&mut self) -> Result, io::Error> { self.db_file .seek(io::SeekFrom::Start(self.magic.len() as _))?; @@ -100,8 +99,8 @@ where /// /// **WARNING**: This method changes the write position of the underlying file. The next /// changeset will be written over the erroring entry (or the end of the file if none existed). - pub fn aggregate_changesets(&mut self) -> (T::ChangeSet, Result<(), IterError>) { - let mut changeset = T::ChangeSet::default(); + pub fn aggregate_changesets(&mut self) -> (C, Result<(), IterError>) { + let mut changeset = C::default(); let result = (|| { let iter_changeset = self.iter_changesets()?; for next_changeset in iter_changeset { @@ -121,7 +120,7 @@ where /// /// **WARNING**: This method does not detect whether the changeset is empty or not, and will /// append an empty changeset to the file (not catastrophic, just a waste of space). - pub fn append_changeset(&mut self, changeset: &T::ChangeSet) -> Result<(), io::Error> { + pub fn append_changeset(&mut self, changeset: &C) -> Result<(), io::Error> { bincode_options() .serialize_into(&mut self.db_file, changeset) .map_err(|e| match *e { @@ -139,25 +138,6 @@ where } } -impl PersistBackend for Store -where - T: Loadable, - T::ChangeSet: serde::de::DeserializeOwned + serde::Serialize, -{ - type WriteError = std::io::Error; - type LoadError = IterError; - - fn write_changes(&mut self, changeset: &T::ChangeSet) -> Result<(), Self::WriteError> { - Store::append_changeset(self, changeset) - } - - fn load_into_tracker(&mut self, tracker: &mut T) -> Result<(), Self::LoadError> { - let (changeset, result) = self.aggregate_changesets(); - tracker.load_changeset(changeset); - result - } -} - #[cfg(test)] mod test { use super::*; @@ -213,19 +193,13 @@ mod test { #[derive(Debug)] struct TestTracker; - impl Loadable for TestTracker { - type ChangeSet = TestChangeSet; - - fn load_changeset(&mut self, _changeset: Self::ChangeSet) {} - } - #[test] fn new_fails_if_file_is_too_short() { let mut file = NamedTempFile::new().unwrap(); file.write_all(&TEST_MAGIC_BYTES[..TEST_MAGIC_BYTES_LEN - 1]) .expect("should write"); - match Store::::new(&TEST_MAGIC_BYTES, file.reopen().unwrap()) { + match Store::::new(&TEST_MAGIC_BYTES, file.reopen().unwrap()) { Err(FileError::Io(e)) => assert_eq!(e.kind(), std::io::ErrorKind::UnexpectedEof), unexpected => panic!("unexpected result: {:?}", unexpected), }; @@ -239,7 +213,7 @@ mod test { file.write_all(invalid_magic_bytes.as_bytes()) .expect("should write"); - match Store::::new(&TEST_MAGIC_BYTES, file.reopen().unwrap()) { + match Store::::new(&TEST_MAGIC_BYTES, file.reopen().unwrap()) { Err(FileError::InvalidMagicBytes { got, .. }) => { assert_eq!(got, invalid_magic_bytes.as_bytes()) } @@ -260,8 +234,9 @@ mod test { let mut file = NamedTempFile::new().unwrap(); file.write_all(&data).expect("should write"); - let mut store = Store::::new(&TEST_MAGIC_BYTES, file.reopen().unwrap()) - .expect("should open"); + let mut store = + Store::::new(&TEST_MAGIC_BYTES, file.reopen().unwrap()) + .expect("should open"); match store.iter_changesets().expect("seek should succeed").next() { Some(Err(IterError::Bincode(_))) => {} unexpected_res => panic!("unexpected result: {:?}", unexpected_res), diff --git a/example-crates/tracker_electrum/src/main.rs b/example-crates/tracker_electrum/src/main.rs index 7034fae67..dec7388a1 100644 --- a/example-crates/tracker_electrum/src/main.rs +++ b/example-crates/tracker_electrum/src/main.rs @@ -58,7 +58,7 @@ pub struct ScanOptions { } fn main() -> anyhow::Result<()> { - let (args, keymap, tracker, db) = cli::init::( + let (args, keymap, tracker, db) = cli::init::( DB_MAGIC, DB_PATH, cli::Tracker::new_local(), diff --git a/example-crates/tracker_example_cli/src/lib.rs b/example-crates/tracker_example_cli/src/lib.rs index 4845ce720..6de66dbe6 100644 --- a/example-crates/tracker_example_cli/src/lib.rs +++ b/example-crates/tracker_example_cli/src/lib.rs @@ -1,3 +1,4 @@ +mod remote_chain; mod tracker; use bdk_chain::{ bitcoin::{ @@ -9,7 +10,7 @@ use bdk_chain::{ descriptor::{DescriptorSecretKey, KeyMap}, Descriptor, DescriptorPublicKey, }, - Anchor, Append, BlockId, ChainOracle, DescriptorExt, FullTxOut, Loadable, ObservedAs, + Anchor, Append, BlockId, ChainOracle, DescriptorExt, FullTxOut, ObservedAs, PersistBackend, }; use bdk_coin_select::{coin_select_bnb, CoinSelector, CoinSelectorOpt, WeightedValue}; use clap::{Parser, Subcommand}; @@ -18,6 +19,7 @@ use std::{cmp::Reverse, collections::HashMap, path::PathBuf, sync::Mutex, time:: pub use anyhow; pub use bdk_file_store; pub use clap; +pub use remote_chain::*; pub use tracker::*; #[derive( @@ -185,15 +187,14 @@ pub struct AddrsOutput { used: bool, } -pub fn run_address_cmd( - tracker: &Mutex>, - db: &Mutex>, +pub fn run_address_cmd( + tracker: &Mutex>, + db: &Mutex>, addr_cmd: AddressCmd, network: Network, ) -> anyhow::Result<()> where - as Loadable>::ChangeSet: - serde::de::DeserializeOwned + serde::Serialize, + tracker::ChangeSet: Append + serde::Serialize + serde::de::DeserializeOwned, { let mut tracker = tracker.lock().unwrap(); let txout_index = &mut tracker.indexed_graph.index; @@ -247,17 +248,17 @@ where } } -pub fn run_balance_cmd( - tracker: &Mutex>, +pub fn run_balance_cmd( + tracker: &Mutex>, chain_tip: BlockId, ) -> anyhow::Result<()> where - ::Error: std::error::Error + Send + Sync + 'static, + ::Error: std::error::Error + Send + Sync + 'static, { let tracker = tracker.lock().unwrap(); let utxos = tracker .try_list_owned_unspents(chain_tip) - .collect::, C::Error>>()?; + .collect::, B::Error>>()?; let (confirmed, unconfirmed) = utxos @@ -278,14 +279,14 @@ where Ok(()) } -pub fn run_txo_cmd( +pub fn run_txo_cmd( txout_cmd: TxOutCmd, - tracker: &Mutex>, + tracker: &Mutex>, chain_tip: BlockId, network: Network, ) -> anyhow::Result<()> where - ::Error: std::error::Error + Send + Sync + 'static, + ::Error: std::error::Error + Send + Sync + 'static, { match txout_cmd { TxOutCmd::List { @@ -335,11 +336,11 @@ where } #[allow(clippy::type_complexity)] // FIXME -pub fn create_tx( +pub fn create_tx( value: u64, address: Address, coin_select: CoinSelectionAlgo, - tracker: &mut Tracker, + tracker: &mut Tracker, chain_tip: BlockId, keymap: &HashMap, ) -> anyhow::Result<( @@ -347,7 +348,7 @@ pub fn create_tx( Option<(DerivationAdditions, (Keychain, u32))>, )> where - ::Error: std::error::Error + Send + Sync + 'static, + ::Error: std::error::Error + Send + Sync + 'static, { let mut additions = DerivationAdditions::default(); @@ -358,7 +359,7 @@ where // TODO use planning module let mut candidates = - planned_utxos(tracker, chain_tip, &assets).collect::, C::Error>>()?; + planned_utxos(tracker, chain_tip, &assets).collect::, B::Error>>()?; // apply coin selection algorithm match coin_select { @@ -554,13 +555,13 @@ where Ok((transaction, change_info)) } -pub fn planned_utxos<'a, AK: bdk_tmp_plan::CanDerive + Clone, A: Anchor, C: ChainOracle>( - tracker: &'a Tracker, +pub fn planned_utxos<'a, AK: bdk_tmp_plan::CanDerive + Clone, A: Anchor, B: ChainOracle>( + tracker: &'a Tracker, chain_tip: BlockId, assets: &'a bdk_tmp_plan::Assets, -) -> impl Iterator, FullTxOut>), C::Error>> + 'a +) -> impl Iterator, FullTxOut>), B::Error>> + 'a where - ::Error: std::error::Error + Send + Sync + 'static, + ::Error: std::error::Error + Send + Sync + 'static, { tracker .try_list_owned_unspents(chain_tip) @@ -581,21 +582,20 @@ where } #[allow(clippy::too_many_arguments)] // FIXME -pub fn handle_commands( +pub fn handle_commands( command: Commands, broadcast: impl FnOnce(&Transaction) -> anyhow::Result<()>, // we Mutex around these not because we need them for a simple CLI app but to demonstrate how // all the stuff we're doing can be made thread-safe and not keep locks up over an IO bound. - tracker: &Mutex>, - store: &Mutex>, + tracker: &Mutex>, + store: &Mutex>, chain_tip: BlockId, network: Network, keymap: &HashMap, ) -> anyhow::Result<()> where - ::Error: std::error::Error + Send + Sync + 'static, - as Loadable>::ChangeSet: - serde::de::DeserializeOwned + serde::Serialize, + ::Error: std::error::Error + Send + Sync + 'static, + tracker::ChangeSet: Append + serde::Serialize + serde::de::DeserializeOwned, { match command { // TODO: Make these functions return stuffs @@ -672,25 +672,24 @@ where } #[allow(clippy::type_complexity)] // FIXME -pub fn init( +pub fn init( db_magic: &'static [u8], db_default_path: &str, - mut tracker: Tracker, + mut tracker: Tracker, ) -> anyhow::Result<( Args, KeyMap, // These don't need to have mutexes around them, but we want the cli example code to make it obvious how they // are thread-safe, forcing the example developers to show where they would lock and unlock things. - Mutex>, - Mutex>, + Mutex>, + Mutex>, )> where - ::Error: std::error::Error + Send + Sync + 'static, - as Loadable>::ChangeSet: - serde::de::DeserializeOwned + serde::Serialize, + ::Error: std::error::Error + Send + Sync + 'static, + tracker::ChangeSet: Append + serde::Serialize + serde::de::DeserializeOwned, + TrackerStore: + PersistBackend, ChangeSet>, { - use bdk_chain::PersistBackend; - if std::env::var("BDK_DB_PATH").is_err() { std::env::set_var("BDK_DB_PATH", db_default_path); } @@ -718,7 +717,8 @@ where .add_keychain(Keychain::Internal, internal_descriptor); }; - let mut db = TrackerStore::::new_from_path(db_magic, args.db_path.as_path())?; + let mut db = + TrackerStore::::new_from_path(db_magic, args.db_path.as_path())?; if let Err(e) = db.load_into_tracker(&mut tracker) { // [TODO] Should we introduce a `TipChainOracle` trait? @@ -727,7 +727,7 @@ where // None => eprintln!("Failed to load any checkpoints from {}: {}", args.db_path.display(), e), // } eprintln!( - "Failed to load changesets from {}: {}", + "Failed to load changesets from {}: {:?}", args.db_path.display(), e ); diff --git a/example-crates/tracker_example_cli/src/remote_chain.rs b/example-crates/tracker_example_cli/src/remote_chain.rs new file mode 100644 index 000000000..eba4e8f71 --- /dev/null +++ b/example-crates/tracker_example_cli/src/remote_chain.rs @@ -0,0 +1,57 @@ +use bdk_chain::{Append, BlockId, ChainOracle}; + +pub type RemoteChainChangeSet = Option; + +/// Contains a remote best-chain representation alongside the last-seen block's height. +/// +/// The last-seen block height is persisted locally and can be used to determine which height to +/// start syncing from for block-by-block chain sources. +pub struct RemoteChain { + oracle: O, + last_seen_height: Option, +} + +impl RemoteChain { + pub fn new(oracle: O) -> Self { + Self { + oracle, + last_seen_height: None, + } + } + + pub fn inner(&self) -> &O { + &self.oracle + } + + pub fn last_seen_height(&self) -> Option { + self.last_seen_height + } + + pub fn update_last_seen_height( + &mut self, + last_seen_height: Option, + ) -> RemoteChainChangeSet { + if self.last_seen_height < last_seen_height { + self.last_seen_height = last_seen_height; + last_seen_height + } else { + None + } + } + + pub fn apply_changeset(&mut self, changeset: RemoteChainChangeSet) { + Append::append(&mut self.last_seen_height, changeset) + } +} + +impl ChainOracle for RemoteChain { + type Error = O::Error; + + fn is_block_in_chain( + &self, + block: BlockId, + static_block: BlockId, + ) -> Result, Self::Error> { + self.oracle.is_block_in_chain(block, static_block) + } +} diff --git a/example-crates/tracker_example_cli/src/tracker.rs b/example-crates/tracker_example_cli/src/tracker.rs index c03140ffa..f3dca6ea1 100644 --- a/example-crates/tracker_example_cli/src/tracker.rs +++ b/example-crates/tracker_example_cli/src/tracker.rs @@ -6,31 +6,41 @@ use bdk_chain::{ keychain::{DerivationAdditions, KeychainTxOutIndex}, local_chain::{self, LocalChain}, tx_graph::CanonicalTx, - Anchor, Append, BlockId, ChainOracle, FullTxOut, Loadable, ObservedAs, + Anchor, Append, BlockId, ChainOracle, FullTxOut, ObservedAs, PersistBackend, }; -use bdk_file_store::Store; +use bdk_file_store::{IterError, Store}; + +use crate::{RemoteChain, RemoteChainChangeSet}; /// Structure for persisting [`Tracker`] data. -pub type TrackerStore = Store>; +pub type TrackerStore = Store, ChangeSet>; + +pub type LocalTracker = Tracker; +pub type LocalTrackerStore = TrackerStore; +pub type LocalTrackerChangeSet = ChangeSet; + +pub type RemoteTracker = Tracker>; +pub type RemoteTrackerStore = TrackerStore, RemoteChainChangeSet>; +pub type RemoteTrackerChangeSet = ChangeSet; /// An in-memory representation of chain data that we are tracking. /// /// * `A` is the [`Anchor`] implementation. /// * `K` is our keychain identifier. -/// * `C` is the representation of the best chain history. This can either be a [`LocalChain`] or a -/// remote [`ChainOracle`] implementation. +/// * `B` is the representation of the best chain history. This can either be a [`LocalChain`] or a +/// [`RemoteChain`] (which wraps a remote [`ChainOracle`] implementation). /// /// [`Tracker`] can be constructed with [`new_local`] or [`new_remote`] (depending on the /// chain-history type). /// /// [`new_local`]: Self::new_local /// [`new_remote`]: Self::new_remote -pub struct Tracker { +pub struct Tracker { pub indexed_graph: IndexedTxGraph>, - pub chain: C, + pub chain: B, } -impl Tracker { +impl LocalTracker { /// New [`Tracker`] with a [`LocalChain`] as the best-chain representation. pub fn new_local() -> Self { Self { @@ -40,24 +50,24 @@ impl Tracker { } } -impl Tracker> { +impl RemoteTracker { /// New [`Tracker`] with a remote [`ChainOracle`] as the best-chain representation. pub fn new_remote(oracle: O) -> Self { Self { indexed_graph: Default::default(), - chain: RemoteChain { - oracle, - last_seen_height: None, - }, + chain: RemoteChain::new(oracle), } } } -impl Tracker { +impl Tracker +where + K: Clone + Ord + Debug, +{ pub fn try_list_owned_txouts( &self, chain_tip: BlockId, - ) -> impl Iterator>), C::Error>> { + ) -> impl Iterator>), B::Error>> { self.indexed_graph .graph() .try_list_chain_txouts(&self.chain, chain_tip) @@ -75,7 +85,7 @@ impl Tracker { pub fn try_list_owned_unspents( &self, chain_tip: BlockId, - ) -> impl Iterator>), C::Error>> { + ) -> impl Iterator>), B::Error>> { self.try_list_owned_txouts(chain_tip).filter(|r| { if let Ok((_, full_txo)) = r { if full_txo.spent_by.is_some() { @@ -89,14 +99,17 @@ impl Tracker { pub fn try_list_txs( &self, chain_tip: BlockId, - ) -> impl Iterator, C::Error>> { + ) -> impl Iterator, B::Error>> { self.indexed_graph .graph() .try_list_chain_txs(&self.chain, chain_tip) } } -impl> Tracker { +impl> Tracker +where + K: Clone + Ord + Debug, +{ pub fn list_owned_txouts( &self, chain_tip: BlockId, @@ -122,27 +135,79 @@ impl> Trac } } -impl Loadable for Tracker { - type ChangeSet = ChangeSet; +impl PersistBackend, LocalTrackerChangeSet> + for LocalTrackerStore +where + K: Clone + Ord + Debug + serde::Serialize + serde::de::DeserializeOwned, + A: Anchor + serde::Serialize + serde::de::DeserializeOwned, +{ + type WriteError = std::io::Error; - fn load_changeset(&mut self, changeset: Self::ChangeSet) { - self.indexed_graph + type LoadError = IterError; + + fn write_changes( + &mut self, + changeset: &LocalTrackerChangeSet, + ) -> Result<(), Self::WriteError> { + self.append_changeset(changeset) + } + + fn load_into_tracker( + &mut self, + tracker: &mut LocalTracker, + ) -> Result<(), Self::LoadError> { + let (changeset, result) = self.aggregate_changesets(); + tracker + .indexed_graph + .apply_additions(changeset.indexed_graph_additions); + tracker.chain.apply_changeset(changeset.chain_changeset); + result + } +} + +impl PersistBackend, RemoteTrackerChangeSet> + for RemoteTrackerStore +where + K: Clone + Ord + Debug + serde::Serialize + serde::de::DeserializeOwned, + A: Anchor + serde::Serialize + serde::de::DeserializeOwned, + O: ChainOracle, +{ + type WriteError = std::io::Error; + + type LoadError = IterError; + + fn write_changes( + &mut self, + changeset: &RemoteTrackerChangeSet, + ) -> Result<(), Self::WriteError> { + self.append_changeset(changeset) + } + + fn load_into_tracker( + &mut self, + tracker: &mut RemoteTracker, + ) -> Result<(), Self::LoadError> { + let (changeset, result) = self.aggregate_changesets(); + tracker + .indexed_graph .apply_additions(changeset.indexed_graph_additions); - self.chain.load_changeset(changeset.chain_changeset); + tracker.chain.apply_changeset(changeset.chain_changeset); + result } } +/// A structure that represents changes to [`Tracker`]. #[derive(Debug, PartialEq, serde::Deserialize, serde::Serialize)] #[serde(bound( deserialize = "A: Ord + serde::Deserialize<'de>, K: Ord + serde::Deserialize<'de>, C: Ord + serde::Deserialize<'de>", serialize = "A: Ord + serde::Serialize, K: Ord + serde::Serialize, C: Ord + serde::Serialize", ))] -pub struct ChangeSet { +pub struct ChangeSet { pub indexed_graph_additions: IndexedAdditions>, pub chain_changeset: C, } -impl Default for ChangeSet { +impl Default for ChangeSet { fn default() -> Self { Self { indexed_graph_additions: Default::default(), @@ -151,7 +216,7 @@ impl Default for ChangeSet { } } -impl Append for ChangeSet { +impl Append for ChangeSet { fn append(&mut self, other: Self) { Append::append( &mut self.indexed_graph_additions, @@ -161,7 +226,7 @@ impl Append for ChangeSet { } } -impl From>> for ChangeSet { +impl From>> for ChangeSet { fn from(inner_additions: IndexedAdditions>) -> Self { Self { indexed_graph_additions: inner_additions, @@ -170,7 +235,7 @@ impl From>> for Cha } } -impl From> for ChangeSet { +impl From> for ChangeSet { fn from(index_additions: DerivationAdditions) -> Self { Self { indexed_graph_additions: IndexedAdditions { @@ -182,7 +247,7 @@ impl From> for ChangeSet { } } -impl From for ChangeSet { +impl From for ChangeSet { fn from(chain_changeset: local_chain::ChangeSet) -> Self { Self { indexed_graph_additions: Default::default(), @@ -191,7 +256,7 @@ impl From for ChangeSet From> for ChangeSet> { +impl From> for ChangeSet> { fn from(chain_changeset: Option) -> Self { Self { indexed_graph_additions: Default::default(), @@ -199,51 +264,3 @@ impl From> for ChangeSet> { } } } - -/// Contains a remote best-chain representation alongside the last-seen block's height. -/// -/// The last-seen block height is persisted locally and can be used to determine which height to -/// start syncing from for block-by-block chain sources. -pub struct RemoteChain { - oracle: O, - last_seen_height: Option, -} - -impl RemoteChain { - pub fn inner(&self) -> &O { - &self.oracle - } - - pub fn last_seen_height(&self) -> Option { - self.last_seen_height - } - - pub fn update_last_seen_height(&mut self, last_seen_height: Option) -> Option { - if self.last_seen_height < last_seen_height { - self.last_seen_height = last_seen_height; - last_seen_height - } else { - None - } - } -} - -impl Loadable for RemoteChain { - type ChangeSet = Option; - - fn load_changeset(&mut self, changeset: Self::ChangeSet) { - self.last_seen_height.append(changeset) - } -} - -impl ChainOracle for RemoteChain { - type Error = O::Error; - - fn is_block_in_chain( - &self, - block: BlockId, - static_block: BlockId, - ) -> Result, Self::Error> { - self.oracle.is_block_in_chain(block, static_block) - } -} From 7fe84f0b19dc51537256a4ce0d590bbc2562b291 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=BF=97=E5=AE=87?= Date: Fri, 28 Apr 2023 18:09:07 +0800 Subject: [PATCH 14/15] [wallet_redesign] Initial attempt to replace `Wallet` internals The internals are replaced with redesigned structures. It turns out that we need an `Tracker` struct. Fixes: * Fix `FinalExectrumUpdate::into_confirmation_time_update()` * The behaviour of `TxGraph::try_get_chain_position` is also changed so that unconfirmed txs with `last_seen` value of 0 can be part of the chain. Additional changes: * Also introduce `TxGraph::all_anchors` method. --- crates/bdk/src/wallet/export.rs | 13 +- crates/bdk/src/wallet/mod.rs | 400 ++++++++++-------- crates/bdk/src/wallet/tracker.rs | 197 +++++++++ crates/bdk/src/wallet/tx_builder.rs | 7 +- crates/bdk/tests/common.rs | 16 +- crates/bdk/tests/wallet.rs | 84 +++- crates/chain/src/chain_data.rs | 26 ++ crates/chain/src/indexed_tx_graph.rs | 1 + crates/chain/src/local_chain.rs | 4 + crates/chain/src/persist.rs | 1 + crates/chain/src/tx_graph.rs | 17 +- crates/chain/tests/test_tx_graph.rs | 9 +- crates/electrum/Cargo.toml | 4 + crates/electrum/src/v2.rs | 97 ++++- crates/file_store/Cargo.toml | 5 + crates/file_store/src/store.rs | 20 + example-crates/wallet_electrum/Cargo.toml | 4 +- example-crates/wallet_electrum/src/main.rs | 47 +- example-crates/wallet_esplora/src/main.rs | 163 +++---- .../wallet_esplora_async/src/main.rs | 165 ++++---- 20 files changed, 875 insertions(+), 405 deletions(-) create mode 100644 crates/bdk/src/wallet/tracker.rs diff --git a/crates/bdk/src/wallet/export.rs b/crates/bdk/src/wallet/export.rs index 905638449..1e1245839 100644 --- a/crates/bdk/src/wallet/export.rs +++ b/crates/bdk/src/wallet/export.rs @@ -226,6 +226,11 @@ mod test { change_descriptor: Option<&str>, network: Network, ) -> Wallet<()> { + let anchor_block = BlockId { + height: 5001, + hash: BlockHash::all_zeros(), + }; + let mut wallet = Wallet::new_no_persist(descriptor, change_descriptor, network).unwrap(); let transaction = Transaction { input: vec![], @@ -233,12 +238,7 @@ mod test { version: 0, lock_time: bitcoin::PackedLockTime::ZERO, }; - wallet - .insert_checkpoint(BlockId { - height: 5001, - hash: BlockHash::all_zeros(), - }) - .unwrap(); + wallet.insert_checkpoint(anchor_block).unwrap(); wallet .insert_tx( transaction, @@ -246,6 +246,7 @@ mod test { height: 5000, time: 0, }, + None, ) .unwrap(); wallet diff --git a/crates/bdk/src/wallet/mod.rs b/crates/bdk/src/wallet/mod.rs index 67032cd3c..ce402c60a 100644 --- a/crates/bdk/src/wallet/mod.rs +++ b/crates/bdk/src/wallet/mod.rs @@ -21,9 +21,7 @@ use alloc::{ }; pub use bdk_chain::keychain::Balance; use bdk_chain::{ - chain_graph, - keychain::{persist, KeychainChangeSet, KeychainScan, KeychainTracker}, - sparse_chain, BlockId, ConfirmationTime, + local_chain, BlockId, ConfirmationTime, ConfirmationTimeAnchor, Persist, PersistBackend, }; use bitcoin::consensus::encode::serialize; use bitcoin::secp256k1::Secp256k1; @@ -42,6 +40,7 @@ use log::{debug, error, info, trace}; pub mod coin_selection; pub mod export; pub mod signer; +mod tracker; pub mod tx_builder; pub(crate) mod utils; @@ -49,6 +48,7 @@ pub(crate) mod utils; #[cfg_attr(docsrs, doc(cfg(feature = "hardware-signer")))] pub mod hardwaresigner; +pub use tracker::*; pub use utils::IsDust; #[allow(deprecated)] @@ -83,19 +83,17 @@ const COINBASE_MATURITY: u32 = 100; pub struct Wallet { signers: Arc, change_signers: Arc, - keychain_tracker: KeychainTracker, - persist: persist::Persist, + tracker: Tracker, + persist: Persist, network: Network, secp: SecpCtx, } -/// The update to a [`Wallet`] used in [`Wallet::apply_update`]. This is usually returned from blockchain data sources. -/// The type parameter `T` indicates the kind of transaction contained in the update. It's usually a [`bitcoin::Transaction`]. -pub type Update = KeychainScan; -/// Error indicating that something was wrong with an [`Update`]. -pub type UpdateError = chain_graph::UpdateError; -/// The changeset produced internally by applying an update -pub(crate) type ChangeSet = KeychainChangeSet; +// /// The update to a [`Wallet`] used in [`Wallet::apply_update`]. This is usually returned from blockchain data sources. +// /// The type parameter `T` indicates the kind of transaction contained in the update. It's usually a [`bitcoin::Transaction`]. +// pub type Update = KeychainScan; +// /// Error indicating that something was wrong with an [`Update`]. +// pub type UpdateError = chain_graph::UpdateError; /// The address index selection strategy to use to derived an address from the wallet's external /// descriptor. See [`Wallet::get_address`]. If you're unsure which one to use use `WalletIndex::New`. @@ -195,17 +193,18 @@ impl Wallet { network: Network, ) -> Result> where - D: persist::PersistBackend, + D: PersistBackend, { let secp = Secp256k1::new(); + let mut tracker = Tracker::default(); - let mut keychain_tracker = KeychainTracker::default(); let (descriptor, keymap) = into_wallet_descriptor_checked(descriptor, &secp, network) .map_err(NewError::Descriptor)?; - keychain_tracker - .txout_index + tracker + .index_mut() .add_keychain(KeychainKind::External, descriptor.clone()); let signers = Arc::new(SignersContainer::build(keymap, &descriptor, &secp)); + let change_signers = match change_descriptor { Some(desc) => { let (change_descriptor, change_keymap) = @@ -218,8 +217,8 @@ impl Wallet { &secp, )); - keychain_tracker - .txout_index + tracker + .index_mut() .add_keychain(KeychainKind::Internal, change_descriptor); change_signers @@ -227,10 +226,10 @@ impl Wallet { None => Arc::new(SignersContainer::new()), }; - db.load_into_keychain_tracker(&mut keychain_tracker) + db.load_into_tracker(&mut tracker) .map_err(NewError::Persist)?; - let persist = persist::Persist::new(db); + let persist = Persist::new(db); Ok(Wallet { signers, @@ -238,7 +237,7 @@ impl Wallet { network, persist, secp, - keychain_tracker, + tracker, }) } @@ -249,7 +248,7 @@ impl Wallet { /// Iterator over all keychains in this wallet pub fn keychanins(&self) -> &BTreeMap { - self.keychain_tracker.txout_index.keychains() + self.tracker.index().keychains() } /// Return a derived address using the external descriptor, see [`AddressIndex`] for @@ -257,7 +256,7 @@ impl Wallet { /// (i.e. does not end with /*) then the same address will always be returned for any [`AddressIndex`]. pub fn get_address(&mut self, address_index: AddressIndex) -> AddressInfo where - D: persist::PersistBackend, + D: PersistBackend, { self._get_address(address_index, KeychainKind::External) } @@ -271,17 +270,17 @@ impl Wallet { /// be returned for any [`AddressIndex`]. pub fn get_internal_address(&mut self, address_index: AddressIndex) -> AddressInfo where - D: persist::PersistBackend, + D: PersistBackend, { self._get_address(address_index, KeychainKind::Internal) } fn _get_address(&mut self, address_index: AddressIndex, keychain: KeychainKind) -> AddressInfo where - D: persist::PersistBackend, + D: PersistBackend, { let keychain = self.map_keychain(keychain); - let txout_index = &mut self.keychain_tracker.txout_index; + let txout_index = self.tracker.index_mut(); let (index, spk) = match address_index { AddressIndex::New => { let ((index, spk), changeset) = txout_index.reveal_next_spk(&keychain); @@ -320,42 +319,45 @@ impl Wallet { /// Return whether or not a `script` is part of this wallet (either internal or external) pub fn is_mine(&self, script: &Script) -> bool { - self.keychain_tracker - .txout_index - .index_of_spk(script) - .is_some() + self.tracker.index().index_of_spk(script).is_some() } /// Finds how the wallet derived the script pubkey `spk`. /// /// Will only return `Some(_)` if the wallet has given out the spk. pub fn derivation_of_spk(&self, spk: &Script) -> Option<(KeychainKind, u32)> { - self.keychain_tracker.txout_index.index_of_spk(spk).copied() + self.tracker.index().index_of_spk(spk).copied() } /// Return the list of unspent outputs of this wallet pub fn list_unspent(&self) -> Vec { - self.keychain_tracker - .full_utxos() + self.tracker + .list_owned_unspents() .map(|(&(keychain, derivation_index), utxo)| LocalUtxo { outpoint: utxo.outpoint, txout: utxo.txout, keychain, is_spent: false, derivation_index, - confirmation_time: utxo.chain_position, + confirmation_time: match utxo.chain_position { + bdk_chain::ObservedAs::Confirmed(a) => ConfirmationTime::Confirmed { + height: a.confirmation_height, + time: a.confirmation_time, + }, + bdk_chain::ObservedAs::Unconfirmed(_) => ConfirmationTime::Unconfirmed, + }, }) .collect() } /// Get all the checkpoints the wallet is currently storing indexed by height. pub fn checkpoints(&self) -> &BTreeMap { - self.keychain_tracker.chain().checkpoints() + self.tracker.chain().all_blocks() } /// Returns the latest checkpoint. pub fn latest_checkpoint(&self) -> Option { - self.keychain_tracker.chain().latest_checkpoint() + self.tracker.chain().tip() } /// Returns a iterators of all the script pubkeys for the `Internal` and External` variants in `KeychainKind`. @@ -369,7 +371,7 @@ impl Wallet { pub fn spks_of_all_keychains( &self, ) -> BTreeMap + Clone> { - self.keychain_tracker.txout_index.spks_of_all_keychains() + self.tracker.index().spks_of_all_keychains() } /// Gets an iterator over all the script pubkeys in a single keychain. @@ -381,16 +383,14 @@ impl Wallet { &self, keychain: KeychainKind, ) -> impl Iterator + Clone { - self.keychain_tracker - .txout_index - .spks_of_keychain(&keychain) + self.tracker.index().spks_of_keychain(&keychain) } /// Returns the utxo owned by this wallet corresponding to `outpoint` if it exists in the /// wallet's database. pub fn get_utxo(&self, op: OutPoint) -> Option { - self.keychain_tracker - .full_utxos() + self.tracker + .list_owned_unspents() .find_map(|(&(keychain, derivation_index), txo)| { if op == txo.outpoint { Some(LocalUtxo { @@ -399,7 +399,13 @@ impl Wallet { keychain, is_spent: txo.spent_by.is_none(), derivation_index, - confirmation_time: txo.chain_position, + confirmation_time: match txo.chain_position { + bdk_chain::ObservedAs::Confirmed(a) => ConfirmationTime::Confirmed { + height: a.confirmation_height, + time: a.confirmation_time, + }, + bdk_chain::ObservedAs::Unconfirmed(_) => ConfirmationTime::Unconfirmed, + }, }) } else { None @@ -412,11 +418,19 @@ impl Wallet { /// Optionally fill the [`TransactionDetails::transaction`] field with the raw transaction if /// `include_raw` is `true`. pub fn get_tx(&self, txid: Txid, include_raw: bool) -> Option { - let (&confirmation_time, tx) = self.keychain_tracker.chain_graph().get_tx_in_chain(txid)?; - let graph = self.keychain_tracker.graph(); - let txout_index = &self.keychain_tracker.txout_index; + let chain_tip = self.tracker.chain().tip().unwrap_or_default(); + + let raw_tx = self.tracker.graph().get_tx(txid)?; + let pos = self + .tracker + .graph() + .get_chain_position(self.tracker.chain(), chain_tip, txid)?; + + // let (&confirmation_time, tx) = self.keychain_tracker.chain_graph().get_tx_in_chain(txid)?; + let graph = self.tracker.graph(); + let txout_index = self.tracker.index(); - let received = tx + let received = raw_tx .output .iter() .map(|txout| { @@ -428,7 +442,7 @@ impl Wallet { }) .sum(); - let sent = tx + let sent = raw_tx .input .iter() .map(|txin| { @@ -440,7 +454,7 @@ impl Wallet { }) .sum(); - let inputs = tx + let inputs = raw_tx .input .iter() .map(|txin| { @@ -449,16 +463,26 @@ impl Wallet { .map(|txout| txout.value) }) .sum::>(); - let outputs = tx.output.iter().map(|txout| txout.value).sum(); + let outputs = raw_tx.output.iter().map(|txout| txout.value).sum(); let fee = inputs.map(|inputs| inputs.saturating_sub(outputs)); Some(TransactionDetails { - transaction: if include_raw { Some(tx.clone()) } else { None }, + transaction: if include_raw { + Some(raw_tx.clone()) + } else { + None + }, txid, received, sent, fee, - confirmation_time, + confirmation_time: match pos { + bdk_chain::ObservedAs::Confirmed(a) => ConfirmationTime::Confirmed { + height: a.confirmation_height, + time: a.confirmation_time, + }, + bdk_chain::ObservedAs::Unconfirmed(_) => ConfirmationTime::Unconfirmed, + }, }) } @@ -472,10 +496,15 @@ impl Wallet { pub fn insert_checkpoint( &mut self, block_id: BlockId, - ) -> Result { - let changeset = self.keychain_tracker.insert_checkpoint(block_id)?; + ) -> Result + where + D: PersistBackend, + { + let changeset = self.tracker.insert_block(block_id)?; let changed = changeset.is_empty(); - self.persist.stage(changeset); + if changed { + self.persist.stage(changeset); + } Ok(changed) } @@ -496,42 +525,63 @@ impl Wallet { pub fn insert_tx( &mut self, tx: Transaction, - position: ConfirmationTime, - ) -> Result> { - let changeset = self.keychain_tracker.insert_tx(tx, position)?; - let changed = changeset.is_empty(); - self.persist.stage(changeset); + pos: ConfirmationTime, + last_seen: Option, + ) -> Result + where + D: PersistBackend, + { + // [TODO] This is not okay! + let anchor_block = self.tracker.chain().tip().unwrap_or_default(); + let anchor = match pos { + ConfirmationTime::Confirmed { height, time } if height <= anchor_block.height => { + Some(ConfirmationTimeAnchor { + anchor_block, + confirmation_height: height, + confirmation_time: time, + }) + } + _ => None, + }; + + let changeset = self.tracker.insert_tx(tx, anchor, last_seen)?; + let changed = !changeset.is_empty(); + if changed { + self.persist.stage(changeset); + } Ok(changed) } #[deprecated(note = "use Wallet::transactions instead")] /// Deprecated. use `Wallet::transactions` instead. pub fn list_transactions(&self, include_raw: bool) -> Vec { - self.keychain_tracker - .chain() - .txids() - .map(|&(_, txid)| self.get_tx(txid, include_raw).expect("must exist")) + self.tracker + .list_transactions() + .map(|tx| self.get_tx(tx.node.txid, include_raw).expect("must exist")) .collect() } /// Iterate over the transactions in the wallet in order of ascending confirmation time with /// unconfirmed transactions last. - pub fn transactions( - &self, - ) -> impl DoubleEndedIterator + '_ { - self.keychain_tracker - .chain_graph() - .transactions_in_chain() - .map(|(pos, tx)| (*pos, tx)) + pub fn transactions(&self) -> impl Iterator + '_ { + self.tracker.list_transactions().map(|canonical_tx| { + ( + match canonical_tx.observed_as { + bdk_chain::ObservedAs::Confirmed(a) => ConfirmationTime::Confirmed { + height: a.confirmation_height, + time: a.confirmation_time, + }, + bdk_chain::ObservedAs::Unconfirmed(_) => ConfirmationTime::Unconfirmed, + }, + canonical_tx.node.tx, + ) + }) } /// Return the balance, separated into available, trusted-pending, untrusted-pending and immature /// values. pub fn get_balance(&self) -> Balance { - self.keychain_tracker.balance(|keychain| match keychain { - KeychainKind::External => false, - KeychainKind::Internal => true, - }) + self.tracker.balance() } /// Add an external signer @@ -613,17 +663,17 @@ impl Wallet { params: TxParams, ) -> Result<(psbt::PartiallySignedTransaction, TransactionDetails), Error> where - D: persist::PersistBackend, + D: PersistBackend, { let external_descriptor = self - .keychain_tracker - .txout_index + .tracker + .index() .keychains() .get(&KeychainKind::External) .expect("must exist"); let internal_descriptor = self - .keychain_tracker - .txout_index + .tracker + .index() .keychains() .get(&KeychainKind::Internal); @@ -700,9 +750,9 @@ impl Wallet { let current_height = match params.current_height { // If they didn't tell us the current height, we assume it's the latest sync height. None => self - .keychain_tracker + .tracker .chain() - .latest_checkpoint() + .tip() .and_then(|cp| cp.height.into()) .map(|height| LockTime::from_height(height).expect("Invalid height")), h => h, @@ -874,14 +924,10 @@ impl Wallet { Some(ref drain_recipient) => drain_recipient.clone(), None => { let change_keychain = self.map_keychain(KeychainKind::Internal); - let ((index, spk), changeset) = self - .keychain_tracker - .txout_index - .next_unused_spk(&change_keychain); + let ((index, spk), changeset) = + self.tracker.index_mut().next_unused_spk(&change_keychain); let spk = spk.clone(); - self.keychain_tracker - .txout_index - .mark_used(&change_keychain, index); + self.tracker.index_mut().mark_used(&change_keychain, index); self.persist.stage(changeset.into()); self.persist.commit().expect("TODO"); spk @@ -1019,15 +1065,15 @@ impl Wallet { &mut self, txid: Txid, ) -> Result, Error> { - let graph = self.keychain_tracker.graph(); - let txout_index = &self.keychain_tracker.txout_index; - let tx_and_height = self.keychain_tracker.chain_graph().get_tx_in_chain(txid); + let graph = self.tracker.graph(); + let txout_index = &self.tracker.index(); + let tx_and_height = self.tracker.get_tx(txid); let mut tx = match tx_and_height { None => return Err(Error::TransactionNotFound), - Some((ConfirmationTime::Confirmed { .. }, _tx)) => { - return Err(Error::TransactionConfirmed) - } - Some((_, tx)) => tx.clone(), + Some(canonical_tx) => match canonical_tx.observed_as { + bdk_chain::ObservedAs::Confirmed(_) => return Err(Error::TransactionConfirmed), + bdk_chain::ObservedAs::Unconfirmed(_) => canonical_tx.node.tx.clone(), + }, }; if !tx @@ -1051,11 +1097,19 @@ impl Wallet { let original_utxos = original_txin .iter() .map(|txin| -> Result<_, Error> { - let (&confirmation_time, prev_tx) = self - .keychain_tracker - .chain_graph() - .get_tx_in_chain(txin.previous_output.txid) + let canonical_prev_tx = self + .tracker + .get_tx(txin.previous_output.txid) .ok_or(Error::UnknownUtxo)?; + let confirmation_time = match canonical_prev_tx.observed_as { + bdk_chain::ObservedAs::Confirmed(a) => ConfirmationTime::Confirmed { + height: a.confirmation_height, + time: a.confirmation_time, + }, + bdk_chain::ObservedAs::Unconfirmed(_) => ConfirmationTime::Unconfirmed, + }; + let prev_tx = canonical_prev_tx.node.tx; + let txout = &prev_tx.output[txin.previous_output.vout as usize]; let weighted_utxo = match txout_index.index_of_spk(&txout.script_pubkey) { @@ -1231,7 +1285,7 @@ impl Wallet { /// /// This can be used to build a watch-only version of a wallet pub fn public_descriptor(&self, keychain: KeychainKind) -> Option<&ExtendedDescriptor> { - self.keychain_tracker.txout_index.keychains().get(&keychain) + self.tracker.index().keychains().get(&keychain) } /// Finalize a PSBT, i.e., for each input determine if sufficient data is available to pass @@ -1258,19 +1312,14 @@ impl Wallet { if psbt_input.final_script_sig.is_some() || psbt_input.final_script_witness.is_some() { continue; } - let confirmation_height = self - .keychain_tracker - .chain() - .tx_position(input.previous_output.txid) - .map(|conftime| match conftime { - &ConfirmationTime::Confirmed { height, .. } => height, - ConfirmationTime::Unconfirmed => u32::MAX, - }); - let last_sync_height = self - .keychain_tracker - .chain() - .latest_checkpoint() - .map(|block_id| block_id.height); + let confirmation_height = + self.tracker + .get_tx(input.previous_output.txid) + .map(|tx| match tx.observed_as { + bdk_chain::ObservedAs::Confirmed(a) => a.confirmation_height, + bdk_chain::ObservedAs::Unconfirmed(_) => u32::MAX, + }); + let last_sync_height = self.tracker.chain().tip().map(|block_id| block_id.height); let current_height = sign_options.assume_height.or(last_sync_height); debug!( @@ -1288,8 +1337,8 @@ impl Wallet { .get_utxo_for(n) .and_then(|txout| self.get_descriptor_for_txout(&txout)) .or_else(|| { - self.keychain_tracker - .txout_index + self.tracker + .index() .keychains() .iter() .find_map(|(_, desc)| { @@ -1347,14 +1396,12 @@ impl Wallet { /// The derivation index of this wallet. It will return `None` if it has not derived any addresses. /// Otherwise, it will return the index of the highest address it has derived. pub fn derivation_index(&self, keychain: KeychainKind) -> Option { - self.keychain_tracker - .txout_index - .last_revealed_index(&keychain) + self.tracker.index().last_revealed_index(&keychain) } /// The index of the next address that you would get if you were to ask the wallet for a new address pub fn next_derivation_index(&self, keychain: KeychainKind) -> u32 { - self.keychain_tracker.txout_index.next_index(&keychain).0 + self.tracker.index().next_index(&keychain).0 } /// Informs the wallet that you no longer intend to broadcast a tx that was built from it. @@ -1362,7 +1409,7 @@ impl Wallet { /// This frees up the change address used when creating the tx for use in future transactions. // TODO: Make this free up reserved utxos when that's implemented pub fn cancel_tx(&mut self, tx: &Transaction) { - let txout_index = &mut self.keychain_tracker.txout_index; + let txout_index = self.tracker.index_mut(); for txout in &tx.output { if let Some(&(keychain, index)) = txout_index.index_of_spk(&txout.script_pubkey) { // NOTE: unmark_used will **not** make something unused if it has actually been used @@ -1383,10 +1430,7 @@ impl Wallet { } fn get_descriptor_for_txout(&self, txout: &TxOut) -> Option { - let &(keychain, child) = self - .keychain_tracker - .txout_index - .index_of_spk(&txout.script_pubkey)?; + let &(keychain, child) = self.tracker.index().index_of_spk(&txout.script_pubkey)?; let descriptor = self.get_descriptor_for_keychain(keychain); Some(descriptor.at_derivation_index(child)) } @@ -1440,13 +1484,23 @@ impl Wallet { .iter() .map(|u| { let txid = u.0.outpoint.txid; - let tx = self.keychain_tracker.chain_graph().get_tx_in_chain(txid); + let tx = self.tracker.get_tx(txid); match tx { // We don't have the tx in the db for some reason, // so we can't know for sure if it's mature or not. // We prefer not to spend it. None => false, - Some((confirmation_time, tx)) => { + // Some((confirmation_time, tx)) => { + Some(canonical_tx) => { + let tx = canonical_tx.node.tx; + let confirmation_time = match canonical_tx.observed_as { + bdk_chain::ObservedAs::Confirmed(a) => ConfirmationTime::Confirmed { + height: a.confirmation_height, + time: a.confirmation_time, + }, + bdk_chain::ObservedAs::Unconfirmed(_) => ConfirmationTime::Unconfirmed, + }; + // Whether the UTXO is mature and, if needed, confirmed let mut spendable = true; if must_only_use_confirmed_tx && !confirmation_time.is_confirmed() { @@ -1461,7 +1515,7 @@ impl Wallet { match confirmation_time { ConfirmationTime::Confirmed { height, .. } => { // https://github.com/bitcoin/bitcoin/blob/c5e67be03bb06a5d7885c55db1f016fbf2333fe3/src/validation.cpp#L373-L375 - spendable &= (current_height.saturating_sub(*height)) + spendable &= (current_height.saturating_sub(height)) >= COINBASE_MATURITY; } ConfirmationTime::Unconfirmed => spendable = false, @@ -1590,8 +1644,8 @@ impl Wallet { // Try to find the prev_script in our db to figure out if this is internal or external, // and the derivation index let &(keychain, child) = self - .keychain_tracker - .txout_index + .tracker + .index() .index_of_spk(&utxo.txout.script_pubkey) .ok_or(Error::UnknownUtxo)?; @@ -1608,7 +1662,7 @@ impl Wallet { .map_err(MiniscriptPsbtError::Conversion)?; let prev_output = utxo.outpoint; - if let Some(prev_tx) = self.keychain_tracker.graph().get_tx(prev_output.txid) { + if let Some(prev_tx) = self.tracker.graph().get_tx(prev_output.txid) { if desc.is_witness() || desc.is_taproot() { psbt_input.witness_utxo = Some(prev_tx.output[prev_output.vout as usize].clone()); } @@ -1641,10 +1695,7 @@ impl Wallet { // Try to figure out the keychain and derivation for every input and output for (is_input, index, out) in utxos.into_iter() { - if let Some(&(keychain, child)) = self - .keychain_tracker - .txout_index - .index_of_spk(&out.script_pubkey) + if let Some(&(keychain, child)) = self.tracker.index().index_of_spk(&out.script_pubkey) { debug!( "Found descriptor for input #{} {:?}/{}", @@ -1679,27 +1730,27 @@ impl Wallet { .to_string() } - /// Applies an update to the wallet and stages the changes (but does not [`commit`] them). - /// - /// Usually you create an `update` by interacting with some blockchain data source and inserting - /// transactions related to your wallet into it. - /// - /// [`commit`]: Self::commit - pub fn apply_update(&mut self, update: Update) -> Result<(), UpdateError> - where - D: persist::PersistBackend, - { - let changeset = self.keychain_tracker.apply_update(update)?; - self.persist.stage(changeset); - Ok(()) - } + // /// Applies an update to the wallet and stages the changes (but does not [`commit`] them). + // /// + // /// Usually you create an `update` by interacting with some blockchain data source and inserting + // /// transactions related to your wallet into it. + // /// + // /// [`commit`]: Self::commit + // pub fn apply_update(&mut self, update: Update) -> Result<(), UpdateError> + // where + // D: PersistBackend, + // { + // let changeset = self.tracker.apply_update(update)?; + // self.persist.stage(changeset); + // Ok(()) + // } /// Commits all curently [`staged`] changed to the persistence backend returning and error when this fails. /// /// [`staged`]: Self::staged pub fn commit(&mut self) -> Result<(), D::WriteError> where - D: persist::PersistBackend, + D: PersistBackend, { self.persist.commit() } @@ -1707,33 +1758,48 @@ impl Wallet { /// Returns the changes that will be staged with the next call to [`commit`]. /// /// [`commit`]: Self::commit - pub fn staged(&self) -> &ChangeSet { + pub fn staged(&self) -> &ChangeSet + where + D: PersistBackend, + { self.persist.staged() } - /// Get a reference to the inner [`TxGraph`](bdk_chain::tx_graph::TxGraph). - pub fn as_graph(&self) -> &bdk_chain::tx_graph::TxGraph { - self.keychain_tracker.graph() + pub fn tracker(&self) -> &Tracker { + &self.tracker } - /// Get a reference to the inner [`ChainGraph`](bdk_chain::chain_graph::ChainGraph). - pub fn as_chain_graph(&self) -> &bdk_chain::chain_graph::ChainGraph { - self.keychain_tracker.chain_graph() + pub fn tracker_mut(&mut self) -> &mut Tracker { + &mut self.tracker } -} -impl AsRef for Wallet { - fn as_ref(&self) -> &bdk_chain::tx_graph::TxGraph { - self.keychain_tracker.graph() - } -} + pub fn update(&mut self, mut f: F) -> Result + where + D: PersistBackend, + F: FnMut(&mut Tracker) -> Result, + { + let changeset = f(&mut self.tracker)?; + if changeset.is_empty() { + return Ok(false); + } -impl AsRef> for Wallet { - fn as_ref(&self) -> &bdk_chain::chain_graph::ChainGraph { - self.keychain_tracker.chain_graph() + self.persist.stage(changeset); + Ok(true) } } +// impl AsRef for Wallet { +// fn as_ref(&self) -> &bdk_chain::tx_graph::TxGraph { +// self.keychain_tracker.graph() +// } +// } + +// impl AsRef> for Wallet { +// fn as_ref(&self) -> &bdk_chain::chain_graph::ChainGraph { +// self.keychain_tracker.chain_graph() +// } +// } + /// Deterministically generate a unique name given the descriptors defining the wallet /// /// Compatible with [`wallet_name_from_descriptor`] @@ -1796,7 +1862,7 @@ macro_rules! doctest_wallet { let _ = wallet.insert_tx(tx.clone(), ConfirmationTime::Confirmed { height: 500, time: 50_000 - }); + }, None); wallet }} diff --git a/crates/bdk/src/wallet/tracker.rs b/crates/bdk/src/wallet/tracker.rs new file mode 100644 index 000000000..07fdaeec2 --- /dev/null +++ b/crates/bdk/src/wallet/tracker.rs @@ -0,0 +1,197 @@ +use alloc::vec::Vec; +use bdk_chain::{ + indexed_tx_graph::{IndexedAdditions, IndexedTxGraph}, + keychain::{Balance, DerivationAdditions, KeychainTxOutIndex}, + local_chain::{self, LocalChain}, + tx_graph::{CanonicalTx, TxGraph}, + Append, BlockId, ConfirmationTimeAnchor, ObservedAs, +}; +use bitcoin::{Transaction, Txid}; + +use crate::KeychainKind; + +pub type FullTxOut = bdk_chain::FullTxOut>; + +#[derive(Debug, Default)] +pub struct Tracker { + pub indexed_graph: IndexedTxGraph>, + pub chain: LocalChain, +} + +impl Tracker { + pub fn chain(&self) -> &LocalChain { + &self.chain + } + + pub fn graph(&self) -> &TxGraph { + self.indexed_graph.graph() + } + + pub fn index(&self) -> &KeychainTxOutIndex { + &self.indexed_graph.index + } + + pub fn index_mut(&mut self) -> &mut KeychainTxOutIndex { + &mut self.indexed_graph.index + } + + pub fn insert_tx( + &mut self, + tx: Transaction, + anchors: impl IntoIterator, + seen_at: Option, + ) -> Result { + let txid = tx.txid(); + + let anchors = anchors + .into_iter() + .map(|a| { + if a.anchor_block.height < a.confirmation_height { + Err(InsertTxInvalidAnchorError { + txid, + tx_height: a.confirmation_height, + anchor_block_height: a.anchor_block.height, + }) + } else { + Ok(a) + } + }) + .collect::, _>>()?; + + Ok(self.indexed_graph.insert_tx(&tx, anchors, seen_at).into()) + } + + pub fn insert_block( + &mut self, + block_id: BlockId, + ) -> Result { + self.chain.insert_block(block_id).map(Into::into) + } + + pub fn apply_changeset(&mut self, changeset: ChangeSet) { + self.indexed_graph + .apply_additions(changeset.indexed_additions); + self.chain.apply_changeset(changeset.chain_changeset); + } + + pub fn list_owned_txouts(&self) -> impl Iterator { + // [TODO] Use block id of correct genesis block + let chain_tip = self.chain.tip().unwrap_or_default(); + + self.indexed_graph + .graph() + .list_chain_txouts(&self.chain, chain_tip) + .filter_map(|full_txo| { + let keychain_ind = self.index().index_of_spk(&full_txo.txout.script_pubkey)?; + Some((keychain_ind, full_txo)) + }) + } + + pub fn list_owned_unspents(&self) -> impl Iterator { + self.list_owned_txouts() + .filter(|(_, full_txo)| full_txo.spent_by.is_none()) + } + + pub fn list_transactions( + &self, + ) -> impl Iterator> { + // [TODO] Use block id of correct genesis block + let chain_tip = self.chain.tip().unwrap_or_default(); + self.graph().list_chain_txs(&self.chain, chain_tip) + } + + pub fn balance(&self) -> Balance { + let chain_tip = self.chain.tip().unwrap_or_default(); + self.indexed_graph.balance(&self.chain, chain_tip, |spk| { + matches!( + self.indexed_graph.index.index_of_spk(spk), + Some(&(KeychainKind::Internal, _)) + ) + }) + } + + pub fn get_tx(&self, txid: Txid) -> Option> { + let chain_tip = self.chain().tip().unwrap_or_default(); + + let node = self.graph().get_tx_node(txid)?; + let observed_as = self + .graph() + .get_chain_position(self.chain(), chain_tip, txid)?; + Some(CanonicalTx { observed_as, node }) + } +} + +/// The changeset produced internally by applying an update +#[derive(Debug, Default, PartialEq, serde::Deserialize, serde::Serialize)] +pub struct ChangeSet { + pub indexed_additions: + IndexedAdditions>, + pub chain_changeset: local_chain::ChangeSet, +} + +impl ChangeSet { + pub fn is_empty(&self) -> bool { + self.indexed_additions.graph_additions.is_empty() + && self.indexed_additions.index_additions.is_empty() + && self.chain_changeset.is_empty() + } +} + +impl Append for ChangeSet { + fn append(&mut self, other: Self) { + Append::append(&mut self.indexed_additions, other.indexed_additions); + Append::append(&mut self.chain_changeset, other.chain_changeset); + } +} + +impl From> for ChangeSet { + fn from(index_additions: DerivationAdditions) -> Self { + Self { + indexed_additions: IndexedAdditions { + index_additions, + ..Default::default() + }, + ..Default::default() + } + } +} + +impl From for ChangeSet { + fn from(chain_changeset: local_chain::ChangeSet) -> Self { + Self { + chain_changeset, + ..Default::default() + } + } +} + +impl From>> + for ChangeSet +{ + fn from( + indexed_additions: IndexedAdditions< + ConfirmationTimeAnchor, + DerivationAdditions, + >, + ) -> Self { + Self { + indexed_additions, + chain_changeset: Default::default(), + } + } +} + +#[derive(Debug, PartialEq, Eq)] +pub struct InsertTxInvalidAnchorError { + pub txid: Txid, + pub tx_height: u32, + pub anchor_block_height: u32, +} + +impl std::fmt::Display for InsertTxInvalidAnchorError { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + write!(f, "cannot insert tx ({}) with anchor block height ({}) higher than tx confirmation height ({})", self.txid, self.anchor_block_height, self.tx_height) + } +} + +impl std::error::Error for InsertTxInvalidAnchorError {} diff --git a/crates/bdk/src/wallet/tx_builder.rs b/crates/bdk/src/wallet/tx_builder.rs index dbd4811c1..f647142c4 100644 --- a/crates/bdk/src/wallet/tx_builder.rs +++ b/crates/bdk/src/wallet/tx_builder.rs @@ -39,7 +39,7 @@ use crate::collections::BTreeMap; use crate::collections::HashSet; use alloc::{boxed::Box, rc::Rc, string::String, vec::Vec}; -use bdk_chain::ConfirmationTime; +use bdk_chain::PersistBackend; use core::cell::RefCell; use core::marker::PhantomData; @@ -47,7 +47,8 @@ use bitcoin::util::psbt::{self, PartiallySignedTransaction as Psbt}; use bitcoin::{LockTime, OutPoint, Script, Sequence, Transaction}; use super::coin_selection::{CoinSelectionAlgorithm, DefaultCoinSelectionAlgorithm}; -use super::persist; +use super::ChangeSet; +use super::Tracker; use crate::{ types::{FeeRate, KeychainKind, LocalUtxo, WeightedUtxo}, TransactionDetails, @@ -526,7 +527,7 @@ impl<'a, D, Cs: CoinSelectionAlgorithm, Ctx: TxBuilderContext> TxBuilder<'a, D, /// [`BIP174`]: https://github.com/bitcoin/bips/blob/master/bip-0174.mediawiki pub fn finish(self) -> Result<(Psbt, TransactionDetails), Error> where - D: persist::PersistBackend, + D: PersistBackend, { self.wallet .borrow_mut() diff --git a/crates/bdk/tests/common.rs b/crates/bdk/tests/common.rs index de9467032..7257625fd 100644 --- a/crates/bdk/tests/common.rs +++ b/crates/bdk/tests/common.rs @@ -1,6 +1,6 @@ #![allow(unused)] use bdk::{wallet::AddressIndex, Wallet}; -use bdk_chain::{BlockId, ConfirmationTime}; +use bdk_chain::{BlockId, ConfirmationTime, ConfirmationTimeAnchor}; use bitcoin::hashes::Hash; use bitcoin::{BlockHash, Network, Transaction, TxOut}; @@ -9,6 +9,10 @@ pub fn get_funded_wallet_with_change( descriptor: &str, change: Option<&str>, ) -> (Wallet, bitcoin::Txid) { + let anchor_block = BlockId { + height: 1_000, + hash: BlockHash::all_zeros(), + }; let mut wallet = Wallet::new_no_persist(descriptor, change, Network::Regtest).unwrap(); let address = wallet.get_address(AddressIndex::New).address; @@ -22,19 +26,15 @@ pub fn get_funded_wallet_with_change( }], }; - wallet - .insert_checkpoint(BlockId { - height: 1_000, - hash: BlockHash::all_zeros(), - }) - .unwrap(); + wallet.insert_checkpoint(anchor_block).unwrap(); wallet .insert_tx( tx.clone(), ConfirmationTime::Confirmed { - height: 1_000, + height: 1000, time: 100, }, + None, ) .unwrap(); diff --git a/crates/bdk/tests/wallet.rs b/crates/bdk/tests/wallet.rs index 0ada20d39..ebeea2592 100644 --- a/crates/bdk/tests/wallet.rs +++ b/crates/bdk/tests/wallet.rs @@ -44,6 +44,7 @@ fn receive_output(wallet: &mut Wallet, value: u64, height: TxHeight) -> OutPoint }, TxHeight::Unconfirmed => ConfirmationTime::Unconfirmed, }, + None, ) .unwrap(); @@ -811,7 +812,7 @@ fn test_create_tx_add_utxo() { lock_time: PackedLockTime(0), }; wallet - .insert_tx(small_output_tx.clone(), ConfirmationTime::Unconfirmed) + .insert_tx(small_output_tx.clone(), ConfirmationTime::Unconfirmed, None) .unwrap(); let addr = Address::from_str("2N1Ffz3WaNzbeLFBb51xyFMHYSEUXcbiSoX").unwrap(); @@ -848,7 +849,7 @@ fn test_create_tx_manually_selected_insufficient() { }; wallet - .insert_tx(small_output_tx.clone(), ConfirmationTime::Unconfirmed) + .insert_tx(small_output_tx.clone(), ConfirmationTime::Unconfirmed, None) .unwrap(); let addr = Address::from_str("2N1Ffz3WaNzbeLFBb51xyFMHYSEUXcbiSoX").unwrap(); @@ -889,7 +890,9 @@ fn test_create_tx_policy_path_no_csv() { script_pubkey: wallet.get_address(New).script_pubkey(), }], }; - wallet.insert_tx(tx, ConfirmationTime::Unconfirmed).unwrap(); + wallet + .insert_tx(tx, ConfirmationTime::Unconfirmed, None) + .unwrap(); let external_policy = wallet.policies(KeychainKind::External).unwrap().unwrap(); let root_id = external_policy.id; @@ -1214,7 +1217,9 @@ fn test_bump_fee_irreplaceable_tx() { let tx = psbt.extract_tx(); let txid = tx.txid(); - wallet.insert_tx(tx, ConfirmationTime::Unconfirmed).unwrap(); + wallet + .insert_tx(tx, ConfirmationTime::Unconfirmed, None) + .unwrap(); wallet.build_fee_bump(txid).unwrap().finish().unwrap(); } @@ -1237,6 +1242,7 @@ fn test_bump_fee_confirmed_tx() { height: 42, time: 42_000, }, + None, ) .unwrap(); @@ -1257,7 +1263,9 @@ fn test_bump_fee_low_fee_rate() { let tx = psbt.extract_tx(); let txid = tx.txid(); - wallet.insert_tx(tx, ConfirmationTime::Unconfirmed).unwrap(); + wallet + .insert_tx(tx, ConfirmationTime::Unconfirmed, None) + .unwrap(); let mut builder = wallet.build_fee_bump(txid).unwrap(); builder.fee_rate(FeeRate::from_sat_per_vb(1.0)); @@ -1278,7 +1286,9 @@ fn test_bump_fee_low_abs() { let tx = psbt.extract_tx(); let txid = tx.txid(); - wallet.insert_tx(tx, ConfirmationTime::Unconfirmed).unwrap(); + wallet + .insert_tx(tx, ConfirmationTime::Unconfirmed, None) + .unwrap(); let mut builder = wallet.build_fee_bump(txid).unwrap(); builder.fee_absolute(10); @@ -1298,7 +1308,9 @@ fn test_bump_fee_zero_abs() { let tx = psbt.extract_tx(); let txid = tx.txid(); - wallet.insert_tx(tx, ConfirmationTime::Unconfirmed).unwrap(); + wallet + .insert_tx(tx, ConfirmationTime::Unconfirmed, None) + .unwrap(); let mut builder = wallet.build_fee_bump(txid).unwrap(); builder.fee_absolute(0); @@ -1316,7 +1328,9 @@ fn test_bump_fee_reduce_change() { let (psbt, original_details) = builder.finish().unwrap(); let tx = psbt.extract_tx(); let txid = tx.txid(); - wallet.insert_tx(tx, ConfirmationTime::Unconfirmed).unwrap(); + wallet + .insert_tx(tx, ConfirmationTime::Unconfirmed, None) + .unwrap(); let mut builder = wallet.build_fee_bump(txid).unwrap(); builder.fee_rate(FeeRate::from_sat_per_vb(2.5)).enable_rbf(); @@ -1401,7 +1415,9 @@ fn test_bump_fee_reduce_single_recipient() { let (psbt, original_details) = builder.finish().unwrap(); let tx = psbt.extract_tx(); let txid = tx.txid(); - wallet.insert_tx(tx, ConfirmationTime::Unconfirmed).unwrap(); + wallet + .insert_tx(tx, ConfirmationTime::Unconfirmed, None) + .unwrap(); let mut builder = wallet.build_fee_bump(txid).unwrap(); builder @@ -1432,7 +1448,9 @@ fn test_bump_fee_absolute_reduce_single_recipient() { let (psbt, original_details) = builder.finish().unwrap(); let tx = psbt.extract_tx(); let txid = tx.txid(); - wallet.insert_tx(tx, ConfirmationTime::Unconfirmed).unwrap(); + assert!(wallet + .insert_tx(tx, ConfirmationTime::Unconfirmed, None) + .expect("can insert tx")); let mut builder = wallet.build_fee_bump(txid).unwrap(); builder @@ -1471,6 +1489,7 @@ fn test_bump_fee_drain_wallet() { height: wallet.latest_checkpoint().unwrap().height, time: 42_000, }, + None, ) .unwrap(); let addr = Address::from_str("2N1Ffz3WaNzbeLFBb51xyFMHYSEUXcbiSoX").unwrap(); @@ -1488,7 +1507,9 @@ fn test_bump_fee_drain_wallet() { let (psbt, original_details) = builder.finish().unwrap(); let tx = psbt.extract_tx(); let txid = tx.txid(); - wallet.insert_tx(tx, ConfirmationTime::Unconfirmed).unwrap(); + wallet + .insert_tx(tx, ConfirmationTime::Unconfirmed, None) + .unwrap(); assert_eq!(original_details.sent, 25_000); // for the new feerate, it should be enough to reduce the output, but since we specify @@ -1523,7 +1544,11 @@ fn test_bump_fee_remove_output_manually_selected_only() { }], }; wallet - .insert_tx(init_tx.clone(), wallet.transactions().last().unwrap().0) + .insert_tx( + init_tx.clone(), + wallet.transactions().last().unwrap().0, + None, + ) .unwrap(); let outpoint = OutPoint { txid: init_tx.txid(), @@ -1540,7 +1565,9 @@ fn test_bump_fee_remove_output_manually_selected_only() { let (psbt, original_details) = builder.finish().unwrap(); let tx = psbt.extract_tx(); let txid = tx.txid(); - wallet.insert_tx(tx, ConfirmationTime::Unconfirmed).unwrap(); + wallet + .insert_tx(tx, ConfirmationTime::Unconfirmed, None) + .unwrap(); assert_eq!(original_details.sent, 25_000); let mut builder = wallet.build_fee_bump(txid).unwrap(); @@ -1563,7 +1590,7 @@ fn test_bump_fee_add_input() { }], }; wallet - .insert_tx(init_tx, wallet.transactions().last().unwrap().0) + .insert_tx(init_tx, wallet.transactions().last().unwrap().0, None) .unwrap(); let addr = Address::from_str("2N1Ffz3WaNzbeLFBb51xyFMHYSEUXcbiSoX").unwrap(); @@ -1574,7 +1601,9 @@ fn test_bump_fee_add_input() { let (psbt, original_details) = builder.finish().unwrap(); let tx = psbt.extract_tx(); let txid = tx.txid(); - wallet.insert_tx(tx, ConfirmationTime::Unconfirmed).unwrap(); + wallet + .insert_tx(tx, ConfirmationTime::Unconfirmed, None) + .unwrap(); let mut builder = wallet.build_fee_bump(txid).unwrap(); builder.fee_rate(FeeRate::from_sat_per_vb(50.0)); @@ -1618,7 +1647,9 @@ fn test_bump_fee_absolute_add_input() { let (psbt, original_details) = builder.finish().unwrap(); let tx = psbt.extract_tx(); let txid = tx.txid(); - wallet.insert_tx(tx, ConfirmationTime::Unconfirmed).unwrap(); + wallet + .insert_tx(tx, ConfirmationTime::Unconfirmed, None) + .unwrap(); let mut builder = wallet.build_fee_bump(txid).unwrap(); builder.fee_absolute(6_000); @@ -1668,7 +1699,9 @@ fn test_bump_fee_no_change_add_input_and_change() { let tx = psbt.extract_tx(); let txid = tx.txid(); - wallet.insert_tx(tx, ConfirmationTime::Unconfirmed).unwrap(); + wallet + .insert_tx(tx, ConfirmationTime::Unconfirmed, None) + .unwrap(); // now bump the fees without using `allow_shrinking`. the wallet should add an // extra input and a change output, and leave the original output untouched @@ -1724,7 +1757,9 @@ fn test_bump_fee_add_input_change_dust() { assert_eq!(tx.input.len(), 1); assert_eq!(tx.output.len(), 2); let txid = tx.txid(); - wallet.insert_tx(tx, ConfirmationTime::Unconfirmed).unwrap(); + wallet + .insert_tx(tx, ConfirmationTime::Unconfirmed, None) + .unwrap(); let mut builder = wallet.build_fee_bump(txid).unwrap(); // We set a fee high enough that during rbf we are forced to add @@ -1784,7 +1819,7 @@ fn test_bump_fee_force_add_input() { txin.witness.push([0x00; P2WPKH_FAKE_WITNESS_SIZE]); // fake signature } wallet - .insert_tx(tx.clone(), ConfirmationTime::Unconfirmed) + .insert_tx(tx.clone(), ConfirmationTime::Unconfirmed, None) .unwrap(); // the new fee_rate is low enough that just reducing the change would be fine, but we force // the addition of an extra input with `add_utxo()` @@ -1839,7 +1874,7 @@ fn test_bump_fee_absolute_force_add_input() { txin.witness.push([0x00; P2WPKH_FAKE_WITNESS_SIZE]); // fake signature } wallet - .insert_tx(tx.clone(), ConfirmationTime::Unconfirmed) + .insert_tx(tx.clone(), ConfirmationTime::Unconfirmed, None) .unwrap(); // the new fee_rate is low enough that just reducing the change would be fine, but we force @@ -1899,7 +1934,9 @@ fn test_bump_fee_unconfirmed_inputs_only() { for txin in &mut tx.input { txin.witness.push([0x00; P2WPKH_FAKE_WITNESS_SIZE]); // fake signature } - wallet.insert_tx(tx, ConfirmationTime::Unconfirmed).unwrap(); + wallet + .insert_tx(tx, ConfirmationTime::Unconfirmed, None) + .unwrap(); let mut builder = wallet.build_fee_bump(txid).unwrap(); builder.fee_rate(FeeRate::from_sat_per_vb(25.0)); builder.finish().unwrap(); @@ -1928,7 +1965,9 @@ fn test_bump_fee_unconfirmed_input() { for txin in &mut tx.input { txin.witness.push([0x00; P2WPKH_FAKE_WITNESS_SIZE]); // fake signature } - wallet.insert_tx(tx, ConfirmationTime::Unconfirmed).unwrap(); + wallet + .insert_tx(tx, ConfirmationTime::Unconfirmed, None) + .unwrap(); let mut builder = wallet.build_fee_bump(txid).unwrap(); builder @@ -3022,6 +3061,7 @@ fn test_spend_coinbase() { height: confirmation_height, time: 30_000, }, + None, ) .unwrap(); diff --git a/crates/chain/src/chain_data.rs b/crates/chain/src/chain_data.rs index 0ce74a841..55c1ac5f5 100644 --- a/crates/chain/src/chain_data.rs +++ b/crates/chain/src/chain_data.rs @@ -221,6 +221,32 @@ impl Anchor for ConfirmationHeightAnchor { } } +/// An [`Anchor`] implementation that also records the exact confirmation time and height of the +/// transaction. +#[derive(Debug, Default, Clone, PartialEq, Eq, Copy, PartialOrd, Ord, core::hash::Hash)] +#[cfg_attr( + feature = "serde", + derive(serde::Deserialize, serde::Serialize), + serde(crate = "serde_crate") +)] +pub struct ConfirmationTimeAnchor { + /// The anchor block. + pub anchor_block: BlockId, + + pub confirmation_height: u32, + pub confirmation_time: u64, +} + +impl Anchor for ConfirmationTimeAnchor { + fn anchor_block(&self) -> BlockId { + self.anchor_block + } + + fn confirmation_height_upper_bound(&self) -> u32 { + self.confirmation_height + } +} + /// A `TxOut` with as much data as we can retrieve about it #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] pub struct FullTxOut

{ diff --git a/crates/chain/src/indexed_tx_graph.rs b/crates/chain/src/indexed_tx_graph.rs index 6d8c16ffa..51f2558ae 100644 --- a/crates/chain/src/indexed_tx_graph.rs +++ b/crates/chain/src/indexed_tx_graph.rs @@ -12,6 +12,7 @@ use crate::{ /// A struct that combines [`TxGraph`] and an [`Indexer`] implementation. /// /// This structure ensures that [`TxGraph`] and [`Indexer`] are updated atomically. +#[derive(Debug)] pub struct IndexedTxGraph { /// Transaction index. pub index: I, diff --git a/crates/chain/src/local_chain.rs b/crates/chain/src/local_chain.rs index 7c74e74cf..fc635173f 100644 --- a/crates/chain/src/local_chain.rs +++ b/crates/chain/src/local_chain.rs @@ -85,6 +85,10 @@ impl LocalChain { .map(|&hash| BlockId { height, hash }) } + pub fn all_blocks(&self) -> &BTreeMap { + &self.blocks + } + /// This is like the sparsechain's logic, expect we must guarantee that all invalidated heights /// are to be re-filled. pub fn determine_changeset(&self, update: &Self) -> Result { diff --git a/crates/chain/src/persist.rs b/crates/chain/src/persist.rs index 35d4b44ff..e23fc8fb7 100644 --- a/crates/chain/src/persist.rs +++ b/crates/chain/src/persist.rs @@ -8,6 +8,7 @@ use crate::Append; /// Not all changes to the tracker, which is an in-memory representation of wallet/blockchain /// data, needs to be written to disk right away, so [`Persist::stage`] can be used to *stage* /// changes first and then [`Persist::commit`] can be used to write changes to disk. +#[derive(Debug)] pub struct Persist { backend: B, stage: C, diff --git a/crates/chain/src/tx_graph.rs b/crates/chain/src/tx_graph.rs index cee688be7..09d91e0e1 100644 --- a/crates/chain/src/tx_graph.rs +++ b/crates/chain/src/tx_graph.rs @@ -349,6 +349,11 @@ impl TxGraph { .filter(move |(_, conflicting_txid)| *conflicting_txid != txid) } + /// Iterates over all transaction anchors known by [`TxGraph`]. + pub fn all_anchors(&self) -> impl ExactSizeIterator + DoubleEndedIterator { + self.anchors.iter() + } + /// Whether the graph has any transactions or outputs in it. pub fn is_empty(&self) -> bool { self.txs.is_empty() @@ -624,11 +629,9 @@ impl TxGraph { chain_tip: BlockId, txid: Txid, ) -> Result>, C::Error> { - let (tx_node, anchors, &last_seen) = match self.txs.get(&txid) { - Some((tx, anchors, last_seen)) if !(anchors.is_empty() && *last_seen == 0) => { - (tx, anchors, last_seen) - } - _ => return Ok(None), + let (tx_node, anchors, last_seen) = match self.txs.get(&txid) { + Some(v) => v, + None => return Ok(None), }; for anchor in anchors { @@ -657,12 +660,12 @@ impl TxGraph { return Ok(None); } } - if conflicting_tx.last_seen_unconfirmed > last_seen { + if conflicting_tx.last_seen_unconfirmed > *last_seen { return Ok(None); } } - Ok(Some(ObservedAs::Unconfirmed(last_seen))) + Ok(Some(ObservedAs::Unconfirmed(*last_seen))) } /// Get the position of the transaction in `chain` with tip `chain_tip`. diff --git a/crates/chain/tests/test_tx_graph.rs b/crates/chain/tests/test_tx_graph.rs index 41b2ae02f..eb41115c4 100644 --- a/crates/chain/tests/test_tx_graph.rs +++ b/crates/chain/tests/test_tx_graph.rs @@ -717,10 +717,11 @@ fn test_chain_spends() { ObservedAs::Confirmed(&local_chain.get_block(95).expect("block expected")) ); - // As long the unconfirmed tx isn't marked as seen, chain_spend will return None. - assert!(graph - .get_chain_spend(&local_chain, tip, OutPoint::new(tx_0.txid(), 1)) - .is_none()); + // Even if seen is 0, chain_spend will still return some + assert_eq!( + graph.get_chain_spend(&local_chain, tip, OutPoint::new(tx_0.txid(), 1)), + Some((ObservedAs::Unconfirmed(0), tx_2.txid())) + ); // Mark the unconfirmed as seen and check correct ObservedAs status is returned. let _ = graph.insert_seen_at(tx_2.txid(), 1234567); diff --git a/crates/electrum/Cargo.toml b/crates/electrum/Cargo.toml index 20eac4d37..96eae5cf8 100644 --- a/crates/electrum/Cargo.toml +++ b/crates/electrum/Cargo.toml @@ -12,5 +12,9 @@ readme = "README.md" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] +bdk = { path = "../bdk", version = "1.0.0-alpha.0", optional = true } bdk_chain = { path = "../chain", version = "0.4.0", features = ["serde", "miniscript"] } electrum-client = { version = "0.12" } + +[features] +wallet = ["bdk"] diff --git a/crates/electrum/src/v2.rs b/crates/electrum/src/v2.rs index 01c28d3a6..60d303103 100644 --- a/crates/electrum/src/v2.rs +++ b/crates/electrum/src/v2.rs @@ -3,18 +3,18 @@ use bdk_chain::{ indexed_tx_graph::{IndexedAdditions, IndexedTxGraph}, keychain::{DerivationAdditions, KeychainTxOutIndex}, local_chain::{self, LocalChain, UpdateNotConnectedError}, - tx_graph::TxGraph, - Anchor, Append, BlockId, ConfirmationHeightAnchor, + tx_graph::{self, TxGraph}, + Anchor, Append, BlockId, ConfirmationHeightAnchor, ConfirmationTimeAnchor, }; use electrum_client::{Client, ElectrumApi, Error}; use std::{ - collections::{BTreeMap, BTreeSet, HashMap}, + collections::{BTreeMap, BTreeSet, HashMap, HashSet}, fmt::Debug, }; use crate::InternalError; -#[derive(Debug)] +#[derive(Debug, Clone)] pub struct ElectrumUpdate { pub graph_update: G, pub chain_update: LocalChain, @@ -34,9 +34,9 @@ impl Default for ElectrumUpdate { pub type IntermediaryElectrumUpdate = ElectrumUpdate>, K>; impl<'a, A: Anchor, K> IntermediaryElectrumUpdate { - pub fn missing_full_txs( + pub fn missing_full_txs( &'a self, - graph: &'a TxGraph, + graph: &'a TxGraph, ) -> impl Iterator + 'a { self.graph_update .keys() @@ -56,6 +56,7 @@ impl<'a, A: Anchor, K> IntermediaryElectrumUpdate { let _ = graph_update.insert_anchor(txid, anchor); } } + dbg!(graph_update.full_txs().count()); FinalElectrumUpdate { graph_update, chain_update: self.chain_update, @@ -66,6 +67,69 @@ impl<'a, A: Anchor, K> IntermediaryElectrumUpdate { pub type FinalElectrumUpdate = ElectrumUpdate, K>; +impl FinalElectrumUpdate { + pub fn into_confirmation_time_update( + self, + client: &Client, + ) -> Result, Error> { + let relevant_heights = { + let mut visited_heights = HashSet::new(); + self.graph_update + .all_anchors() + .map(|(a, _)| a.confirmation_height_upper_bound()) + .filter(move |&h| visited_heights.insert(h)) + .collect::>() + }; + + // [TODO] We need to check whether block hashes is "anchored" to our anchor. + let height_to_time = relevant_heights + .clone() + .into_iter() + .zip( + client + .batch_block_header(relevant_heights)? + .into_iter() + .map(|bh| bh.time as u64), + ) + .collect::>(); + + let graph_additions = { + let old_additions = TxGraph::default().determine_additions(&self.graph_update); + tx_graph::Additions { + tx: old_additions.tx, + txout: old_additions.txout, + last_seen: old_additions.last_seen, + anchors: old_additions + .anchors + .into_iter() + .map(|(height_anchor, txid)| { + let confirmation_height = dbg!(height_anchor + .confirmation_height + .expect("must have confirmation height")); + let confirmation_time = height_to_time[&confirmation_height]; + let time_anchor = ConfirmationTimeAnchor { + anchor_block: height_anchor.anchor_block, + confirmation_height, + confirmation_time, + }; + (time_anchor, txid) + }) + .collect(), + } + }; + + Ok(FinalElectrumUpdate { + graph_update: { + let mut graph = TxGraph::default(); + graph.apply_additions(graph_additions); + graph + }, + chain_update: self.chain_update, + keychain_update: self.keychain_update, + }) + } +} + impl FinalElectrumUpdate { pub fn apply( self, @@ -94,6 +158,27 @@ impl FinalElectrumUpdate { } } +#[cfg(feature = "wallet")] +impl FinalElectrumUpdate { + pub fn apply_to_tracker( + self, + tracker: &mut bdk::wallet::Tracker, + ) -> Result { + Ok(bdk::wallet::ChangeSet { + indexed_additions: { + let mut additions = tracker.indexed_graph.apply_update(self.graph_update); + let (_, derivation_additions) = tracker + .indexed_graph + .index + .reveal_to_target_multi(&self.keychain_update); + additions.index_additions.append(derivation_additions); + additions + }, + chain_changeset: tracker.chain.apply_update(self.chain_update)?, + }) + } +} + pub trait ElectrumExt { fn get_tip(&self) -> Result<(u32, BlockHash), Error>; diff --git a/crates/file_store/Cargo.toml b/crates/file_store/Cargo.toml index 791780409..681d5cbce 100644 --- a/crates/file_store/Cargo.toml +++ b/crates/file_store/Cargo.toml @@ -15,5 +15,10 @@ bdk_chain = { path = "../chain", version = "0.4.0", features = [ "serde", "minis bincode = { version = "1" } serde = { version = "1", features = ["derive"] } +bdk = { path = "../bdk", version = "1.0.0-alpha.0", features = [ "default" ], optional = true } + [dev-dependencies] tempfile = "3" + +[features] +wallet = [ "bdk" ] diff --git a/crates/file_store/src/store.rs b/crates/file_store/src/store.rs index 1af635a10..9404888e9 100644 --- a/crates/file_store/src/store.rs +++ b/crates/file_store/src/store.rs @@ -10,6 +10,26 @@ use bincode::Options; use crate::{bincode_options, EntryIter, FileError, IterError}; +#[cfg(feature = "wallet")] +use bdk::wallet::{ChangeSet, Tracker}; + +#[cfg(feature = "wallet")] +impl bdk_chain::PersistBackend for Store { + type WriteError = std::io::Error; + + type LoadError = IterError; + + fn write_changes(&mut self, changeset: &ChangeSet) -> Result<(), Self::WriteError> { + self.append_changeset(changeset) + } + + fn load_into_tracker(&mut self, tracker: &mut Tracker) -> Result<(), Self::LoadError> { + let (changeset, result) = self.aggregate_changesets(); + tracker.apply_changeset(changeset); + result + } +} + /// Persists an append-only list of changesets (`C`) to a single file. /// /// The changesets are the results of altering a tracker implementation (`T`). diff --git a/example-crates/wallet_electrum/Cargo.toml b/example-crates/wallet_electrum/Cargo.toml index da84e85f1..b39584a2e 100644 --- a/example-crates/wallet_electrum/Cargo.toml +++ b/example-crates/wallet_electrum/Cargo.toml @@ -5,5 +5,5 @@ edition = "2021" [dependencies] bdk = { path = "../../crates/bdk" } -bdk_electrum = { path = "../../crates/electrum" } -bdk_file_store = { path = "../../crates/file_store" } +bdk_electrum = { path = "../../crates/electrum", features = [ "wallet" ] } +bdk_file_store = { path = "../../crates/file_store", features = [ "wallet" ] } diff --git a/example-crates/wallet_electrum/src/main.rs b/example-crates/wallet_electrum/src/main.rs index 5145d593b..0421088c2 100644 --- a/example-crates/wallet_electrum/src/main.rs +++ b/example-crates/wallet_electrum/src/main.rs @@ -1,4 +1,4 @@ -use std::{io::Write, str::FromStr}; +use std::{io::Write, str::FromStr, time::UNIX_EPOCH}; use bdk::{ bitcoin::{Address, Network}, @@ -6,19 +6,20 @@ use bdk::{ }; use bdk_electrum::{ electrum_client::{self, ElectrumApi}, - ElectrumExt, + v2::ElectrumExt, }; -use bdk_file_store::KeychainStore; +use bdk_file_store::Store; const SEND_AMOUNT: u64 = 5000; const STOP_GAP: usize = 50; const BATCH_SIZE: usize = 5; +const DB_MAGIC: &[u8] = b"example-crates/wallet_electrum"; fn main() -> Result<(), Box> { println!("Hello, world!"); let db_path = std::env::temp_dir().join("bdk-electrum-example"); - let db = KeychainStore::new_from_path(db_path)?; + let db = Store::new_from_path(DB_MAGIC, db_path)?; let external_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/0'/0'/0/*)"; let internal_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/0'/0'/1/*)"; @@ -58,20 +59,32 @@ fn main() -> Result<(), Box> { ) }) .collect(); - let electrum_update = client - .scan( - local_chain, - spks, - core::iter::empty(), - core::iter::empty(), - STOP_GAP, - BATCH_SIZE, - )? - .into_confirmation_time_update(&client)?; + let electrum_update = client.scan( + local_chain, + spks, + core::iter::empty(), + core::iter::empty(), + STOP_GAP, + BATCH_SIZE, + )?; println!(); - let new_txs = client.batch_transaction_get(electrum_update.missing_full_txs(&wallet))?; - let update = electrum_update.into_keychain_scan(new_txs, &wallet)?; - wallet.apply_update(update)?; + + let missing_txs = electrum_update + .missing_full_txs(wallet.tracker().graph()) + .collect::>(); + let new_txs = client.batch_transaction_get(missing_txs)?; + + let now = UNIX_EPOCH + .elapsed() + .expect("must get system time") + .as_secs(); + + let update = electrum_update + .finalize(Some(now), new_txs) + .into_confirmation_time_update(&client)?; + + // update. + wallet.update(|tracker| update.clone().apply_to_tracker(tracker))?; wallet.commit()?; let balance = wallet.get_balance(); diff --git a/example-crates/wallet_esplora/src/main.rs b/example-crates/wallet_esplora/src/main.rs index d8eda32a2..a0e14951c 100644 --- a/example-crates/wallet_esplora/src/main.rs +++ b/example-crates/wallet_esplora/src/main.rs @@ -1,96 +1,97 @@ -use bdk::{ - bitcoin::{Address, Network}, - wallet::AddressIndex, - SignOptions, Wallet, -}; -use bdk_esplora::esplora_client; -use bdk_esplora::EsploraExt; -use bdk_file_store::KeychainStore; -use std::{io::Write, str::FromStr}; +// use bdk::{ +// bitcoin::{Address, Network}, +// wallet::AddressIndex, +// SignOptions, Wallet, +// }; +// use bdk_esplora::esplora_client; +// use bdk_esplora::EsploraExt; +// use bdk_file_store::KeychainStore; +// use std::{io::Write, str::FromStr}; -const SEND_AMOUNT: u64 = 5000; -const STOP_GAP: usize = 50; -const PARALLEL_REQUESTS: usize = 5; +// const SEND_AMOUNT: u64 = 5000; +// const STOP_GAP: usize = 50; +// const PARALLEL_REQUESTS: usize = 5; fn main() -> Result<(), Box> { - let db_path = std::env::temp_dir().join("bdk-esplora-example"); - let db = KeychainStore::new_from_path(db_path)?; - let external_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/0'/0'/0/*)"; - let internal_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/0'/0'/1/*)"; + todo!() + // let db_path = std::env::temp_dir().join("bdk-esplora-example"); + // let db = KeychainStore::new_from_path(db_path)?; + // let external_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/0'/0'/0/*)"; + // let internal_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/0'/0'/1/*)"; - let mut wallet = Wallet::new( - external_descriptor, - Some(internal_descriptor), - db, - Network::Testnet, - )?; + // let mut wallet = Wallet::new( + // external_descriptor, + // Some(internal_descriptor), + // db, + // Network::Testnet, + // )?; - let address = wallet.get_address(AddressIndex::New); - println!("Generated Address: {}", address); + // let address = wallet.get_address(AddressIndex::New); + // println!("Generated Address: {}", address); - let balance = wallet.get_balance(); - println!("Wallet balance before syncing: {} sats", balance.total()); + // let balance = wallet.get_balance(); + // println!("Wallet balance before syncing: {} sats", balance.total()); - print!("Syncing..."); - // Scanning the chain... - let esplora_url = "https://mempool.space/testnet/api"; - let client = esplora_client::Builder::new(esplora_url).build_blocking()?; - let checkpoints = wallet.checkpoints(); - let spks = wallet - .spks_of_all_keychains() - .into_iter() - .map(|(k, spks)| { - let mut first = true; - ( - k, - spks.inspect(move |(spk_i, _)| { - if first { - first = false; - print!("\nScanning keychain [{:?}]:", k); - } - print!(" {}", spk_i); - let _ = std::io::stdout().flush(); - }), - ) - }) - .collect(); - let update = client.scan( - checkpoints, - spks, - core::iter::empty(), - core::iter::empty(), - STOP_GAP, - PARALLEL_REQUESTS, - )?; - println!(); - wallet.apply_update(update)?; - wallet.commit()?; + // print!("Syncing..."); + // // Scanning the chain... + // let esplora_url = "https://mempool.space/testnet/api"; + // let client = esplora_client::Builder::new(esplora_url).build_blocking()?; + // let checkpoints = wallet.checkpoints(); + // let spks = wallet + // .spks_of_all_keychains() + // .into_iter() + // .map(|(k, spks)| { + // let mut first = true; + // ( + // k, + // spks.inspect(move |(spk_i, _)| { + // if first { + // first = false; + // print!("\nScanning keychain [{:?}]:", k); + // } + // print!(" {}", spk_i); + // let _ = std::io::stdout().flush(); + // }), + // ) + // }) + // .collect(); + // let update = client.scan( + // checkpoints, + // spks, + // core::iter::empty(), + // core::iter::empty(), + // STOP_GAP, + // PARALLEL_REQUESTS, + // )?; + // println!(); + // wallet.apply_update(update)?; + // wallet.commit()?; - let balance = wallet.get_balance(); - println!("Wallet balance after syncing: {} sats", balance.total()); + // let balance = wallet.get_balance(); + // println!("Wallet balance after syncing: {} sats", balance.total()); - if balance.total() < SEND_AMOUNT { - println!( - "Please send at least {} sats to the receiving address", - SEND_AMOUNT - ); - std::process::exit(0); - } + // if balance.total() < SEND_AMOUNT { + // println!( + // "Please send at least {} sats to the receiving address", + // SEND_AMOUNT + // ); + // std::process::exit(0); + // } - let faucet_address = Address::from_str("mkHS9ne12qx9pS9VojpwU5xtRd4T7X7ZUt")?; + // let faucet_address = Address::from_str("mkHS9ne12qx9pS9VojpwU5xtRd4T7X7ZUt")?; - let mut tx_builder = wallet.build_tx(); - tx_builder - .add_recipient(faucet_address.script_pubkey(), SEND_AMOUNT) - .enable_rbf(); + // let mut tx_builder = wallet.build_tx(); + // tx_builder + // .add_recipient(faucet_address.script_pubkey(), SEND_AMOUNT) + // .enable_rbf(); - let (mut psbt, _) = tx_builder.finish()?; - let finalized = wallet.sign(&mut psbt, SignOptions::default())?; - assert!(finalized); + // let (mut psbt, _) = tx_builder.finish()?; + // let finalized = wallet.sign(&mut psbt, SignOptions::default())?; + // assert!(finalized); - let tx = psbt.extract_tx(); - client.broadcast(&tx)?; - println!("Tx broadcasted! Txid: {}", tx.txid()); + // let tx = psbt.extract_tx(); + // client.broadcast(&tx)?; + // println!("Tx broadcasted! Txid: {}", tx.txid()); - Ok(()) + // Ok(()) } diff --git a/example-crates/wallet_esplora_async/src/main.rs b/example-crates/wallet_esplora_async/src/main.rs index b78b09dfa..094ad5937 100644 --- a/example-crates/wallet_esplora_async/src/main.rs +++ b/example-crates/wallet_esplora_async/src/main.rs @@ -1,99 +1,100 @@ -use std::{io::Write, str::FromStr}; +// use std::{io::Write, str::FromStr}; -use bdk::{ - bitcoin::{Address, Network}, - wallet::AddressIndex, - SignOptions, Wallet, -}; -use bdk_esplora::{esplora_client, EsploraAsyncExt}; -use bdk_file_store::KeychainStore; +// use bdk::{ +// bitcoin::{Address, Network}, +// wallet::AddressIndex, +// SignOptions, Wallet, +// }; +// use bdk_esplora::{esplora_client, EsploraAsyncExt}; +// use bdk_file_store::KeychainStore; -const SEND_AMOUNT: u64 = 5000; -const STOP_GAP: usize = 50; -const PARALLEL_REQUESTS: usize = 5; +// const SEND_AMOUNT: u64 = 5000; +// const STOP_GAP: usize = 50; +// const PARALLEL_REQUESTS: usize = 5; #[tokio::main] async fn main() -> Result<(), Box> { - let db_path = std::env::temp_dir().join("bdk-esplora-example"); - let db = KeychainStore::new_from_path(db_path)?; - let external_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/0'/0'/0/*)"; - let internal_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/0'/0'/1/*)"; + todo!() + // let db_path = std::env::temp_dir().join("bdk-esplora-example"); + // let db = KeychainStore::new_from_path(db_path)?; + // let external_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/0'/0'/0/*)"; + // let internal_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/0'/0'/1/*)"; - let mut wallet = Wallet::new( - external_descriptor, - Some(internal_descriptor), - db, - Network::Testnet, - )?; + // let mut wallet = Wallet::new( + // external_descriptor, + // Some(internal_descriptor), + // db, + // Network::Testnet, + // )?; - let address = wallet.get_address(AddressIndex::New); - println!("Generated Address: {}", address); + // let address = wallet.get_address(AddressIndex::New); + // println!("Generated Address: {}", address); - let balance = wallet.get_balance(); - println!("Wallet balance before syncing: {} sats", balance.total()); + // let balance = wallet.get_balance(); + // println!("Wallet balance before syncing: {} sats", balance.total()); - print!("Syncing..."); - // Scanning the blockchain - let esplora_url = "https://mempool.space/testnet/api"; - let client = esplora_client::Builder::new(esplora_url).build_async()?; - let checkpoints = wallet.checkpoints(); - let spks = wallet - .spks_of_all_keychains() - .into_iter() - .map(|(k, spks)| { - let mut first = true; - ( - k, - spks.inspect(move |(spk_i, _)| { - if first { - first = false; - print!("\nScanning keychain [{:?}]:", k); - } - print!(" {}", spk_i); - let _ = std::io::stdout().flush(); - }), - ) - }) - .collect(); - let update = client - .scan( - checkpoints, - spks, - std::iter::empty(), - std::iter::empty(), - STOP_GAP, - PARALLEL_REQUESTS, - ) - .await?; - println!(); - wallet.apply_update(update)?; - wallet.commit()?; + // print!("Syncing..."); + // // Scanning the blockchain + // let esplora_url = "https://mempool.space/testnet/api"; + // let client = esplora_client::Builder::new(esplora_url).build_async()?; + // let checkpoints = wallet.checkpoints(); + // let spks = wallet + // .spks_of_all_keychains() + // .into_iter() + // .map(|(k, spks)| { + // let mut first = true; + // ( + // k, + // spks.inspect(move |(spk_i, _)| { + // if first { + // first = false; + // print!("\nScanning keychain [{:?}]:", k); + // } + // print!(" {}", spk_i); + // let _ = std::io::stdout().flush(); + // }), + // ) + // }) + // .collect(); + // let update = client + // .scan( + // checkpoints, + // spks, + // std::iter::empty(), + // std::iter::empty(), + // STOP_GAP, + // PARALLEL_REQUESTS, + // ) + // .await?; + // println!(); + // wallet.apply_update(update)?; + // wallet.commit()?; - let balance = wallet.get_balance(); - println!("Wallet balance after syncing: {} sats", balance.total()); + // let balance = wallet.get_balance(); + // println!("Wallet balance after syncing: {} sats", balance.total()); - if balance.total() < SEND_AMOUNT { - println!( - "Please send at least {} sats to the receiving address", - SEND_AMOUNT - ); - std::process::exit(0); - } + // if balance.total() < SEND_AMOUNT { + // println!( + // "Please send at least {} sats to the receiving address", + // SEND_AMOUNT + // ); + // std::process::exit(0); + // } - let faucet_address = Address::from_str("mkHS9ne12qx9pS9VojpwU5xtRd4T7X7ZUt")?; + // let faucet_address = Address::from_str("mkHS9ne12qx9pS9VojpwU5xtRd4T7X7ZUt")?; - let mut tx_builder = wallet.build_tx(); - tx_builder - .add_recipient(faucet_address.script_pubkey(), SEND_AMOUNT) - .enable_rbf(); + // let mut tx_builder = wallet.build_tx(); + // tx_builder + // .add_recipient(faucet_address.script_pubkey(), SEND_AMOUNT) + // .enable_rbf(); - let (mut psbt, _) = tx_builder.finish()?; - let finalized = wallet.sign(&mut psbt, SignOptions::default())?; - assert!(finalized); + // let (mut psbt, _) = tx_builder.finish()?; + // let finalized = wallet.sign(&mut psbt, SignOptions::default())?; + // assert!(finalized); - let tx = psbt.extract_tx(); - client.broadcast(&tx).await?; - println!("Tx broadcasted! Txid: {}", tx.txid()); + // let tx = psbt.extract_tx(); + // client.broadcast(&tx).await?; + // println!("Tx broadcasted! Txid: {}", tx.txid()); - Ok(()) + // Ok(()) } From 26dc69a9cd0727e9087dc250f997152f57d468e8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=BF=97=E5=AE=87?= Date: Tue, 2 May 2023 22:20:56 +0800 Subject: [PATCH 15/15] [wallet_redesign] Have a single `Tracker` implementation in `bdk_chain` --- crates/bdk/src/wallet/export.rs | 18 +- crates/bdk/src/wallet/mod.rs | 927 +++++++++--------- crates/bdk/src/wallet/tracker.rs | 197 ---- crates/bdk/src/wallet/tx_builder.rs | 6 +- crates/bdk/tests/common.rs | 18 +- crates/bdk/tests/wallet.rs | 170 ++-- crates/chain/src/indexed_tx_graph.rs | 4 +- crates/chain/src/lib.rs | 2 + crates/chain/src/local_chain.rs | 8 +- crates/chain/src/remote_chain.rs | 71 ++ crates/chain/src/tracker.rs | 357 +++++++ crates/chain/src/tx_data_traits.rs | 8 - crates/electrum/Cargo.toml | 4 - crates/electrum/src/v2.rs | 121 +-- crates/file_store/Cargo.toml | 5 - crates/file_store/src/store.rs | 50 +- example-crates/tracker_electrum/src/main.rs | 36 +- example-crates/tracker_example_cli/src/lib.rs | 238 +++-- .../tracker_example_cli/src/remote_chain.rs | 57 -- .../tracker_example_cli/src/tracker.rs | 266 ----- example-crates/wallet_electrum/Cargo.toml | 4 +- example-crates/wallet_electrum/src/main.rs | 6 +- 22 files changed, 1212 insertions(+), 1361 deletions(-) delete mode 100644 crates/bdk/src/wallet/tracker.rs create mode 100644 crates/chain/src/remote_chain.rs create mode 100644 crates/chain/src/tracker.rs delete mode 100644 example-crates/tracker_example_cli/src/remote_chain.rs delete mode 100644 example-crates/tracker_example_cli/src/tracker.rs diff --git a/crates/bdk/src/wallet/export.rs b/crates/bdk/src/wallet/export.rs index 1e1245839..5b6d3cbc2 100644 --- a/crates/bdk/src/wallet/export.rs +++ b/crates/bdk/src/wallet/export.rs @@ -239,16 +239,14 @@ mod test { lock_time: bitcoin::PackedLockTime::ZERO, }; wallet.insert_checkpoint(anchor_block).unwrap(); - wallet - .insert_tx( - transaction, - ConfirmationTime::Confirmed { - height: 5000, - time: 0, - }, - None, - ) - .unwrap(); + assert!(wallet.insert_tx( + &transaction, + ConfirmationTime::Confirmed { + height: 5000, + time: 0, + }, + None, + )); wallet } diff --git a/crates/bdk/src/wallet/mod.rs b/crates/bdk/src/wallet/mod.rs index ce402c60a..43df487ee 100644 --- a/crates/bdk/src/wallet/mod.rs +++ b/crates/bdk/src/wallet/mod.rs @@ -21,11 +21,14 @@ use alloc::{ }; pub use bdk_chain::keychain::Balance; use bdk_chain::{ - local_chain, BlockId, ConfirmationTime, ConfirmationTimeAnchor, Persist, PersistBackend, + indexed_tx_graph::IndexedAdditions, + local_chain, + tracker::{LocalChangeSet, LocalTracker, LocalUpdate}, + BlockId, ConfirmationTime, ConfirmationTimeAnchor, Persist, PersistBackend, }; -use bitcoin::consensus::encode::serialize; use bitcoin::secp256k1::Secp256k1; use bitcoin::util::psbt; +use bitcoin::{consensus::encode::serialize, hashes::Hash}; use bitcoin::{ Address, BlockHash, EcdsaSighashType, LockTime, Network, OutPoint, SchnorrSighashType, Script, Sequence, Transaction, TxOut, Txid, Witness, @@ -40,7 +43,7 @@ use log::{debug, error, info, trace}; pub mod coin_selection; pub mod export; pub mod signer; -mod tracker; +// mod tracker; pub mod tx_builder; pub(crate) mod utils; @@ -48,7 +51,7 @@ pub(crate) mod utils; #[cfg_attr(docsrs, doc(cfg(feature = "hardware-signer")))] pub mod hardwaresigner; -pub use tracker::*; +// pub use tracker::*; pub use utils::IsDust; #[allow(deprecated)] @@ -83,12 +86,16 @@ const COINBASE_MATURITY: u32 = 100; pub struct Wallet { signers: Arc, change_signers: Arc, - tracker: Tracker, - persist: Persist, + tracker: WalletTracker, + persist: Persist, network: Network, secp: SecpCtx, } +type WalletTracker = LocalTracker; +type WalletChangeSet = LocalChangeSet; +pub type WalletUpdate = LocalUpdate; + // /// The update to a [`Wallet`] used in [`Wallet::apply_update`]. This is usually returned from blockchain data sources. // /// The type parameter `T` indicates the kind of transaction contained in the update. It's usually a [`bitcoin::Transaction`]. // pub type Update = KeychainScan; @@ -183,7 +190,10 @@ where #[cfg(feature = "std")] impl std::error::Error for NewError

{} -impl Wallet { +impl Wallet +where + D: PersistBackend, +{ /// Create a wallet from a `descriptor` (and an optional `change_descriptor`) and load related /// transaction data from `db`. pub fn new( @@ -191,18 +201,13 @@ impl Wallet { change_descriptor: Option, mut db: D, network: Network, - ) -> Result> - where - D: PersistBackend, - { + ) -> Result> { let secp = Secp256k1::new(); - let mut tracker = Tracker::default(); + let mut tracker_keychains = Vec::new(); let (descriptor, keymap) = into_wallet_descriptor_checked(descriptor, &secp, network) .map_err(NewError::Descriptor)?; - tracker - .index_mut() - .add_keychain(KeychainKind::External, descriptor.clone()); + tracker_keychains.push((KeychainKind::External, descriptor.clone())); let signers = Arc::new(SignersContainer::build(keymap, &descriptor, &secp)); let change_signers = match change_descriptor { @@ -217,15 +222,15 @@ impl Wallet { &secp, )); - tracker - .index_mut() - .add_keychain(KeychainKind::Internal, change_descriptor); - + tracker_keychains.push((KeychainKind::Internal, change_descriptor)); change_signers } None => Arc::new(SignersContainer::new()), }; + let mut tracker = + WalletTracker::new_local(BlockHash::from_inner([0_u8; 32]), tracker_keychains); + db.load_into_tracker(&mut tracker) .map_err(NewError::Persist)?; @@ -241,23 +246,10 @@ impl Wallet { }) } - /// Get the Bitcoin network the wallet is using. - pub fn network(&self) -> Network { - self.network - } - - /// Iterator over all keychains in this wallet - pub fn keychanins(&self) -> &BTreeMap { - self.tracker.index().keychains() - } - /// Return a derived address using the external descriptor, see [`AddressIndex`] for /// available address index selection strategies. If none of the keys in the descriptor are derivable /// (i.e. does not end with /*) then the same address will always be returned for any [`AddressIndex`]. - pub fn get_address(&mut self, address_index: AddressIndex) -> AddressInfo - where - D: PersistBackend, - { + pub fn get_address(&mut self, address_index: AddressIndex) -> AddressInfo { self._get_address(address_index, KeychainKind::External) } @@ -268,27 +260,27 @@ impl Wallet { /// see [`AddressIndex`] for available address index selection strategies. If none of the keys /// in the descriptor are derivable (i.e. does not end with /*) then the same address will always /// be returned for any [`AddressIndex`]. - pub fn get_internal_address(&mut self, address_index: AddressIndex) -> AddressInfo - where - D: PersistBackend, - { + pub fn get_internal_address(&mut self, address_index: AddressIndex) -> AddressInfo { self._get_address(address_index, KeychainKind::Internal) } - fn _get_address(&mut self, address_index: AddressIndex, keychain: KeychainKind) -> AddressInfo - where - D: PersistBackend, - { + fn _get_address(&mut self, address_index: AddressIndex, keychain: KeychainKind) -> AddressInfo { let keychain = self.map_keychain(keychain); let txout_index = self.tracker.index_mut(); let (index, spk) = match address_index { AddressIndex::New => { - let ((index, spk), changeset) = txout_index.reveal_next_spk(&keychain); - let spk = spk.clone(); + let ((index, spk), index_additions) = txout_index.reveal_next_spk(&keychain); + + self.persist.stage(WalletChangeSet { + indexed_additions: IndexedAdditions { + index_additions, + ..Default::default() + }, + ..Default::default() + }); - self.persist.stage(changeset.into()); self.persist.commit().expect("TODO"); - (index, spk) + (index, spk.clone()) } AddressIndex::LastUnused => { let index = txout_index.last_revealed_index(&keychain); @@ -317,175 +309,6 @@ impl Wallet { } } - /// Return whether or not a `script` is part of this wallet (either internal or external) - pub fn is_mine(&self, script: &Script) -> bool { - self.tracker.index().index_of_spk(script).is_some() - } - - /// Finds how the wallet derived the script pubkey `spk`. - /// - /// Will only return `Some(_)` if the wallet has given out the spk. - pub fn derivation_of_spk(&self, spk: &Script) -> Option<(KeychainKind, u32)> { - self.tracker.index().index_of_spk(spk).copied() - } - - /// Return the list of unspent outputs of this wallet - pub fn list_unspent(&self) -> Vec { - self.tracker - .list_owned_unspents() - .map(|(&(keychain, derivation_index), utxo)| LocalUtxo { - outpoint: utxo.outpoint, - txout: utxo.txout, - keychain, - is_spent: false, - derivation_index, - confirmation_time: match utxo.chain_position { - bdk_chain::ObservedAs::Confirmed(a) => ConfirmationTime::Confirmed { - height: a.confirmation_height, - time: a.confirmation_time, - }, - bdk_chain::ObservedAs::Unconfirmed(_) => ConfirmationTime::Unconfirmed, - }, - }) - .collect() - } - - /// Get all the checkpoints the wallet is currently storing indexed by height. - pub fn checkpoints(&self) -> &BTreeMap { - self.tracker.chain().all_blocks() - } - - /// Returns the latest checkpoint. - pub fn latest_checkpoint(&self) -> Option { - self.tracker.chain().tip() - } - - /// Returns a iterators of all the script pubkeys for the `Internal` and External` variants in `KeychainKind`. - /// - /// This is inteded to be used when doing a full scan of your addresses (e.g. after restoring - /// from seed words). You pass the `BTreeMap` of iterators to a blockchain data source (e.g. - /// electrum server) which will go through each address until it reaches a *stop grap*. - /// - /// Note carefully that iterators go over **all** script pubkeys on the keychains (not what - /// script pubkeys the wallet is storing internally). - pub fn spks_of_all_keychains( - &self, - ) -> BTreeMap + Clone> { - self.tracker.index().spks_of_all_keychains() - } - - /// Gets an iterator over all the script pubkeys in a single keychain. - /// - /// See [`spks_of_all_keychains`] for more documentation - /// - /// [`spks_of_all_keychains`]: Self::spks_of_all_keychains - pub fn spks_of_keychain( - &self, - keychain: KeychainKind, - ) -> impl Iterator + Clone { - self.tracker.index().spks_of_keychain(&keychain) - } - - /// Returns the utxo owned by this wallet corresponding to `outpoint` if it exists in the - /// wallet's database. - pub fn get_utxo(&self, op: OutPoint) -> Option { - self.tracker - .list_owned_unspents() - .find_map(|(&(keychain, derivation_index), txo)| { - if op == txo.outpoint { - Some(LocalUtxo { - outpoint: txo.outpoint, - txout: txo.txout, - keychain, - is_spent: txo.spent_by.is_none(), - derivation_index, - confirmation_time: match txo.chain_position { - bdk_chain::ObservedAs::Confirmed(a) => ConfirmationTime::Confirmed { - height: a.confirmation_height, - time: a.confirmation_time, - }, - bdk_chain::ObservedAs::Unconfirmed(_) => ConfirmationTime::Unconfirmed, - }, - }) - } else { - None - } - }) - } - - /// Return a single transactions made and received by the wallet - /// - /// Optionally fill the [`TransactionDetails::transaction`] field with the raw transaction if - /// `include_raw` is `true`. - pub fn get_tx(&self, txid: Txid, include_raw: bool) -> Option { - let chain_tip = self.tracker.chain().tip().unwrap_or_default(); - - let raw_tx = self.tracker.graph().get_tx(txid)?; - let pos = self - .tracker - .graph() - .get_chain_position(self.tracker.chain(), chain_tip, txid)?; - - // let (&confirmation_time, tx) = self.keychain_tracker.chain_graph().get_tx_in_chain(txid)?; - let graph = self.tracker.graph(); - let txout_index = self.tracker.index(); - - let received = raw_tx - .output - .iter() - .map(|txout| { - if txout_index.index_of_spk(&txout.script_pubkey).is_some() { - txout.value - } else { - 0 - } - }) - .sum(); - - let sent = raw_tx - .input - .iter() - .map(|txin| { - if let Some((_, txout)) = txout_index.txout(txin.previous_output) { - txout.value - } else { - 0 - } - }) - .sum(); - - let inputs = raw_tx - .input - .iter() - .map(|txin| { - graph - .get_txout(txin.previous_output) - .map(|txout| txout.value) - }) - .sum::>(); - let outputs = raw_tx.output.iter().map(|txout| txout.value).sum(); - let fee = inputs.map(|inputs| inputs.saturating_sub(outputs)); - - Some(TransactionDetails { - transaction: if include_raw { - Some(raw_tx.clone()) - } else { - None - }, - txid, - received, - sent, - fee, - confirmation_time: match pos { - bdk_chain::ObservedAs::Confirmed(a) => ConfirmationTime::Confirmed { - height: a.confirmation_height, - time: a.confirmation_time, - }, - bdk_chain::ObservedAs::Unconfirmed(_) => ConfirmationTime::Unconfirmed, - }, - }) - } - /// Add a new checkpoint to the wallet's internal view of the chain. /// This stages but does not [`commit`] the change. /// @@ -496,10 +319,7 @@ impl Wallet { pub fn insert_checkpoint( &mut self, block_id: BlockId, - ) -> Result - where - D: PersistBackend, - { + ) -> Result { let changeset = self.tracker.insert_block(block_id)?; let changed = changeset.is_empty(); if changed { @@ -524,13 +344,10 @@ impl Wallet { /// [`insert_checkpoint`]: Self::insert_checkpoint pub fn insert_tx( &mut self, - tx: Transaction, + tx: &Transaction, pos: ConfirmationTime, last_seen: Option, - ) -> Result - where - D: PersistBackend, - { + ) -> bool { // [TODO] This is not okay! let anchor_block = self.tracker.chain().tip().unwrap_or_default(); let anchor = match pos { @@ -544,127 +361,19 @@ impl Wallet { _ => None, }; - let changeset = self.tracker.insert_tx(tx, anchor, last_seen)?; + let changeset: WalletChangeSet = self.tracker.insert_tx(tx, anchor, last_seen); let changed = !changeset.is_empty(); if changed { self.persist.stage(changeset); } - Ok(changed) - } - - #[deprecated(note = "use Wallet::transactions instead")] - /// Deprecated. use `Wallet::transactions` instead. - pub fn list_transactions(&self, include_raw: bool) -> Vec { - self.tracker - .list_transactions() - .map(|tx| self.get_tx(tx.node.txid, include_raw).expect("must exist")) - .collect() - } - - /// Iterate over the transactions in the wallet in order of ascending confirmation time with - /// unconfirmed transactions last. - pub fn transactions(&self) -> impl Iterator + '_ { - self.tracker.list_transactions().map(|canonical_tx| { - ( - match canonical_tx.observed_as { - bdk_chain::ObservedAs::Confirmed(a) => ConfirmationTime::Confirmed { - height: a.confirmation_height, - time: a.confirmation_time, - }, - bdk_chain::ObservedAs::Unconfirmed(_) => ConfirmationTime::Unconfirmed, - }, - canonical_tx.node.tx, - ) - }) - } - - /// Return the balance, separated into available, trusted-pending, untrusted-pending and immature - /// values. - pub fn get_balance(&self) -> Balance { - self.tracker.balance() - } - - /// Add an external signer - /// - /// See [the `signer` module](signer) for an example. - pub fn add_signer( - &mut self, - keychain: KeychainKind, - ordering: SignerOrdering, - signer: Arc, - ) { - let signers = match keychain { - KeychainKind::External => Arc::make_mut(&mut self.signers), - KeychainKind::Internal => Arc::make_mut(&mut self.change_signers), - }; - - signers.add_external(signer.id(&self.secp), ordering, signer); - } - - /// Get the signers - /// - /// ## Example - /// - /// ``` - /// # use bdk::{Wallet, KeychainKind}; - /// # use bdk::bitcoin::Network; - /// let wallet = Wallet::new_no_persist("wpkh(tprv8ZgxMBicQKsPe73PBRSmNbTfbcsZnwWhz5eVmhHpi31HW29Z7mc9B4cWGRQzopNUzZUT391DeDJxL2PefNunWyLgqCKRMDkU1s2s8bAfoSk/84'/0'/0'/0/*)", None, Network::Testnet)?; - /// for secret_key in wallet.get_signers(KeychainKind::External).signers().iter().filter_map(|s| s.descriptor_secret_key()) { - /// // secret_key: tprv8ZgxMBicQKsPe73PBRSmNbTfbcsZnwWhz5eVmhHpi31HW29Z7mc9B4cWGRQzopNUzZUT391DeDJxL2PefNunWyLgqCKRMDkU1s2s8bAfoSk/84'/0'/0'/0/* - /// println!("secret_key: {}", secret_key); - /// } - /// - /// Ok::<(), Box>(()) - /// ``` - pub fn get_signers(&self, keychain: KeychainKind) -> Arc { - match keychain { - KeychainKind::External => Arc::clone(&self.signers), - KeychainKind::Internal => Arc::clone(&self.change_signers), - } - } - - /// Start building a transaction. - /// - /// This returns a blank [`TxBuilder`] from which you can specify the parameters for the transaction. - /// - /// ## Example - /// - /// ``` - /// # use std::str::FromStr; - /// # use bitcoin::*; - /// # use bdk::*; - /// # let descriptor = "wpkh(tpubD6NzVbkrYhZ4Xferm7Pz4VnjdcDPFyjVu5K4iZXQ4pVN8Cks4pHVowTBXBKRhX64pkRyJZJN5xAKj4UDNnLPb5p2sSKXhewoYx5GbTdUFWq/*)"; - /// # let mut wallet = doctest_wallet!(); - /// # let to_address = Address::from_str("2N4eQYCbKUHCCTUjBJeHcJp9ok6J2GZsTDt").unwrap(); - /// let (psbt, details) = { - /// let mut builder = wallet.build_tx(); - /// builder - /// .add_recipient(to_address.script_pubkey(), 50_000); - /// builder.finish()? - /// }; - /// - /// // sign and broadcast ... - /// # Ok::<(), bdk::Error>(()) - /// ``` - /// - /// [`TxBuilder`]: crate::TxBuilder - pub fn build_tx(&mut self) -> TxBuilder<'_, D, DefaultCoinSelectionAlgorithm, CreateTx> { - TxBuilder { - wallet: alloc::rc::Rc::new(core::cell::RefCell::new(self)), - params: TxParams::default(), - coin_selection: DefaultCoinSelectionAlgorithm::default(), - phantom: core::marker::PhantomData, - } + changed } pub(crate) fn create_tx( &mut self, coin_selection: Cs, params: TxParams, - ) -> Result<(psbt::PartiallySignedTransaction, TransactionDetails), Error> - where - D: PersistBackend, - { + ) -> Result<(psbt::PartiallySignedTransaction, TransactionDetails), Error> { let external_descriptor = self .tracker .index() @@ -924,102 +633,424 @@ impl Wallet { Some(ref drain_recipient) => drain_recipient.clone(), None => { let change_keychain = self.map_keychain(KeychainKind::Internal); - let ((index, spk), changeset) = + let ((index, spk), index_additions) = self.tracker.index_mut().next_unused_spk(&change_keychain); let spk = spk.clone(); self.tracker.index_mut().mark_used(&change_keychain, index); - self.persist.stage(changeset.into()); + self.persist.stage(WalletChangeSet { + indexed_additions: IndexedAdditions { + index_additions, + ..Default::default() + }, + ..Default::default() + }); self.persist.commit().expect("TODO"); spk } }; - let coin_selection = coin_selection.coin_select( - required_utxos, - optional_utxos, - fee_rate, - outgoing + fee_amount, - &drain_script, - )?; - fee_amount += coin_selection.fee_amount; - let excess = &coin_selection.excess; + let coin_selection = coin_selection.coin_select( + required_utxos, + optional_utxos, + fee_rate, + outgoing + fee_amount, + &drain_script, + )?; + fee_amount += coin_selection.fee_amount; + let excess = &coin_selection.excess; + + tx.input = coin_selection + .selected + .iter() + .map(|u| bitcoin::TxIn { + previous_output: u.outpoint(), + script_sig: Script::default(), + sequence: n_sequence, + witness: Witness::new(), + }) + .collect(); + + if tx.output.is_empty() { + // Uh oh, our transaction has no outputs. + // We allow this when: + // - We have a drain_to address and the utxos we must spend (this happens, + // for example, when we RBF) + // - We have a drain_to address and drain_wallet set + // Otherwise, we don't know who we should send the funds to, and how much + // we should send! + if params.drain_to.is_some() && (params.drain_wallet || !params.utxos.is_empty()) { + if let NoChange { + dust_threshold, + remaining_amount, + change_fee, + } = excess + { + return Err(Error::InsufficientFunds { + needed: *dust_threshold, + available: remaining_amount.saturating_sub(*change_fee), + }); + } + } else { + return Err(Error::NoRecipients); + } + } + + match excess { + NoChange { + remaining_amount, .. + } => fee_amount += remaining_amount, + Change { amount, fee } => { + if self.is_mine(&drain_script) { + received += amount; + } + fee_amount += fee; + + // create drain output + let drain_output = TxOut { + value: *amount, + script_pubkey: drain_script, + }; + + // TODO: We should pay attention when adding a new output: this might increase + // the lenght of the "number of vouts" parameter by 2 bytes, potentially making + // our feerate too low + tx.output.push(drain_output); + } + }; + + // sort input/outputs according to the chosen algorithm + params.ordering.sort_tx(&mut tx); + + let txid = tx.txid(); + let sent = coin_selection.local_selected_amount(); + let psbt = self.complete_transaction(tx, coin_selection.selected, params)?; + + let transaction_details = TransactionDetails { + transaction: None, + txid, + confirmation_time: ConfirmationTime::Unconfirmed, + received, + sent, + fee: Some(fee_amount), + }; + + Ok((psbt, transaction_details)) + } + + /// Commits all curently [`staged`] changed to the persistence backend returning and error when this fails. + /// + /// [`staged`]: Self::staged + pub fn commit(&mut self) -> Result<(), D::WriteError> { + self.persist.commit() + } + + /// Returns the changes that will be staged with the next call to [`commit`]. + /// + /// [`commit`]: Self::commit + pub fn staged(&self) -> &WalletChangeSet { + self.persist.staged() + } + + /// Applies an update to the wallet and stages the changes (but does not [`commit`] them). + /// + /// Usually you create an `update` by interacting with some blockchain data source and inserting + /// transactions related to your wallet into it. + /// + /// [`commit`]: Self::commit + pub fn apply_update( + &mut self, + update: WalletUpdate, + ) -> Result<(), local_chain::UpdateNotConnectedError> { + let (changeset, result) = self.tracker.apply_update(update); + self.persist.stage(changeset); + result + } +} + +impl Wallet { + /// Get the Bitcoin network the wallet is using. + pub fn network(&self) -> Network { + self.network + } + + /// Iterator over all keychains in this wallet + pub fn keychains(&self) -> &BTreeMap { + self.tracker.index().keychains() + } + + /// Get all the checkpoints the wallet is currently storing indexed by height. + pub fn checkpoints(&self) -> &BTreeMap { + self.tracker.chain().all_blocks() + } + + /// Returns the latest checkpoint. + pub fn latest_checkpoint(&self) -> Option { + self.tracker.chain().tip() + } + + /// Return whether or not a `script` is part of this wallet (either internal or external) + pub fn is_mine(&self, script: &Script) -> bool { + self.tracker.index().index_of_spk(script).is_some() + } + + /// Finds how the wallet derived the script pubkey `spk`. + /// + /// Will only return `Some(_)` if the wallet has given out the spk. + pub fn derivation_of_spk(&self, spk: &Script) -> Option<(KeychainKind, u32)> { + self.tracker.index().index_of_spk(spk).copied() + } + + /// Returns a iterators of all the script pubkeys for the `Internal` and External` variants in `KeychainKind`. + /// + /// This is inteded to be used when doing a full scan of your addresses (e.g. after restoring + /// from seed words). You pass the `BTreeMap` of iterators to a blockchain data source (e.g. + /// electrum server) which will go through each address until it reaches a *stop grap*. + /// + /// Note carefully that iterators go over **all** script pubkeys on the keychains (not what + /// script pubkeys the wallet is storing internally). + pub fn spks_of_all_keychains( + &self, + ) -> BTreeMap + Clone> { + self.tracker.index().spks_of_all_keychains() + } + + /// Gets an iterator over all the script pubkeys in a single keychain. + /// + /// See [`spks_of_all_keychains`] for more documentation + /// + /// [`spks_of_all_keychains`]: Self::spks_of_all_keychains + pub fn spks_of_keychain( + &self, + keychain: KeychainKind, + ) -> impl Iterator + Clone { + self.tracker.index().spks_of_keychain(&keychain) + } + + /// Return the list of unspent outputs of this wallet + pub fn list_unspent(&self) -> Vec { + self.tracker + .list_owned_unspents() + .map(|(&(keychain, derivation_index), utxo)| LocalUtxo { + outpoint: utxo.outpoint, + txout: utxo.txout, + keychain, + is_spent: false, + derivation_index, + confirmation_time: match utxo.chain_position { + bdk_chain::ObservedAs::Confirmed(a) => ConfirmationTime::Confirmed { + height: a.confirmation_height, + time: a.confirmation_time, + }, + bdk_chain::ObservedAs::Unconfirmed(_) => ConfirmationTime::Unconfirmed, + }, + }) + .collect() + } + + /// Returns the utxo owned by this wallet corresponding to `outpoint` if it exists in the + /// wallet's database. + pub fn get_utxo(&self, op: OutPoint) -> Option { + self.tracker + .list_owned_unspents() + .find_map(|(&(keychain, derivation_index), txo)| { + if op == txo.outpoint { + Some(LocalUtxo { + outpoint: txo.outpoint, + txout: txo.txout, + keychain, + is_spent: txo.spent_by.is_none(), + derivation_index, + confirmation_time: match txo.chain_position { + bdk_chain::ObservedAs::Confirmed(a) => ConfirmationTime::Confirmed { + height: a.confirmation_height, + time: a.confirmation_time, + }, + bdk_chain::ObservedAs::Unconfirmed(_) => ConfirmationTime::Unconfirmed, + }, + }) + } else { + None + } + }) + } + + /// Return a single transactions made and received by the wallet + /// + /// Optionally fill the [`TransactionDetails::transaction`] field with the raw transaction if + /// `include_raw` is `true`. + pub fn get_tx(&self, txid: Txid, include_raw: bool) -> Option { + let chain_tip = self.tracker.chain().tip().unwrap_or_default(); + + let raw_tx = self.tracker.graph().get_tx(txid)?; + let pos = self + .tracker + .graph() + .get_chain_position(self.tracker.chain(), chain_tip, txid)?; - tx.input = coin_selection - .selected + // let (&confirmation_time, tx) = self.keychain_tracker.chain_graph().get_tx_in_chain(txid)?; + let graph = self.tracker.graph(); + let txout_index = self.tracker.index(); + + let received = raw_tx + .output .iter() - .map(|u| bitcoin::TxIn { - previous_output: u.outpoint(), - script_sig: Script::default(), - sequence: n_sequence, - witness: Witness::new(), + .map(|txout| { + if txout_index.index_of_spk(&txout.script_pubkey).is_some() { + txout.value + } else { + 0 + } }) - .collect(); + .sum(); - if tx.output.is_empty() { - // Uh oh, our transaction has no outputs. - // We allow this when: - // - We have a drain_to address and the utxos we must spend (this happens, - // for example, when we RBF) - // - We have a drain_to address and drain_wallet set - // Otherwise, we don't know who we should send the funds to, and how much - // we should send! - if params.drain_to.is_some() && (params.drain_wallet || !params.utxos.is_empty()) { - if let NoChange { - dust_threshold, - remaining_amount, - change_fee, - } = excess - { - return Err(Error::InsufficientFunds { - needed: *dust_threshold, - available: remaining_amount.saturating_sub(*change_fee), - }); + let sent = raw_tx + .input + .iter() + .map(|txin| { + if let Some((_, txout)) = txout_index.txout(txin.previous_output) { + txout.value + } else { + 0 } - } else { - return Err(Error::NoRecipients); - } - } + }) + .sum(); - match excess { - NoChange { - remaining_amount, .. - } => fee_amount += remaining_amount, - Change { amount, fee } => { - if self.is_mine(&drain_script) { - received += amount; - } - fee_amount += fee; + let inputs = raw_tx + .input + .iter() + .map(|txin| { + graph + .get_txout(txin.previous_output) + .map(|txout| txout.value) + }) + .sum::>(); + let outputs = raw_tx.output.iter().map(|txout| txout.value).sum(); + let fee = inputs.map(|inputs| inputs.saturating_sub(outputs)); - // create drain output - let drain_output = TxOut { - value: *amount, - script_pubkey: drain_script, - }; + Some(TransactionDetails { + transaction: if include_raw { + Some(raw_tx.clone()) + } else { + None + }, + txid, + received, + sent, + fee, + confirmation_time: match pos { + bdk_chain::ObservedAs::Confirmed(a) => ConfirmationTime::Confirmed { + height: a.confirmation_height, + time: a.confirmation_time, + }, + bdk_chain::ObservedAs::Unconfirmed(_) => ConfirmationTime::Unconfirmed, + }, + }) + } - // TODO: We should pay attention when adding a new output: this might increase - // the lenght of the "number of vouts" parameter by 2 bytes, potentially making - // our feerate too low - tx.output.push(drain_output); - } - }; + #[deprecated(note = "use Wallet::transactions instead")] + /// Deprecated. use `Wallet::transactions` instead. + pub fn list_transactions(&self, include_raw: bool) -> Vec { + self.tracker + .list_transactions() + .map(|tx| self.get_tx(tx.node.txid, include_raw).expect("must exist")) + .collect() + } - // sort input/outputs according to the chosen algorithm - params.ordering.sort_tx(&mut tx); + /// Iterate over the transactions in the wallet in order of ascending confirmation time with + /// unconfirmed transactions last. + pub fn transactions(&self) -> impl Iterator + '_ { + self.tracker.list_transactions().map(|canonical_tx| { + ( + match canonical_tx.observed_as { + bdk_chain::ObservedAs::Confirmed(a) => ConfirmationTime::Confirmed { + height: a.confirmation_height, + time: a.confirmation_time, + }, + bdk_chain::ObservedAs::Unconfirmed(_) => ConfirmationTime::Unconfirmed, + }, + canonical_tx.node.tx, + ) + }) + } - let txid = tx.txid(); - let sent = coin_selection.local_selected_amount(); - let psbt = self.complete_transaction(tx, coin_selection.selected, params)?; + /// Return the balance, separated into available, trusted-pending, untrusted-pending and immature + /// values. + pub fn get_balance(&self) -> Balance { + self.tracker + .balance(|&keychain| keychain == KeychainKind::Internal) + } - let transaction_details = TransactionDetails { - transaction: None, - txid, - confirmation_time: ConfirmationTime::Unconfirmed, - received, - sent, - fee: Some(fee_amount), + /// Add an external signer + /// + /// See [the `signer` module](signer) for an example. + pub fn add_signer( + &mut self, + keychain: KeychainKind, + ordering: SignerOrdering, + signer: Arc, + ) { + let signers = match keychain { + KeychainKind::External => Arc::make_mut(&mut self.signers), + KeychainKind::Internal => Arc::make_mut(&mut self.change_signers), }; - Ok((psbt, transaction_details)) + signers.add_external(signer.id(&self.secp), ordering, signer); + } + + /// Get the signers + /// + /// ## Example + /// + /// ``` + /// # use bdk::{Wallet, KeychainKind}; + /// # use bdk::bitcoin::Network; + /// let wallet = Wallet::new_no_persist("wpkh(tprv8ZgxMBicQKsPe73PBRSmNbTfbcsZnwWhz5eVmhHpi31HW29Z7mc9B4cWGRQzopNUzZUT391DeDJxL2PefNunWyLgqCKRMDkU1s2s8bAfoSk/84'/0'/0'/0/*)", None, Network::Testnet)?; + /// for secret_key in wallet.get_signers(KeychainKind::External).signers().iter().filter_map(|s| s.descriptor_secret_key()) { + /// // secret_key: tprv8ZgxMBicQKsPe73PBRSmNbTfbcsZnwWhz5eVmhHpi31HW29Z7mc9B4cWGRQzopNUzZUT391DeDJxL2PefNunWyLgqCKRMDkU1s2s8bAfoSk/84'/0'/0'/0/* + /// println!("secret_key: {}", secret_key); + /// } + /// + /// Ok::<(), Box>(()) + /// ``` + pub fn get_signers(&self, keychain: KeychainKind) -> Arc { + match keychain { + KeychainKind::External => Arc::clone(&self.signers), + KeychainKind::Internal => Arc::clone(&self.change_signers), + } + } + + /// Start building a transaction. + /// + /// This returns a blank [`TxBuilder`] from which you can specify the parameters for the transaction. + /// + /// ## Example + /// + /// ``` + /// # use std::str::FromStr; + /// # use bitcoin::*; + /// # use bdk::*; + /// # let descriptor = "wpkh(tpubD6NzVbkrYhZ4Xferm7Pz4VnjdcDPFyjVu5K4iZXQ4pVN8Cks4pHVowTBXBKRhX64pkRyJZJN5xAKj4UDNnLPb5p2sSKXhewoYx5GbTdUFWq/*)"; + /// # let mut wallet = doctest_wallet!(); + /// # let to_address = Address::from_str("2N4eQYCbKUHCCTUjBJeHcJp9ok6J2GZsTDt").unwrap(); + /// let (psbt, details) = { + /// let mut builder = wallet.build_tx(); + /// builder + /// .add_recipient(to_address.script_pubkey(), 50_000); + /// builder.finish()? + /// }; + /// + /// // sign and broadcast ... + /// # Ok::<(), bdk::Error>(()) + /// ``` + /// + /// [`TxBuilder`]: crate::TxBuilder + pub fn build_tx(&mut self) -> TxBuilder<'_, D, DefaultCoinSelectionAlgorithm, CreateTx> { + TxBuilder { + wallet: alloc::rc::Rc::new(core::cell::RefCell::new(self)), + params: TxParams::default(), + coin_selection: DefaultCoinSelectionAlgorithm::default(), + phantom: core::marker::PhantomData, + } } /// Bump the fee of a transaction previously created with this wallet. @@ -1067,7 +1098,7 @@ impl Wallet { ) -> Result, Error> { let graph = self.tracker.graph(); let txout_index = &self.tracker.index(); - let tx_and_height = self.tracker.get_tx(txid); + let tx_and_height = self.tracker.get_transaction(txid); let mut tx = match tx_and_height { None => return Err(Error::TransactionNotFound), Some(canonical_tx) => match canonical_tx.observed_as { @@ -1099,7 +1130,7 @@ impl Wallet { .map(|txin| -> Result<_, Error> { let canonical_prev_tx = self .tracker - .get_tx(txin.previous_output.txid) + .get_transaction(txin.previous_output.txid) .ok_or(Error::UnknownUtxo)?; let confirmation_time = match canonical_prev_tx.observed_as { bdk_chain::ObservedAs::Confirmed(a) => ConfirmationTime::Confirmed { @@ -1189,7 +1220,6 @@ impl Wallet { phantom: core::marker::PhantomData, }) } - /// Sign a transaction with all the wallet's signers, in the order specified by every signer's /// [`SignerOrdering`]. This function returns the `Result` type with an encapsulated `bool` that has the value true if the PSBT was finalized, or false otherwise. /// @@ -1312,13 +1342,13 @@ impl Wallet { if psbt_input.final_script_sig.is_some() || psbt_input.final_script_witness.is_some() { continue; } - let confirmation_height = - self.tracker - .get_tx(input.previous_output.txid) - .map(|tx| match tx.observed_as { - bdk_chain::ObservedAs::Confirmed(a) => a.confirmation_height, - bdk_chain::ObservedAs::Unconfirmed(_) => u32::MAX, - }); + let confirmation_height = self + .tracker + .get_transaction(input.previous_output.txid) + .map(|tx| match tx.observed_as { + bdk_chain::ObservedAs::Confirmed(a) => a.confirmation_height, + bdk_chain::ObservedAs::Unconfirmed(_) => u32::MAX, + }); let last_sync_height = self.tracker.chain().tip().map(|block_id| block_id.height); let current_height = sign_options.assume_height.or(last_sync_height); @@ -1484,7 +1514,7 @@ impl Wallet { .iter() .map(|u| { let txid = u.0.outpoint.txid; - let tx = self.tracker.get_tx(txid); + let tx = self.tracker.get_transaction(txid); match tx { // We don't have the tx in the db for some reason, // so we can't know for sure if it's mature or not. @@ -1562,7 +1592,7 @@ impl Wallet { if params.add_global_xpubs { let all_xpubs = self - .keychanins() + .keychains() .iter() .flat_map(|(_, desc)| desc.get_extended_keys()) .collect::>(); @@ -1730,62 +1760,13 @@ impl Wallet { .to_string() } - // /// Applies an update to the wallet and stages the changes (but does not [`commit`] them). - // /// - // /// Usually you create an `update` by interacting with some blockchain data source and inserting - // /// transactions related to your wallet into it. - // /// - // /// [`commit`]: Self::commit - // pub fn apply_update(&mut self, update: Update) -> Result<(), UpdateError> - // where - // D: PersistBackend, - // { - // let changeset = self.tracker.apply_update(update)?; - // self.persist.stage(changeset); - // Ok(()) - // } - - /// Commits all curently [`staged`] changed to the persistence backend returning and error when this fails. - /// - /// [`staged`]: Self::staged - pub fn commit(&mut self) -> Result<(), D::WriteError> - where - D: PersistBackend, - { - self.persist.commit() - } - - /// Returns the changes that will be staged with the next call to [`commit`]. - /// - /// [`commit`]: Self::commit - pub fn staged(&self) -> &ChangeSet - where - D: PersistBackend, - { - self.persist.staged() - } - - pub fn tracker(&self) -> &Tracker { + pub fn tracker(&self) -> &WalletTracker { &self.tracker } - pub fn tracker_mut(&mut self) -> &mut Tracker { + pub fn tracker_mut(&mut self) -> &mut WalletTracker { &mut self.tracker } - - pub fn update(&mut self, mut f: F) -> Result - where - D: PersistBackend, - F: FnMut(&mut Tracker) -> Result, - { - let changeset = f(&mut self.tracker)?; - if changeset.is_empty() { - return Ok(false); - } - - self.persist.stage(changeset); - Ok(true) - } } // impl AsRef for Wallet { @@ -1859,7 +1840,7 @@ macro_rules! doctest_wallet { }], }; let _ = wallet.insert_checkpoint(BlockId { height: 1_000, hash: BlockHash::all_zeros() }); - let _ = wallet.insert_tx(tx.clone(), ConfirmationTime::Confirmed { + let _ = wallet.insert_tx(&tx, ConfirmationTime::Confirmed { height: 500, time: 50_000 }, None); diff --git a/crates/bdk/src/wallet/tracker.rs b/crates/bdk/src/wallet/tracker.rs deleted file mode 100644 index 07fdaeec2..000000000 --- a/crates/bdk/src/wallet/tracker.rs +++ /dev/null @@ -1,197 +0,0 @@ -use alloc::vec::Vec; -use bdk_chain::{ - indexed_tx_graph::{IndexedAdditions, IndexedTxGraph}, - keychain::{Balance, DerivationAdditions, KeychainTxOutIndex}, - local_chain::{self, LocalChain}, - tx_graph::{CanonicalTx, TxGraph}, - Append, BlockId, ConfirmationTimeAnchor, ObservedAs, -}; -use bitcoin::{Transaction, Txid}; - -use crate::KeychainKind; - -pub type FullTxOut = bdk_chain::FullTxOut>; - -#[derive(Debug, Default)] -pub struct Tracker { - pub indexed_graph: IndexedTxGraph>, - pub chain: LocalChain, -} - -impl Tracker { - pub fn chain(&self) -> &LocalChain { - &self.chain - } - - pub fn graph(&self) -> &TxGraph { - self.indexed_graph.graph() - } - - pub fn index(&self) -> &KeychainTxOutIndex { - &self.indexed_graph.index - } - - pub fn index_mut(&mut self) -> &mut KeychainTxOutIndex { - &mut self.indexed_graph.index - } - - pub fn insert_tx( - &mut self, - tx: Transaction, - anchors: impl IntoIterator, - seen_at: Option, - ) -> Result { - let txid = tx.txid(); - - let anchors = anchors - .into_iter() - .map(|a| { - if a.anchor_block.height < a.confirmation_height { - Err(InsertTxInvalidAnchorError { - txid, - tx_height: a.confirmation_height, - anchor_block_height: a.anchor_block.height, - }) - } else { - Ok(a) - } - }) - .collect::, _>>()?; - - Ok(self.indexed_graph.insert_tx(&tx, anchors, seen_at).into()) - } - - pub fn insert_block( - &mut self, - block_id: BlockId, - ) -> Result { - self.chain.insert_block(block_id).map(Into::into) - } - - pub fn apply_changeset(&mut self, changeset: ChangeSet) { - self.indexed_graph - .apply_additions(changeset.indexed_additions); - self.chain.apply_changeset(changeset.chain_changeset); - } - - pub fn list_owned_txouts(&self) -> impl Iterator { - // [TODO] Use block id of correct genesis block - let chain_tip = self.chain.tip().unwrap_or_default(); - - self.indexed_graph - .graph() - .list_chain_txouts(&self.chain, chain_tip) - .filter_map(|full_txo| { - let keychain_ind = self.index().index_of_spk(&full_txo.txout.script_pubkey)?; - Some((keychain_ind, full_txo)) - }) - } - - pub fn list_owned_unspents(&self) -> impl Iterator { - self.list_owned_txouts() - .filter(|(_, full_txo)| full_txo.spent_by.is_none()) - } - - pub fn list_transactions( - &self, - ) -> impl Iterator> { - // [TODO] Use block id of correct genesis block - let chain_tip = self.chain.tip().unwrap_or_default(); - self.graph().list_chain_txs(&self.chain, chain_tip) - } - - pub fn balance(&self) -> Balance { - let chain_tip = self.chain.tip().unwrap_or_default(); - self.indexed_graph.balance(&self.chain, chain_tip, |spk| { - matches!( - self.indexed_graph.index.index_of_spk(spk), - Some(&(KeychainKind::Internal, _)) - ) - }) - } - - pub fn get_tx(&self, txid: Txid) -> Option> { - let chain_tip = self.chain().tip().unwrap_or_default(); - - let node = self.graph().get_tx_node(txid)?; - let observed_as = self - .graph() - .get_chain_position(self.chain(), chain_tip, txid)?; - Some(CanonicalTx { observed_as, node }) - } -} - -/// The changeset produced internally by applying an update -#[derive(Debug, Default, PartialEq, serde::Deserialize, serde::Serialize)] -pub struct ChangeSet { - pub indexed_additions: - IndexedAdditions>, - pub chain_changeset: local_chain::ChangeSet, -} - -impl ChangeSet { - pub fn is_empty(&self) -> bool { - self.indexed_additions.graph_additions.is_empty() - && self.indexed_additions.index_additions.is_empty() - && self.chain_changeset.is_empty() - } -} - -impl Append for ChangeSet { - fn append(&mut self, other: Self) { - Append::append(&mut self.indexed_additions, other.indexed_additions); - Append::append(&mut self.chain_changeset, other.chain_changeset); - } -} - -impl From> for ChangeSet { - fn from(index_additions: DerivationAdditions) -> Self { - Self { - indexed_additions: IndexedAdditions { - index_additions, - ..Default::default() - }, - ..Default::default() - } - } -} - -impl From for ChangeSet { - fn from(chain_changeset: local_chain::ChangeSet) -> Self { - Self { - chain_changeset, - ..Default::default() - } - } -} - -impl From>> - for ChangeSet -{ - fn from( - indexed_additions: IndexedAdditions< - ConfirmationTimeAnchor, - DerivationAdditions, - >, - ) -> Self { - Self { - indexed_additions, - chain_changeset: Default::default(), - } - } -} - -#[derive(Debug, PartialEq, Eq)] -pub struct InsertTxInvalidAnchorError { - pub txid: Txid, - pub tx_height: u32, - pub anchor_block_height: u32, -} - -impl std::fmt::Display for InsertTxInvalidAnchorError { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - write!(f, "cannot insert tx ({}) with anchor block height ({}) higher than tx confirmation height ({})", self.txid, self.anchor_block_height, self.tx_height) - } -} - -impl std::error::Error for InsertTxInvalidAnchorError {} diff --git a/crates/bdk/src/wallet/tx_builder.rs b/crates/bdk/src/wallet/tx_builder.rs index f647142c4..d54da9fe7 100644 --- a/crates/bdk/src/wallet/tx_builder.rs +++ b/crates/bdk/src/wallet/tx_builder.rs @@ -47,8 +47,8 @@ use bitcoin::util::psbt::{self, PartiallySignedTransaction as Psbt}; use bitcoin::{LockTime, OutPoint, Script, Sequence, Transaction}; use super::coin_selection::{CoinSelectionAlgorithm, DefaultCoinSelectionAlgorithm}; -use super::ChangeSet; -use super::Tracker; +use super::WalletChangeSet; +use super::WalletTracker; use crate::{ types::{FeeRate, KeychainKind, LocalUtxo, WeightedUtxo}, TransactionDetails, @@ -527,7 +527,7 @@ impl<'a, D, Cs: CoinSelectionAlgorithm, Ctx: TxBuilderContext> TxBuilder<'a, D, /// [`BIP174`]: https://github.com/bitcoin/bips/blob/master/bip-0174.mediawiki pub fn finish(self) -> Result<(Psbt, TransactionDetails), Error> where - D: PersistBackend, + D: PersistBackend, { self.wallet .borrow_mut() diff --git a/crates/bdk/tests/common.rs b/crates/bdk/tests/common.rs index 7257625fd..84868c92f 100644 --- a/crates/bdk/tests/common.rs +++ b/crates/bdk/tests/common.rs @@ -27,16 +27,14 @@ pub fn get_funded_wallet_with_change( }; wallet.insert_checkpoint(anchor_block).unwrap(); - wallet - .insert_tx( - tx.clone(), - ConfirmationTime::Confirmed { - height: 1000, - time: 100, - }, - None, - ) - .unwrap(); + wallet.insert_tx( + &tx, + ConfirmationTime::Confirmed { + height: 1000, + time: 100, + }, + None, + ); (wallet, tx.txid()) } diff --git a/crates/bdk/tests/wallet.rs b/crates/bdk/tests/wallet.rs index ebeea2592..4ff73f4b8 100644 --- a/crates/bdk/tests/wallet.rs +++ b/crates/bdk/tests/wallet.rs @@ -34,19 +34,17 @@ fn receive_output(wallet: &mut Wallet, value: u64, height: TxHeight) -> OutPoint }], }; - wallet - .insert_tx( - tx.clone(), - match height { - TxHeight::Confirmed(height) => ConfirmationTime::Confirmed { - height, - time: 42_000, - }, - TxHeight::Unconfirmed => ConfirmationTime::Unconfirmed, + wallet.insert_tx( + &tx, + match height { + TxHeight::Confirmed(height) => ConfirmationTime::Confirmed { + height, + time: 42_000, }, - None, - ) - .unwrap(); + TxHeight::Unconfirmed => ConfirmationTime::Unconfirmed, + }, + None, + ); OutPoint { txid: tx.txid(), @@ -74,7 +72,7 @@ fn test_descriptor_checksum() { assert_eq!(checksum.len(), 8); let raw_descriptor = wallet - .keychanins() + .keychains() .iter() .next() .unwrap() @@ -811,9 +809,7 @@ fn test_create_tx_add_utxo() { version: 0, lock_time: PackedLockTime(0), }; - wallet - .insert_tx(small_output_tx.clone(), ConfirmationTime::Unconfirmed, None) - .unwrap(); + wallet.insert_tx(&small_output_tx, ConfirmationTime::Unconfirmed, None); let addr = Address::from_str("2N1Ffz3WaNzbeLFBb51xyFMHYSEUXcbiSoX").unwrap(); let mut builder = wallet.build_tx(); @@ -848,9 +844,7 @@ fn test_create_tx_manually_selected_insufficient() { lock_time: PackedLockTime(0), }; - wallet - .insert_tx(small_output_tx.clone(), ConfirmationTime::Unconfirmed, None) - .unwrap(); + wallet.insert_tx(&small_output_tx, ConfirmationTime::Unconfirmed, None); let addr = Address::from_str("2N1Ffz3WaNzbeLFBb51xyFMHYSEUXcbiSoX").unwrap(); let mut builder = wallet.build_tx(); @@ -890,9 +884,7 @@ fn test_create_tx_policy_path_no_csv() { script_pubkey: wallet.get_address(New).script_pubkey(), }], }; - wallet - .insert_tx(tx, ConfirmationTime::Unconfirmed, None) - .unwrap(); + wallet.insert_tx(&tx, ConfirmationTime::Unconfirmed, None); let external_policy = wallet.policies(KeychainKind::External).unwrap().unwrap(); let root_id = external_policy.id; @@ -1217,9 +1209,7 @@ fn test_bump_fee_irreplaceable_tx() { let tx = psbt.extract_tx(); let txid = tx.txid(); - wallet - .insert_tx(tx, ConfirmationTime::Unconfirmed, None) - .unwrap(); + wallet.insert_tx(&tx, ConfirmationTime::Unconfirmed, None); wallet.build_fee_bump(txid).unwrap().finish().unwrap(); } @@ -1235,16 +1225,14 @@ fn test_bump_fee_confirmed_tx() { let tx = psbt.extract_tx(); let txid = tx.txid(); - wallet - .insert_tx( - tx, - ConfirmationTime::Confirmed { - height: 42, - time: 42_000, - }, - None, - ) - .unwrap(); + wallet.insert_tx( + &tx, + ConfirmationTime::Confirmed { + height: 42, + time: 42_000, + }, + None, + ); wallet.build_fee_bump(txid).unwrap().finish().unwrap(); } @@ -1263,9 +1251,7 @@ fn test_bump_fee_low_fee_rate() { let tx = psbt.extract_tx(); let txid = tx.txid(); - wallet - .insert_tx(tx, ConfirmationTime::Unconfirmed, None) - .unwrap(); + wallet.insert_tx(&tx, ConfirmationTime::Unconfirmed, None); let mut builder = wallet.build_fee_bump(txid).unwrap(); builder.fee_rate(FeeRate::from_sat_per_vb(1.0)); @@ -1286,9 +1272,7 @@ fn test_bump_fee_low_abs() { let tx = psbt.extract_tx(); let txid = tx.txid(); - wallet - .insert_tx(tx, ConfirmationTime::Unconfirmed, None) - .unwrap(); + wallet.insert_tx(&tx, ConfirmationTime::Unconfirmed, None); let mut builder = wallet.build_fee_bump(txid).unwrap(); builder.fee_absolute(10); @@ -1308,9 +1292,7 @@ fn test_bump_fee_zero_abs() { let tx = psbt.extract_tx(); let txid = tx.txid(); - wallet - .insert_tx(tx, ConfirmationTime::Unconfirmed, None) - .unwrap(); + wallet.insert_tx(&tx, ConfirmationTime::Unconfirmed, None); let mut builder = wallet.build_fee_bump(txid).unwrap(); builder.fee_absolute(0); @@ -1328,9 +1310,7 @@ fn test_bump_fee_reduce_change() { let (psbt, original_details) = builder.finish().unwrap(); let tx = psbt.extract_tx(); let txid = tx.txid(); - wallet - .insert_tx(tx, ConfirmationTime::Unconfirmed, None) - .unwrap(); + wallet.insert_tx(&tx, ConfirmationTime::Unconfirmed, None); let mut builder = wallet.build_fee_bump(txid).unwrap(); builder.fee_rate(FeeRate::from_sat_per_vb(2.5)).enable_rbf(); @@ -1415,9 +1395,7 @@ fn test_bump_fee_reduce_single_recipient() { let (psbt, original_details) = builder.finish().unwrap(); let tx = psbt.extract_tx(); let txid = tx.txid(); - wallet - .insert_tx(tx, ConfirmationTime::Unconfirmed, None) - .unwrap(); + wallet.insert_tx(&tx, ConfirmationTime::Unconfirmed, None); let mut builder = wallet.build_fee_bump(txid).unwrap(); builder @@ -1448,9 +1426,7 @@ fn test_bump_fee_absolute_reduce_single_recipient() { let (psbt, original_details) = builder.finish().unwrap(); let tx = psbt.extract_tx(); let txid = tx.txid(); - assert!(wallet - .insert_tx(tx, ConfirmationTime::Unconfirmed, None) - .expect("can insert tx")); + assert!(wallet.insert_tx(&tx, ConfirmationTime::Unconfirmed, None)); let mut builder = wallet.build_fee_bump(txid).unwrap(); builder @@ -1482,16 +1458,14 @@ fn test_bump_fee_drain_wallet() { script_pubkey: wallet.get_address(New).script_pubkey(), }], }; - wallet - .insert_tx( - tx.clone(), - ConfirmationTime::Confirmed { - height: wallet.latest_checkpoint().unwrap().height, - time: 42_000, - }, - None, - ) - .unwrap(); + wallet.insert_tx( + &tx, + ConfirmationTime::Confirmed { + height: wallet.latest_checkpoint().unwrap().height, + time: 42_000, + }, + None, + ); let addr = Address::from_str("2N1Ffz3WaNzbeLFBb51xyFMHYSEUXcbiSoX").unwrap(); let mut builder = wallet.build_tx(); @@ -1507,9 +1481,7 @@ fn test_bump_fee_drain_wallet() { let (psbt, original_details) = builder.finish().unwrap(); let tx = psbt.extract_tx(); let txid = tx.txid(); - wallet - .insert_tx(tx, ConfirmationTime::Unconfirmed, None) - .unwrap(); + wallet.insert_tx(&tx, ConfirmationTime::Unconfirmed, None); assert_eq!(original_details.sent, 25_000); // for the new feerate, it should be enough to reduce the output, but since we specify @@ -1543,13 +1515,7 @@ fn test_bump_fee_remove_output_manually_selected_only() { value: 25_000, }], }; - wallet - .insert_tx( - init_tx.clone(), - wallet.transactions().last().unwrap().0, - None, - ) - .unwrap(); + wallet.insert_tx(&init_tx, wallet.transactions().last().unwrap().0, None); let outpoint = OutPoint { txid: init_tx.txid(), vout: 0, @@ -1565,9 +1531,7 @@ fn test_bump_fee_remove_output_manually_selected_only() { let (psbt, original_details) = builder.finish().unwrap(); let tx = psbt.extract_tx(); let txid = tx.txid(); - wallet - .insert_tx(tx, ConfirmationTime::Unconfirmed, None) - .unwrap(); + wallet.insert_tx(&tx, ConfirmationTime::Unconfirmed, None); assert_eq!(original_details.sent, 25_000); let mut builder = wallet.build_fee_bump(txid).unwrap(); @@ -1589,9 +1553,7 @@ fn test_bump_fee_add_input() { value: 25_000, }], }; - wallet - .insert_tx(init_tx, wallet.transactions().last().unwrap().0, None) - .unwrap(); + wallet.insert_tx(&init_tx, wallet.transactions().last().unwrap().0, None); let addr = Address::from_str("2N1Ffz3WaNzbeLFBb51xyFMHYSEUXcbiSoX").unwrap(); let mut builder = wallet.build_tx().coin_selection(LargestFirstCoinSelection); @@ -1601,9 +1563,7 @@ fn test_bump_fee_add_input() { let (psbt, original_details) = builder.finish().unwrap(); let tx = psbt.extract_tx(); let txid = tx.txid(); - wallet - .insert_tx(tx, ConfirmationTime::Unconfirmed, None) - .unwrap(); + wallet.insert_tx(&tx, ConfirmationTime::Unconfirmed, None); let mut builder = wallet.build_fee_bump(txid).unwrap(); builder.fee_rate(FeeRate::from_sat_per_vb(50.0)); @@ -1647,9 +1607,7 @@ fn test_bump_fee_absolute_add_input() { let (psbt, original_details) = builder.finish().unwrap(); let tx = psbt.extract_tx(); let txid = tx.txid(); - wallet - .insert_tx(tx, ConfirmationTime::Unconfirmed, None) - .unwrap(); + wallet.insert_tx(&tx, ConfirmationTime::Unconfirmed, None); let mut builder = wallet.build_fee_bump(txid).unwrap(); builder.fee_absolute(6_000); @@ -1699,9 +1657,7 @@ fn test_bump_fee_no_change_add_input_and_change() { let tx = psbt.extract_tx(); let txid = tx.txid(); - wallet - .insert_tx(tx, ConfirmationTime::Unconfirmed, None) - .unwrap(); + wallet.insert_tx(&tx, ConfirmationTime::Unconfirmed, None); // now bump the fees without using `allow_shrinking`. the wallet should add an // extra input and a change output, and leave the original output untouched @@ -1757,9 +1713,7 @@ fn test_bump_fee_add_input_change_dust() { assert_eq!(tx.input.len(), 1); assert_eq!(tx.output.len(), 2); let txid = tx.txid(); - wallet - .insert_tx(tx, ConfirmationTime::Unconfirmed, None) - .unwrap(); + wallet.insert_tx(&tx, ConfirmationTime::Unconfirmed, None); let mut builder = wallet.build_fee_bump(txid).unwrap(); // We set a fee high enough that during rbf we are forced to add @@ -1818,9 +1772,7 @@ fn test_bump_fee_force_add_input() { for txin in &mut tx.input { txin.witness.push([0x00; P2WPKH_FAKE_WITNESS_SIZE]); // fake signature } - wallet - .insert_tx(tx.clone(), ConfirmationTime::Unconfirmed, None) - .unwrap(); + wallet.insert_tx(&tx, ConfirmationTime::Unconfirmed, None); // the new fee_rate is low enough that just reducing the change would be fine, but we force // the addition of an extra input with `add_utxo()` let mut builder = wallet.build_fee_bump(txid).unwrap(); @@ -1873,9 +1825,7 @@ fn test_bump_fee_absolute_force_add_input() { for txin in &mut tx.input { txin.witness.push([0x00; P2WPKH_FAKE_WITNESS_SIZE]); // fake signature } - wallet - .insert_tx(tx.clone(), ConfirmationTime::Unconfirmed, None) - .unwrap(); + wallet.insert_tx(&tx, ConfirmationTime::Unconfirmed, None); // the new fee_rate is low enough that just reducing the change would be fine, but we force // the addition of an extra input with `add_utxo()` @@ -1934,9 +1884,7 @@ fn test_bump_fee_unconfirmed_inputs_only() { for txin in &mut tx.input { txin.witness.push([0x00; P2WPKH_FAKE_WITNESS_SIZE]); // fake signature } - wallet - .insert_tx(tx, ConfirmationTime::Unconfirmed, None) - .unwrap(); + wallet.insert_tx(&tx, ConfirmationTime::Unconfirmed, None); let mut builder = wallet.build_fee_bump(txid).unwrap(); builder.fee_rate(FeeRate::from_sat_per_vb(25.0)); builder.finish().unwrap(); @@ -1965,9 +1913,7 @@ fn test_bump_fee_unconfirmed_input() { for txin in &mut tx.input { txin.witness.push([0x00; P2WPKH_FAKE_WITNESS_SIZE]); // fake signature } - wallet - .insert_tx(tx, ConfirmationTime::Unconfirmed, None) - .unwrap(); + wallet.insert_tx(&tx, ConfirmationTime::Unconfirmed, None); let mut builder = wallet.build_fee_bump(txid).unwrap(); builder @@ -3054,16 +3000,14 @@ fn test_spend_coinbase() { script_pubkey: wallet.get_address(New).address.script_pubkey(), }], }; - wallet - .insert_tx( - coinbase_tx, - ConfirmationTime::Confirmed { - height: confirmation_height, - time: 30_000, - }, - None, - ) - .unwrap(); + wallet.insert_tx( + &coinbase_tx, + ConfirmationTime::Confirmed { + height: confirmation_height, + time: 30_000, + }, + None, + ); let not_yet_mature_time = confirmation_height + COINBASE_MATURITY - 1; let maturity_time = confirmation_height + COINBASE_MATURITY; diff --git a/crates/chain/src/indexed_tx_graph.rs b/crates/chain/src/indexed_tx_graph.rs index 51f2558ae..0c7bd0b96 100644 --- a/crates/chain/src/indexed_tx_graph.rs +++ b/crates/chain/src/indexed_tx_graph.rs @@ -28,12 +28,14 @@ impl Default for IndexedTxGraph { } } -impl IndexedTxGraph { +impl IndexedTxGraph { /// Get a reference of the internal transaction graph. pub fn graph(&self) -> &TxGraph { &self.graph } +} +impl IndexedTxGraph { /// Applies the [`IndexedAdditions`] to the [`IndexedTxGraph`]. pub fn apply_additions(&mut self, additions: IndexedAdditions) { let IndexedAdditions { diff --git a/crates/chain/src/lib.rs b/crates/chain/src/lib.rs index ee518f4cd..053998ba7 100644 --- a/crates/chain/src/lib.rs +++ b/crates/chain/src/lib.rs @@ -27,6 +27,7 @@ pub use chain_data::*; pub mod indexed_tx_graph; pub mod keychain; pub mod local_chain; +pub mod remote_chain; pub mod sparse_chain; mod tx_data_traits; pub mod tx_graph; @@ -35,6 +36,7 @@ mod chain_oracle; pub use chain_oracle::*; mod persist; pub use persist::*; +pub mod tracker; #[doc(hidden)] pub mod example_utils; diff --git a/crates/chain/src/local_chain.rs b/crates/chain/src/local_chain.rs index fc635173f..c77f33738 100644 --- a/crates/chain/src/local_chain.rs +++ b/crates/chain/src/local_chain.rs @@ -3,7 +3,7 @@ use core::convert::Infallible; use alloc::collections::{BTreeMap, BTreeSet}; use bitcoin::BlockHash; -use crate::{BlockId, ChainOracle}; +use crate::{tracker::LastSeenBlock, BlockId, ChainOracle}; /// This is a local implementation of [`ChainOracle`]. /// @@ -18,6 +18,12 @@ pub struct LocalChain { blocks: BTreeMap, } +impl LastSeenBlock for LocalChain { + fn last_seen_block(&self) -> Option { + self.tip() + } +} + impl ChainOracle for LocalChain { type Error = Infallible; diff --git a/crates/chain/src/remote_chain.rs b/crates/chain/src/remote_chain.rs new file mode 100644 index 000000000..2da420393 --- /dev/null +++ b/crates/chain/src/remote_chain.rs @@ -0,0 +1,71 @@ +use crate::{tracker::LastSeenBlock, Append, BlockId, ChainOracle}; + +pub struct RemoteChain { + oracle: O, + last_seen: Option, +} + +impl LastSeenBlock for RemoteChain { + fn last_seen_block(&self) -> Option { + self.last_seen + } +} + +impl ChainOracle for RemoteChain { + type Error = O::Error; + + fn is_block_in_chain( + &self, + block: BlockId, + static_block: BlockId, + ) -> Result, Self::Error> { + self.oracle.is_block_in_chain(block, static_block) + } +} + +impl RemoteChain { + pub fn new(oracle: O) -> Self { + Self { + oracle, + last_seen: None, + } + } + + pub fn inner(&self) -> &O { + &self.oracle + } + + pub fn last_seen_block(&self) -> Option { + self.last_seen + } + + pub fn update_last_seen_block(&mut self, last_seen_block: BlockId) -> ChangeSet { + let update = match self.last_seen { + Some(original_ls) => { + original_ls.height < last_seen_block.height || original_ls == last_seen_block + } + None => true, + }; + if update { + let changeset = Some(last_seen_block); + self.last_seen = changeset; + changeset + } else { + None + } + } + + pub fn apply_changeset(&mut self, changeset: ChangeSet) { + Append::append(&mut self.last_seen, changeset) + } +} + +pub type ChangeSet = Option; + +impl Append for ChangeSet { + fn append(&mut self, other: Self) { + if *self != other && self.map(|b| b.height) <= other.map(|b| b.height) { + *self = other; + } + } +} diff --git a/crates/chain/src/tracker.rs b/crates/chain/src/tracker.rs new file mode 100644 index 000000000..9a8dfe915 --- /dev/null +++ b/crates/chain/src/tracker.rs @@ -0,0 +1,357 @@ +use core::{convert::Infallible, fmt::Debug}; + +use alloc::collections::BTreeMap; +use bitcoin::{BlockHash, Transaction, Txid}; +use miniscript::{Descriptor, DescriptorPublicKey}; + +use crate::{ + indexed_tx_graph::{IndexedAdditions, IndexedTxGraph}, + keychain::{Balance, DerivationAdditions, KeychainTxOutIndex}, + local_chain::{self, LocalChain}, + remote_chain::{self, RemoteChain}, + tx_graph::{CanonicalTx, TxGraph}, + Anchor, Append, BlockId, ChainOracle, FullTxOut, ObservedAs, +}; + +/// An trait which represents the last seen chain-source state. +/// +/// Only [`RemoteChain`] and [`LocalChain`] should implement this. +pub trait LastSeenBlock { + /// Get the last seen block height and hash. + fn last_seen_block(&self) -> Option; +} + +pub type LocalTracker = Tracker; +pub type RemoteTracker = Tracker>; + +pub type LocalUpdate = Update; +pub type RemoteUpdate = Update>; + +pub type LocalChangeSet = ChangeSet; +pub type RemoteChangeSet = ChangeSet; + +/// An in-memory representation of chain data that we are tracking. +/// +/// * `K` is our keychain identifier. +/// * `A` is the [`Anchor`] implementation. +/// * `B` is the representation of the best chain history. This can either be a [`LocalChain`] or a +/// [`RemoteChain`] (which wraps a remote [`ChainOracle`] implementation). +/// +/// [`Tracker`] can be constructed with [`new_local`] or [`new_remote`] (depending on the +/// chain-history type). +/// +/// [`new_local`]: Self::new_local +/// [`new_remote`]: Self::new_remote +#[derive(Debug)] +pub struct Tracker { + indexed_graph: IndexedTxGraph>, + chain: C, + genesis_blockhash: BlockHash, +} + +impl LocalTracker { + pub fn new_local( + genesis_blockhash: BlockHash, + keychains: impl IntoIterator)>, + ) -> Self { + Self { + genesis_blockhash, + indexed_graph: { + let mut indexed_graph = IndexedTxGraph::>::default(); + for (keychain, descriptor) in keychains.into_iter() { + indexed_graph.index.add_keychain(keychain, descriptor); + } + indexed_graph + }, + chain: LocalChain::default(), + } + } + + pub fn apply_update( + &mut self, + update: LocalUpdate, + ) -> ( + LocalChangeSet, + Result<(), local_chain::UpdateNotConnectedError>, + ) { + let mut changeset = LocalChangeSet { + indexed_additions: { + let (_, derivation_additions) = self + .indexed_graph + .index + .reveal_to_target_multi(&update.index_update); + let mut additions = self.indexed_graph.apply_update(update.graph_update); + additions.index_additions.append(derivation_additions); + additions + }, + ..Default::default() + }; + + match self.chain.apply_update(update.chain_update) { + Ok(chain_changeset) => { + changeset.chain_changeset = chain_changeset; + (changeset, Ok(())) + } + Err(err) => (changeset, Err(err)), + } + } + + pub fn apply_changeset(&mut self, changeset: LocalChangeSet) { + self.indexed_graph + .apply_additions(changeset.indexed_additions); + self.chain.apply_changeset(changeset.chain_changeset); + } + + pub fn insert_block( + &mut self, + block_id: BlockId, + ) -> Result, local_chain::InsertBlockNotMatchingError> { + self.chain + .insert_block(block_id) + .map(|chain_changeset| LocalChangeSet { + chain_changeset, + ..Default::default() + }) + } +} + +impl RemoteTracker { + pub fn new_remote( + genesis_blockhash: BlockHash, + keychains: impl IntoIterator)>, + oracle: O, + ) -> Self { + Self { + genesis_blockhash, + indexed_graph: { + let mut indexed_graph = IndexedTxGraph::>::default(); + for (keychain, descriptor) in keychains.into_iter() { + indexed_graph.index.add_keychain(keychain, descriptor); + } + indexed_graph + }, + chain: RemoteChain::new(oracle), + } + } + + pub fn apply_update(&mut self, update: RemoteUpdate) -> RemoteChangeSet { + RemoteChangeSet { + indexed_additions: { + let (_, derivation_additions) = self + .indexed_graph + .index + .reveal_to_target_multi(&update.index_update); + let mut additions = self.indexed_graph.apply_update(update.graph_update); + additions.index_additions.append(derivation_additions); + additions + }, + chain_changeset: match update.chain_update { + Some(last_seen_block) => self.chain.update_last_seen_block(last_seen_block), + None => Default::default(), + }, + } + } + + pub fn apply_changeset(&mut self, changeset: RemoteChangeSet) { + self.indexed_graph + .apply_additions(changeset.indexed_additions); + self.chain.apply_changeset(changeset.chain_changeset); + } +} + +impl Tracker { + pub fn index(&self) -> &KeychainTxOutIndex { + &self.indexed_graph.index + } + + pub fn index_mut(&mut self) -> &mut KeychainTxOutIndex { + &mut self.indexed_graph.index + } + + pub fn graph(&self) -> &TxGraph { + self.indexed_graph.graph() + } + + pub fn chain(&self) -> &C { + &self.chain + } + + pub fn genesis_block(&self) -> BlockId { + BlockId { + height: 0, + hash: self.genesis_blockhash, + } + } +} + +impl Tracker +where + K: Debug + Clone + Ord, +{ + pub fn insert_tx( + &mut self, + tx: &Transaction, + anchors: impl IntoIterator, + seen_at: Option, + ) -> ChangeSet { + ChangeSet { + indexed_additions: self.indexed_graph.insert_tx(tx, anchors, seen_at), + ..Default::default() + } + } + + pub fn try_get_transaction( + &self, + txid: Txid, + ) -> Result>, C::Error> { + let chain_tip = self.chain.last_seen_block().unwrap_or(self.genesis_block()); + let node = match self.graph().get_tx_node(txid) { + Some(node) => node, + None => return Ok(None), + }; + self.graph() + .try_get_chain_position(&self.chain, chain_tip, txid) + .map(|opt| opt.map(|observed_as| CanonicalTx { observed_as, node })) + } + + pub fn try_list_transactions( + &self, + ) -> impl Iterator, C::Error>> { + let chain_tip = self.chain.last_seen_block().unwrap_or(self.genesis_block()); + self.indexed_graph + .graph() + .try_list_chain_txs(&self.chain, chain_tip) + } + + pub fn try_list_owned_txouts( + &self, + ) -> impl Iterator>), C::Error>> { + let chain_tip = self.chain.last_seen_block().unwrap_or(self.genesis_block()); + self.indexed_graph + .graph() + .try_list_chain_txouts(&self.chain, chain_tip) + .filter_map(|r| match r { + Err(err) => Some(Err(err)), + Ok(full_txo) => Some(Ok(( + self.indexed_graph + .index + .index_of_spk(&full_txo.txout.script_pubkey)?, + full_txo, + ))), + }) + } + + pub fn try_list_owned_unspents( + &self, + ) -> impl Iterator>), C::Error>> { + self.try_list_owned_txouts().filter(|r| { + if let Ok((_, full_txo)) = r { + if full_txo.spent_by.is_some() { + return false; + } + } + true + }) + } + + pub fn try_balance(&self, should_trust: impl Fn(&K) -> bool) -> Result { + let chain_tip = self.chain.last_seen_block().unwrap_or(self.genesis_block()); + self.indexed_graph + .try_balance(&self.chain, chain_tip, |script| { + match self.index().index_of_spk(script) { + Some((keychain, _)) => should_trust(keychain), + None => false, + } + }) + } +} + +impl + LastSeenBlock> Tracker +where + K: Debug + Clone + Ord, +{ + pub fn get_transaction(&self, txid: Txid) -> Option> { + self.try_get_transaction(txid) + .expect("oracle is infallible") + } + + pub fn list_transactions(&self) -> impl Iterator> { + self.try_list_transactions() + .map(|r| r.expect("oracle is infallible")) + } + + pub fn list_owned_txouts(&self) -> impl Iterator>)> { + self.try_list_owned_txouts() + .map(|r| r.expect("oracle is infallible")) + } + + pub fn list_owned_unspents( + &self, + ) -> impl Iterator>)> { + self.try_list_owned_unspents() + .map(|r| r.expect("oracle is infallible")) + } + + pub fn balance(&self, should_trust: impl Fn(&K) -> bool) -> Balance { + self.try_balance(should_trust) + .expect("oracle is infallible") + } +} + +#[derive(Debug, PartialEq)] +pub struct Update { + pub index_update: BTreeMap, + pub graph_update: TxGraph, + pub chain_update: CC, +} + +/// A structure containing the resultant changes to [`Tracker`] after an update. +#[derive(Debug, PartialEq)] +#[cfg_attr( + feature = "serde", + derive(serde::Deserialize, serde::Serialize), + serde( + crate = "serde_crate", + bound( + deserialize = "A: Ord + serde::Deserialize<'de>, K: Ord + serde::Deserialize<'de>, CC: Ord + serde::Deserialize<'de>", + serialize = "A: Ord + serde::Serialize, K: Ord + serde::Serialize, CC: Ord + serde::Serialize", + ) + ) +)] +#[must_use] +pub struct ChangeSet { + pub indexed_additions: IndexedAdditions>, + pub chain_changeset: CC, +} + +impl Default for ChangeSet { + fn default() -> Self { + Self { + indexed_additions: Default::default(), + chain_changeset: Default::default(), + } + } +} + +impl Append for ChangeSet { + fn append(&mut self, other: Self) { + Append::append(&mut self.indexed_additions, other.indexed_additions); + Append::append(&mut self.chain_changeset, other.chain_changeset) + } +} + +impl LocalChangeSet { + pub fn is_empty(&self) -> bool { + self.indexed_additions.index_additions.is_empty() + && self.indexed_additions.graph_additions.is_empty() + && self.chain_changeset.is_empty() + } +} + +impl RemoteChangeSet { + pub fn is_empty(&self) -> bool { + self.indexed_additions.index_additions.is_empty() + && self.indexed_additions.graph_additions.is_empty() + && self.chain_changeset.is_none() + } +} diff --git a/crates/chain/src/tx_data_traits.rs b/crates/chain/src/tx_data_traits.rs index 7bf871cfd..8ec695add 100644 --- a/crates/chain/src/tx_data_traits.rs +++ b/crates/chain/src/tx_data_traits.rs @@ -81,11 +81,3 @@ impl Append for BTreeSet { BTreeSet::append(self, &mut other) } } - -impl Append for Option { - fn append(&mut self, other: Self) { - if *self < other { - *self = other; - } - } -} diff --git a/crates/electrum/Cargo.toml b/crates/electrum/Cargo.toml index 96eae5cf8..20eac4d37 100644 --- a/crates/electrum/Cargo.toml +++ b/crates/electrum/Cargo.toml @@ -12,9 +12,5 @@ readme = "README.md" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -bdk = { path = "../bdk", version = "1.0.0-alpha.0", optional = true } bdk_chain = { path = "../chain", version = "0.4.0", features = ["serde", "miniscript"] } electrum-client = { version = "0.12" } - -[features] -wallet = ["bdk"] diff --git a/crates/electrum/src/v2.rs b/crates/electrum/src/v2.rs index 60d303103..5f4041d02 100644 --- a/crates/electrum/src/v2.rs +++ b/crates/electrum/src/v2.rs @@ -1,10 +1,9 @@ use bdk_chain::{ bitcoin::{hashes::hex::FromHex, BlockHash, OutPoint, Script, Transaction, Txid}, - indexed_tx_graph::{IndexedAdditions, IndexedTxGraph}, - keychain::{DerivationAdditions, KeychainTxOutIndex}, - local_chain::{self, LocalChain, UpdateNotConnectedError}, + local_chain::LocalChain, + tracker::LocalUpdate, tx_graph::{self, TxGraph}, - Anchor, Append, BlockId, ConfirmationHeightAnchor, ConfirmationTimeAnchor, + Anchor, BlockId, ConfirmationHeightAnchor, ConfirmationTimeAnchor, }; use electrum_client::{Client, ElectrumApi, Error}; use std::{ @@ -15,13 +14,13 @@ use std::{ use crate::InternalError; #[derive(Debug, Clone)] -pub struct ElectrumUpdate { - pub graph_update: G, +pub struct ElectrumUpdate { + pub graph_update: HashMap>, pub chain_update: LocalChain, pub keychain_update: BTreeMap, } -impl Default for ElectrumUpdate { +impl Default for ElectrumUpdate { fn default() -> Self { Self { graph_update: Default::default(), @@ -31,9 +30,7 @@ impl Default for ElectrumUpdate { } } -pub type IntermediaryElectrumUpdate = ElectrumUpdate>, K>; - -impl<'a, A: Anchor, K> IntermediaryElectrumUpdate { +impl<'a, K, A: Anchor> ElectrumUpdate { pub fn missing_full_txs( &'a self, graph: &'a TxGraph, @@ -43,7 +40,7 @@ impl<'a, A: Anchor, K> IntermediaryElectrumUpdate { .filter(move |&&txid| graph.as_ref().get_tx(txid).is_none()) } - pub fn finalize(self, seen_at: Option, new_txs: T) -> FinalElectrumUpdate + pub fn finalize(self, seen_at: Option, new_txs: T) -> LocalUpdate where T: IntoIterator, { @@ -57,24 +54,31 @@ impl<'a, A: Anchor, K> IntermediaryElectrumUpdate { } } dbg!(graph_update.full_txs().count()); - FinalElectrumUpdate { + LocalUpdate { + index_update: self.keychain_update, graph_update, chain_update: self.chain_update, - keychain_update: self.keychain_update, } } } -pub type FinalElectrumUpdate = ElectrumUpdate, K>; - -impl FinalElectrumUpdate { - pub fn into_confirmation_time_update( +impl ElectrumUpdate { + pub fn finalize_as_confirmation_time( self, client: &Client, - ) -> Result, Error> { + seen_at: Option, + new_txs: T, + ) -> Result, Error> + where + T: IntoIterator, + { + let update = self.finalize(seen_at, new_txs); + let update_tip = update.chain_update.tip().expect("must have tip"); + let relevant_heights = { let mut visited_heights = HashSet::new(); - self.graph_update + update + .graph_update .all_anchors() .map(|(a, _)| a.confirmation_height_upper_bound()) .filter(move |&h| visited_heights.insert(h)) @@ -94,7 +98,7 @@ impl FinalElectrumUpdate { .collect::>(); let graph_additions = { - let old_additions = TxGraph::default().determine_additions(&self.graph_update); + let old_additions = TxGraph::default().determine_additions(&update.graph_update); tx_graph::Additions { tx: old_additions.tx, txout: old_additions.txout, @@ -118,63 +122,22 @@ impl FinalElectrumUpdate { } }; - Ok(FinalElectrumUpdate { + if update_tip.hash != client.block_header(update_tip.height as _)?.block_hash() { + // [TODO] We should alter the logic so we won't have to return an error + return Err(Error::Message(format!( + "tip changed during update: update_tip={:?}", + update_tip + ))); + } + + Ok(LocalUpdate { + index_update: update.index_update, graph_update: { let mut graph = TxGraph::default(); graph.apply_additions(graph_additions); graph }, - chain_update: self.chain_update, - keychain_update: self.keychain_update, - }) - } -} - -impl FinalElectrumUpdate { - pub fn apply( - self, - indexed_graph: &mut IndexedTxGraph>, - chain: &mut LocalChain, - ) -> Result< - ( - IndexedAdditions>, - local_chain::ChangeSet, - ), - UpdateNotConnectedError, - > { - let (_, derivation_additions) = indexed_graph - .index - .reveal_to_target_multi(&self.keychain_update); - - let additions = { - let mut additions = indexed_graph.apply_update(self.graph_update); - additions.index_additions.append(derivation_additions); - additions - }; - - let changeset = chain.apply_update(self.chain_update)?; - - Ok((additions, changeset)) - } -} - -#[cfg(feature = "wallet")] -impl FinalElectrumUpdate { - pub fn apply_to_tracker( - self, - tracker: &mut bdk::wallet::Tracker, - ) -> Result { - Ok(bdk::wallet::ChangeSet { - indexed_additions: { - let mut additions = tracker.indexed_graph.apply_update(self.graph_update); - let (_, derivation_additions) = tracker - .indexed_graph - .index - .reveal_to_target_multi(&self.keychain_update); - additions.index_additions.append(derivation_additions); - additions - }, - chain_changeset: tracker.chain.apply_update(self.chain_update)?, + chain_update: update.chain_update, }) } } @@ -190,7 +153,7 @@ pub trait ElectrumExt { outpoints: impl IntoIterator, stop_gap: usize, batch_size: usize, - ) -> Result, Error>; + ) -> Result, Error>; fn scan_without_keychain( &self, @@ -199,7 +162,7 @@ pub trait ElectrumExt { txids: impl IntoIterator, outpoints: impl IntoIterator, batch_size: usize, - ) -> Result, Error> { + ) -> Result, Error> { let spk_iter = misc_spks .into_iter() .enumerate() @@ -231,7 +194,7 @@ impl ElectrumExt for Client { outpoints: impl IntoIterator, stop_gap: usize, batch_size: usize, - ) -> Result, Error> { + ) -> Result, Error> { let mut request_spks = keychain_spks .into_iter() .map(|(k, s)| (k, s.into_iter())) @@ -242,7 +205,7 @@ impl ElectrumExt for Client { let outpoints = outpoints.into_iter().collect::>(); let update = loop { - let mut update = IntermediaryElectrumUpdate:: { + let mut update = ElectrumUpdate:: { chain_update: prepare_chain_update(self, local_chain)?, ..Default::default() }; @@ -404,7 +367,7 @@ fn determine_tx_anchor( fn populate_with_outpoints( client: &Client, anchor_block: BlockId, - update: &mut IntermediaryElectrumUpdate, + update: &mut ElectrumUpdate, outpoints: &mut impl Iterator, ) -> Result, InternalError> { let mut full_txs = HashMap::new(); @@ -466,7 +429,7 @@ fn populate_with_outpoints( fn populate_with_txids( client: &Client, anchor_block: BlockId, - update: &mut IntermediaryElectrumUpdate, + update: &mut ElectrumUpdate, txids: &mut impl Iterator, ) -> Result<(), InternalError> { for txid in txids { @@ -502,7 +465,7 @@ fn populate_with_txids( fn populate_with_spks( client: &Client, anchor_block: BlockId, - update: &mut IntermediaryElectrumUpdate, + update: &mut ElectrumUpdate, spks: &mut impl Iterator, stop_gap: usize, batch_size: usize, diff --git a/crates/file_store/Cargo.toml b/crates/file_store/Cargo.toml index 681d5cbce..791780409 100644 --- a/crates/file_store/Cargo.toml +++ b/crates/file_store/Cargo.toml @@ -15,10 +15,5 @@ bdk_chain = { path = "../chain", version = "0.4.0", features = [ "serde", "minis bincode = { version = "1" } serde = { version = "1", features = ["derive"] } -bdk = { path = "../bdk", version = "1.0.0-alpha.0", features = [ "default" ], optional = true } - [dev-dependencies] tempfile = "3" - -[features] -wallet = [ "bdk" ] diff --git a/crates/file_store/src/store.rs b/crates/file_store/src/store.rs index 9404888e9..03d5f6ee8 100644 --- a/crates/file_store/src/store.rs +++ b/crates/file_store/src/store.rs @@ -1,29 +1,65 @@ use std::{ + fmt::Debug, fs::{File, OpenOptions}, io::{self, Read, Seek, Write}, marker::PhantomData, path::Path, }; -use bdk_chain::Append; +use bdk_chain::{ + tracker::{ChangeSet, LocalChangeSet, LocalTracker, RemoteChangeSet, RemoteTracker, Tracker}, + Anchor, Append, ChainOracle, PersistBackend, +}; use bincode::Options; use crate::{bincode_options, EntryIter, FileError, IterError}; -#[cfg(feature = "wallet")] -use bdk::wallet::{ChangeSet, Tracker}; +pub type TrackerStore = Store, ChangeSet>; +pub type LocalTrackerStore = Store, LocalChangeSet>; +pub type RemoteTrackerStore = Store, RemoteChangeSet>; + +impl PersistBackend, LocalChangeSet> for LocalTrackerStore +where + K: serde::de::DeserializeOwned + serde::Serialize + Debug + Clone + Ord, + A: serde::de::DeserializeOwned + serde::Serialize + Anchor, +{ + type WriteError = std::io::Error; + + type LoadError = IterError; + + fn write_changes(&mut self, changeset: &LocalChangeSet) -> Result<(), Self::WriteError> { + self.append_changeset(changeset) + } + + fn load_into_tracker( + &mut self, + tracker: &mut LocalTracker, + ) -> Result<(), Self::LoadError> { + let (changeset, result) = self.aggregate_changesets(); + tracker.apply_changeset(changeset); + result + } +} -#[cfg(feature = "wallet")] -impl bdk_chain::PersistBackend for Store { +impl PersistBackend, RemoteChangeSet> + for RemoteTrackerStore +where + K: serde::de::DeserializeOwned + serde::Serialize + Debug + Clone + Ord, + A: serde::de::DeserializeOwned + serde::Serialize + Anchor, + O: ChainOracle, +{ type WriteError = std::io::Error; type LoadError = IterError; - fn write_changes(&mut self, changeset: &ChangeSet) -> Result<(), Self::WriteError> { + fn write_changes(&mut self, changeset: &RemoteChangeSet) -> Result<(), Self::WriteError> { self.append_changeset(changeset) } - fn load_into_tracker(&mut self, tracker: &mut Tracker) -> Result<(), Self::LoadError> { + fn load_into_tracker( + &mut self, + tracker: &mut RemoteTracker, + ) -> Result<(), Self::LoadError> { let (changeset, result) = self.aggregate_changesets(); tracker.apply_changeset(changeset); result diff --git a/example-crates/tracker_electrum/src/main.rs b/example-crates/tracker_electrum/src/main.rs index dec7388a1..018aa4abc 100644 --- a/example-crates/tracker_electrum/src/main.rs +++ b/example-crates/tracker_electrum/src/main.rs @@ -6,7 +6,7 @@ use std::{ use bdk_chain::{ bitcoin::{Address, BlockHash, Network, OutPoint, Txid}, - Append, ConfirmationHeightAnchor, + ConfirmationHeightAnchor, }; use bdk_electrum::{ electrum_client::{self, ElectrumApi}, @@ -58,11 +58,8 @@ pub struct ScanOptions { } fn main() -> anyhow::Result<()> { - let (args, keymap, tracker, db) = cli::init::( - DB_MAGIC, - DB_PATH, - cli::Tracker::new_local(), - )?; + let (args, keymap, tracker, db) = + cli::init_local::(DB_MAGIC, DB_PATH)?; let electrum_url = match args.network { Network::Bitcoin => "ssl://electrum.blockstream.info:50002", @@ -77,7 +74,7 @@ fn main() -> anyhow::Result<()> { let client = electrum_client::Client::from_config(electrum_url, config)?; // [TODO]: Use genesis block based on network! - let chain_tip = tracker.lock().unwrap().chain.tip().unwrap_or_default(); + let chain_tip = tracker.lock().unwrap().chain().tip().unwrap_or_default(); let electrum_cmd = match args.command.clone() { cli::Commands::ChainSpecific(electrum_cmd) => electrum_cmd, @@ -105,8 +102,7 @@ fn main() -> anyhow::Result<()> { let (spk_iters, local_chain) = { let tracker = &*tracker.lock().unwrap(); let spk_iters = tracker - .indexed_graph - .index + .index() .spks_of_all_keychains() .into_iter() .map(|(keychain, iter)| { @@ -123,7 +119,7 @@ fn main() -> anyhow::Result<()> { (keychain, spk_iter) }) .collect::>(); - let local_chain: BTreeMap = tracker.chain.clone().into(); + let local_chain: BTreeMap = tracker.chain().clone().into(); (spk_iters, local_chain) }; @@ -157,7 +153,7 @@ fn main() -> anyhow::Result<()> { let mut spks: Box> = Box::new(core::iter::empty()); if all_spks { - let index = &tracker.indexed_graph.index; + let index = tracker.index(); let all_spks = index .all_spks() .iter() @@ -169,7 +165,7 @@ fn main() -> anyhow::Result<()> { }))); } if unused_spks { - let index = &tracker.indexed_graph.index; + let index = tracker.index(); let unused_spks = index .unused_spks(..) .map(|(k, v)| (*k, v.clone())) @@ -189,7 +185,7 @@ fn main() -> anyhow::Result<()> { if utxos { let utxos = tracker - .list_owned_unspents(chain_tip) + .list_owned_unspents() .map(|(_, utxo)| utxo) .collect::>(); outpoints = Box::new( @@ -209,7 +205,7 @@ fn main() -> anyhow::Result<()> { if unconfirmed { let unconfirmed_txids = tracker - .list_txs(chain_tip) + .list_transactions() .filter(|ctx| !ctx.observed_as.is_confirmed()) .map(|ctx| ctx.node.txid) .collect::>(); @@ -219,7 +215,7 @@ fn main() -> anyhow::Result<()> { })); } - let local_chain: BTreeMap = tracker.chain.clone().into(); + let local_chain: BTreeMap = tracker.chain().clone().into(); drop(tracker); let update = client.scan_without_keychain( @@ -241,7 +237,7 @@ fn main() -> anyhow::Result<()> { let missing_txids = { let tracker = &*tracker.lock().unwrap(); response - .missing_full_txs(tracker.indexed_graph.graph()) + .missing_full_txs(tracker.graph()) .cloned() .collect::>() }; @@ -258,16 +254,12 @@ fn main() -> anyhow::Result<()> { let tracker = &mut *tracker.lock().unwrap(); let db = &mut *db.lock().unwrap(); - let (additions, changeset) = - update.apply(&mut tracker.indexed_graph, &mut tracker.chain)?; - - let mut tracker_changeset = cli::ChangeSet::default(); - tracker_changeset.append(additions.into()); - tracker_changeset.append(changeset.into()); + let (tracker_changeset, result) = tracker.apply_update(update); // [TODO] How do we check if changeset is empty? // [TODO] When should we flush? db.write_changes(&tracker_changeset)?; + result?; } Ok(()) diff --git a/example-crates/tracker_example_cli/src/lib.rs b/example-crates/tracker_example_cli/src/lib.rs index 6de66dbe6..f2c25bf31 100644 --- a/example-crates/tracker_example_cli/src/lib.rs +++ b/example-crates/tracker_example_cli/src/lib.rs @@ -1,26 +1,28 @@ -mod remote_chain; -mod tracker; use bdk_chain::{ bitcoin::{ - psbt::Prevouts, secp256k1::Secp256k1, util::sighash::SighashCache, Address, LockTime, - Network, Sequence, Transaction, TxIn, TxOut, + hashes::Hash, + psbt::Prevouts, + secp256k1::{self, Secp256k1}, + util::sighash::SighashCache, + Address, BlockHash, LockTime, Network, Sequence, Transaction, TxIn, TxOut, }, + indexed_tx_graph::IndexedAdditions, keychain::DerivationAdditions, miniscript::{ descriptor::{DescriptorSecretKey, KeyMap}, Descriptor, DescriptorPublicKey, }, + tracker::{ChangeSet, LastSeenBlock, LocalTracker, RemoteTracker, Tracker}, Anchor, Append, BlockId, ChainOracle, DescriptorExt, FullTxOut, ObservedAs, PersistBackend, }; use bdk_coin_select::{coin_select_bnb, CoinSelector, CoinSelectorOpt, WeightedValue}; +use bdk_file_store::{LocalTrackerStore, RemoteTrackerStore, TrackerStore}; use clap::{Parser, Subcommand}; use std::{cmp::Reverse, collections::HashMap, path::PathBuf, sync::Mutex, time::Duration}; pub use anyhow; pub use bdk_file_store; pub use clap; -pub use remote_chain::*; -pub use tracker::*; #[derive( Debug, Clone, Copy, PartialOrd, Ord, PartialEq, Eq, serde::Deserialize, serde::Serialize, @@ -67,6 +69,32 @@ pub struct Args { pub command: Commands, } +impl Args { + #[allow(clippy::type_complexity)] // FIXME + pub fn prepare_keychains( + &self, + secp: &Secp256k1, + ) -> anyhow::Result<(Vec<(Keychain, Descriptor)>, KeyMap)> { + let mut tracker_keychains = Vec::new(); + + let (descriptor, mut keymap) = + Descriptor::::parse_descriptor(secp, &self.descriptor)?; + tracker_keychains.push((Keychain::External, descriptor)); + + if let Some((internal_descriptor, internal_keymap)) = self + .change_descriptor + .as_ref() + .map(|desc_str| Descriptor::::parse_descriptor(secp, desc_str)) + .transpose()? + { + keymap.extend(internal_keymap); + tracker_keychains.push((Keychain::Internal, internal_descriptor)); + } + + Ok((tracker_keychains, keymap)) + } +} + #[derive(Subcommand, Debug, Clone)] pub enum Commands { #[clap(flatten)] @@ -194,10 +222,10 @@ pub fn run_address_cmd( network: Network, ) -> anyhow::Result<()> where - tracker::ChangeSet: Append + serde::Serialize + serde::de::DeserializeOwned, + ChangeSet: Append + serde::Serialize + serde::de::DeserializeOwned, { let mut tracker = tracker.lock().unwrap(); - let txout_index = &mut tracker.indexed_graph.index; + let txout_index = tracker.index_mut(); let addr_cmd_output = match addr_cmd { AddressCmd::Next => Some(txout_index.next_unused_spk(&Keychain::External)), @@ -208,7 +236,13 @@ where if let Some(((index, spk), additions)) = addr_cmd_output { let mut db = db.lock().unwrap(); // update database since we're about to give out a new address - db.append_changeset(&additions.into())?; + db.append_changeset(&ChangeSet { + indexed_additions: IndexedAdditions { + index_additions: additions, + ..Default::default() + }, + ..Default::default() + })?; let spk = spk.clone(); let address = @@ -248,16 +282,15 @@ where } } -pub fn run_balance_cmd( +pub fn run_balance_cmd( tracker: &Mutex>, - chain_tip: BlockId, ) -> anyhow::Result<()> where ::Error: std::error::Error + Send + Sync + 'static, { let tracker = tracker.lock().unwrap(); let utxos = tracker - .try_list_owned_unspents(chain_tip) + .try_list_owned_unspents() .collect::, B::Error>>()?; let (confirmed, unconfirmed) = @@ -279,10 +312,9 @@ where Ok(()) } -pub fn run_txo_cmd( +pub fn run_txo_cmd( txout_cmd: TxOutCmd, tracker: &Mutex>, - chain_tip: BlockId, network: Network, ) -> anyhow::Result<()> where @@ -298,7 +330,7 @@ where let tracker = tracker.lock().unwrap(); let txouts = tracker - .try_list_owned_txouts(chain_tip) + .try_list_owned_txouts() .filter(|r| match r { Ok((_, full_txo)) => match (unspent, spent) { (true, false) => full_txo.spent_by.is_none(), @@ -336,7 +368,7 @@ where } #[allow(clippy::type_complexity)] // FIXME -pub fn create_tx( +pub fn create_tx( value: u64, address: Address, coin_select: CoinSelectionAlgo, @@ -358,8 +390,7 @@ where }; // TODO use planning module - let mut candidates = - planned_utxos(tracker, chain_tip, &assets).collect::, B::Error>>()?; + let mut candidates = planned_utxos(tracker, &assets).collect::, B::Error>>()?; // apply coin selection algorithm match coin_select { @@ -394,8 +425,7 @@ where }]; let internal_keychain = if tracker - .indexed_graph - .index + .index() .keychains() .get(&Keychain::Internal) .is_some() @@ -405,10 +435,8 @@ where Keychain::External }; - let ((change_index, change_script), change_additions) = tracker - .indexed_graph - .index - .next_unused_spk(&internal_keychain); + let ((change_index, change_script), change_additions) = + tracker.index_mut().next_unused_spk(&internal_keychain); additions.append(change_additions); // Clone to drop the immutable reference. @@ -416,8 +444,7 @@ where let change_plan = bdk_tmp_plan::plan_satisfaction( &tracker - .indexed_graph - .index + .index() .keychains() .get(&internal_keychain) .expect("must exist") @@ -434,8 +461,7 @@ where let cs_opts = CoinSelectorOpt { target_feerate: 0.5, min_drain_value: tracker - .indexed_graph - .index + .index() .keychains() .get(&internal_keychain) .expect("must exist") @@ -555,34 +581,35 @@ where Ok((transaction, change_info)) } -pub fn planned_utxos<'a, AK: bdk_tmp_plan::CanDerive + Clone, A: Anchor, B: ChainOracle>( +pub fn planned_utxos< + 'a, + AK: bdk_tmp_plan::CanDerive + Clone, + A: Anchor, + B: ChainOracle + LastSeenBlock, +>( tracker: &'a Tracker, - chain_tip: BlockId, assets: &'a bdk_tmp_plan::Assets, ) -> impl Iterator, FullTxOut>), B::Error>> + 'a where ::Error: std::error::Error + Send + Sync + 'static, { - tracker - .try_list_owned_unspents(chain_tip) - .filter_map(|r| match r { - Ok(((keychain, derivation_index), full_txo)) => { - let desc = tracker - .indexed_graph - .index - .keychains() - .get(keychain) - .expect("must exist") - .at_derivation_index(*derivation_index); - let plan = bdk_tmp_plan::plan_satisfaction(&desc, assets)?; - Some(Ok((plan, full_txo))) - } - Err(err) => Some(Err(err)), - }) + tracker.try_list_owned_unspents().filter_map(|r| match r { + Ok(((keychain, derivation_index), full_txo)) => { + let desc = tracker + .index() + .keychains() + .get(keychain) + .expect("must exist") + .at_derivation_index(*derivation_index); + let plan = bdk_tmp_plan::plan_satisfaction(&desc, assets)?; + Some(Ok((plan, full_txo))) + } + Err(err) => Some(Err(err)), + }) } #[allow(clippy::too_many_arguments)] // FIXME -pub fn handle_commands( +pub fn handle_commands( command: Commands, broadcast: impl FnOnce(&Transaction) -> anyhow::Result<()>, // we Mutex around these not because we need them for a simple CLI app but to demonstrate how @@ -595,13 +622,13 @@ pub fn handle_commands anyhow::Result<()> where ::Error: std::error::Error + Send + Sync + 'static, - tracker::ChangeSet: Append + serde::Serialize + serde::de::DeserializeOwned, + ChangeSet: Append + serde::Serialize + serde::de::DeserializeOwned, { match command { // TODO: Make these functions return stuffs Commands::Address { addr_cmd } => run_address_cmd(tracker, store, addr_cmd, network), - Commands::Balance => run_balance_cmd(tracker, chain_tip), - Commands::TxOut { txout_cmd } => run_txo_cmd(txout_cmd, tracker, chain_tip, network), + Commands::Balance => run_balance_cmd(tracker), + Commands::TxOut { txout_cmd } => run_txo_cmd(txout_cmd, tracker, network), Commands::Send { value, address, @@ -619,15 +646,18 @@ where // change keychain so future scans will find the tx we're about to broadcast. // If we're unable to persist this, then we don't want to broadcast. let store = &mut *store.lock().unwrap(); - store.append_changeset(&change_derivation_changes.into())?; + store.append_changeset(&ChangeSet { + indexed_additions: IndexedAdditions { + index_additions: change_derivation_changes, + ..Default::default() + }, + ..Default::default() + })?; // We don't want other callers/threads to use this address while we're using it // but we also don't want to scan the tx we just created because it's not // technically in the blockchain yet. - tracker - .indexed_graph - .index - .mark_used(&change_keychain, index); + tracker.index_mut().mark_used(&change_keychain, index); (transaction, Some((change_keychain, index))) } else { (transaction, None) @@ -640,18 +670,15 @@ where let now = std::time::SystemTime::elapsed(&std::time::UNIX_EPOCH).unwrap(); let mut tracker = tracker.lock().unwrap(); - let additions = - tracker - .indexed_graph - .insert_tx(&transaction, None, Some(now.as_secs())); - if !additions.graph_additions.is_empty() - || !additions.index_additions.is_empty() + let additions = tracker.insert_tx(&transaction, None, Some(now.as_secs())); + if !additions.indexed_additions.index_additions.is_empty() + || !additions.indexed_additions.graph_additions.is_empty() { let store = &mut *store.lock().unwrap(); // We know the tx is at least unconfirmed now. Note if persisting here fails, // it's not a big deal since we can always find it again form // blockchain. - store.append_changeset(&additions.into())?; + store.append_changeset(&additions)?; } Ok(()) } @@ -659,7 +686,7 @@ where let tracker = &mut *tracker.lock().unwrap(); if let Some((keychain, index)) = change_index { // We failed to broadcast, so allow our change address to be used in the future - tracker.indexed_graph.index.unmark_used(&keychain, index); + tracker.index_mut().unmark_used(&keychain, index); } Err(e) } @@ -672,60 +699,73 @@ where } #[allow(clippy::type_complexity)] // FIXME -pub fn init( +pub fn init_local( db_magic: &'static [u8], db_default_path: &str, - mut tracker: Tracker, ) -> anyhow::Result<( Args, KeyMap, - // These don't need to have mutexes around them, but we want the cli example code to make it obvious how they - // are thread-safe, forcing the example developers to show where they would lock and unlock things. - Mutex>, - Mutex>, + Mutex>, + Mutex>, )> where - ::Error: std::error::Error + Send + Sync + 'static, - tracker::ChangeSet: Append + serde::Serialize + serde::de::DeserializeOwned, - TrackerStore: - PersistBackend, ChangeSet>, + A: serde::Serialize + serde::de::DeserializeOwned, { if std::env::var("BDK_DB_PATH").is_err() { std::env::set_var("BDK_DB_PATH", db_default_path); } + let args = Args::::parse(); + let secp = Secp256k1::default(); + let (tracker_keychains, keymap) = args.prepare_keychains(&secp)?; + + let mut tracker = LocalTracker::::new_local( + BlockHash::from_inner([0_u8; 32]), + tracker_keychains, + ); + let mut db = LocalTrackerStore::::new_from_path(db_magic, args.db_path.as_path())?; + if let Err(e) = db.load_into_tracker(&mut tracker) { + eprintln!( + "Failed to load changesets from {}: {:?}", + args.db_path.display(), + e + ); + eprintln!("⚠ Consider running a rescan of chain data."); + } + + Ok((args, keymap, Mutex::new(tracker), Mutex::new(db))) +} + +#[allow(clippy::type_complexity)] // FIXME +pub fn init_remote( + db_magic: &'static [u8], + db_default_path: &str, + oracle: O, +) -> anyhow::Result<( + Args, + KeyMap, + Mutex>, + Mutex>, +)> +where + A: serde::Serialize + serde::de::DeserializeOwned, +{ + if std::env::var("BDK_DB_PATH").is_err() { + std::env::set_var("BDK_DB_PATH", db_default_path); + } let args = Args::::parse(); let secp = Secp256k1::default(); - let (descriptor, mut keymap) = - Descriptor::::parse_descriptor(&secp, &args.descriptor)?; - - tracker - .indexed_graph - .index - .add_keychain(Keychain::External, descriptor); - - let internal = args - .change_descriptor - .clone() - .map(|descriptor| Descriptor::::parse_descriptor(&secp, &descriptor)) - .transpose()?; - if let Some((internal_descriptor, internal_keymap)) = internal { - keymap.extend(internal_keymap); - tracker - .indexed_graph - .index - .add_keychain(Keychain::Internal, internal_descriptor); - }; + let (tracker_keychains, keymap) = args.prepare_keychains(&secp)?; + let mut tracker = RemoteTracker::::new_remote( + BlockHash::from_inner([0_u8; 32]), + tracker_keychains, + oracle, + ); let mut db = - TrackerStore::::new_from_path(db_magic, args.db_path.as_path())?; + RemoteTrackerStore::::new_from_path(db_magic, args.db_path.as_path())?; if let Err(e) = db.load_into_tracker(&mut tracker) { - // [TODO] Should we introduce a `TipChainOracle` trait? - // match tracker.last_seen_height() { - // Some(tip) => eprintln!("Failed to load all changesets from {}. Last checkpoint was at height {}. Error: {}", args.db_path.display(), tip, e), - // None => eprintln!("Failed to load any checkpoints from {}: {}", args.db_path.display(), e), - // } eprintln!( "Failed to load changesets from {}: {:?}", args.db_path.display(), diff --git a/example-crates/tracker_example_cli/src/remote_chain.rs b/example-crates/tracker_example_cli/src/remote_chain.rs deleted file mode 100644 index eba4e8f71..000000000 --- a/example-crates/tracker_example_cli/src/remote_chain.rs +++ /dev/null @@ -1,57 +0,0 @@ -use bdk_chain::{Append, BlockId, ChainOracle}; - -pub type RemoteChainChangeSet = Option; - -/// Contains a remote best-chain representation alongside the last-seen block's height. -/// -/// The last-seen block height is persisted locally and can be used to determine which height to -/// start syncing from for block-by-block chain sources. -pub struct RemoteChain { - oracle: O, - last_seen_height: Option, -} - -impl RemoteChain { - pub fn new(oracle: O) -> Self { - Self { - oracle, - last_seen_height: None, - } - } - - pub fn inner(&self) -> &O { - &self.oracle - } - - pub fn last_seen_height(&self) -> Option { - self.last_seen_height - } - - pub fn update_last_seen_height( - &mut self, - last_seen_height: Option, - ) -> RemoteChainChangeSet { - if self.last_seen_height < last_seen_height { - self.last_seen_height = last_seen_height; - last_seen_height - } else { - None - } - } - - pub fn apply_changeset(&mut self, changeset: RemoteChainChangeSet) { - Append::append(&mut self.last_seen_height, changeset) - } -} - -impl ChainOracle for RemoteChain { - type Error = O::Error; - - fn is_block_in_chain( - &self, - block: BlockId, - static_block: BlockId, - ) -> Result, Self::Error> { - self.oracle.is_block_in_chain(block, static_block) - } -} diff --git a/example-crates/tracker_example_cli/src/tracker.rs b/example-crates/tracker_example_cli/src/tracker.rs deleted file mode 100644 index f3dca6ea1..000000000 --- a/example-crates/tracker_example_cli/src/tracker.rs +++ /dev/null @@ -1,266 +0,0 @@ -use std::{convert::Infallible, fmt::Debug}; - -use bdk_chain::{ - bitcoin::Transaction, - indexed_tx_graph::{IndexedAdditions, IndexedTxGraph}, - keychain::{DerivationAdditions, KeychainTxOutIndex}, - local_chain::{self, LocalChain}, - tx_graph::CanonicalTx, - Anchor, Append, BlockId, ChainOracle, FullTxOut, ObservedAs, PersistBackend, -}; -use bdk_file_store::{IterError, Store}; - -use crate::{RemoteChain, RemoteChainChangeSet}; - -/// Structure for persisting [`Tracker`] data. -pub type TrackerStore = Store, ChangeSet>; - -pub type LocalTracker = Tracker; -pub type LocalTrackerStore = TrackerStore; -pub type LocalTrackerChangeSet = ChangeSet; - -pub type RemoteTracker = Tracker>; -pub type RemoteTrackerStore = TrackerStore, RemoteChainChangeSet>; -pub type RemoteTrackerChangeSet = ChangeSet; - -/// An in-memory representation of chain data that we are tracking. -/// -/// * `A` is the [`Anchor`] implementation. -/// * `K` is our keychain identifier. -/// * `B` is the representation of the best chain history. This can either be a [`LocalChain`] or a -/// [`RemoteChain`] (which wraps a remote [`ChainOracle`] implementation). -/// -/// [`Tracker`] can be constructed with [`new_local`] or [`new_remote`] (depending on the -/// chain-history type). -/// -/// [`new_local`]: Self::new_local -/// [`new_remote`]: Self::new_remote -pub struct Tracker { - pub indexed_graph: IndexedTxGraph>, - pub chain: B, -} - -impl LocalTracker { - /// New [`Tracker`] with a [`LocalChain`] as the best-chain representation. - pub fn new_local() -> Self { - Self { - indexed_graph: Default::default(), - chain: LocalChain::default(), - } - } -} - -impl RemoteTracker { - /// New [`Tracker`] with a remote [`ChainOracle`] as the best-chain representation. - pub fn new_remote(oracle: O) -> Self { - Self { - indexed_graph: Default::default(), - chain: RemoteChain::new(oracle), - } - } -} - -impl Tracker -where - K: Clone + Ord + Debug, -{ - pub fn try_list_owned_txouts( - &self, - chain_tip: BlockId, - ) -> impl Iterator>), B::Error>> { - self.indexed_graph - .graph() - .try_list_chain_txouts(&self.chain, chain_tip) - .filter_map(|r| match r { - Err(err) => Some(Err(err)), - Ok(full_txo) => Some(Ok(( - self.indexed_graph - .index - .index_of_spk(&full_txo.txout.script_pubkey)?, - full_txo, - ))), - }) - } - - pub fn try_list_owned_unspents( - &self, - chain_tip: BlockId, - ) -> impl Iterator>), B::Error>> { - self.try_list_owned_txouts(chain_tip).filter(|r| { - if let Ok((_, full_txo)) = r { - if full_txo.spent_by.is_some() { - return false; - } - } - true - }) - } - - pub fn try_list_txs( - &self, - chain_tip: BlockId, - ) -> impl Iterator, B::Error>> { - self.indexed_graph - .graph() - .try_list_chain_txs(&self.chain, chain_tip) - } -} - -impl> Tracker -where - K: Clone + Ord + Debug, -{ - pub fn list_owned_txouts( - &self, - chain_tip: BlockId, - ) -> impl Iterator>)> { - self.try_list_owned_txouts(chain_tip) - .map(|r| r.expect("oracle is infallible")) - } - - pub fn list_owned_unspents( - &self, - chain_tip: BlockId, - ) -> impl Iterator>)> { - self.try_list_owned_unspents(chain_tip) - .map(|r| r.expect("oracle is infallible")) - } - - pub fn list_txs( - &self, - chain_tip: BlockId, - ) -> impl Iterator> { - self.try_list_txs(chain_tip) - .map(|r| r.expect("oracle is infallible")) - } -} - -impl PersistBackend, LocalTrackerChangeSet> - for LocalTrackerStore -where - K: Clone + Ord + Debug + serde::Serialize + serde::de::DeserializeOwned, - A: Anchor + serde::Serialize + serde::de::DeserializeOwned, -{ - type WriteError = std::io::Error; - - type LoadError = IterError; - - fn write_changes( - &mut self, - changeset: &LocalTrackerChangeSet, - ) -> Result<(), Self::WriteError> { - self.append_changeset(changeset) - } - - fn load_into_tracker( - &mut self, - tracker: &mut LocalTracker, - ) -> Result<(), Self::LoadError> { - let (changeset, result) = self.aggregate_changesets(); - tracker - .indexed_graph - .apply_additions(changeset.indexed_graph_additions); - tracker.chain.apply_changeset(changeset.chain_changeset); - result - } -} - -impl PersistBackend, RemoteTrackerChangeSet> - for RemoteTrackerStore -where - K: Clone + Ord + Debug + serde::Serialize + serde::de::DeserializeOwned, - A: Anchor + serde::Serialize + serde::de::DeserializeOwned, - O: ChainOracle, -{ - type WriteError = std::io::Error; - - type LoadError = IterError; - - fn write_changes( - &mut self, - changeset: &RemoteTrackerChangeSet, - ) -> Result<(), Self::WriteError> { - self.append_changeset(changeset) - } - - fn load_into_tracker( - &mut self, - tracker: &mut RemoteTracker, - ) -> Result<(), Self::LoadError> { - let (changeset, result) = self.aggregate_changesets(); - tracker - .indexed_graph - .apply_additions(changeset.indexed_graph_additions); - tracker.chain.apply_changeset(changeset.chain_changeset); - result - } -} - -/// A structure that represents changes to [`Tracker`]. -#[derive(Debug, PartialEq, serde::Deserialize, serde::Serialize)] -#[serde(bound( - deserialize = "A: Ord + serde::Deserialize<'de>, K: Ord + serde::Deserialize<'de>, C: Ord + serde::Deserialize<'de>", - serialize = "A: Ord + serde::Serialize, K: Ord + serde::Serialize, C: Ord + serde::Serialize", -))] -pub struct ChangeSet { - pub indexed_graph_additions: IndexedAdditions>, - pub chain_changeset: C, -} - -impl Default for ChangeSet { - fn default() -> Self { - Self { - indexed_graph_additions: Default::default(), - chain_changeset: Default::default(), - } - } -} - -impl Append for ChangeSet { - fn append(&mut self, other: Self) { - Append::append( - &mut self.indexed_graph_additions, - other.indexed_graph_additions, - ); - Append::append(&mut self.chain_changeset, other.chain_changeset) - } -} - -impl From>> for ChangeSet { - fn from(inner_additions: IndexedAdditions>) -> Self { - Self { - indexed_graph_additions: inner_additions, - chain_changeset: Default::default(), - } - } -} - -impl From> for ChangeSet { - fn from(index_additions: DerivationAdditions) -> Self { - Self { - indexed_graph_additions: IndexedAdditions { - graph_additions: Default::default(), - index_additions, - }, - chain_changeset: Default::default(), - } - } -} - -impl From for ChangeSet { - fn from(chain_changeset: local_chain::ChangeSet) -> Self { - Self { - indexed_graph_additions: Default::default(), - chain_changeset, - } - } -} - -impl From> for ChangeSet> { - fn from(chain_changeset: Option) -> Self { - Self { - indexed_graph_additions: Default::default(), - chain_changeset, - } - } -} diff --git a/example-crates/wallet_electrum/Cargo.toml b/example-crates/wallet_electrum/Cargo.toml index b39584a2e..da84e85f1 100644 --- a/example-crates/wallet_electrum/Cargo.toml +++ b/example-crates/wallet_electrum/Cargo.toml @@ -5,5 +5,5 @@ edition = "2021" [dependencies] bdk = { path = "../../crates/bdk" } -bdk_electrum = { path = "../../crates/electrum", features = [ "wallet" ] } -bdk_file_store = { path = "../../crates/file_store", features = [ "wallet" ] } +bdk_electrum = { path = "../../crates/electrum" } +bdk_file_store = { path = "../../crates/file_store" } diff --git a/example-crates/wallet_electrum/src/main.rs b/example-crates/wallet_electrum/src/main.rs index 0421088c2..feeb3040f 100644 --- a/example-crates/wallet_electrum/src/main.rs +++ b/example-crates/wallet_electrum/src/main.rs @@ -79,12 +79,10 @@ fn main() -> Result<(), Box> { .expect("must get system time") .as_secs(); - let update = electrum_update - .finalize(Some(now), new_txs) - .into_confirmation_time_update(&client)?; + let update = electrum_update.finalize_as_confirmation_time(&client, Some(now), new_txs)?; // update. - wallet.update(|tracker| update.clone().apply_to_tracker(tracker))?; + wallet.apply_update(update)?; wallet.commit()?; let balance = wallet.get_balance();