Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 1 addition & 6 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

10 changes: 0 additions & 10 deletions forester-utils/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -17,21 +17,18 @@ light-hash-set = { workspace = true }
light-hasher = { workspace = true }
light-concurrent-merkle-tree = { workspace = true }
light-indexed-merkle-tree = { workspace = true }
light-indexed-array = { workspace = true }
light-compressed-account = { workspace = true }
light-batched-merkle-tree = { workspace = true }
light-merkle-tree-metadata = { workspace = true }
light-sparse-merkle-tree = { workspace = true }
light-account-checks = { workspace = true }
light-sdk = { workspace = true }

# unrelased
light-client = { workspace = true }
light-prover-client = { workspace = true }
light-registry = { workspace = true, features = ["cpi"] }
account-compression = { workspace = true, features = ["cpi"] }


tokio = { workspace = true }
futures = { workspace = true }
async-stream = "0.3"
Expand All @@ -45,14 +42,7 @@ thiserror = { workspace = true }
tracing = { workspace = true }

num-traits = { workspace = true }
num-bigint = { workspace = true }

rand = { workspace = true }

reqwest = { workspace = true }
bb8 = { workspace = true }
async-trait = { workspace = true }
governor = { workspace = true }

serde = { version = "1.0.219", features = ["derive"] }
serde_json = "1.0.140"
23 changes: 16 additions & 7 deletions forester-utils/src/instructions/address_batch_update.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,10 @@ use light_prover_client::{
use light_sparse_merkle_tree::SparseMerkleTree;
use tracing::{debug, error, info, warn};

use crate::{error::ForesterUtilsError, rpc_pool::SolanaRpcPool};
use crate::{error::ForesterUtilsError, rpc_pool::SolanaRpcPool, utils::wait_for_indexer};

const MAX_PHOTON_ELEMENTS_PER_CALL: usize = 500;
const MAX_PROOFS_PER_TX: usize = 3;

pub struct AddressUpdateConfig<R: Rpc> {
pub rpc_pool: Arc<SolanaRpcPool<R>>,
Expand All @@ -32,7 +33,6 @@ pub struct AddressUpdateConfig<R: Rpc> {
pub prover_api_key: Option<String>,
pub polling_interval: Duration,
pub max_wait_time: Duration,
pub ixs_per_tx: usize,
}

#[allow(clippy::too_many_arguments)]
Expand All @@ -47,7 +47,6 @@ async fn stream_instruction_data<'a, R: Rpc>(
start_index: u64,
zkp_batch_size: u16,
mut current_root: [u8; 32],
yield_batch_size: usize,
) -> impl Stream<Item = Result<Vec<InstructionDataAddressAppendInputs>, ForesterUtilsError>> + Send + 'a
{
stream! {
Expand All @@ -63,6 +62,17 @@ async fn stream_instruction_data<'a, R: Rpc>(
let elements_for_chunk = chunk_hash_chains.len() * zkp_batch_size as usize;
let processed_items_offset = chunk_start * zkp_batch_size as usize;

{
if chunk_idx > 0 {
debug!("Waiting for indexer to sync before fetching chunk {} data", chunk_idx);
}
let connection = rpc_pool.get_connection().await?;
wait_for_indexer(&*connection).await?;
if chunk_idx > 0 {
debug!("Indexer synced, proceeding with chunk {} fetch", chunk_idx);
}
}

let indexer_update_info = {
let mut connection = rpc_pool.get_connection().await?;
let indexer = connection.indexer_mut()?;
Expand Down Expand Up @@ -125,8 +135,8 @@ async fn stream_instruction_data<'a, R: Rpc>(
});
pending_count += 1;

if pending_count >= yield_batch_size {
for _ in 0..yield_batch_size.min(pending_count) {
if pending_count >= MAX_PROOFS_PER_TX {
for _ in 0..MAX_PROOFS_PER_TX.min(pending_count) {
if let Some((idx, result)) = futures_ordered.next().await {
match result {
Ok((compressed_proof, new_root)) => {
Expand Down Expand Up @@ -173,7 +183,7 @@ async fn stream_instruction_data<'a, R: Rpc>(
};
proof_buffer.push(instruction_data);

if proof_buffer.len() >= yield_batch_size {
if proof_buffer.len() >= MAX_PROOFS_PER_TX {
yield Ok(proof_buffer.clone());
proof_buffer.clear();
}
Expand Down Expand Up @@ -333,7 +343,6 @@ pub async fn get_address_update_instruction_stream<'a, R: Rpc>(
start_index,
zkp_batch_size,
current_root,
config.ixs_per_tx,
)
.await;

Expand Down
2 changes: 0 additions & 2 deletions forester-utils/src/instructions/mod.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
pub mod address_batch_update;
pub mod create_account;
pub mod state_batch_append;
pub mod state_batch_nullify;

pub use create_account::create_account_instruction;
208 changes: 0 additions & 208 deletions forester-utils/src/instructions/state_batch_append.rs

This file was deleted.

Loading
Loading