Skip to content

Commit

Permalink
use indexmap to request outputs
Browse files Browse the repository at this point in the history
  • Loading branch information
Boog900 committed Feb 9, 2025
1 parent 4e805b3 commit 241b7fc
Show file tree
Hide file tree
Showing 19 changed files with 95 additions and 101 deletions.
2 changes: 2 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

13 changes: 8 additions & 5 deletions binaries/cuprated/src/blockchain/manager/handler.rs
Original file line number Diff line number Diff line change
Expand Up @@ -166,9 +166,12 @@ impl super::BlockchainManager {
/// This function will panic if any internal service returns an unexpected error that we cannot
/// recover from or if the incoming batch contains no blocks.
async fn handle_incoming_block_batch_main_chain(&mut self, batch: BlockBatch) {
let Ok((prepped_blocks, mut output_cache)) =
batch_prepare_main_chain_blocks(batch.blocks, &mut self.blockchain_context_service, self.blockchain_read_handle.clone())
.await
let Ok((prepped_blocks, mut output_cache)) = batch_prepare_main_chain_blocks(
batch.blocks,
&mut self.blockchain_context_service,
self.blockchain_read_handle.clone(),
)
.await
else {
batch.peer_handle.ban_peer(LONG_BAN);
self.stop_current_block_downloader.notify_one();
Expand All @@ -181,7 +184,7 @@ impl super::BlockchainManager {
txs,
&mut self.blockchain_context_service,
self.blockchain_read_handle.clone(),
Some(&mut output_cache)
Some(&mut output_cache),
)
.await
else {
Expand Down Expand Up @@ -405,7 +408,7 @@ impl super::BlockchainManager {
prepped_txs,
&mut self.blockchain_context_service,
self.blockchain_read_handle.clone(),
None
None,
)
.await?;

Expand Down
1 change: 1 addition & 0 deletions binaries/cuprated/src/blockchain/syncer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ use tokio::{
sync::{mpsc, Notify},
time::interval,
};
use tokio::sync::oneshot;
use tower::{Service, ServiceExt};
use tracing::instrument;

Expand Down
7 changes: 3 additions & 4 deletions binaries/cuprated/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,14 +23,14 @@ use tower::{Service, ServiceExt};
use tracing::level_filters::LevelFilter;
use tracing_subscriber::{layer::SubscriberExt, reload::Handle, util::SubscriberInitExt, Registry};

use crate::{
config::Config, constants::PANIC_CRITICAL_SERVICE_ERROR, logging::CupratedTracingFilter,
};
use cuprate_consensus_context::{
BlockChainContextRequest, BlockChainContextResponse, BlockchainContextService,
};
use cuprate_helper::time::secs_to_hms;
use cuprate_types::blockchain::BlockchainWriteRequest;
use crate::{
config::Config, constants::PANIC_CRITICAL_SERVICE_ERROR, logging::CupratedTracingFilter,
};

mod blockchain;
mod commands;
Expand Down Expand Up @@ -77,7 +77,6 @@ fn main() {
// Initialize async tasks.

rt.block_on(async move {

// Check add the genesis block to the blockchain.
blockchain::check_add_genesis(
&mut blockchain_read_handle,
Expand Down
2 changes: 1 addition & 1 deletion binaries/cuprated/src/rpc/request/blockchain.rs
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,7 @@ pub(crate) async fn outputs(
let BlockchainResponse::Outputs(outputs) = blockchain_read
.ready()
.await?
.call(BlockchainReadRequest::Outputs(outputs))
.call(BlockchainReadRequest::Outputs(todo!()))
.await?
else {
unreachable!();
Expand Down
2 changes: 1 addition & 1 deletion binaries/cuprated/src/txpool/incoming_tx.rs
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,7 @@ async fn handle_incoming_txs(
context.current_adjusted_timestamp_for_time_lock(),
context.current_hf,
blockchain_read_handle,
None
None,
)
.verify()
.await
Expand Down
1 change: 1 addition & 0 deletions consensus/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ monero-serai = { workspace = true, features = ["std"] }
rayon = { workspace = true }
thread_local = { workspace = true }

indexmap = { workspace = true, features = ["std"] }
hex = { workspace = true }
rand = { workspace = true }

Expand Down
1 change: 1 addition & 0 deletions consensus/rules/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ curve25519-dalek = { workspace = true, features = ["alloc", "zeroize", "precompu

rand = { workspace = true, features = ["std", "std_rng"] }

indexmap = { workspace = true, features = ["std"] }
hex = { workspace = true, features = ["std"] }
hex-literal = { workspace = true }
crypto-bigint = { workspace = true }
Expand Down
4 changes: 2 additions & 2 deletions consensus/rules/src/transactions/contextual_data.rs
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
use std::{
cmp::{max, min},
collections::{HashMap, HashSet},
};

use curve25519_dalek::EdwardsPoint;
use indexmap::{IndexMap, IndexSet};
use monero_serai::transaction::{Input, Timelock};

use crate::{transactions::TransactionError, HardFork};
Expand Down Expand Up @@ -33,7 +33,7 @@ pub fn get_absolute_offsets(relative_offsets: &[u64]) -> Result<Vec<u64>, Transa
///
pub fn insert_ring_member_ids(
inputs: &[Input],
output_ids: &mut HashMap<u64, HashSet<u64>>,
output_ids: &mut IndexMap<u64, IndexSet<u64>>,
) -> Result<(), TransactionError> {
if inputs.is_empty() {
return Err(TransactionError::NoInputs);
Expand Down
3 changes: 1 addition & 2 deletions consensus/src/block.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,6 @@ mod free;

pub use alt_block::sanity_check_alt_block;
pub use batch_prepare::{batch_prepare_main_chain_blocks, BatchPrepareCache};
use cuprate_types::output_cache::OutputCache;
use free::pull_ordered_transactions;

/// A pre-prepared block with all data needed to verify it, except the block's proof of work.
Expand Down Expand Up @@ -285,7 +284,7 @@ where
context.current_adjusted_timestamp_for_time_lock(),
context.current_hf,
database,
batch_prep_cache.as_ref().map(|o| &**o)
batch_prep_cache.as_ref().map(|o| &**o),
)
.verify()
.await?;
Expand Down
32 changes: 21 additions & 11 deletions consensus/src/block/batch_prepare.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,28 +16,34 @@ use cuprate_helper::asynch::rayon_spawn_async;
use cuprate_types::output_cache::OutputCache;
use cuprate_types::TransactionVerificationData;

use crate::__private::Database;
use crate::transactions::check_kis_unique;
use crate::transactions::contextual_data::get_output_cache;
use crate::{
batch_verifier::MultiThreadedBatchVerifier,
block::{free::order_transactions, PreparedBlock, PreparedBlockExPow},
transactions::start_tx_verification,
BlockChainContextRequest, BlockChainContextResponse, ExtendedConsensusError,
};
use crate::__private::Database;
use crate::transactions::check_kis_unique;
use crate::transactions::contextual_data::get_output_cache;

pub struct BatchPrepareCache {
pub(crate) output_cache: OutputCache,
pub(crate) key_images_spent_checked: bool
pub(crate) key_images_spent_checked: bool,
}

/// Batch prepares a list of blocks for verification.
#[instrument(level = "debug", name = "batch_prep_blocks", skip_all, fields(amt = blocks.len()))]
pub async fn batch_prepare_main_chain_blocks<D: Database>(
blocks: Vec<(Block, Vec<Transaction>)>,
context_svc: &mut BlockchainContextService,
mut database: D
) -> Result<(Vec<(PreparedBlock, Vec<TransactionVerificationData>)>, BatchPrepareCache), ExtendedConsensusError> {
mut database: D,
) -> Result<
(
Vec<(PreparedBlock, Vec<TransactionVerificationData>)>,
BatchPrepareCache,
),
ExtendedConsensusError,
> {
let (blocks, txs): (Vec<_>, Vec<_>) = blocks.into_iter().unzip();

tracing::debug!("Calculating block hashes.");
Expand Down Expand Up @@ -201,10 +207,14 @@ pub async fn batch_prepare_main_chain_blocks<D: Database>(

check_kis_unique(blocks.iter().flat_map(|(_, txs)| txs.iter()), &mut database).await?;

let output_cache = get_output_cache(blocks.iter().flat_map(|(_, txs)| txs.iter()), database).await?;
let output_cache =
get_output_cache(blocks.iter().flat_map(|(_, txs)| txs.iter()), database).await?;

Ok((blocks, BatchPrepareCache {
output_cache,
key_images_spent_checked: true
}))
Ok((
blocks,
BatchPrepareCache {
output_cache,
key_images_spent_checked: true,
},
))
}
2 changes: 1 addition & 1 deletion consensus/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ pub mod __private {
BlockchainReadRequest,
Response = BlockchainResponse,
Error = tower::BoxError,
Future: Send + 'static,
Future: Send + 'static,
>
{
}
Expand Down
33 changes: 18 additions & 15 deletions consensus/src/transactions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,11 @@ use monero_serai::transaction::{Input, Timelock, Transaction};
use rayon::prelude::*;
use tower::ServiceExt;

use crate::{
batch_verifier::MultiThreadedBatchVerifier,
transactions::contextual_data::{batch_get_decoy_info, batch_get_ring_member_info},
Database, ExtendedConsensusError,
};
use cuprate_consensus_rules::{
transactions::{
check_decoy_info, check_transaction_contextual, check_transaction_semantic,
Expand All @@ -39,22 +44,17 @@ use cuprate_consensus_rules::{
ConsensusError, HardFork,
};
use cuprate_helper::asynch::rayon_spawn_async;
use cuprate_types::output_cache::OutputCache;
use cuprate_types::{
blockchain::{BlockchainReadRequest, BlockchainResponse},
CachedVerificationState, TransactionVerificationData, TxVersion,
};
use cuprate_types::output_cache::OutputCache;
use crate::{
batch_verifier::MultiThreadedBatchVerifier,
transactions::contextual_data::{batch_get_decoy_info, batch_get_ring_member_info},
Database, ExtendedConsensusError,
};

pub mod contextual_data;
mod free;

pub use free::new_tx_verification_data;
use crate::block::BatchPrepareCache;
pub use free::new_tx_verification_data;

/// An enum representing the type of validation that needs to be completed for this transaction.
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
Expand Down Expand Up @@ -156,7 +156,7 @@ impl VerificationWanted {
time_for_time_lock: u64,
hf: HardFork,
database: D,
batch_prep_cache: Option<&BatchPrepareCache>
batch_prep_cache: Option<&BatchPrepareCache>,
) -> FullVerification<D> {
FullVerification {
prepped_txs: self.prepped_txs,
Expand All @@ -165,7 +165,7 @@ impl VerificationWanted {
time_for_time_lock,
hf,
database,
batch_prep_cache
batch_prep_cache,
}
}
}
Expand Down Expand Up @@ -219,15 +219,18 @@ pub struct FullVerification<'a, D> {
time_for_time_lock: u64,
hf: HardFork,
database: D,
batch_prep_cache: Option<&'a BatchPrepareCache>
batch_prep_cache: Option<&'a BatchPrepareCache>,
}

impl<D: Database + Clone> FullVerification<'_, D> {
/// Fully verify each transaction.
pub async fn verify(
mut self,
) -> Result<Vec<TransactionVerificationData>, ExtendedConsensusError> {
if self.batch_prep_cache.is_none_or(|c| !c.key_images_spent_checked) {
if self
.batch_prep_cache
.is_none_or(|c| !c.key_images_spent_checked)
{
check_kis_unique(self.prepped_txs.iter(), &mut self.database).await?;
}

Expand Down Expand Up @@ -269,7 +272,7 @@ impl<D: Database + Clone> FullVerification<'_, D> {
self.time_for_time_lock,
self.hf,
self.database,
self.batch_prep_cache.map(|c| &c.output_cache)
self.batch_prep_cache.map(|c| &c.output_cache),
)
.await
}
Expand Down Expand Up @@ -440,7 +443,7 @@ async fn verify_transactions_decoy_info<D: Database>(
txs: impl Iterator<Item = &TransactionVerificationData> + Clone,
hf: HardFork,
database: D,
output_cache: Option<&OutputCache>
output_cache: Option<&OutputCache>,
) -> Result<(), ExtendedConsensusError> {
// Decoy info is not validated for V1 txs.
if hf == HardFork::V1 {
Expand All @@ -467,7 +470,7 @@ async fn verify_transactions<D>(
current_time_lock_timestamp: u64,
hf: HardFork,
database: D,
output_cache: Option<&OutputCache>
output_cache: Option<&OutputCache>,
) -> Result<Vec<TransactionVerificationData>, ExtendedConsensusError>
where
D: Database,
Expand All @@ -488,7 +491,7 @@ where
.map(|(tx, _)| tx),
hf,
database,
output_cache
output_cache,
)
.await?;

Expand Down
Loading

0 comments on commit 241b7fc

Please sign in to comment.