From 5eb9e7c3fcc59fc46535e9a80f0c87ae249f684d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Mon, 16 Mar 2026 11:26:41 -0300 Subject: [PATCH 01/85] feat: WIP removal of pallas from core --- crates/core/src/lib.rs | 320 ++++++++++++++++++++++++----------------- 1 file changed, 185 insertions(+), 135 deletions(-) diff --git a/crates/core/src/lib.rs b/crates/core/src/lib.rs index 308d99df5..487fe4b87 100644 --- a/crates/core/src/lib.rs +++ b/crates/core/src/lib.rs @@ -8,14 +8,16 @@ //! be processed together). A batch is usually split into chunks for parallel //! processing. -use pallas::{ - codec::minicbor::{self, Decode, Encode}, - crypto::hash::{Hash, Hasher}, - ledger::{ - primitives::Epoch, - traverse::{MultiEraInput, MultiEraOutput, MultiEraTx, MultiEraUpdate}, - }, -}; +//use pallas::{ +// codec::minicbor::{self, Decode, Encode}, +// crypto::hash::{Hash, Hasher}, +// ledger::{ +// primitives::Epoch, +// traverse::{MultiEraInput, MultiEraOutput, MultiEraTx, MultiEraUpdate}, +// }, +//}; +use hash::Hash; +use minicbor::{Decode, Encode}; use serde::{Deserialize, Serialize}; use std::{ collections::{HashMap, HashSet}, @@ -33,6 +35,7 @@ pub mod bootstrap; pub mod builtin; pub mod config; pub mod crawl; +pub mod hash; pub mod import; pub mod indexes; pub mod mempool; @@ -51,6 +54,8 @@ pub use work_unit::{MempoolUpdate, WorkUnit}; pub type Era = u16; +pub type Epoch = u64; + /// The index of an output in a tx pub type TxoIdx = u32; @@ -68,7 +73,6 @@ pub type BlockBody = Cbor; pub type RawBlock = Arc; pub type RawBlockBatch = Vec; pub type RawUtxoMap = HashMap>; -pub type BlockEra = pallas::ledger::traverse::Era; pub type BlockHash = Hash<32>; pub type BlockHeader = Cbor; pub type TxHash = Hash<32>; @@ -84,7 +88,12 @@ pub struct UndoBlockData { } pub type OutputIdx = u64; pub type UtxoBody = (u16, Cbor); -pub type ChainTip = pallas::network::miniprotocols::chainsync::Tip; + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ChainTip { + pub point: ChainPoint, + pub block_number: u64, +} pub type LogSeq = u64; pub use archive::*; @@ -98,9 +107,14 @@ pub use wal::*; #[derive(Debug, Eq, PartialEq, Clone, Serialize, Deserialize, Encode, Decode)] pub struct EraCbor( #[n(0)] pub Era, + // TODO: Rename the Cbor field later. Not necessary just yet + // We might have another way to do this. Not sure yet so let's keep it for the time being #[cbor(n(1), with = "minicbor::bytes")] pub Cbor, ); +#[derive(Debug, Eq, PartialEq, Clone, Serialize, Deserialize)] +pub struct EraBody(pub Era, pub Vec); + impl EraCbor { pub fn era(&self) -> Era { self.0 @@ -129,52 +143,52 @@ impl From for (Era, Cbor) { } } -impl From> for EraCbor { - fn from(value: MultiEraOutput<'_>) -> Self { - EraCbor(value.era().into(), value.encode()) - } -} - -impl<'a> TryFrom<&'a EraCbor> for MultiEraOutput<'a> { - type Error = pallas::codec::minicbor::decode::Error; - - fn try_from(value: &'a EraCbor) -> Result { - let era = value.0.try_into().expect("era out of range"); - MultiEraOutput::decode(era, &value.1) - } -} - -impl<'a> TryFrom<&'a EraCbor> for MultiEraTx<'a> { - type Error = pallas::codec::minicbor::decode::Error; - - fn try_from(value: &'a EraCbor) -> Result { - let era = value.0.try_into().expect("era out of range"); - MultiEraTx::decode_for_era(era, &value.1) - } -} - -impl TryFrom for MultiEraUpdate<'_> { - type Error = pallas::codec::minicbor::decode::Error; - - fn try_from(value: EraCbor) -> Result { - let era = value.0.try_into().expect("era out of range"); - MultiEraUpdate::decode_for_era(era, &value.1) - } -} - -impl From<&MultiEraInput<'_>> for TxoRef { - fn from(value: &MultiEraInput<'_>) -> Self { - TxoRef(*value.hash(), value.index() as u32) - } -} - -impl From for Vec { - fn from(value: TxoRef) -> Self { - let mut bytes = value.0.to_vec(); - bytes.extend_from_slice(value.1.to_be_bytes().as_slice()); - bytes - } -} +//impl From> for EraCbor { +// fn from(value: MultiEraOutput<'_>) -> Self { +// EraCbor(value.era().into(), value.encode()) +// } +//} +// +//impl<'a> TryFrom<&'a EraCbor> for MultiEraOutput<'a> { +// type Error = pallas::codec::minicbor::decode::Error; +// +// fn try_from(value: &'a EraCbor) -> Result { +// let era = value.0.try_into().expect("era out of range"); +// MultiEraOutput::decode(era, &value.1) +// } +//} +// +//impl<'a> TryFrom<&'a EraCbor> for MultiEraTx<'a> { +// type Error = pallas::codec::minicbor::decode::Error; +// +// fn try_from(value: &'a EraCbor) -> Result { +// let era = value.0.try_into().expect("era out of range"); +// MultiEraTx::decode_for_era(era, &value.1) +// } +//} +// +//impl TryFrom for MultiEraUpdate<'_> { +// type Error = pallas::codec::minicbor::decode::Error; +// +// fn try_from(value: EraCbor) -> Result { +// let era = value.0.try_into().expect("era out of range"); +// MultiEraUpdate::decode_for_era(era, &value.1) +// } +//} +// +//impl From<&MultiEraInput<'_>> for TxoRef { +// fn from(value: &MultiEraInput<'_>) -> Self { +// TxoRef(*value.hash(), value.index() as u32) +// } +//} +// +//impl From for Vec { +// fn from(value: TxoRef) -> Self { +// let mut bytes = value.0.to_vec(); +// bytes.extend_from_slice(value.1.to_be_bytes().as_slice()); +// bytes +// } +//} #[derive(Debug, Eq, PartialEq, Hash, Clone, Serialize, Deserialize)] pub struct TxoRef(pub TxHash, pub TxoIdx); @@ -321,48 +335,48 @@ pub enum ServeError { Internal(#[from] Box), } -#[derive(Clone)] -pub struct Genesis { - pub byron: pallas::interop::hardano::configs::byron::GenesisFile, - pub shelley: pallas::interop::hardano::configs::shelley::GenesisFile, - pub alonzo: pallas::interop::hardano::configs::alonzo::GenesisFile, - pub conway: pallas::interop::hardano::configs::conway::GenesisFile, - pub shelley_hash: Hash<32>, - pub force_protocol: Option, -} - -impl Genesis { - pub fn network_magic(&self) -> u32 { - self.shelley.network_magic.unwrap_or_default() - } - - pub fn from_file_paths( - byron: impl AsRef, - shelley: impl AsRef, - alonzo: impl AsRef, - conway: impl AsRef, - force_protocol: Option, - ) -> Result { - let shelley_bytes = std::fs::read(shelley.as_ref())?; - let mut hasher = Hasher::<256>::new(); - hasher.input(&shelley_bytes); - let shelley_hash = hasher.finalize(); - - let byron = pallas::ledger::configs::byron::from_file(byron.as_ref())?; - let shelley = pallas::ledger::configs::shelley::from_file(shelley.as_ref())?; - let alonzo = pallas::ledger::configs::alonzo::from_file(alonzo.as_ref())?; - let conway = pallas::ledger::configs::conway::from_file(conway.as_ref())?; - - Ok(Self { - byron, - shelley, - alonzo, - conway, - force_protocol, - shelley_hash, - }) - } -} +//#[derive(Clone)] +//pub struct GenesisCardanoCardano { +// pub byron: pallas::interop::hardano::configs::byron::GenesisFile, +// pub shelley: pallas::interop::hardano::configs::shelley::GenesisFile, +// pub alonzo: pallas::interop::hardano::configs::alonzo::GenesisFile, +// pub conway: pallas::interop::hardano::configs::conway::GenesisFile, +// pub shelley_hash: Hash<32>, +// pub force_protocol: Option, +//} + +//impl GenesisCardanoCardano { +// pub fn network_magic(&self) -> u32 { +// self.shelley.network_magic.unwrap_or_default() +// } +// +// pub fn from_file_paths( +// byron: impl AsRef, +// shelley: impl AsRef, +// alonzo: impl AsRef, +// conway: impl AsRef, +// force_protocol: Option, +// ) -> Result { +// let shelley_bytes = std::fs::read(shelley.as_ref())?; +// let mut hasher = Hasher::<256>::new(); +// hasher.input(&shelley_bytes); +// let shelley_hash = hasher.finalize(); +// +// let byron = pallas::ledger::configs::byron::from_file(byron.as_ref())?; +// let shelley = pallas::ledger::configs::shelley::from_file(shelley.as_ref())?; +// let alonzo = pallas::ledger::configs::alonzo::from_file(alonzo.as_ref())?; +// let conway = pallas::ledger::configs::conway::from_file(conway.as_ref())?; +// +// Ok(Self { +// byron, +// shelley, +// alonzo, +// conway, +// force_protocol, +// shelley_hash, +// }) +// } +//} pub trait Block: Sized + Send + Sync { fn depends_on(&self, loaded: &mut RawUtxoMap) -> Vec; @@ -380,66 +394,87 @@ pub trait Block: Sized + Send + Sync { pub type Phase2Log = Vec; #[derive(Debug, Error)] -pub enum ChainError { +pub enum ChainError { + // keep #[error("can't receive block until previous work is completed")] CantReceiveBlock(RawBlock), + // keep #[error(transparent)] BrokenInvariant(#[from] BrokenInvariant), - #[error("decoding error")] - DecodingError(#[from] pallas::ledger::traverse::Error), - - #[error("cbor error")] - CborDecodingError(#[from] pallas::codec::minicbor::decode::Error), + // ChainSpecific + //#[error("decoding error")] + //DecodingError(#[from] pallas::ledger::traverse::Error), + //// ChainSpecifci + //#[error("cbor error")] + //CborDecodingError(#[from] pallas::codec::minicbor::decode::Error), #[error("invalid namespace: {0}")] InvalidNamespace(Namespace), - #[error("address decoding error")] - AddressDecoding(#[from] pallas::ledger::addresses::Error), + // Chain specific + //#[error("address decoding error")] + //AddressDecoding(#[from] pallas::ledger::addresses::Error), + // chain specific + // TODO: check StateError #[error(transparent)] StateError(#[from] StateError), + // keep #[error(transparent)] IndexError(#[from] IndexError), + // keep #[error(transparent)] ArchiveError(#[from] ArchiveError), + // keep ? #[error("genesis field missing: {0}")] GenesisFieldMissing(String), + // keep ? #[error("protocol params not found: {0}")] PParamsNotFound(String), + // keep #[error("no active epoch")] NoActiveEpoch, + // keep -> maybe rename? #[error("era not found")] EraNotFound, + // keep -> maybe rename? #[error("epoch value version not found for epoch {0}")] EpochValueVersionNotFound(Epoch), - #[error("missing rewards")] - MissingRewards, + // keep? idk. too cardano + //#[error("missing rewards")] + //MissingRewards, - #[error("invalid pool params")] - InvalidPoolParams, + // keep? too cardano + //#[error("invalid pool params")] + //InvalidPoolParams, - #[error("invalid proposal params")] - InvalidProposalParams, + // keep? too cardano + //#[error("invalid proposal params")] + //InvalidProposalParams, + #[error(transparent)] + ChainSpecific(E), + // #[error("phase-1 script rejected the transaction: {0}")] + // Phase1ValidationRejected(#[from] pallas::ledger::validate::utils::ValidationError), - #[error("phase-1 script rejected the transaction: {0}")] - Phase1ValidationRejected(#[from] pallas::ledger::validate::utils::ValidationError), + // #[error("couldn't evaluate phase-2 script: {0}")] + // Phase2EvaluationError(String), - #[error("couldn't evaluate phase-2 script: {0}")] - Phase2EvaluationError(String), + // #[error("phase-2 script rejected the transaction")] + // Phase2ValidationRejected(Phase2Log), +} - #[error("phase-2 script rejected the transaction")] - Phase2ValidationRejected(Phase2Log), +pub trait Genesis: Clone + Send + Sync + 'static { + fn chain_id(&self) -> u32; } // Note: The WorkUnit trait is now defined in work_unit.rs @@ -460,6 +495,8 @@ pub trait ChainLogic: Sized + Send + Sync { type Entity: Entity; type Utxo: Sized + Send + Sync; type Delta: EntityDelta; + type Genesis: Genesis; + type ChainSpecificError: std::error::Error + Send + Sync; /// The concrete work unit type produced by this chain logic. type WorkUnit>: WorkUnit; @@ -468,8 +505,8 @@ pub trait ChainLogic: Sized + Send + Sync { fn initialize( config: Self::Config, state: &D::State, - genesis: &Genesis, - ) -> Result; + genesis: Self::Genesis, + ) -> Result>; /// Check if the chain logic can receive a new block. /// @@ -481,7 +518,10 @@ pub trait ChainLogic: Sized + Send + Sync { /// /// The block is queued for processing. Call `pop_work()` to get /// work units that should be executed. - fn receive_block(&mut self, raw: RawBlock) -> Result; + fn receive_block( + &mut self, + raw: RawBlock, + ) -> Result>; /// Pop the next work unit to execute. /// @@ -502,10 +542,13 @@ pub trait ChainLogic: Sized + Send + Sync { block: &Cbor, inputs: &HashMap>, point: ChainPoint, - ) -> Result; + ) -> Result>; // TODO: remove from the interface - this is Cardano-specific - fn decode_utxo(&self, utxo: Arc) -> Result; + fn decode_utxo( + &self, + utxo: Arc, + ) -> Result>; // TODO: remove from the interface - this is Cardano-specific fn mutable_slots(domain: &impl Domain) -> BlockSlot; @@ -521,17 +564,17 @@ pub trait ChainLogic: Sized + Send + Sync { cbor: &[u8], utxos: &MempoolAwareUtxoStore, tip: Option, - genesis: &Genesis, - ) -> Result; + genesis: &Self::Genesis, + ) -> Result>; } #[derive(Debug, Error)] -pub enum DomainError { +pub enum DomainError { #[error("wal error: {0}")] WalError(#[from] WalError), #[error("chain error: {0}")] - ChainError(#[from] ChainError), + ChainError(#[from] ChainError), #[error("state error: {0}")] StateError(#[from] StateError), @@ -574,11 +617,15 @@ pub trait TipSubscription: Send + Sync + 'static { pub trait Domain: Send + Sync + Clone + 'static { type Entity: Entity; type EntityDelta: EntityDelta + std::fmt::Debug; + type Genesis: Genesis; + type ChainSpecificError: std::error::Error + Send + Sync; type Chain: ChainLogic< Delta = Self::EntityDelta, Entity = Self::Entity, WorkUnit = Self::WorkUnit, + Genesis = Self::Genesis, + ChainSpecificError = Self::ChainSpecificError, >; /// The concrete work unit type for this domain. @@ -594,7 +641,7 @@ pub trait Domain: Send + Sync + Clone + 'static { fn storage_config(&self) -> &config::StorageConfig; fn sync_config(&self) -> &config::SyncConfig; - fn genesis(&self) -> Arc; + fn genesis(&self) -> Arc; fn read_chain(&self) -> std::sync::RwLockReadGuard<'_, Self::Chain>; fn write_chain(&self) -> std::sync::RwLockWriteGuard<'_, Self::Chain>; @@ -605,12 +652,15 @@ pub trait Domain: Send + Sync + Clone + 'static { fn indexes(&self) -> &Self::Indexes; fn mempool(&self) -> &Self::Mempool; - fn watch_tip(&self, from: Option) -> Result; + fn watch_tip( + &self, + from: Option, + ) -> Result>; fn notify_tip(&self, tip: TipEvent); const MAX_PRUNE_SLOTS_PER_HOUSEKEEPING: u64 = 10_000; - fn housekeeping(&self) -> Result { + fn housekeeping(&self) -> Result> { let max_ledger_slots = self .storage_config() .state @@ -659,11 +709,11 @@ pub trait Driver: Send + Sync + 'static { mod tests { use super::*; - pub fn slot_to_hash(slot: u64) -> BlockHash { - let mut hasher = pallas::crypto::hash::Hasher::<256>::new(); - hasher.input(&(slot as i32).to_le_bytes()); - hasher.finalize() - } + //pub fn slot_to_hash(slot: u64) -> BlockHash { + // let mut hasher = pallas::crypto::hash::Hasher::<256>::new(); + // hasher.input(&(slot as i32).to_le_bytes()); + // hasher.finalize() + //} #[test] fn chainpoint_partial_eq() { From 56036ed3e2e43980ff4ec59692afbdcb98c72cce Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Mon, 16 Mar 2026 11:43:26 -0300 Subject: [PATCH 02/85] feat: create hash struct --- crates/core/src/hash.rs | 59 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 59 insertions(+) create mode 100644 crates/core/src/hash.rs diff --git a/crates/core/src/hash.rs b/crates/core/src/hash.rs new file mode 100644 index 000000000..db4d96d89 --- /dev/null +++ b/crates/core/src/hash.rs @@ -0,0 +1,59 @@ +use serde::{Deserialize, Serialize}; +use std::{fmt, str::FromStr}; + +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] +pub struct Hash([u8; N]); + +impl Hash { + pub fn new(bytes: [u8; N]) -> Hash { + Hash(bytes) + } + + pub fn as_slice(&self) -> &[u8] { + &self.0 + } +} + +impl fmt::Display for Hash { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", hex::encode(self.0)) + } +} + +impl FromStr for Hash { + type Err = hex::FromHexError; + fn from_str(s: &str) -> Result { + let bytes = hex::decode(s)?; + if bytes.len() != N { + return Err(hex::FromHexError::InvalidStringLength); + } + let mut arr = [0u8; N]; + + arr.copy_from_slice(&bytes); + Ok(Self(arr)) + } +} + +impl Serialize for Hash { + fn serialize(&self, s: S) -> Result + where + S: serde::Serializer, + { + s.serialize_str(&hex::encode(self.0)) + } +} + +impl<'de, const N: usize> Deserialize<'de> for Hash { + fn deserialize(d: D) -> Result + where + D: serde::Deserializer<'de>, + { + let s = String::deserialize(d)?; + s.parse().map_err(serde::de::Error::custom) + } +} + +// TODO: add more methods here. Probably will need a bunch so hash is its own file +// +// +// From a3a47145d3f6cf8fb92a6d8fb6a9ab301fee87fb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Mon, 16 Mar 2026 11:44:02 -0300 Subject: [PATCH 03/85] fix: config & point compiling --- crates/core/src/config.rs | 3 +-- crates/core/src/point.rs | 50 +++++++++++++++++++-------------------- 2 files changed, 26 insertions(+), 27 deletions(-) diff --git a/crates/core/src/config.rs b/crates/core/src/config.rs index c31c836c7..b1039c514 100644 --- a/crates/core/src/config.rs +++ b/crates/core/src/config.rs @@ -1,10 +1,9 @@ use std::{fmt::Display, net::SocketAddr, path::PathBuf}; -use pallas::ledger::primitives::Epoch; use serde::{Deserialize, Serialize}; use serde_with::{serde_as, DisplayFromStr}; -use crate::{Cbor, Era, TxoRef}; +use crate::{Cbor, Epoch, Era, TxoRef}; #[derive(Serialize, Deserialize)] #[serde(untagged)] diff --git a/crates/core/src/point.rs b/crates/core/src/point.rs index f6fa8972e..d6231bf2e 100644 --- a/crates/core/src/point.rs +++ b/crates/core/src/point.rs @@ -1,11 +1,11 @@ use std::{fmt::Display, str::FromStr}; use hex; -use pallas::{crypto::hash::Hash, network::miniprotocols::Point as PallasPoint}; +//use pallas::{crypto::hash::Hash, network::miniprotocols::Point as PallasPoint}; use regex::Regex; use serde::{Deserialize, Serialize}; -use crate::{Block, BlockHash, BlockSlot}; +use crate::{hash::Hash, Block, BlockHash, BlockSlot}; #[derive(Debug, Clone, Serialize, Deserialize, Eq)] pub enum ChainPoint { @@ -81,26 +81,26 @@ impl PartialOrd for ChainPoint { } } -impl From for ChainPoint { - fn from(value: PallasPoint) -> Self { - match value { - PallasPoint::Origin => ChainPoint::Origin, - PallasPoint::Specific(s, h) => ChainPoint::Specific(s, h.as_slice().into()), - } - } -} - -impl TryFrom for PallasPoint { - type Error = (); - - fn try_from(value: ChainPoint) -> Result { - match value { - ChainPoint::Origin => Ok(PallasPoint::Origin), - ChainPoint::Specific(s, h) => Ok(PallasPoint::Specific(s, h.to_vec())), - ChainPoint::Slot(_) => Err(()), - } - } -} +//impl From for ChainPoint { +// fn from(value: PallasPoint) -> Self { +// match value { +// PallasPoint::Origin => ChainPoint::Origin, +// PallasPoint::Specific(s, h) => ChainPoint::Specific(s, h.as_slice().into()), +// } +// } +//} + +//impl TryFrom for PallasPoint { +// type Error = (); +// +// fn try_from(value: ChainPoint) -> Result { +// match value { +// ChainPoint::Origin => Ok(PallasPoint::Origin), +// ChainPoint::Specific(s, h) => Ok(PallasPoint::Specific(s, h.to_vec())), +// ChainPoint::Slot(_) => Err(()), +// } +// } +//} impl From<&T> for ChainPoint where @@ -118,8 +118,8 @@ impl ChainPoint { let slot = self.slot(); let hash = match self.hash() { - Some(hash) => *hash, - None => [0u8; 32], + Some(hash) => hash, + None => Hash::new([0u8; 32]), }; let mut out = [0u8; 40]; @@ -138,7 +138,7 @@ impl ChainPoint { let slot_half: [u8; 8] = value[0..8].try_into().unwrap(); let hash_half: [u8; 32] = value[8..40].try_into().unwrap(); let slot = u64::from_be_bytes(slot_half); - let hash = Hash::new(hash_half); + let hash = Hash::<32>::new(hash_half); ChainPoint::Specific(slot, hash) } } From e5fa7cf73154ef225fe2f0c9a113ca5a9915d32e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Mon, 16 Mar 2026 13:23:54 -0300 Subject: [PATCH 04/85] wip: bytesvec in eracbor. restricted archive store error --- Cargo.lock | 2 +- crates/core/src/lib.rs | 37 +++-- remove-pallas-from-core.md | 201 ++++++++++++++++++++++++ src/adapters/mod.rs | 4 +- src/bin/dolos/common.rs | 6 +- src/bin/dolos/init.rs | 4 +- src/serve/o7s_unix/utils/era_history.rs | 2 +- 7 files changed, 235 insertions(+), 21 deletions(-) create mode 100644 remove-pallas-from-core.md diff --git a/Cargo.lock b/Cargo.lock index f8639eb33..af23ef027 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1373,8 +1373,8 @@ dependencies = [ "futures-util", "hex", "itertools 0.14.0", + "minicbor 0.26.4", "opentelemetry", - "pallas", "proptest", "rayon", "regex", diff --git a/crates/core/src/lib.rs b/crates/core/src/lib.rs index 487fe4b87..0f8f74321 100644 --- a/crates/core/src/lib.rs +++ b/crates/core/src/lib.rs @@ -104,16 +104,29 @@ pub use point::*; pub use state::*; pub use wal::*; -#[derive(Debug, Eq, PartialEq, Clone, Serialize, Deserialize, Encode, Decode)] -pub struct EraCbor( - #[n(0)] pub Era, - // TODO: Rename the Cbor field later. Not necessary just yet - // We might have another way to do this. Not sure yet so let's keep it for the time being - #[cbor(n(1), with = "minicbor::bytes")] pub Cbor, -); +// TODO: ask santiago. Doubtful +mod cbor_bytes { + use minicbor::{Decoder, Encoder}; + + pub fn encode( + v: &Vec, + e: &mut Encoder, + _: &mut C, + ) -> Result<(), minicbor::encode::Error> { + e.bytes(v)?; + Ok(()) + } + + pub fn decode<'b, C>( + d: &mut Decoder<'b>, + _: &mut C, + ) -> Result, minicbor::decode::Error> { + d.bytes().map(|b| b.to_vec()) + } +} -#[derive(Debug, Eq, PartialEq, Clone, Serialize, Deserialize)] -pub struct EraBody(pub Era, pub Vec); +#[derive(Debug, Eq, PartialEq, Clone, Serialize, Deserialize, Encode, Decode)] +pub struct EraCbor(#[n(0)] pub Era, #[cbor(n(1), with = "cbor_bytes")] pub Cbor); impl EraCbor { pub fn era(&self) -> Era { @@ -428,7 +441,7 @@ pub enum ChainError { // keep #[error(transparent)] - ArchiveError(#[from] ArchiveError), + ArchiveError(#[from] ArchiveError), // keep ? #[error("genesis field missing: {0}")] @@ -580,7 +593,7 @@ pub enum DomainError { StateError(#[from] StateError), #[error("archive error: {0}")] - ArchiveError(#[from] ArchiveError), + ArchiveError(#[from] ArchiveError), #[error("index error: {0}")] IndexError(#[from] IndexError), @@ -634,7 +647,7 @@ pub trait Domain: Send + Sync + Clone + 'static { type Wal: WalStore; type State: StateStore; - type Archive: ArchiveStore; + type Archive: ArchiveStore; type Indexes: IndexStore; type Mempool: MempoolStore; type TipSubscription: TipSubscription; diff --git a/remove-pallas-from-core.md b/remove-pallas-from-core.md new file mode 100644 index 000000000..739ac7252 --- /dev/null +++ b/remove-pallas-from-core.md @@ -0,0 +1,201 @@ +# Removing Pallas from `dolos-core` — Decision Record + +## Why this change + +`dolos-core` is the foundation of the entire dolos stack. It defines the abstract traits (`Domain`, `ChainLogic`, `WalStore`, `StateStore`, etc.) that every crate in the workspace depends on. For it to work with a second blockchain (e.g. Midnight, which uses Substrate/SCALE), it must make zero assumptions about which chain it is running. + +Today it depends on the full `pallas` umbrella crate, which pulls in Cardano protocol types, genesis file schemas, and validation error types into what should be a neutral layer. The goal is to remove `pallas` entirely from `dolos-core`'s dependency tree. + +**Constraint**: no performance compromise. Every decision below was evaluated against this constraint. + +--- + +## Decision 1 — `EraCbor` renamed to `EraBody`, `minicbor` stays as a direct dep + +**What**: `EraCbor(Era, Cbor)` is renamed to `EraBody(Era, Vec)`. The name `EraCbor` implied the inner bytes are CBOR — they are not, they are opaque (Cardano stores CBOR there today; Midnight would store SCALE bytes). The type alias `Cbor = Vec` is also removed. + +**Why minicbor stays**: `EraBody` retains its `#[derive(minicbor::Encode, minicbor::Decode)]`. This is required because `redb3/mempool.rs` embeds `EraBody` inside `InflightRecord` and `FinalizedEntry`, which are stored in redb using minicbor serialization. Removing the derives would require either reimplementing the encoding in redb3 (coupling) or restructuring the storage format (a breaking change to the on-disk format). The tradeoff is accepted: `minicbor` replaces `pallas` as the direct dep for this one purpose. + +**What minicbor does NOT do**: it does not touch the inner bytes. The `#[cbor(with = "minicbor::bytes")]` annotation on the payload field means minicbor treats those bytes as a raw blob — the SCALE or CBOR content inside is completely untouched. + +**Performance**: zero impact. The encoding of `(u16, Vec)` via minicbor is trivial and was already happening. + +--- + +## Decision 2 — `Hash` becomes a custom newtype in core + +**What**: A new `crates/core/src/hash.rs` defines `Hash([u8; N])` (~60 lines) with Display/hex, FromStr, serde, Copy, Eq, Hash, Deref impls. `BlockHash = Hash<32>` and `TxHash = Hash<32>` continue as type aliases. `pallas-crypto` is removed from core. + +**Why not `pallas-crypto` directly**: depending on a pallas sub-crate still ties core to the pallas release cycle and keeps pallas in `cargo tree -p dolos-core`. A custom newtype is ~60 lines of boilerplate with zero ongoing maintenance burden. + +**Why not raw `[u8; 32]`**: losing the newtype would lose hex Display/FromStr, making `ChainPoint` display and CLI parsing significantly more verbose. The type-level distinction between a hash and an arbitrary byte array is worth keeping. + +**Boundary conversions**: `dolos-cardano` adds `From> for dolos_core::Hash` and its inverse. These are zero-cost (same memory layout). + +**Performance**: zero impact. `Hash` is `repr(transparent)` over `[u8; N]`. + +--- + +## Decision 3 — `Genesis` becomes a `GenesisConfig` trait + +**What**: The `Genesis` struct (holding `byron`, `shelley`, `alonzo`, `conway` genesis files) moves to `dolos-cardano` as `CardanoGenesis`. Core defines a `GenesisConfig` trait: + +```rust +pub trait GenesisConfig: Clone + Send + Sync + 'static { + fn chain_id(&self) -> u32; +} +``` + +`ChainLogic` gets `type GenesisConfig: GenesisConfig` as an associated type. `Domain::genesis()` returns `Arc<::GenesisConfig>`. + +**Why `chain_id` and not `network_magic`**: `network_magic` is Cardano terminology. Every blockchain has some form of chain identifier — Midnight calls it something else. `chain_id()` returning `u32` is the minimal generic interface. Cardano's impl returns `shelley.network_magic`. + +**Compromise**: everything else about genesis (epoch lengths, slot durations, protocol parameters) is Cardano-specific and stays in `CardanoGenesis`. Core knows nothing about genesis structure — only that a chain has an ID. + +**Performance**: zero impact. The genesis config is read at startup and cached in an `Arc`. + +--- + +## Decision 4 — `ChainTip` defined natively in core + +**What**: `ChainTip = pallas::network::miniprotocols::chainsync::Tip` is replaced with: + +```rust +pub struct ChainTip { + pub point: ChainPoint, + pub block_number: u64, +} +``` + +`dolos-cardano` adds `From for ChainTip`. + +**Why keep it in core**: every UTxO-based blockchain has a notion of "current tip" — a position in the chain plus a block height. This is not Cardano-specific. Removing it from core would force every chain integration to re-define it and lose the ability to express tip-awareness generically in sync machinery. + +**Why not remove it**: sync progress tracking, tip subscriptions (`TipEvent`), and API responses all need a tip with both a point and a block number. Working with `ChainPoint` alone would lose the block number, requiring each chain layer to re-add it redundantly. + +**Performance**: zero impact. It's a plain struct. + +--- + +## Decision 5 — `BlockEra` dropped, `Era = u16` is the single representation + +**What**: `pub type BlockEra = pallas::ledger::traverse::Era` is removed. `pub type Era = u16` (already present) is the sole era representation in core. + +**Why**: both Cardano (eras: Byron=0, Shelley=1, ..., Conway=6) and Midnight (ledger versions: integer) serialize their version identifier as an unsigned integer. The `u16` wire type is the correct shared abstraction. Named variants (`Byron`, `Shelley`) are Cardano-specific and belong in `dolos-cardano`. + +**Where named variants live**: `dolos-cardano` converts `pallas::ledger::traverse::Era ↔ u16` at its boundary. Pallas already implements this. + +**Compromise**: code that currently writes `if era == BlockEra::Conway` must become `if era == CONWAY_ERA` (a constant in dolos-cardano). Slightly less ergonomic but correctly scoped. + +**Performance**: zero impact. `Era = u16` is what was already stored. + +--- + +## Decision 6 — `EvalReport` becomes `Option>` on `MempoolTx` + +**What**: `pub report: Option` (a pallas phase-2 evaluation result) becomes `pub report: Option>`. `dolos-cardano` serializes/deserializes the pallas `EvalReport` to bytes at its boundary. + +**Why opaque bytes and not an associated type**: making `MempoolTx` generic over `D::EvalReport` would propagate a new generic parameter through every struct and trait that touches `MempoolTx` — a very wide blast radius. The eval report is only ever inspected by chain-specific code (the minibf/trp API layers), not by core machinery. Opaque bytes are the minimal interface. + +**Compromise**: callers that want to inspect the eval report must deserialize from `Vec`. This is a one-line operation in dolos-cardano. + +**Performance**: one extra serialize/deserialize round-trip per mempool tx admission. Mempool admission is rare compared to block processing — negligible. + +--- + +## Decision 7 — Chain-specific error types via `ChainLogic::ChainSpecificError` + +**What**: `ChainError` and `MempoolError` previously had variants typed directly to pallas errors (`pallas::ledger::traverse::Error`, `pallas::ledger::addresses::Error`, etc.). These are replaced with an associated type on `ChainLogic`: + +```rust +pub trait ChainLogic { + type ChainSpecificError: std::error::Error + Send + Sync + 'static; +} + +pub enum ChainError { + // chain-agnostic variants unchanged ... + ChainSpecific(E), // typed, not hidden +} +``` + +`dolos-cardano` defines `CardanoError` wrapping all pallas error types and sets `type ChainSpecificError = CardanoError`. + +**Why not string errors**: string errors destroy type information, make programmatic error handling impossible, and go against Rust idioms. A typed associated error preserves the full pallas error at the dolos-cardano boundary — callers that know they're in a Cardano context can match on `ChainError::ChainSpecific(CardanoError::Traverse(...))` and get the original pallas error. + +**Why not `Box`**: same reason — loses type information. The associated type approach is zero-cost (monomorphized) and keeps errors inspectable. + +**Propagation**: `DomainError` resolves `E` as `::ChainSpecificError`. Since `D::Chain` is already an associated type on `Domain`, no extra generics appear at call sites. + +**Performance**: zero impact. Associated types are resolved at compile time. + +--- + +## Decision 8 — Block decoding moved behind `ChainLogic::find_tx_in_block` + +**What**: `async_query.rs` had two methods (`block_by_tx_hash`, `tx_cbor`) that decoded raw block bytes inline using `MultiEraBlock`. These become a new `ChainLogic` static method: + +```rust +fn find_tx_in_block(block: &[u8], tx_hash: &[u8]) -> Option<(EraBody, TxOrder)>; +``` + +Core calls this; `dolos-cardano` implements it with `MultiEraBlock`. `AsyncQueryFacade` stays in core. + +**Why keep `AsyncQueryFacade` in core**: the async semaphore-limited dispatch pattern and the other query methods (`block_by_slot`, `block_by_number`, etc.) are entirely chain-agnostic. Moving the facade to dolos-cardano would force every API layer to re-implement the concurrency limiting. + +**Performance**: zero impact. One additional virtual dispatch per query — negligible for a query path that's already doing database I/O. + +--- + +## Decision 9 — Mempool UTxO scanning moved behind `ChainLogic` methods + +**What**: `scan_mempool_utxos` and `exclude_inflight_stxis` in `mempool.rs` used `MultiEraTx`/`MultiEraOutput` to iterate mempool transaction inputs/outputs. These become two `ChainLogic` static methods: + +```rust +fn tx_produced_utxos(era_body: &EraBody) -> Vec<(TxoRef, EraBody)>; +fn tx_consumed_refs(era_body: &EraBody) -> Vec; +``` + +The predicate in `get_utxos_by_tag` changes from `Fn(&MultiEraOutput<'_>) -> bool` to `Fn(&EraBody) -> bool`. + +**Why keep the scan logic in core**: mempool-aware UTxO queries are a generic concept for any UTxO-based chain. Midnight (or any other chain) would need the same "scan pending transactions and merge with confirmed state" logic. The only chain-specific part is how you decode a raw transaction into its inputs and outputs — which is exactly what the two `ChainLogic` methods encapsulate. + +**Predicate change consequence**: callers in `dolos-minibf` and `dolos-trp` that currently receive `&MultiEraOutput` in their predicate will instead receive `&EraBody` and must decode it themselves to `MultiEraOutput`. This is a one-line change per callsite in those crates. + +**Performance**: zero impact. The same decoding work happens — it just happens inside the predicate rather than before it. + +--- + +## Summary of what stays in `dolos-core` + +| Thing | Before | After | +|---|---|---| +| `Hash` | from pallas-crypto | custom newtype (~60 lines) | +| `EraBody` (was `EraCbor`) | pallas re-export | stays in core, minicbor direct dep | +| `Era = u16` | unchanged | unchanged | +| `ChainTip` | pallas alias | native struct in core | +| `GenesisConfig` | pallas `Genesis` struct | generic trait | +| `ChainError` | pallas error variants | parameterized `ChainError` | +| `minicbor` dep | via pallas | direct dep | +| `pallas` dep | present | **removed** | + +## Summary of what moves to `dolos-cardano` + +- `CardanoGenesis` (was `Genesis`) with all four genesis file fields +- `CardanoError` (wraps all pallas error types) +- `From for ChainPoint`, `TryFrom for PallasPoint` +- `From for ChainTip` +- `From> for dolos_core::Hash` +- `From for EraBody`, `TryFrom<&EraBody> for MultiEraOutput`, etc. +- `From<&MultiEraInput> for TxoRef` +- `ChainLogic::find_tx_in_block` implementation +- `ChainLogic::tx_produced_utxos` implementation +- `ChainLogic::tx_consumed_refs` implementation + +--- + +## Verification + +1. `cargo tree -p dolos-core | grep pallas` — must return nothing +2. `cargo check -p dolos-core` — must compile clean with zero pallas in scope +3. `cargo clippy --all-targets --all-features -- -D warnings` — zero warnings +4. `cargo test --workspace --all-targets` — all tests pass diff --git a/src/adapters/mod.rs b/src/adapters/mod.rs index 1dd0431a1..830d75322 100644 --- a/src/adapters/mod.rs +++ b/src/adapters/mod.rs @@ -35,7 +35,7 @@ impl dolos_core::TipSubscription for TipSubscription { pub struct DomainAdapter { pub storage_config: Arc, pub sync_config: Arc, - pub genesis: Arc, + pub genesis: Arc, pub wal: WalAdapter, pub chain: Arc>, pub state: StateStoreBackend, @@ -110,7 +110,7 @@ impl Domain for DomainAdapter { type Mempool = MempoolBackend; type TipSubscription = TipSubscription; - fn genesis(&self) -> Arc { + fn genesis(&self) -> Arc { self.genesis.clone() } diff --git a/src/bin/dolos/common.rs b/src/bin/dolos/common.rs index c7293a8c2..c193f72c7 100644 --- a/src/bin/dolos/common.rs +++ b/src/bin/dolos/common.rs @@ -10,7 +10,7 @@ use tracing::{debug, info}; use tracing_subscriber::{filter::Targets, prelude::*}; use dolos::adapters::DomainAdapter; -use dolos::core::Genesis; +use dolos::core::GenesisCardanoCardano; use dolos::prelude::*; use dolos::storage; @@ -204,8 +204,8 @@ pub fn setup_tracing(config: &LoggingConfig, telemetry: &TelemetryConfig) -> mie Ok(()) } -pub fn open_genesis_files(config: &GenesisConfig) -> miette::Result { - Genesis::from_file_paths( +pub fn open_genesis_files(config: &GenesisConfig) -> miette::Result { + GenesisCardanoCardano::from_file_paths( &config.byron_path, &config.shelley_path, &config.alonzo_path, diff --git a/src/bin/dolos/init.rs b/src/bin/dolos/init.rs index 44e69522a..3d84f32fe 100644 --- a/src/bin/dolos/init.rs +++ b/src/bin/dolos/init.rs @@ -6,7 +6,7 @@ use dolos_core::{ MithrilConfig, PeerConfig, RelayConfig, RootConfig, StorageConfig, StorageVersion, TrpConfig, UpstreamConfig, }, - Genesis, + GenesisCardanoCardano, }; use inquire::{Confirm, MultiSelect, Select, Text}; use miette::{miette, Context as _, IntoDiagnostic}; @@ -84,7 +84,7 @@ impl KnownNetwork { ] } - pub fn load_included_genesis(&self) -> Genesis { + pub fn load_included_genesis(&self) -> GenesisCardanoCardano { match self { KnownNetwork::CardanoMainnet => include::mainnet::load(), KnownNetwork::CardanoPreProd => include::preprod::load(), diff --git a/src/serve/o7s_unix/utils/era_history.rs b/src/serve/o7s_unix/utils/era_history.rs index 70f632232..e441f7ea5 100644 --- a/src/serve/o7s_unix/utils/era_history.rs +++ b/src/serve/o7s_unix/utils/era_history.rs @@ -83,7 +83,7 @@ impl<'a, C> minicbor::Encode for EraHistoryResponse<'a> { pub fn build_era_history_response( eras: &[DolosEraSummary], - genesis: &Genesis, + genesis: &GenesisCardanoCardano, ) -> Result { if eras.is_empty() { return Err(Error::server("era summary is empty")); From 1620c3fe66667ff67b327f10f060c96f1806e148 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Mon, 16 Mar 2026 13:24:02 -0300 Subject: [PATCH 05/85] wip: state compiling --- crates/core/src/state.rs | 41 +++++++++++++++++++++------------------- 1 file changed, 22 insertions(+), 19 deletions(-) diff --git a/crates/core/src/state.rs b/crates/core/src/state.rs index d9ac286ca..605845863 100644 --- a/crates/core/src/state.rs +++ b/crates/core/src/state.rs @@ -21,11 +21,11 @@ impl From<&[u8]> for EntityKey { } } -impl From> for EntityKey { - fn from(value: pallas::crypto::hash::Hash) -> Self { - EntityKey::from(value.as_slice()) - } -} +//impl From> for EntityKey { +// fn from(value: pallas::crypto::hash::Hash) -> Self { +// EntityKey::from(value.as_slice()) +// } +//} impl From<&[u8; N]> for EntityKey { fn from(value: &[u8; N]) -> Self { @@ -60,14 +60,14 @@ impl EntityKey { } } -impl From for pallas::crypto::hash::Hash { - fn from(value: EntityKey) -> Self { - let mut array = [0u8; HASH_SIZE]; - let source = &value.0[..HASH_SIZE]; - array.copy_from_slice(source); - pallas::crypto::hash::Hash::::new(array) - } -} +//impl From for pallas::crypto::hash::Hash { +// fn from(value: EntityKey) -> Self { +// let mut array = [0u8; HASH_SIZE]; +// let source = &value.0[..HASH_SIZE]; +// array.copy_from_slice(source); +// pallas::crypto::hash::Hash::::new(array) +// } +//} /// A namespaced key /// @@ -125,9 +125,13 @@ impl std::ops::Deref for StateSchema { } pub trait Entity: Sized + Send { + type ChainSpecificError: std::error::Error + Send + Sync + 'static; const KEY_SIZE: usize = 32; - fn decode_entity(ns: Namespace, value: &EntityValue) -> Result; + fn decode_entity( + ns: Namespace, + value: &EntityValue, + ) -> Result>; fn encode_entity(value: &Self) -> (Namespace, EntityValue); } @@ -190,11 +194,10 @@ pub enum StateError { InvalidNamespace(Namespace), #[error(transparent)] - DecodingError(#[from] pallas::codec::minicbor::decode::Error), - - #[error(transparent)] - TraverseError(#[from] pallas::ledger::traverse::Error), - + DecodingError(#[from] minicbor::decode::Error), + //DecodingError(#[from] pallas::codec::minicbor::decode::Error), + // #[error(transparent)] + // TraverseError(#[from] pallas::ledger::traverse::Error), #[error(transparent)] InvariantViolation(#[from] InvariantViolation), From 4c47ca614041e8efc192ea376c0cfabf163feb8b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Mon, 16 Mar 2026 13:24:06 -0300 Subject: [PATCH 06/85] wip: archive compiling --- crates/core/src/archive.rs | 92 ++++++++++++++++++++++++++------------ 1 file changed, 64 insertions(+), 28 deletions(-) diff --git a/crates/core/src/archive.rs b/crates/core/src/archive.rs index 4994d4d07..6340256ee 100644 --- a/crates/core/src/archive.rs +++ b/crates/core/src/archive.rs @@ -126,7 +126,7 @@ impl LogIterTyped { } impl Iterator for LogIterTyped { - type Item = Result<(LogKey, E), ArchiveError>; + type Item = Result<(LogKey, E), ArchiveError>; fn next(&mut self) -> Option { let next = self.inner.next()?; @@ -142,16 +142,13 @@ impl Iterator for LogIterTyped { } #[derive(Debug, Error)] -pub enum ArchiveError { +pub enum ArchiveError { #[error("broken invariant")] BrokenInvariant(#[from] BrokenInvariant), #[error("storage error")] InternalError(String), - #[error("address decoding error")] - AddressDecoding(#[from] pallas::ledger::addresses::Error), - #[error("query not supported")] QueryNotSupported, @@ -159,37 +156,53 @@ pub enum ArchiveError { InvalidStoreVersion, #[error("decoding error")] - DecodingError(#[from] pallas::codec::minicbor::decode::Error), + DecodingError(#[from] minicbor::decode::Error), - #[error("block decoding error")] - BlockDecodingError(#[from] pallas::ledger::traverse::Error), + //#[error("address decoding error")] + //AddressDecoding(#[from] pallas::ledger::addresses::Error), + //#[error("decoding error")] + //DecodingError(#[from] pallas::codec::minicbor::decode::Error), + //#[error("block decoding error")] + //BlockDecodingError(#[from] pallas::ledger::traverse::Error), #[error("entity decoding error")] EntityDecodingError(String), #[error("namespace {0} not found")] NamespaceNotFound(Namespace), + + #[error("chain-specific error: {0}")] + ChainSpecifc(E), } pub trait ArchiveWriter: Send + Sync + 'static { - fn apply(&self, point: &ChainPoint, block: &RawBlock) -> Result<(), ArchiveError>; + type ChainSpecificError: std::error::Error + Send + Sync; + fn apply( + &self, + point: &ChainPoint, + block: &RawBlock, + ) -> Result<(), ArchiveError>; fn write_log( &self, ns: Namespace, key: &LogKey, value: &EntityValue, - ) -> Result<(), ArchiveError>; + ) -> Result<(), ArchiveError>; - fn write_log_typed(&self, key: &LogKey, entity: &E) -> Result<(), ArchiveError> { + fn write_log_typed( + &self, + key: &LogKey, + entity: &E, + ) -> Result<(), ArchiveError> { let (ns, raw) = E::encode_entity(entity); self.write_log(ns, key, &raw) } - fn undo(&self, point: &ChainPoint) -> Result<(), ArchiveError>; + fn undo(&self, point: &ChainPoint) -> Result<(), ArchiveError>; - fn commit(self) -> Result<(), ArchiveError>; + fn commit(self) -> Result<(), ArchiveError>; } /// An iterator that supports efficient skipping without materializing items. @@ -205,30 +218,38 @@ pub trait Skippable { } pub trait ArchiveStore: Clone + Send + Sync + 'static { + type ChainSpecificError: std::error::Error + Send + Sync; type BlockIter<'a>: Iterator + DoubleEndedIterator + Skippable + 'a; type Writer: ArchiveWriter; - type LogIter: Iterator>; - type EntityValueIter: Iterator>; + type LogIter: Iterator< + Item = Result<(LogKey, EntityValue), ArchiveError>, + >; + type EntityValueIter: Iterator< + Item = Result>, + >; - fn start_writer(&self) -> Result; + fn start_writer(&self) -> Result>; fn read_logs( &self, ns: Namespace, keys: &[&LogKey], - ) -> Result>, ArchiveError>; + ) -> Result>, ArchiveError>; - fn iter_logs(&self, ns: Namespace, range: Range) - -> Result; + fn iter_logs( + &self, + ns: Namespace, + range: Range, + ) -> Result>; fn read_logs_typed( &self, ns: Namespace, keys: &[&LogKey], - ) -> Result>, ArchiveError> { + ) -> Result>, ArchiveError> { let raw = self.read_logs(ns, keys)?; let decoded = raw @@ -249,7 +270,7 @@ pub trait ArchiveStore: Clone + Send + Sync + 'static { &self, ns: Namespace, key: &LogKey, - ) -> Result, ArchiveError> { + ) -> Result, ArchiveError> { let raw = self.read_logs_typed(ns, &[key])?; let first = raw.into_iter().next().unwrap(); @@ -261,7 +282,7 @@ pub trait ArchiveStore: Clone + Send + Sync + 'static { &self, ns: Namespace, range: Option>, - ) -> Result, ArchiveError> { + ) -> Result, ArchiveError> { let range = range.unwrap_or_else(LogKey::full_range); let inner = self.iter_logs(ns, range)?; @@ -269,18 +290,33 @@ pub trait ArchiveStore: Clone + Send + Sync + 'static { Ok(LogIterTyped::::new(inner, ns)) } - fn get_block_by_slot(&self, slot: &BlockSlot) -> Result, ArchiveError>; + fn get_block_by_slot( + &self, + slot: &BlockSlot, + ) -> Result, ArchiveError>; fn get_range<'a>( &self, from: Option, to: Option, - ) -> Result, ArchiveError>; + ) -> Result, ArchiveError>; - fn find_intersect(&self, intersect: &[ChainPoint]) -> Result, ArchiveError>; + fn find_intersect( + &self, + intersect: &[ChainPoint], + ) -> Result, ArchiveError>; - fn get_tip(&self) -> Result, ArchiveError>; + fn get_tip( + &self, + ) -> Result, ArchiveError>; - fn prune_history(&self, max_slots: u64, max_prune: Option) -> Result; + fn prune_history( + &self, + max_slots: u64, + max_prune: Option, + ) -> Result>; - fn truncate_front(&self, after: &ChainPoint) -> Result<(), ArchiveError>; + fn truncate_front( + &self, + after: &ChainPoint, + ) -> Result<(), ArchiveError>; } From fbf3113b55c12b5294b0acfa6b1ebe99af5076f9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Mon, 16 Mar 2026 14:11:00 -0300 Subject: [PATCH 07/85] feat: successfully removed pallas from mempool --- crates/core/src/lib.rs | 5 ++- crates/core/src/mempool.rs | 62 ++++++++++++-------------------------- 2 files changed, 24 insertions(+), 43 deletions(-) diff --git a/crates/core/src/lib.rs b/crates/core/src/lib.rs index 0f8f74321..7475b4da8 100644 --- a/crates/core/src/lib.rs +++ b/crates/core/src/lib.rs @@ -571,7 +571,10 @@ pub trait ChainLogic: Sized + Send + Sync { tip.saturating_sub(Self::mutable_slots(domain)) } - /// Validate a transaction against the current ledger state. + fn tx_produced_utxos(era_body: &EraCbor) -> Vec<(TxoRef, EraCbor)>; + fn tx_consumed_ref(era_body: &EraCbor) -> Vec; + + // Validate a transaction against the current ledger state. fn validate_tx( &self, cbor: &[u8], diff --git a/crates/core/src/mempool.rs b/crates/core/src/mempool.rs index 61b1dfdc4..ada04df7a 100644 --- a/crates/core/src/mempool.rs +++ b/crates/core/src/mempool.rs @@ -1,12 +1,14 @@ use super::*; use crate::TagDimension; -pub use pallas::ledger::validate::phase2::EvalReport; +//pub use pallas::ledger::validate::phase2::EvalReport; use futures_core::Stream; use std::pin::Pin; use tracing::{debug, warn}; +pub type Report = Vec; + #[derive(Debug)] pub struct MempoolTx { pub hash: TxHash, @@ -17,7 +19,8 @@ pub struct MempoolTx { pub confirmed_at: Option, // this might be empty if the tx is cloned - pub report: Option, + // TODO: notify santiago there is an extra serialize/deserialize on mempool ops + pub report: Option, } impl PartialEq for MempoolTx { @@ -43,7 +46,7 @@ impl Clone for MempoolTx { } impl MempoolTx { - pub fn new(hash: TxHash, payload: EraCbor, report: EvalReport) -> Self { + pub fn new(hash: TxHash, payload: EraCbor, report: Report) -> Self { Self { hash, payload, @@ -110,11 +113,10 @@ pub enum MempoolError { #[error("internal error: {0}")] Internal(#[from] Box), - #[error("traverse error: {0}")] - TraverseError(#[from] pallas::ledger::traverse::Error), - + // #[error("traverse error: {0}")] + // TraverseError(#[from] pallas::ledger::traverse::Error), #[error("decode error: {0}")] - DecodeError(#[from] pallas::codec::minicbor::decode::Error), + DecodeError(#[from] minicbor::decode::Error), #[error(transparent)] StateError(#[from] StateError), @@ -291,24 +293,17 @@ pub struct MempoolAwareUtxoStore<'a, D: Domain> { fn scan_mempool_utxos(predicate: F, mempool: &D::Mempool) -> HashSet where - F: Fn(&MultiEraOutput<'_>) -> bool, + F: Fn(&EraCbor) -> bool, { let mut refs = HashSet::new(); let mut all_txs = mempool.peek_pending(); all_txs.extend(mempool.peek_inflight()); - for mtx in all_txs { - let era_cbor = &mtx.payload; - let Some(tx) = MultiEraTx::try_from(era_cbor).ok() else { - continue; - }; - - debug!(tx = %tx.hash(), "scanning mempool tx"); - - for (idx, inflight) in tx.produces() { - if predicate(&inflight) { - let txoref = TxoRef::from((tx.hash(), idx as u32)); + for mtx in all_txs.into_iter() { + debug!(mtx = %mtx.hash, "scanning mempool tx"); + for (txoref, utxo) in D::Chain::tx_produced_utxos(&mtx.payload) { + if predicate(&utxo) { debug!(txoref = %txoref, "mempool utxo matches predicate"); refs.insert(txoref); } @@ -325,16 +320,8 @@ fn exclude_inflight_stxis(refs: &mut HashSet, mempool: &D::Me all_txs.extend(mempool.peek_inflight()); for mtx in all_txs { - let era_cbor = &mtx.payload; - let Some(tx) = MultiEraTx::try_from(era_cbor).ok() else { - warn!("invalid inflight tx"); - continue; - }; - - debug!(tx = %tx.hash(), "checking inflight tx"); - - for locked in tx.consumes() { - let txoref = TxoRef::from(&locked); + debug!(tx = %mtx.hash, "checking inflight tx"); + for txoref in D::Chain::tx_consumed_ref(&mtx.payload) { if refs.remove(&txoref) { debug!(txoref = %txoref, "excluded stxi"); } @@ -349,20 +336,11 @@ fn select_mempool_utxos(refs: &mut HashSet, mempool: &D::Memp all_txs.extend(mempool.peek_inflight()); for mtx in all_txs { - let era_cbor = &mtx.payload; - let Some(tx) = MultiEraTx::try_from(era_cbor).ok() else { - continue; - }; - - debug!(tx = %tx.hash(), "checking mempool tx"); - - for (idx, inflight) in tx.produces() { - let txoref = TxoRef::from((tx.hash(), idx as u32)); + debug!(tx = %mtx.hash, "checking mempool tx"); + for (txoref, era_cbor) in D::Chain::tx_produced_utxos(&mtx.payload) { debug!(txoref = %txoref, "checking mempool utxo"); - if refs.contains(&txoref) { - let era_cbor = EraCbor::from(inflight); - debug!(txoref = %txoref, "selected utxo available inmempool tx"); + debug!(txoref = %txoref, "selected utxo available in mempool tx"); refs.remove(&txoref); map.insert(txoref, Arc::new(era_cbor)); } @@ -404,7 +382,7 @@ impl<'a, D: Domain> MempoolAwareUtxoStore<'a, D> { predicate: F, ) -> Result where - F: Fn(&MultiEraOutput<'_>) -> bool, + F: Fn(&EraCbor) -> bool, { let from_mempool = scan_mempool_utxos::(predicate, self.mempool); From b5476668da72cceecb4118082ecb447817be16f0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Mon, 16 Mar 2026 16:14:02 -0300 Subject: [PATCH 08/85] feat: async_query compiling --- crates/core/src/async_query.rs | 64 +++++++++++++++++----------------- crates/core/src/lib.rs | 5 +++ 2 files changed, 37 insertions(+), 32 deletions(-) diff --git a/crates/core/src/async_query.rs b/crates/core/src/async_query.rs index f43a83337..a70745812 100644 --- a/crates/core/src/async_query.rs +++ b/crates/core/src/async_query.rs @@ -2,11 +2,9 @@ use std::sync::Arc; use tokio::sync::Semaphore; -use pallas::ledger::traverse::MultiEraBlock; - use crate::{ archive::ArchiveStore, indexes::IndexStore, ArchiveError, BlockBody, BlockSlot, ChainError, - ChainPoint, Domain, DomainError, EraCbor, IndexError, TagDimension, TxOrder, + ChainLogic, ChainPoint, Domain, DomainError, EraCbor, IndexError, TagDimension, TxOrder, }; #[derive(Debug, Clone)] @@ -48,10 +46,10 @@ where &self.options } - pub async fn run_blocking(&self, f: F) -> Result + pub async fn run_blocking(&self, f: F) -> Result> where T: Send + 'static, - F: FnOnce(D) -> Result + Send + 'static, + F: FnOnce(D) -> Result> + Send + 'static, { let permit = self.limiter.clone().acquire_owned().await.map_err(|_| { DomainError::ArchiveError(ArchiveError::InternalError( @@ -69,12 +67,18 @@ where .map_err(|e| DomainError::ArchiveError(ArchiveError::InternalError(e.to_string())))? } - pub async fn block_by_slot(&self, slot: BlockSlot) -> Result, DomainError> { + pub async fn block_by_slot( + &self, + slot: BlockSlot, + ) -> Result, DomainError> { self.run_blocking(move |domain| Ok(domain.archive().get_block_by_slot(&slot)?)) .await } - pub async fn block_by_hash(&self, hash: Vec) -> Result, DomainError> { + pub async fn block_by_hash( + &self, + hash: Vec, + ) -> Result, DomainError> { self.run_blocking(move |domain| { let slot = domain.indexes().slot_by_block_hash(&hash)?; match slot { @@ -85,7 +89,10 @@ where .await } - pub async fn block_by_number(&self, number: u64) -> Result, DomainError> { + pub async fn block_by_number( + &self, + number: u64, + ) -> Result, DomainError> { self.run_blocking(move |domain| { let slot = domain.indexes().slot_by_block_number(number)?; match slot { @@ -96,7 +103,10 @@ where .await } - pub async fn slot_by_number(&self, number: u64) -> Result, DomainError> { + pub async fn slot_by_number( + &self, + number: u64, + ) -> Result, DomainError> { self.run_blocking(move |domain| Ok(domain.indexes().slot_by_block_number(number)?)) .await } @@ -104,7 +114,7 @@ where pub async fn block_by_tx_hash( &self, tx_hash: Vec, - ) -> Result, DomainError> { + ) -> Result, DomainError> { let tx_hash_lookup = tx_hash.clone(); let Some(raw) = self .run_blocking(move |domain| { @@ -120,21 +130,15 @@ where return Ok(None); }; - let block = MultiEraBlock::decode(raw.as_slice()) - .map_err(|e| DomainError::ChainError(ChainError::DecodingError(e)))?; - if let Some((idx, _)) = block - .txs() - .iter() - .enumerate() - .find(|(_, tx)| tx.hash().to_vec() == tx_hash) - { - return Ok(Some((raw, idx))); - } - - Ok(None) + D::Chain::find_tx_in_block(&raw, &tx_hash) + .map_err(|err| DomainError::ChainError(ChainError::ChainSpecific(err))) + .map(|maybe_ix| maybe_ix.map(|(era_cbor, ix)| (era_cbor.cbor().to_vec(), ix))) } - pub async fn tx_cbor(&self, tx_hash: Vec) -> Result, DomainError> { + pub async fn tx_cbor( + &self, + tx_hash: Vec, + ) -> Result, DomainError> { let tx_hash_lookup = tx_hash.clone(); let Some(raw) = self .run_blocking(move |domain| { @@ -150,13 +154,9 @@ where return Ok(None); }; - let block = MultiEraBlock::decode(raw.as_slice()) - .map_err(|e| DomainError::ChainError(ChainError::DecodingError(e)))?; - if let Some(tx) = block.txs().iter().find(|x| x.hash().to_vec() == tx_hash) { - return Ok(Some(EraCbor(block.era().into(), tx.encode()))); - } - - Ok(None) + D::Chain::find_tx_in_block(&raw, &tx_hash) + .map_err(|err| DomainError::ChainError(ChainError::ChainSpecific(err))) + .map(|maybe_ix| maybe_ix.map(|(era_cbor, _)| era_cbor)) } pub async fn slots_by_tag( @@ -165,7 +165,7 @@ where key: Vec, start_slot: BlockSlot, end_slot: BlockSlot, - ) -> Result, DomainError> { + ) -> Result, DomainError> { self.run_blocking(move |domain| { let slots = domain .indexes() @@ -179,7 +179,7 @@ where pub async fn find_intersect( &self, intersect: Vec, - ) -> Result, DomainError> { + ) -> Result, DomainError> { self.run_blocking(move |domain| Ok(domain.archive().find_intersect(&intersect)?)) .await } diff --git a/crates/core/src/lib.rs b/crates/core/src/lib.rs index 7475b4da8..393bcda35 100644 --- a/crates/core/src/lib.rs +++ b/crates/core/src/lib.rs @@ -574,6 +574,11 @@ pub trait ChainLogic: Sized + Send + Sync { fn tx_produced_utxos(era_body: &EraCbor) -> Vec<(TxoRef, EraCbor)>; fn tx_consumed_ref(era_body: &EraCbor) -> Vec; + fn find_tx_in_block( + block: &[u8], + tx_hash: &[u8], + ) -> Result, Self::ChainSpecificError>; + // Validate a transaction against the current ledger state. fn validate_tx( &self, From 773320a442934a16acaead24b593b1c13a75c3dd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Mon, 16 Mar 2026 16:16:21 -0300 Subject: [PATCH 09/85] feat: bootstrap compiling --- crates/core/src/archive.rs | 1 + crates/core/src/bootstrap.rs | 16 ++++++++++------ 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/crates/core/src/archive.rs b/crates/core/src/archive.rs index 6340256ee..fc54e3f7c 100644 --- a/crates/core/src/archive.rs +++ b/crates/core/src/archive.rs @@ -294,6 +294,7 @@ pub trait ArchiveStore: Clone + Send + Sync + 'static { &self, slot: &BlockSlot, ) -> Result, ArchiveError>; + fn get_range<'a>( &self, from: Option, diff --git a/crates/core/src/bootstrap.rs b/crates/core/src/bootstrap.rs index 982cf7010..5e2fdf7bb 100644 --- a/crates/core/src/bootstrap.rs +++ b/crates/core/src/bootstrap.rs @@ -19,25 +19,25 @@ pub trait BootstrapExt: Domain { /// /// Ensures WAL and archive are in sync with the state store. /// This should be called at startup before processing any new blocks. - fn check_integrity(&self) -> Result<(), DomainError>; + fn check_integrity(&self) -> Result<(), DomainError>; /// Bootstrap the domain. /// /// Performs integrity checks and drains any pending initialization work. /// Uses the full sync lifecycle (WAL + tip notifications) since after /// bootstrap the node is considered "live". - fn bootstrap(&self) -> Result<(), DomainError>; + fn bootstrap(&self) -> Result<(), DomainError>; } impl BootstrapExt for D { - fn check_integrity(&self) -> Result<(), DomainError> { + fn check_integrity(&self) -> Result<(), DomainError> { ensure_wal_in_sync_with_state(self)?; check_archive_in_sync_with_state(self)?; Ok(()) } - fn bootstrap(&self) -> Result<(), DomainError> { + fn bootstrap(&self) -> Result<(), DomainError> { self.check_integrity()?; // TODO: we should probably catch up stores here @@ -55,7 +55,9 @@ impl BootstrapExt for D { /// Ensure WAL is in sync with state store. /// /// If the WAL tip doesn't match the state cursor, reset WAL to match state. -fn ensure_wal_in_sync_with_state(domain: &D) -> Result<(), DomainError> { +fn ensure_wal_in_sync_with_state( + domain: &D, +) -> Result<(), DomainError> { let wal = domain.wal().find_tip()?.map(|(point, _)| point); let state = domain.state().read_cursor()?; @@ -87,7 +89,9 @@ fn ensure_wal_in_sync_with_state(domain: &D) -> Result<(), DomainErro /// Check if archive is in sync with state store. /// /// Logs warnings/errors if there's a mismatch but doesn't attempt to fix it. -fn check_archive_in_sync_with_state(domain: &D) -> Result<(), DomainError> { +fn check_archive_in_sync_with_state( + domain: &D, +) -> Result<(), DomainError> { let archive = domain.archive().get_tip()?.map(|(slot, _)| slot); let state = domain.state().read_cursor()?.map(|x| x.slot()); From ca284ad54bdebf6cc22056ec92d9f4960033d13e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Mon, 16 Mar 2026 16:17:39 -0300 Subject: [PATCH 10/85] feat: import compiling --- crates/core/src/import.rs | 20 ++++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/crates/core/src/import.rs b/crates/core/src/import.rs index a68b754cb..6c43b11ae 100644 --- a/crates/core/src/import.rs +++ b/crates/core/src/import.rs @@ -31,11 +31,17 @@ pub trait ImportExt: Domain { /// # Returns /// /// The slot of the last imported block. - fn import_blocks(&self, raw: Vec) -> Result; + fn import_blocks( + &self, + raw: Vec, + ) -> Result>; } impl ImportExt for D { - fn import_blocks(&self, mut raw: Vec) -> Result { + fn import_blocks( + &self, + mut raw: Vec, + ) -> Result> { let mut last = 0; let mut chain = self.write_chain(); @@ -55,7 +61,10 @@ impl ImportExt for D { } /// Drain all pending work from the chain logic using import lifecycle. -fn drain_pending_work(chain: &mut D::Chain, domain: &D) -> Result<(), DomainError> { +fn drain_pending_work( + chain: &mut D::Chain, + domain: &D, +) -> Result<(), DomainError> { while let Some(mut work) = ::pop_work::(chain, domain) { execute_work_unit(domain, &mut work)?; } @@ -76,7 +85,10 @@ fn drain_pending_work(chain: &mut D::Chain, domain: &D) -> Result<(), /// - `commit_wal()` - Not needed for immutable data import /// - `notify_tip()` - No subscribers during bulk import #[instrument(skip_all, name = "work_unit", fields(name = %work.name()))] -fn execute_work_unit(domain: &D, work: &mut D::WorkUnit) -> Result<(), DomainError> { +fn execute_work_unit( + domain: &D, + work: &mut D::WorkUnit, +) -> Result<(), DomainError> { debug!("executing work unit (import)"); work.load(domain)?; From 6e273ade7e0235dda771b7ed85641125447fc6f3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Mon, 16 Mar 2026 16:19:52 -0300 Subject: [PATCH 11/85] feat: sync compilign --- crates/core/src/sync.rs | 21 +++++++++++++++------ 1 file changed, 15 insertions(+), 6 deletions(-) diff --git a/crates/core/src/sync.rs b/crates/core/src/sync.rs index df400c650..464954679 100644 --- a/crates/core/src/sync.rs +++ b/crates/core/src/sync.rs @@ -39,18 +39,24 @@ pub trait SyncExt: Domain { /// # Returns /// /// The slot of the processed block. - fn roll_forward(&self, block: RawBlock) -> Result; + fn roll_forward( + &self, + block: RawBlock, + ) -> Result>; /// Roll back the chain to a previous point. /// /// Iterates WAL entries after the target point in reverse order, /// undoing each block's effects on state, UTxOs, and indexes. - fn rollback(&self, to: &ChainPoint) -> Result<(), DomainError>; + fn rollback(&self, to: &ChainPoint) -> Result<(), DomainError>; } impl SyncExt for D { #[instrument(skip_all)] - fn roll_forward(&self, block: RawBlock) -> Result { + fn roll_forward( + &self, + block: RawBlock, + ) -> Result> { let mut chain = self.write_chain(); // Drain first in case there's previous work that needs to be applied (eg: initialization) @@ -64,7 +70,7 @@ impl SyncExt for D { } #[instrument(skip_all, fields(rollback_to = %to))] - fn rollback(&self, to: &ChainPoint) -> Result<(), DomainError> { + fn rollback(&self, to: &ChainPoint) -> Result<(), DomainError> { let undo_blocks = self.wal().iter_logs(Some(to.clone()), None)?; let writer = self.state().start_writer()?; @@ -141,7 +147,7 @@ impl SyncExt for D { pub(crate) fn drain_pending_work( chain: &mut D::Chain, domain: &D, -) -> Result<(), DomainError> { +) -> Result<(), DomainError> { while let Some(mut work) = ::pop_work::(chain, domain) { execute_work_unit(domain, &mut work)?; } @@ -163,7 +169,10 @@ pub(crate) fn drain_pending_work( /// This function is public primarily for testing scenarios where direct /// work unit execution is needed (e.g., manual genesis initialization). #[instrument(skip_all, name = "work_unit", fields(name = %work.name()))] -pub fn execute_work_unit(domain: &D, work: &mut D::WorkUnit) -> Result<(), DomainError> { +pub fn execute_work_unit( + domain: &D, + work: &mut D::WorkUnit, +) -> Result<(), DomainError> { debug!("executing work unit"); work.load(domain)?; From f94062c8a00449e15a6f3171de08ae4d2957ac10 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Mon, 16 Mar 2026 16:25:54 -0300 Subject: [PATCH 12/85] feat: crawl submit workunit all take in chain specific errors as params --- crates/core/src/crawl.rs | 15 +++++++++------ crates/core/src/submit.rs | 8 ++++++-- crates/core/src/work_unit.rs | 12 ++++++------ 3 files changed, 21 insertions(+), 14 deletions(-) diff --git a/crates/core/src/crawl.rs b/crates/core/src/crawl.rs index c62754e3f..35c063242 100644 --- a/crates/core/src/crawl.rs +++ b/crates/core/src/crawl.rs @@ -13,12 +13,12 @@ pub enum Batch { } impl Batch { - fn from_tip(point: ChainPoint, domain: &D) -> Result { + fn from_tip(point: ChainPoint, domain: &D) -> Result> { let subscription = domain.watch_tip(Some(point.clone()))?; Ok(Self::Tip(point, subscription)) } - fn from_wal(point: ChainPoint, domain: &D) -> Result { + fn from_wal(point: ChainPoint, domain: &D) -> Result> { let page = domain .wal() .iter_blocks(Some(point.clone()), None)? @@ -35,7 +35,10 @@ impl Batch { Ok(Self::WalPage(point, page)) } - fn from_archive(last_point: ChainPoint, domain: &D) -> Result { + fn from_archive( + last_point: ChainPoint, + domain: &D, + ) -> Result> { let page = domain .archive() .get_range(Some(last_point.slot()), None)? @@ -107,7 +110,7 @@ impl ChainCrawler { pub fn start( domain: &D, intersect: &[ChainPoint], - ) -> Result, DomainError> { + ) -> Result, DomainError> { let domain = domain.clone(); if intersect.is_empty() { @@ -138,7 +141,7 @@ impl ChainCrawler { Ok(None) } - fn load_next_batch(&mut self) -> Result<(), DomainError> { + fn load_next_batch(&mut self) -> Result<(), DomainError> { let next = match &self.batch { Batch::WalPage(point, _) => Batch::from_wal(point.clone(), &self.domain), Batch::ArchivePage(x, _) => Batch::from_archive(x.clone(), &self.domain), @@ -169,7 +172,7 @@ impl ChainCrawler { } } - pub fn find_tip(&self) -> Result, DomainError> { + pub fn find_tip(&self) -> Result, DomainError> { let point = self.domain.wal().find_tip()?; Ok(point.map(|(x, _)| x)) } diff --git a/crates/core/src/submit.rs b/crates/core/src/submit.rs index f0309f5b1..0f685ce3d 100644 --- a/crates/core/src/submit.rs +++ b/crates/core/src/submit.rs @@ -31,7 +31,11 @@ pub trait SubmitExt: Domain { /// /// The validated mempool transaction if valid. #[instrument(skip_all)] - fn validate_tx(&self, chain: &Self::Chain, cbor: &[u8]) -> Result { + fn validate_tx( + &self, + chain: &Self::Chain, + cbor: &[u8], + ) -> Result> { let tip = self.state().read_cursor()?; let utxos = @@ -62,7 +66,7 @@ pub trait SubmitExt: Domain { source: &str, chain: &Self::Chain, cbor: &[u8], - ) -> Result { + ) -> Result> { let _guard = match SUBMIT_LOCK.lock() { Ok(guard) => guard, Err(poisoned) => poisoned.into_inner(), diff --git a/crates/core/src/work_unit.rs b/crates/core/src/work_unit.rs index 2aa8a50bd..9e96abfb1 100644 --- a/crates/core/src/work_unit.rs +++ b/crates/core/src/work_unit.rs @@ -55,7 +55,7 @@ pub trait WorkUnit: Send { /// /// Returns an error if data loading fails (e.g., storage errors, /// missing required data). - fn load(&mut self, domain: &D) -> Result<(), DomainError>; + fn load(&mut self, domain: &D) -> Result<(), DomainError>; /// Execute CPU-intensive computation over loaded data. /// @@ -67,7 +67,7 @@ pub trait WorkUnit: Send { /// /// Returns an error if computation fails (e.g., invalid data, /// computation errors). - fn compute(&mut self) -> Result<(), DomainError>; + fn compute(&mut self) -> Result<(), DomainError>; /// Persist to write-ahead log for crash recovery. /// @@ -81,7 +81,7 @@ pub trait WorkUnit: Send { /// # Errors /// /// Returns an error if WAL persistence fails. - fn commit_wal(&mut self, _domain: &D) -> Result<(), DomainError> { + fn commit_wal(&mut self, _domain: &D) -> Result<(), DomainError> { Ok(()) } @@ -94,7 +94,7 @@ pub trait WorkUnit: Send { /// # Errors /// /// Returns an error if state persistence fails. - fn commit_state(&mut self, domain: &D) -> Result<(), DomainError>; + fn commit_state(&mut self, domain: &D) -> Result<(), DomainError>; /// Apply computed changes to the archive store. /// @@ -104,7 +104,7 @@ pub trait WorkUnit: Send { /// # Errors /// /// Returns an error if archive persistence fails. - fn commit_archive(&mut self, domain: &D) -> Result<(), DomainError>; + fn commit_archive(&mut self, domain: &D) -> Result<(), DomainError>; /// Apply computed changes to index stores. /// @@ -115,7 +115,7 @@ pub trait WorkUnit: Send { /// # Errors /// /// Returns an error if index persistence fails. - fn commit_indexes(&mut self, _domain: &D) -> Result<(), DomainError> { + fn commit_indexes(&mut self, _domain: &D) -> Result<(), DomainError> { Ok(()) } From 00c233e0535ed8211c279b826f40b65a6cdce065 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Mon, 16 Mar 2026 16:38:43 -0300 Subject: [PATCH 13/85] feat(core/builtnin/noop): get chain specific error here as well --- crates/core/src/archive.rs | 2 +- crates/core/src/lib.rs | 1 - crates/core/src/mempool.rs | 2 +- 3 files changed, 2 insertions(+), 3 deletions(-) diff --git a/crates/core/src/archive.rs b/crates/core/src/archive.rs index fc54e3f7c..80763e1ac 100644 --- a/crates/core/src/archive.rs +++ b/crates/core/src/archive.rs @@ -223,7 +223,7 @@ pub trait ArchiveStore: Clone + Send + Sync + 'static { + DoubleEndedIterator + Skippable + 'a; - type Writer: ArchiveWriter; + type Writer: ArchiveWriter; type LogIter: Iterator< Item = Result<(LogKey, EntityValue), ArchiveError>, >; diff --git a/crates/core/src/lib.rs b/crates/core/src/lib.rs index 393bcda35..f4317dd0e 100644 --- a/crates/core/src/lib.rs +++ b/crates/core/src/lib.rs @@ -22,7 +22,6 @@ use serde::{Deserialize, Serialize}; use std::{ collections::{HashMap, HashSet}, fmt::Display, - path::Path, str::FromStr, sync::Arc, }; diff --git a/crates/core/src/mempool.rs b/crates/core/src/mempool.rs index ada04df7a..1d2964808 100644 --- a/crates/core/src/mempool.rs +++ b/crates/core/src/mempool.rs @@ -5,7 +5,7 @@ use crate::TagDimension; use futures_core::Stream; use std::pin::Pin; -use tracing::{debug, warn}; +use tracing::debug; pub type Report = Vec; From 48de650720f631f12a83c13c8fed5e820c76a392 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Mon, 16 Mar 2026 16:38:48 -0300 Subject: [PATCH 14/85] feat(core/builtnin/noop): get chain specific error here as well --- crates/core/src/builtin/noop.rs | 35 ++++++++++++++++++--------------- 1 file changed, 19 insertions(+), 16 deletions(-) diff --git a/crates/core/src/builtin/noop.rs b/crates/core/src/builtin/noop.rs index 416c944dc..15b3c0c63 100644 --- a/crates/core/src/builtin/noop.rs +++ b/crates/core/src/builtin/noop.rs @@ -121,7 +121,9 @@ impl IndexStore for NoOpIndexStore { pub struct NoOpArchiveWriter; impl ArchiveWriter for NoOpArchiveWriter { - fn apply(&self, _point: &ChainPoint, _block: &RawBlock) -> Result<(), ArchiveError> { + type ChainSpecificError = std::convert::Infallible; + + fn apply(&self, _point: &ChainPoint, _block: &RawBlock) -> Result<(), ArchiveError> { Ok(()) } @@ -130,15 +132,15 @@ impl ArchiveWriter for NoOpArchiveWriter { _ns: Namespace, _key: &LogKey, _value: &EntityValue, - ) -> Result<(), ArchiveError> { + ) -> Result<(), ArchiveError> { Ok(()) } - fn undo(&self, _point: &ChainPoint) -> Result<(), ArchiveError> { + fn undo(&self, _point: &ChainPoint) -> Result<(), ArchiveError> { Ok(()) } - fn commit(self) -> Result<(), ArchiveError> { + fn commit(self) -> Result<(), ArchiveError> { Ok(()) } } @@ -152,7 +154,7 @@ impl NoOpArchiveStore { Self } - pub fn shutdown(&self) -> Result<(), ArchiveError> { + pub fn shutdown(&self) -> Result<(), ArchiveError> { Ok(()) } } @@ -183,7 +185,7 @@ impl crate::archive::Skippable for EmptyBlockIter { pub struct EmptyLogIter; impl Iterator for EmptyLogIter { - type Item = Result<(LogKey, EntityValue), ArchiveError>; + type Item = Result<(LogKey, EntityValue), ArchiveError>; fn next(&mut self) -> Option { None @@ -194,7 +196,7 @@ impl Iterator for EmptyLogIter { pub struct EmptyEntityValueIter; impl Iterator for EmptyEntityValueIter { - type Item = Result; + type Item = Result>; fn next(&mut self) -> Option { None @@ -202,12 +204,13 @@ impl Iterator for EmptyEntityValueIter { } impl ArchiveStore for NoOpArchiveStore { + type ChainSpecificError = std::convert::Infallible; type BlockIter<'a> = EmptyBlockIter; type Writer = NoOpArchiveWriter; type LogIter = EmptyLogIter; type EntityValueIter = EmptyEntityValueIter; - fn start_writer(&self) -> Result { + fn start_writer(&self) -> Result> { Ok(NoOpArchiveWriter) } @@ -215,7 +218,7 @@ impl ArchiveStore for NoOpArchiveStore { &self, _ns: Namespace, keys: &[&LogKey], - ) -> Result>, ArchiveError> { + ) -> Result>, ArchiveError> { Ok(vec![None; keys.len()]) } @@ -223,11 +226,11 @@ impl ArchiveStore for NoOpArchiveStore { &self, _ns: Namespace, _range: Range, - ) -> Result { + ) -> Result> { Ok(EmptyLogIter) } - fn get_block_by_slot(&self, _slot: &BlockSlot) -> Result, ArchiveError> { + fn get_block_by_slot(&self, _slot: &BlockSlot) -> Result, ArchiveError> { Ok(None) } @@ -235,18 +238,18 @@ impl ArchiveStore for NoOpArchiveStore { &self, _from: Option, _to: Option, - ) -> Result, ArchiveError> { + ) -> Result, ArchiveError> { Ok(EmptyBlockIter) } fn find_intersect( &self, _intersect: &[ChainPoint], - ) -> Result, ArchiveError> { + ) -> Result, ArchiveError> { Ok(None) } - fn get_tip(&self) -> Result, ArchiveError> { + fn get_tip(&self) -> Result, ArchiveError> { Ok(None) } @@ -254,12 +257,12 @@ impl ArchiveStore for NoOpArchiveStore { &self, _max_slots: u64, _max_prune: Option, - ) -> Result { + ) -> Result> { // Nothing to prune, always "done" Ok(true) } - fn truncate_front(&self, _after: &ChainPoint) -> Result<(), ArchiveError> { + fn truncate_front(&self, _after: &ChainPoint) -> Result<(), ArchiveError> { Ok(()) } } From 65eb2deef9c0d218b270fe8fe8bb29ed9f71efaf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Mon, 16 Mar 2026 16:49:50 -0300 Subject: [PATCH 15/85] chore: add minicbor to core cargo toml and remove pallas --- crates/core/Cargo.toml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/crates/core/Cargo.toml b/crates/core/Cargo.toml index 0ee404e82..db6ce5e88 100644 --- a/crates/core/Cargo.toml +++ b/crates/core/Cargo.toml @@ -19,11 +19,10 @@ rayon.workspace = true tokio.workspace = true tokio-stream.workspace = true -# TODO: we shouldn't need Pallas in core -pallas.workspace = true hex.workspace = true regex.workspace = true serde_with = "3.16.0" +minicbor = { version = "0.26", features = ["derive"] } [dev-dependencies] proptest = "1.7.0" From 489ad197045947cb59aef1d9bc405882ae6a1618 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Mon, 16 Mar 2026 18:39:58 -0300 Subject: [PATCH 16/85] feat: lib.rs and validate migrated to new architecture --- crates/cardano/src/lib.rs | 227 ++++++++++++++++++++++++++++----- crates/cardano/src/validate.rs | 63 +++++---- 2 files changed, 239 insertions(+), 51 deletions(-) diff --git a/crates/cardano/src/lib.rs b/crates/cardano/src/lib.rs index 133f54f32..b36b09c1a 100644 --- a/crates/cardano/src/lib.rs +++ b/crates/cardano/src/lib.rs @@ -1,5 +1,8 @@ -use pallas::ledger::traverse::{MultiEraBlock, MultiEraOutput}; -use std::sync::Arc; +use pallas::{ + crypto::hash::Hasher, + ledger::traverse::{MultiEraBlock, MultiEraInput, MultiEraOutput, MultiEraTx}, +}; +use std::{path::Path, sync::Arc}; use tracing::info; // re-export pallas for version compatibility downstream @@ -7,8 +10,8 @@ pub use pallas; use dolos_core::{ config::CardanoConfig, BlockSlot, ChainError, ChainPoint, Domain, DomainError, EntityKey, - EraCbor, Genesis, MempoolAwareUtxoStore, MempoolTx, MempoolUpdate, RawBlock, StateStore, - TipEvent, WorkUnit, + EraCbor, MempoolAwareUtxoStore, MempoolTx, MempoolUpdate, RawBlock, StateStore, TipEvent, + TxoRef, WorkUnit, }; use crate::{ @@ -55,6 +58,44 @@ pub type Block<'a> = MultiEraBlock<'a>; pub type UtxoBody<'a> = MultiEraOutput<'a>; +// ============================================================================ +// Pallas ↔ dolos_core conversions (orphan rule prevents From/TryFrom impls) +// ============================================================================ + +pub fn pallas_hash_to_core( + h: pallas::crypto::hash::Hash, +) -> dolos_core::hash::Hash { + dolos_core::hash::Hash::new(*h) +} + +// Can the era integer be removed? Not sure. Santi said something about it. +pub(crate) fn multi_era_tx_from_era_cbor(era_body: &EraCbor) -> Result, CardanoError> { + Ok(MultiEraTx::decode(era_body.cbor())?) +} + +pub(crate) fn txo_ref_from_pallas(hash: pallas::crypto::hash::Hash<32>, idx: u32) -> TxoRef { + TxoRef(pallas_hash_to_core(hash), idx) +} + +pub(crate) fn era_cbor_from_output(output: &MultiEraOutput<'_>) -> EraCbor { + EraCbor(output.era() as u16, output.encode()) +} + +pub(crate) fn txo_ref_from_input(input: &MultiEraInput<'_>) -> TxoRef { + TxoRef(pallas_hash_to_core(*input.hash()), input.index() as u32) +} + +pub fn core_hash_to_pallas( + h: dolos_core::hash::Hash, +) -> pallas::crypto::hash::Hash { + (*h.as_ref()).into() +} + +fn multi_era_output_from_era_cbor(era_body: &EraCbor) -> Result, CardanoError> { + let era = pallas::ledger::traverse::Era::try_from(era_body.era())?; + Ok(MultiEraOutput::decode(era, era_body.cbor())?) +} + /// Cardano-specific work unit variants. /// /// This enum represents all possible work units that can be produced @@ -90,7 +131,7 @@ where } } - fn load(&mut self, domain: &D) -> Result<(), DomainError> { + fn load(&mut self, domain: &D) -> Result<(), DomainError> { match self { Self::Genesis(w) => >::load(w, domain), Self::Roll(w) => >::load(w, domain), @@ -101,7 +142,7 @@ where } } - fn compute(&mut self) -> Result<(), DomainError> { + fn compute(&mut self) -> Result<(), DomainError> { match self { Self::Genesis(w) => >::compute(w), Self::Roll(w) => >::compute(w), @@ -112,7 +153,7 @@ where } } - fn commit_wal(&mut self, domain: &D) -> Result<(), DomainError> { + fn commit_wal(&mut self, domain: &D) -> Result<(), DomainError> { match self { Self::Genesis(w) => >::commit_wal(w, domain), Self::Roll(w) => >::commit_wal(w, domain), @@ -123,7 +164,7 @@ where } } - fn commit_state(&mut self, domain: &D) -> Result<(), DomainError> { + fn commit_state(&mut self, domain: &D) -> Result<(), DomainError> { match self { Self::Genesis(w) => >::commit_state(w, domain), Self::Roll(w) => >::commit_state(w, domain), @@ -134,7 +175,7 @@ where } } - fn commit_archive(&mut self, domain: &D) -> Result<(), DomainError> { + fn commit_archive(&mut self, domain: &D) -> Result<(), DomainError> { match self { Self::Genesis(w) => { >::commit_archive(w, domain) @@ -147,7 +188,7 @@ where } } - fn commit_indexes(&mut self, domain: &D) -> Result<(), DomainError> { + fn commit_indexes(&mut self, domain: &D) -> Result<(), DomainError> { match self { Self::Genesis(w) => { >::commit_indexes(w, domain) @@ -197,13 +238,82 @@ pub struct CardanoLogic { impl CardanoLogic { /// Refresh the cached era summary from state. /// Called after work units that may change era information (like genesis). - pub fn refresh_cache(&mut self, state: &D::State) -> Result<(), ChainError> { + pub fn refresh_cache( + &mut self, + state: &D::State, + ) -> Result<(), ChainError> { self.cache.eras = eras::load_era_summary::(state)?; Ok(()) } } +#[derive(Debug, thiserror::Error)] +pub enum CardanoError { + #[error("traverse error: {0}")] + Traverse(#[from] pallas::ledger::traverse::Error), + + #[error("address decoding error: {0}")] + Address(#[from] pallas::ledger::addresses::Error), + + #[error("cbor decoding error: {0}")] + Cbor(#[from] pallas::codec::minicbor::decode::Error), + + #[error("validation error: {0}")] + Validation(#[from] pallas::ledger::validate::utils::ValidationError), + + #[error("couldn't evaluate phase-2 script: {0}")] + Phase2EvaluationError(String), + + #[error("phase-2 script rejected the transaction")] + Phase2ValidationRejected(Vec), +} + +#[derive(Clone)] +pub struct CardanoGenesis { + pub byron: pallas::interop::hardano::configs::byron::GenesisFile, + pub shelley: pallas::interop::hardano::configs::shelley::GenesisFile, + pub alonzo: pallas::interop::hardano::configs::alonzo::GenesisFile, + pub conway: pallas::interop::hardano::configs::conway::GenesisFile, + pub shelley_hash: pallas::ledger::primitives::Hash<32>, + pub force_protocol: Option, +} + +impl dolos_core::Genesis for CardanoGenesis { + fn chain_id(&self) -> u32 { + self.shelley.network_magic.unwrap_or_default() + } +} + +impl CardanoGenesis { + pub fn from_file_paths( + byron: impl AsRef, + shelley: impl AsRef, + alonzo: impl AsRef, + conway: impl AsRef, + force_protocol: Option, + ) -> Result { + let shelley_bytes = std::fs::read(shelley.as_ref())?; + let mut hasher = Hasher::<256>::new(); + hasher.input(&shelley_bytes); + let shelley_hash = hasher.finalize(); + + let byron = pallas::ledger::configs::byron::from_file(byron.as_ref())?; + let shelley = pallas::ledger::configs::shelley::from_file(shelley.as_ref())?; + let alonzo = pallas::ledger::configs::alonzo::from_file(alonzo.as_ref())?; + let conway = pallas::ledger::configs::conway::from_file(conway.as_ref())?; + + Ok(Self { + byron, + shelley, + alonzo, + conway, + force_protocol, + shelley_hash, + }) + } +} + impl dolos_core::ChainLogic for CardanoLogic { type Config = CardanoConfig; type Block = OwnedMultiEraBlock; @@ -212,12 +322,14 @@ impl dolos_core::ChainLogic for CardanoLogic { type Entity = CardanoEntity; type WorkUnit> = CardanoWorkUnit; + type ChainSpecificError = CardanoError; + type Genesis = CardanoGenesis; fn initialize( config: Self::Config, state: &D::State, - genesis: &Genesis, - ) -> Result { + genesis: Self::Genesis, + ) -> Result> { info!("initializing"); let cursor = state.read_cursor()?; @@ -252,12 +364,17 @@ impl dolos_core::ChainLogic for CardanoLogic { work.can_receive_block() } - fn receive_block(&mut self, raw: RawBlock) -> Result { + fn receive_block( + &mut self, + raw: RawBlock, + ) -> Result> { if !self.can_receive_block() { return Err(ChainError::CantReceiveBlock(raw)); } - let block = OwnedMultiEraBlock::decode(raw)?; + let block = OwnedMultiEraBlock::decode(raw) + .map_err(CardanoError::from) + .map_err(ChainError::ChainSpecific)?; let work = self.work.take().expect("work buffer is initialized"); @@ -333,16 +450,21 @@ impl dolos_core::ChainLogic for CardanoLogic { block: &dolos_core::Cbor, inputs: &std::collections::HashMap>, point: ChainPoint, - ) -> Result { + ) -> Result> { let block_arc = Arc::new(block.clone()); - let blockd = OwnedMultiEraBlock::decode(block_arc)?; + let blockd = OwnedMultiEraBlock::decode(block_arc) + .map_err(CardanoError::from) + .map_err(ChainError::ChainSpecific)?; let blockv = blockd.view(); let decoded_inputs: std::collections::HashMap<_, _> = inputs .iter() .map(|(k, v)| { - let out = (k.clone(), OwnedMultiEraOutput::decode(v.clone())?); - Result::<_, ChainError>::Ok(out) + let decoded = OwnedMultiEraOutput::decode(v.clone()) + .map_err(CardanoError::from) + .map_err(ChainError::ChainSpecific)?; + let out = (k.clone(), decoded); + Result::<_, ChainError>::Ok(out) }) .collect::>()?; @@ -351,7 +473,12 @@ impl dolos_core::ChainLogic for CardanoLogic { let index_delta = crate::indexes::index_delta_from_utxo_delta(point, &utxo_delta); - let tx_hashes = blockv.txs().iter().map(|tx| tx.hash()).collect(); + let tx_hashes = blockv + .txs() + .iter() + // TODO: fix this pallas_hash_to_core most likely + .map(|tx| pallas_hash_to_core(tx.hash())) + .collect(); Ok(dolos_core::UndoBlockData { utxo_delta, @@ -360,8 +487,13 @@ impl dolos_core::ChainLogic for CardanoLogic { }) } - fn decode_utxo(&self, utxo: Arc) -> Result { - let out = OwnedMultiEraOutput::decode(utxo)?; + fn decode_utxo( + &self, + utxo: Arc, + ) -> Result> { + let out = OwnedMultiEraOutput::decode(utxo) + .map_err(CardanoError::from) + .map_err(ChainError::ChainSpecific)?; Ok(out) } @@ -370,25 +502,62 @@ impl dolos_core::ChainLogic for CardanoLogic { utils::mutable_slots(&domain.genesis()) } - fn validate_tx( + fn validate_tx>( &self, cbor: &[u8], utxos: &MempoolAwareUtxoStore, tip: Option, - genesis: &Genesis, - ) -> Result { + genesis: &CardanoGenesis, + ) -> Result> { validate::validate_tx(cbor, utxos, tip, genesis) } + + fn tx_produced_utxos(era_body: &EraCbor) -> Vec<(dolos_core::TxoRef, EraCbor)> { + let Ok(tx) = multi_era_tx_from_era_cbor(era_body) else { + return vec![]; + }; + tx.produces() + .iter() + .map(|(idx, output)| { + let txoref = txo_ref_from_pallas(tx.hash(), *idx as u32); + let body = era_cbor_from_output(output); + (txoref, body) + }) + .collect() + } + fn tx_consumed_ref(era_body: &EraCbor) -> Vec { + let Ok(tx) = multi_era_tx_from_era_cbor(era_body) else { + return vec![]; + }; + tx.consumes().iter().map(txo_ref_from_input).collect() + } + fn find_tx_in_block( + block: &[u8], + tx_hash: &[u8], + ) -> Result, Self::ChainSpecificError> { + let block = MultiEraBlock::decode(block)?; + let result = block + .txs() + .iter() + .enumerate() + .find(|(_, tx)| tx.hash().as_slice() == tx_hash) + .map(|(idx, tx)| (EraCbor(block.era().into(), tx.encode()), idx)); + Ok(result) + } } -pub fn load_effective_pparams(state: &D::State) -> Result { +pub fn load_effective_pparams( + state: &D::State, +) -> Result> { let epoch = load_epoch::(state)?; let active = epoch.pparams.unwrap_live(); Ok(active.clone()) } -pub fn load_epoch(state: &D::State) -> Result { +pub fn load_epoch( + state: &D::State, +) -> Result> { let epoch = state .read_entity_typed::(EpochState::NS, &EntityKey::from(CURRENT_EPOCH_KEY))? .ok_or(ChainError::NoActiveEpoch)?; @@ -397,14 +566,14 @@ pub fn load_epoch(state: &D::State) -> Result } #[cfg(test)] -pub fn load_test_genesis(env: &str) -> Genesis { +pub fn load_test_genesis(env: &str) -> GenesisCardanoCardano { use std::path::PathBuf; let test_data = PathBuf::from(std::env::var("CARGO_MANIFEST_DIR").unwrap()) .join("test_data") .join(env); - Genesis::from_file_paths( + GenesisCardanoCardano::from_file_paths( test_data.join("genesis/byron.json"), test_data.join("genesis/shelley.json"), test_data.join("genesis/alonzo.json"), diff --git a/crates/cardano/src/validate.rs b/crates/cardano/src/validate.rs index 579fec5d7..979458613 100644 --- a/crates/cardano/src/validate.rs +++ b/crates/cardano/src/validate.rs @@ -1,26 +1,27 @@ use std::borrow::Cow; -use dolos_core::{ - ChainError, ChainPoint, Domain, EraCbor, Genesis, MempoolAwareUtxoStore, MempoolTx, -}; +use dolos_core::{ChainError, ChainPoint, Domain, EraCbor, MempoolAwareUtxoStore, MempoolTx}; +use crate::{CardanoError, CardanoGenesis}; use pallas::ledger::{ primitives::{NetworkId, TransactionInput}, traverse::{MultiEraInput, MultiEraOutput, MultiEraTx}, }; use tracing::debug; -pub fn validate_tx( +pub fn validate_tx>( cbor: &[u8], utxos: &MempoolAwareUtxoStore, tip: Option, - genesis: &Genesis, -) -> Result { - let tx = MultiEraTx::decode(cbor)?; + genesis: &CardanoGenesis, +) -> Result> { + let tx = MultiEraTx::decode(cbor) + .map_err(CardanoError::from) + .map_err(ChainError::ChainSpecific)?; let hash = tx.hash(); - let pparams = crate::load_effective_pparams::(utxos.state())?; - let pparams = crate::utils::pparams_to_pallas(&pparams); + let raw_pparams = crate::load_effective_pparams::(utxos.state())?; + let pparams = crate::utils::pparams_to_pallas(&raw_pparams); let network_id = match genesis.shelley.network_id.as_ref() { Some(network) => match network.as_str() { @@ -40,7 +41,11 @@ pub fn validate_tx( acnt: Some(pallas::ledger::validate::utils::AccountState::default()), }; - let input_refs = tx.requires().iter().map(From::from).collect(); + let input_refs = tx + .requires() + .iter() + .map(crate::txo_ref_from_input) + .collect(); let utxos_matches = utxos.get_utxos(input_refs)?; @@ -48,7 +53,7 @@ pub fn validate_tx( for (txoref, eracbor) in utxos_matches.iter() { let tx_in = TransactionInput { - transaction_id: txoref.0, + transaction_id: crate::core_hash_to_pallas(txoref.0), index: txoref.1.into(), }; @@ -58,7 +63,8 @@ pub fn validate_tx( let eracbor = eracbor.as_ref(); - let output = MultiEraOutput::try_from(eracbor)?; + let output = crate::multi_era_output_from_era_cbor(eracbor) + .map_err(ChainError::ChainSpecific)?; pallas_utxos.insert(input, output); } @@ -69,13 +75,17 @@ pub fn validate_tx( &env, &pallas_utxos, &mut pallas::ledger::validate::utils::CertState::default(), - )?; + ) + .map_err(CardanoError::from) + .map_err(ChainError::ChainSpecific)?; let report = evaluate_tx::(cbor, utxos)?; for eval in report.iter() { if !eval.success { - return Err(ChainError::Phase2ValidationRejected(eval.logs.clone())); + return Err(ChainError::ChainSpecific( + CardanoError::Phase2ValidationRejected(eval.logs.clone()), + )); } } @@ -89,16 +99,20 @@ pub fn validate_tx( let era = u16::from(tx.era()); let payload = EraCbor(era, cbor.into()); - let tx = MempoolTx::new(hash, payload, report); + let tx_hash = crate::pallas_hash_to_core(hash); + let encoded_report = format!("{report:?}").into_bytes(); + let tx = MempoolTx::new(tx_hash, payload, encoded_report); Ok(tx) } -pub fn evaluate_tx( +pub fn evaluate_tx>( cbor: &[u8], utxos: &MempoolAwareUtxoStore, -) -> Result { - let tx = MultiEraTx::decode(cbor)?; +) -> Result> { + let tx = MultiEraTx::decode(cbor) + .map_err(CardanoError::from) + .map_err(ChainError::ChainSpecific)?; use dolos_core::TxoRef; @@ -114,7 +128,11 @@ pub fn evaluate_tx( zero_time: eras.edge().start.timestamp, }; - let input_refs = tx.requires().iter().map(From::from).collect(); + let input_refs = tx + .requires() + .iter() + .map(crate::txo_ref_from_input) + .collect(); let utxos: pallas::ledger::validate::utils::UtxoMap = utxos .get_utxos(input_refs)? @@ -123,14 +141,15 @@ pub fn evaluate_tx( let era = eracbor.era().try_into().expect("era out of range"); ( - pallas::ledger::validate::utils::TxoRef::from((a, b)), + pallas::ledger::validate::utils::TxoRef::from((crate::core_hash_to_pallas(a), b)), pallas::ledger::validate::utils::EraCbor::from((era, eracbor.cbor().into())), ) }) .collect(); - let report = pallas::ledger::validate::phase2::evaluate_tx(&tx, &pparams, &utxos, &slot_config) - .map_err(|e| ChainError::Phase2EvaluationError(e.to_string()))?; + let report = + pallas::ledger::validate::phase2::evaluate_tx(&tx, &pparams, &utxos, &slot_config) + .map_err(|e| ChainError::ChainSpecific(CardanoError::Phase2EvaluationError(e.to_string())))?; Ok(report) } From f4f4f59d1a4b33f4766d233ae3b4644dcfef9515 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Mon, 16 Mar 2026 18:40:25 -0300 Subject: [PATCH 17/85] feat: add hash from methods to hash to reduce errors --- crates/core/src/hash.rs | 15 +++++++++++---- crates/core/src/lib.rs | 5 ++--- 2 files changed, 13 insertions(+), 7 deletions(-) diff --git a/crates/core/src/hash.rs b/crates/core/src/hash.rs index db4d96d89..e4c71e977 100644 --- a/crates/core/src/hash.rs +++ b/crates/core/src/hash.rs @@ -53,7 +53,14 @@ impl<'de, const N: usize> Deserialize<'de> for Hash { } } -// TODO: add more methods here. Probably will need a bunch so hash is its own file -// -// -// +impl From<[u8; N]> for Hash { + fn from(bytes: [u8; N]) -> Self { + Self(bytes) + } +} + +impl AsRef<[u8]> for Hash { + fn as_ref(&self) -> &[u8] { + &self.0 + } +} diff --git a/crates/core/src/lib.rs b/crates/core/src/lib.rs index f4317dd0e..fe65eac66 100644 --- a/crates/core/src/lib.rs +++ b/crates/core/src/lib.rs @@ -16,7 +16,6 @@ // traverse::{MultiEraInput, MultiEraOutput, MultiEraTx, MultiEraUpdate}, // }, //}; -use hash::Hash; use minicbor::{Decode, Encode}; use serde::{Deserialize, Serialize}; use std::{ @@ -72,9 +71,9 @@ pub type BlockBody = Cbor; pub type RawBlock = Arc; pub type RawBlockBatch = Vec; pub type RawUtxoMap = HashMap>; -pub type BlockHash = Hash<32>; +pub type BlockHash = crate::hash::Hash<32>; pub type BlockHeader = Cbor; -pub type TxHash = Hash<32>; +pub type TxHash = crate::hash::Hash<32>; /// Data needed to undo a block during rollback. /// From 7344ac22a3796145f0f7d54c8d5a827c88119fc2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Mon, 16 Mar 2026 18:42:35 -0300 Subject: [PATCH 18/85] feat(cardano/utils): compiling --- crates/cardano/src/utils.rs | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/crates/cardano/src/utils.rs b/crates/cardano/src/utils.rs index 961d2044e..6adc0ba18 100644 --- a/crates/cardano/src/utils.rs +++ b/crates/cardano/src/utils.rs @@ -3,9 +3,9 @@ use dolos_core::*; use pallas::ledger::addresses::Network; use pallas::ledger::validate::utils::{ConwayProtParams, MultiEraProtocolParameters}; -use crate::PParamsSet; +use crate::{CardanoGenesis, PParamsSet}; -pub fn network_from_genesis(genesis: &Genesis) -> Network { +pub fn network_from_genesis(genesis: &CardanoGenesis) -> Network { match genesis.shelley.network_id.as_deref() { Some("Mainnet") => Network::Mainnet, _ => Network::Testnet, @@ -17,7 +17,7 @@ pub fn network_from_genesis(genesis: &Genesis) -> Network { /// Reads the relevant genesis config values and uses the security window /// guarantee formula from consensus to calculate the latest slot that can be /// considered immutable. -pub fn mutable_slots(genesis: &Genesis) -> u64 { +pub fn mutable_slots(genesis: &CardanoGenesis) -> u64 { let k = genesis.byron.protocol_consts.k as f64; let f = genesis.shelley.active_slots_coeff.unwrap() as f64; ((3.0 * k) / f).ceil() as u64 @@ -29,7 +29,7 @@ pub fn mutable_slots(genesis: &Genesis) -> u64 { /// guarantee formula from consensus to calculate the latest slot that can be /// considered immutable. Same as `mutable_slots`, added for the code to be similar in naming /// convention to other implementations. -pub fn stability_window(genesis: &Genesis) -> u64 { +pub fn stability_window(genesis: &CardanoGenesis) -> u64 { mutable_slots(genesis) } @@ -37,7 +37,7 @@ pub fn stability_window(genesis: &Genesis) -> u64 { /// /// Similar to `mutable_slots` but with 4 instead of 3 as the constant. See the following issue for /// refference: https://github.com/IntersectMBO/cardano-ledger/issues/1914 -pub fn randomness_stability_window(genesis: &Genesis) -> u64 { +pub fn randomness_stability_window(genesis: &CardanoGenesis) -> u64 { let k = genesis.byron.protocol_consts.k as f64; let f = genesis.shelley.active_slots_coeff.unwrap() as f64; ((4.0 * k) / f).ceil() as u64 @@ -47,7 +47,7 @@ pub fn randomness_stability_window(genesis: &Genesis) -> u64 { /// /// This is supposed be `randomness_stability_window` but due to a bug in the code it is dependant /// on the protocol. See https://github.com/IntersectMBO/cardano-ledger/issues/1914. -pub fn nonce_stability_window(protocol: u16, genesis: &Genesis) -> u64 { +pub fn nonce_stability_window(protocol: u16, genesis: &CardanoGenesis) -> u64 { if protocol >= 9 { randomness_stability_window(genesis) } else { @@ -61,7 +61,7 @@ pub fn nonce_stability_window(protocol: u16, genesis: &Genesis) -> u64 { /// uses the security window guarantee formula from consensus to calculate the /// latest slot that can be considered immutable. This is used mainly to define /// which slots can be finalized in the ledger store (aka: compaction). -pub fn lastest_immutable_slot(tip: BlockSlot, genesis: &Genesis) -> BlockSlot { +pub fn lastest_immutable_slot(tip: BlockSlot, genesis: &CardanoGenesis) -> BlockSlot { tip.saturating_sub(mutable_slots(genesis)) } @@ -84,8 +84,8 @@ pub fn gcd(mut a: u64, mut b: u64) -> u64 { a } -pub fn load_genesis(path: &std::path::Path) -> Genesis { - Genesis::from_file_paths( +pub fn load_genesis(path: &std::path::Path) -> CardanoGenesis { + CardanoGenesis::from_file_paths( path.join("byron.json"), path.join("shelley.json"), path.join("alonzo.json"), From e7b3d49e6f4f0f122810b27908ac8008e2cc8f03 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Mon, 16 Mar 2026 18:52:08 -0300 Subject: [PATCH 19/85] feat(cardano/utxoset): compiling --- crates/cardano/src/lib.rs | 8 +++--- crates/cardano/src/utxoset.rs | 50 ++++++++++++++++++++--------------- 2 files changed, 34 insertions(+), 24 deletions(-) diff --git a/crates/cardano/src/lib.rs b/crates/cardano/src/lib.rs index b36b09c1a..9278716b4 100644 --- a/crates/cardano/src/lib.rs +++ b/crates/cardano/src/lib.rs @@ -69,7 +69,9 @@ pub fn pallas_hash_to_core( } // Can the era integer be removed? Not sure. Santi said something about it. -pub(crate) fn multi_era_tx_from_era_cbor(era_body: &EraCbor) -> Result, CardanoError> { +pub(crate) fn multi_era_tx_from_era_cbor( + era_body: &EraCbor, +) -> Result, CardanoError> { Ok(MultiEraTx::decode(era_body.cbor())?) } @@ -346,7 +348,7 @@ impl dolos_core::ChainLogic for CardanoLogic { // into the epoch, capturing addrsRew (registered accounts) for the pre-Babbage // prefilter. Using 4k/f instead of 3k/f ensures the state at RUPD time includes // all deregistrations up to the correct threshold. - let stability_window = utils::randomness_stability_window(genesis); + let stability_window = utils::randomness_stability_window(&genesis); Ok(Self { config, @@ -498,7 +500,7 @@ impl dolos_core::ChainLogic for CardanoLogic { Ok(out) } - fn mutable_slots(domain: &impl Domain) -> BlockSlot { + fn mutable_slots(domain: &(impl Domain)) -> BlockSlot { utils::mutable_slots(&domain.genesis()) } diff --git a/crates/cardano/src/utxoset.rs b/crates/cardano/src/utxoset.rs index 20b7dcae3..408918ffe 100644 --- a/crates/cardano/src/utxoset.rs +++ b/crates/cardano/src/utxoset.rs @@ -5,6 +5,7 @@ use std::collections::{HashMap, HashSet}; use std::sync::Arc; use crate::owned::OwnedMultiEraOutput; +use crate::{era_cbor_from_output, pallas_hash_to_core, txo_ref_from_input, CardanoError}; pub fn compute_block_dependencies(block: &MultiEraBlock, loaded: &mut RawUtxoMap) -> Vec { let txs: HashMap<_, _> = block.txs().into_iter().map(|tx| (tx.hash(), tx)).collect(); @@ -14,8 +15,8 @@ pub fn compute_block_dependencies(block: &MultiEraBlock, loaded: &mut RawUtxoMap // add all produced utxos to the loaded map for (tx_hash, tx) in txs.iter() { for (idx, utxo) in tx.produces() { - let utxo_ref = TxoRef(*tx_hash, idx as u32); - loaded.insert(utxo_ref, Arc::new(utxo.into())); + let utxo_ref = TxoRef(pallas_hash_to_core(*tx_hash), idx as u32); + loaded.insert(utxo_ref, Arc::new(era_cbor_from_output(&utxo))); } } @@ -23,7 +24,7 @@ pub fn compute_block_dependencies(block: &MultiEraBlock, loaded: &mut RawUtxoMap let consumed: HashSet<_> = txs .values() .flat_map(MultiEraTx::consumes) - .map(|utxo| TxoRef(*utxo.hash(), utxo.index() as u32)) + .map(|utxo| txo_ref_from_input(&utxo)) .collect(); // find all missing utxos that are not already in the loaded map @@ -58,14 +59,14 @@ pub fn compute_apply_delta( for (tx_hash, tx) in txs.iter() { for (idx, produced) in tx.produces() { - let uxto_ref = TxoRef(*tx_hash, idx as u32); + let uxto_ref = TxoRef(pallas_hash_to_core(*tx_hash), idx as u32); delta .produced_utxo - .insert(uxto_ref, Arc::new(produced.into())); + .insert(uxto_ref, Arc::new(era_cbor_from_output(&produced))); } for consumed in tx.consumes() { - let stxi_ref = TxoRef(*consumed.hash(), consumed.index() as u32); + let stxi_ref = txo_ref_from_input(&consumed); let stxi_body = loaded .get(&stxi_ref) @@ -90,14 +91,16 @@ pub fn compute_undo_delta( for (tx_hash, tx) in txs.iter() { for (idx, body) in tx.produces() { - let utxo_ref = TxoRef(*tx_hash, idx as u32); - delta.undone_utxo.insert(utxo_ref, Arc::new(body.into())); + let utxo_ref = TxoRef(pallas_hash_to_core(*tx_hash), idx as u32); + delta + .undone_utxo + .insert(utxo_ref, Arc::new(era_cbor_from_output(&body))); } } for (_, tx) in txs.iter() { for consumed in tx.consumes() { - let stxi_ref = TxoRef(*consumed.hash(), consumed.index() as u32); + let stxi_ref = txo_ref_from_input(&consumed); let stxi_body = context .get(&stxi_ref) @@ -112,7 +115,7 @@ pub fn compute_undo_delta( Ok(delta) } -pub fn compute_origin_delta(genesis: &Genesis) -> UtxoSetDelta { +pub fn compute_origin_delta(genesis: &crate::CardanoGenesis) -> UtxoSetDelta { let mut delta = UtxoSetDelta::default(); // byron @@ -120,7 +123,7 @@ pub fn compute_origin_delta(genesis: &Genesis) -> UtxoSetDelta { let utxos = pallas::ledger::configs::byron::genesis_utxos(&genesis.byron); for (tx, addr, amount) in utxos { - let utxo_ref = TxoRef(tx, 0); + let utxo_ref = TxoRef(pallas_hash_to_core(tx), 0); let utxo_body = pallas::ledger::primitives::byron::TxOut { address: pallas::ledger::primitives::byron::Address { payload: addr.payload, @@ -132,7 +135,7 @@ pub fn compute_origin_delta(genesis: &Genesis) -> UtxoSetDelta { let utxo_body = MultiEraOutput::from_byron(&utxo_body).to_owned(); delta .produced_utxo - .insert(utxo_ref, Arc::new(utxo_body.into())); + .insert(utxo_ref, Arc::new(era_cbor_from_output(&utxo_body))); } } // shelley @@ -140,7 +143,7 @@ pub fn compute_origin_delta(genesis: &Genesis) -> UtxoSetDelta { let utxos = pallas::ledger::configs::shelley::shelley_utxos(&genesis.shelley); for (tx, addr, amount) in utxos { - let utxo_ref = TxoRef(tx, 0); + let utxo_ref = TxoRef(pallas_hash_to_core(tx), 0); let utxo_body = pallas::ledger::primitives::alonzo::TransactionOutput { address: addr.to_vec().into(), amount: pallas::ledger::primitives::alonzo::Value::Coin(amount), @@ -153,14 +156,16 @@ pub fn compute_origin_delta(genesis: &Genesis) -> UtxoSetDelta { delta .produced_utxo - .insert(utxo_ref, Arc::new(utxo_body.into())); + .insert(utxo_ref, Arc::new(era_cbor_from_output(&utxo_body))); } } delta } -pub fn build_custom_utxos_delta(config: &CardanoConfig) -> Result { +pub fn build_custom_utxos_delta( + config: &CardanoConfig, +) -> Result> { let mut delta = UtxoSetDelta::default(); for utxo in config.custom_utxos.iter() { @@ -187,6 +192,7 @@ mod tests { use std::str::FromStr; use super::*; + use crate::multi_era_output_from_era_cbor; fn fake_slice_for_block(block: &MultiEraBlock) -> HashMap { let valid_utxo = block @@ -202,7 +208,7 @@ mod tests { .txs() .iter() .flat_map(MultiEraTx::consumes) - .map(|utxo| TxoRef(*utxo.hash(), utxo.index() as u32)) + .map(|utxo| txo_ref_from_input(&utxo)) .map(|key| { ( key, @@ -221,11 +227,11 @@ mod tests { fn assert_genesis_utxo_exists(db: &UtxoSetDelta, tx_hex: &str, addr_base58: &str, amount: u64) { let tx = Hash::<32>::from_str(tx_hex).unwrap(); - let utxo_body = db.produced_utxo.get(&TxoRef(tx, 0)); + let utxo_body = db.produced_utxo.get(&TxoRef(pallas_hash_to_core(tx), 0)); assert!(utxo_body.is_some(), "utxo not found"); let utxo_body = utxo_body.unwrap(); - let utxo_body = MultiEraOutput::try_from(utxo_body.as_ref()).unwrap(); + let utxo_body = multi_era_output_from_era_cbor(utxo_body.as_ref()).unwrap(); assert_eq!(utxo_body.era(), pallas::ledger::traverse::Era::Byron); @@ -303,15 +309,17 @@ mod tests { for input in tx.consumes() { let consumed = delta .consumed_utxo - .contains_key(&TxoRef(*input.hash(), input.index() as u32)); + .contains_key(&txo_ref_from_input(&input)); assert!(consumed); } for (idx, expected) in tx.produces() { - let utxo = delta.produced_utxo.get(&TxoRef(tx.hash(), idx as u32)); + let utxo = delta + .produced_utxo + .get(&TxoRef(pallas_hash_to_core(tx.hash()), idx as u32)); let utxo = utxo.unwrap(); - let utxo = MultiEraOutput::try_from(utxo.as_ref()).unwrap(); + let utxo = multi_era_output_from_era_cbor(utxo.as_ref()).unwrap(); assert_eq!(utxo, expected); } } From b13815c19b0b3fd3504cff33d935f71936ac5aba Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Mon, 16 Mar 2026 18:53:22 -0300 Subject: [PATCH 20/85] feat(forks): compiling --- crates/cardano/src/forks.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/crates/cardano/src/forks.rs b/crates/cardano/src/forks.rs index 15bc9451b..6984381da 100644 --- a/crates/cardano/src/forks.rs +++ b/crates/cardano/src/forks.rs @@ -1,5 +1,5 @@ -use crate::{utils::float_to_rational, PParamValue, PParamsSet}; -use dolos_core::{BrokenInvariant, Genesis}; +use crate::{utils::float_to_rational, CardanoGenesis, PParamValue, PParamsSet}; +use dolos_core::BrokenInvariant; use pallas::{ crypto::hash::Hash, ledger::{ @@ -217,7 +217,7 @@ pub fn migrate_pparams_version( from: u16, to: u16, current: &PParamsSet, - genesis: &Genesis, + genesis: &CardanoGenesis, ) -> PParamsSet { debug!(from, to, "migrating pparams version"); @@ -251,7 +251,7 @@ pub fn migrate_pparams_version( pub fn force_pparams_version( initial: &PParamsSet, - genesis: &Genesis, + genesis: &CardanoGenesis, from: u16, to: u16, ) -> Result { @@ -269,7 +269,7 @@ pub struct ProtocolConstants { pub slot_length: u64, } -pub fn protocol_constants(version: u16, genesis: &Genesis) -> ProtocolConstants { +pub fn protocol_constants(version: u16, genesis: &CardanoGenesis) -> ProtocolConstants { match version { x if x < 2 => { let slot_length_in_secs = genesis.byron.block_version_data.slot_duration / 1000; From 56b1fa3b6ce8f2614f354ffa897aa1480f37f38e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Mon, 16 Mar 2026 18:54:30 -0300 Subject: [PATCH 21/85] feat(owned): compiling --- crates/cardano/src/owned.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/cardano/src/owned.rs b/crates/cardano/src/owned.rs index 6c9e926cc..668e4b6b0 100644 --- a/crates/cardano/src/owned.rs +++ b/crates/cardano/src/owned.rs @@ -32,7 +32,7 @@ impl dolos_core::Block for OwnedMultiEraBlock { } fn hash(&self) -> BlockHash { - self.view().hash() + crate::pallas_hash_to_core(self.view().hash()) } fn raw(&self) -> RawBlock { From 4302c76af6aebe10849dda3f4d7a0d05b58d3b6d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Mon, 16 Mar 2026 18:58:54 -0300 Subject: [PATCH 22/85] fix(cardano/model): map errors to ChainSpecific errors --- crates/cardano/src/model.rs | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/crates/cardano/src/model.rs b/crates/cardano/src/model.rs index b6c74586f..441d9704f 100644 --- a/crates/cardano/src/model.rs +++ b/crates/cardano/src/model.rs @@ -304,7 +304,7 @@ where } } - pub fn try_snapshot_at(&self, epoch: Epoch) -> Result<&T, ChainError> { + pub fn try_snapshot_at(&self, epoch: Epoch) -> Result<&T, ChainError> { match self.snapshot_at(epoch) { Some(value) => Ok(value), None => Err(ChainError::EpochValueVersionNotFound(epoch)), @@ -347,9 +347,12 @@ macro_rules! entity_boilerplate { } impl dolos_core::Entity for $type { - fn decode_entity(ns: Namespace, value: &EntityValue) -> Result { + type ChainSpecificError = crate::CardanoError; + + fn decode_entity(ns: Namespace, value: &EntityValue) -> Result> { assert_eq!(ns, $type::NS); - let value = pallas::codec::minicbor::decode(value)?; + let value = pallas::codec::minicbor::decode(value) + .map_err(|e| ChainError::ChainSpecific(crate::CardanoError::Cbor(e)))?; Ok(value) } @@ -1227,7 +1230,7 @@ macro_rules! pgetter { macro_rules! ensure_pparam { ($kind:ident, $ty:ty) => { paste::paste! { - pub fn [](&self) -> Result<$ty, ChainError> { + pub fn [](&self) -> Result<$ty, ChainError> { self.$kind().ok_or(ChainError::PParamsNotFound(stringify!($kind).to_string())) } } @@ -1940,7 +1943,9 @@ variant_boilerplate!(PendingRewardState); variant_boilerplate!(PendingMirState); impl dolos_core::Entity for CardanoEntity { - fn decode_entity(ns: Namespace, value: &EntityValue) -> Result { + type ChainSpecificError = crate::CardanoError; + + fn decode_entity(ns: Namespace, value: &EntityValue) -> Result> { match ns { EraSummary::NS => EraSummary::decode_entity(ns, value).map(Into::into), AccountState::NS => AccountState::decode_entity(ns, value).map(Into::into), From 022f88ce4926766b62d3cc81cb446d341083d2bf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Mon, 16 Mar 2026 19:03:32 -0300 Subject: [PATCH 23/85] fix(core): restrict domain in specific functions --- crates/core/src/lib.rs | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/crates/core/src/lib.rs b/crates/core/src/lib.rs index fe65eac66..21fc8b1e4 100644 --- a/crates/core/src/lib.rs +++ b/crates/core/src/lib.rs @@ -562,10 +562,13 @@ pub trait ChainLogic: Sized + Send + Sync { ) -> Result>; // TODO: remove from the interface - this is Cardano-specific - fn mutable_slots(domain: &impl Domain) -> BlockSlot; + fn mutable_slots(domain: &impl Domain) -> BlockSlot; // TODO: remove from the interface - this is Cardano-specific - fn last_immutable_slot(domain: &impl Domain, tip: BlockSlot) -> BlockSlot { + fn last_immutable_slot( + domain: &impl Domain, + tip: BlockSlot, + ) -> BlockSlot { tip.saturating_sub(Self::mutable_slots(domain)) } @@ -578,7 +581,7 @@ pub trait ChainLogic: Sized + Send + Sync { ) -> Result, Self::ChainSpecificError>; // Validate a transaction against the current ledger state. - fn validate_tx( + fn validate_tx>( &self, cbor: &[u8], utxos: &MempoolAwareUtxoStore, From 50963688072e306a1a622405be721be51fef056c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Mon, 16 Mar 2026 19:04:03 -0300 Subject: [PATCH 24/85] fix(cardano/lib): restrcit domain to build --- crates/cardano/src/lib.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/cardano/src/lib.rs b/crates/cardano/src/lib.rs index 9278716b4..7d28cf810 100644 --- a/crates/cardano/src/lib.rs +++ b/crates/cardano/src/lib.rs @@ -500,11 +500,11 @@ impl dolos_core::ChainLogic for CardanoLogic { Ok(out) } - fn mutable_slots(domain: &(impl Domain)) -> BlockSlot { + fn mutable_slots(domain: &impl Domain) -> BlockSlot { utils::mutable_slots(&domain.genesis()) } - fn validate_tx>( + fn validate_tx>( &self, cbor: &[u8], utxos: &MempoolAwareUtxoStore, From efc7c634e07c2fbd0b3e15439f2754b753b82db8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Mon, 16 Mar 2026 19:08:18 -0300 Subject: [PATCH 25/85] fix(cardano/eras): fix error type --- crates/cardano/src/eras.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/cardano/src/eras.rs b/crates/cardano/src/eras.rs index e6dc86f50..0b78ee651 100644 --- a/crates/cardano/src/eras.rs +++ b/crates/cardano/src/eras.rs @@ -187,7 +187,7 @@ impl ChainSummary { } } -pub fn load_era_summary(state: &D::State) -> Result { +pub fn load_era_summary(state: &D::State) -> Result> { let eras = state.iter_entities_typed(EraSummary::NS, None)?; let mut chain = ChainSummary::default(); @@ -201,7 +201,7 @@ pub fn load_era_summary(state: &D::State) -> Result Result { +pub fn load_chain_summary_from_state(state: &impl StateStore) -> Result> { let eras = state.iter_entities_typed(EraSummary::NS, None)?; let mut chain = ChainSummary::default(); @@ -239,7 +239,7 @@ pub fn log_epoch_range_to_key_range( pub fn load_active_era( state: &D::State, -) -> Result<(EraProtocol, EraSummary), ChainError> { +) -> Result<(EraProtocol, EraSummary), ChainError> { let eras = state.iter_entities_typed::(EraSummary::NS, None)?; match eras.last() { From e8ab901132aa2ffb97076b9fe9ef936d2dd760f6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Mon, 16 Mar 2026 19:36:41 -0300 Subject: [PATCH 26/85] remove plan --- remove-pallas-from-core.md | 201 ------------------------------------- 1 file changed, 201 deletions(-) delete mode 100644 remove-pallas-from-core.md diff --git a/remove-pallas-from-core.md b/remove-pallas-from-core.md deleted file mode 100644 index 739ac7252..000000000 --- a/remove-pallas-from-core.md +++ /dev/null @@ -1,201 +0,0 @@ -# Removing Pallas from `dolos-core` — Decision Record - -## Why this change - -`dolos-core` is the foundation of the entire dolos stack. It defines the abstract traits (`Domain`, `ChainLogic`, `WalStore`, `StateStore`, etc.) that every crate in the workspace depends on. For it to work with a second blockchain (e.g. Midnight, which uses Substrate/SCALE), it must make zero assumptions about which chain it is running. - -Today it depends on the full `pallas` umbrella crate, which pulls in Cardano protocol types, genesis file schemas, and validation error types into what should be a neutral layer. The goal is to remove `pallas` entirely from `dolos-core`'s dependency tree. - -**Constraint**: no performance compromise. Every decision below was evaluated against this constraint. - ---- - -## Decision 1 — `EraCbor` renamed to `EraBody`, `minicbor` stays as a direct dep - -**What**: `EraCbor(Era, Cbor)` is renamed to `EraBody(Era, Vec)`. The name `EraCbor` implied the inner bytes are CBOR — they are not, they are opaque (Cardano stores CBOR there today; Midnight would store SCALE bytes). The type alias `Cbor = Vec` is also removed. - -**Why minicbor stays**: `EraBody` retains its `#[derive(minicbor::Encode, minicbor::Decode)]`. This is required because `redb3/mempool.rs` embeds `EraBody` inside `InflightRecord` and `FinalizedEntry`, which are stored in redb using minicbor serialization. Removing the derives would require either reimplementing the encoding in redb3 (coupling) or restructuring the storage format (a breaking change to the on-disk format). The tradeoff is accepted: `minicbor` replaces `pallas` as the direct dep for this one purpose. - -**What minicbor does NOT do**: it does not touch the inner bytes. The `#[cbor(with = "minicbor::bytes")]` annotation on the payload field means minicbor treats those bytes as a raw blob — the SCALE or CBOR content inside is completely untouched. - -**Performance**: zero impact. The encoding of `(u16, Vec)` via minicbor is trivial and was already happening. - ---- - -## Decision 2 — `Hash` becomes a custom newtype in core - -**What**: A new `crates/core/src/hash.rs` defines `Hash([u8; N])` (~60 lines) with Display/hex, FromStr, serde, Copy, Eq, Hash, Deref impls. `BlockHash = Hash<32>` and `TxHash = Hash<32>` continue as type aliases. `pallas-crypto` is removed from core. - -**Why not `pallas-crypto` directly**: depending on a pallas sub-crate still ties core to the pallas release cycle and keeps pallas in `cargo tree -p dolos-core`. A custom newtype is ~60 lines of boilerplate with zero ongoing maintenance burden. - -**Why not raw `[u8; 32]`**: losing the newtype would lose hex Display/FromStr, making `ChainPoint` display and CLI parsing significantly more verbose. The type-level distinction between a hash and an arbitrary byte array is worth keeping. - -**Boundary conversions**: `dolos-cardano` adds `From> for dolos_core::Hash` and its inverse. These are zero-cost (same memory layout). - -**Performance**: zero impact. `Hash` is `repr(transparent)` over `[u8; N]`. - ---- - -## Decision 3 — `Genesis` becomes a `GenesisConfig` trait - -**What**: The `Genesis` struct (holding `byron`, `shelley`, `alonzo`, `conway` genesis files) moves to `dolos-cardano` as `CardanoGenesis`. Core defines a `GenesisConfig` trait: - -```rust -pub trait GenesisConfig: Clone + Send + Sync + 'static { - fn chain_id(&self) -> u32; -} -``` - -`ChainLogic` gets `type GenesisConfig: GenesisConfig` as an associated type. `Domain::genesis()` returns `Arc<::GenesisConfig>`. - -**Why `chain_id` and not `network_magic`**: `network_magic` is Cardano terminology. Every blockchain has some form of chain identifier — Midnight calls it something else. `chain_id()` returning `u32` is the minimal generic interface. Cardano's impl returns `shelley.network_magic`. - -**Compromise**: everything else about genesis (epoch lengths, slot durations, protocol parameters) is Cardano-specific and stays in `CardanoGenesis`. Core knows nothing about genesis structure — only that a chain has an ID. - -**Performance**: zero impact. The genesis config is read at startup and cached in an `Arc`. - ---- - -## Decision 4 — `ChainTip` defined natively in core - -**What**: `ChainTip = pallas::network::miniprotocols::chainsync::Tip` is replaced with: - -```rust -pub struct ChainTip { - pub point: ChainPoint, - pub block_number: u64, -} -``` - -`dolos-cardano` adds `From for ChainTip`. - -**Why keep it in core**: every UTxO-based blockchain has a notion of "current tip" — a position in the chain plus a block height. This is not Cardano-specific. Removing it from core would force every chain integration to re-define it and lose the ability to express tip-awareness generically in sync machinery. - -**Why not remove it**: sync progress tracking, tip subscriptions (`TipEvent`), and API responses all need a tip with both a point and a block number. Working with `ChainPoint` alone would lose the block number, requiring each chain layer to re-add it redundantly. - -**Performance**: zero impact. It's a plain struct. - ---- - -## Decision 5 — `BlockEra` dropped, `Era = u16` is the single representation - -**What**: `pub type BlockEra = pallas::ledger::traverse::Era` is removed. `pub type Era = u16` (already present) is the sole era representation in core. - -**Why**: both Cardano (eras: Byron=0, Shelley=1, ..., Conway=6) and Midnight (ledger versions: integer) serialize their version identifier as an unsigned integer. The `u16` wire type is the correct shared abstraction. Named variants (`Byron`, `Shelley`) are Cardano-specific and belong in `dolos-cardano`. - -**Where named variants live**: `dolos-cardano` converts `pallas::ledger::traverse::Era ↔ u16` at its boundary. Pallas already implements this. - -**Compromise**: code that currently writes `if era == BlockEra::Conway` must become `if era == CONWAY_ERA` (a constant in dolos-cardano). Slightly less ergonomic but correctly scoped. - -**Performance**: zero impact. `Era = u16` is what was already stored. - ---- - -## Decision 6 — `EvalReport` becomes `Option>` on `MempoolTx` - -**What**: `pub report: Option` (a pallas phase-2 evaluation result) becomes `pub report: Option>`. `dolos-cardano` serializes/deserializes the pallas `EvalReport` to bytes at its boundary. - -**Why opaque bytes and not an associated type**: making `MempoolTx` generic over `D::EvalReport` would propagate a new generic parameter through every struct and trait that touches `MempoolTx` — a very wide blast radius. The eval report is only ever inspected by chain-specific code (the minibf/trp API layers), not by core machinery. Opaque bytes are the minimal interface. - -**Compromise**: callers that want to inspect the eval report must deserialize from `Vec`. This is a one-line operation in dolos-cardano. - -**Performance**: one extra serialize/deserialize round-trip per mempool tx admission. Mempool admission is rare compared to block processing — negligible. - ---- - -## Decision 7 — Chain-specific error types via `ChainLogic::ChainSpecificError` - -**What**: `ChainError` and `MempoolError` previously had variants typed directly to pallas errors (`pallas::ledger::traverse::Error`, `pallas::ledger::addresses::Error`, etc.). These are replaced with an associated type on `ChainLogic`: - -```rust -pub trait ChainLogic { - type ChainSpecificError: std::error::Error + Send + Sync + 'static; -} - -pub enum ChainError { - // chain-agnostic variants unchanged ... - ChainSpecific(E), // typed, not hidden -} -``` - -`dolos-cardano` defines `CardanoError` wrapping all pallas error types and sets `type ChainSpecificError = CardanoError`. - -**Why not string errors**: string errors destroy type information, make programmatic error handling impossible, and go against Rust idioms. A typed associated error preserves the full pallas error at the dolos-cardano boundary — callers that know they're in a Cardano context can match on `ChainError::ChainSpecific(CardanoError::Traverse(...))` and get the original pallas error. - -**Why not `Box`**: same reason — loses type information. The associated type approach is zero-cost (monomorphized) and keeps errors inspectable. - -**Propagation**: `DomainError` resolves `E` as `::ChainSpecificError`. Since `D::Chain` is already an associated type on `Domain`, no extra generics appear at call sites. - -**Performance**: zero impact. Associated types are resolved at compile time. - ---- - -## Decision 8 — Block decoding moved behind `ChainLogic::find_tx_in_block` - -**What**: `async_query.rs` had two methods (`block_by_tx_hash`, `tx_cbor`) that decoded raw block bytes inline using `MultiEraBlock`. These become a new `ChainLogic` static method: - -```rust -fn find_tx_in_block(block: &[u8], tx_hash: &[u8]) -> Option<(EraBody, TxOrder)>; -``` - -Core calls this; `dolos-cardano` implements it with `MultiEraBlock`. `AsyncQueryFacade` stays in core. - -**Why keep `AsyncQueryFacade` in core**: the async semaphore-limited dispatch pattern and the other query methods (`block_by_slot`, `block_by_number`, etc.) are entirely chain-agnostic. Moving the facade to dolos-cardano would force every API layer to re-implement the concurrency limiting. - -**Performance**: zero impact. One additional virtual dispatch per query — negligible for a query path that's already doing database I/O. - ---- - -## Decision 9 — Mempool UTxO scanning moved behind `ChainLogic` methods - -**What**: `scan_mempool_utxos` and `exclude_inflight_stxis` in `mempool.rs` used `MultiEraTx`/`MultiEraOutput` to iterate mempool transaction inputs/outputs. These become two `ChainLogic` static methods: - -```rust -fn tx_produced_utxos(era_body: &EraBody) -> Vec<(TxoRef, EraBody)>; -fn tx_consumed_refs(era_body: &EraBody) -> Vec; -``` - -The predicate in `get_utxos_by_tag` changes from `Fn(&MultiEraOutput<'_>) -> bool` to `Fn(&EraBody) -> bool`. - -**Why keep the scan logic in core**: mempool-aware UTxO queries are a generic concept for any UTxO-based chain. Midnight (or any other chain) would need the same "scan pending transactions and merge with confirmed state" logic. The only chain-specific part is how you decode a raw transaction into its inputs and outputs — which is exactly what the two `ChainLogic` methods encapsulate. - -**Predicate change consequence**: callers in `dolos-minibf` and `dolos-trp` that currently receive `&MultiEraOutput` in their predicate will instead receive `&EraBody` and must decode it themselves to `MultiEraOutput`. This is a one-line change per callsite in those crates. - -**Performance**: zero impact. The same decoding work happens — it just happens inside the predicate rather than before it. - ---- - -## Summary of what stays in `dolos-core` - -| Thing | Before | After | -|---|---|---| -| `Hash` | from pallas-crypto | custom newtype (~60 lines) | -| `EraBody` (was `EraCbor`) | pallas re-export | stays in core, minicbor direct dep | -| `Era = u16` | unchanged | unchanged | -| `ChainTip` | pallas alias | native struct in core | -| `GenesisConfig` | pallas `Genesis` struct | generic trait | -| `ChainError` | pallas error variants | parameterized `ChainError` | -| `minicbor` dep | via pallas | direct dep | -| `pallas` dep | present | **removed** | - -## Summary of what moves to `dolos-cardano` - -- `CardanoGenesis` (was `Genesis`) with all four genesis file fields -- `CardanoError` (wraps all pallas error types) -- `From for ChainPoint`, `TryFrom for PallasPoint` -- `From for ChainTip` -- `From> for dolos_core::Hash` -- `From for EraBody`, `TryFrom<&EraBody> for MultiEraOutput`, etc. -- `From<&MultiEraInput> for TxoRef` -- `ChainLogic::find_tx_in_block` implementation -- `ChainLogic::tx_produced_utxos` implementation -- `ChainLogic::tx_consumed_refs` implementation - ---- - -## Verification - -1. `cargo tree -p dolos-core | grep pallas` — must return nothing -2. `cargo check -p dolos-core` — must compile clean with zero pallas in scope -3. `cargo clippy --all-targets --all-features -- -D warnings` — zero warnings -4. `cargo test --workspace --all-targets` — all tests pass From 11bb69fb109248e9facc2794bf5833ce843b7d0c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Tue, 17 Mar 2026 01:08:49 -0300 Subject: [PATCH 27/85] feat(cardano/estart): fix error constraints --- crates/cardano/src/estart/commit.rs | 10 +++++----- crates/cardano/src/estart/loading.rs | 16 ++++++++-------- crates/cardano/src/estart/mod.rs | 14 +++++++------- crates/cardano/src/estart/nonces.rs | 2 +- crates/cardano/src/estart/reset.rs | 10 +++++----- crates/cardano/src/estart/work_unit.rs | 18 +++++++++--------- 6 files changed, 35 insertions(+), 35 deletions(-) diff --git a/crates/cardano/src/estart/commit.rs b/crates/cardano/src/estart/commit.rs index 674d58918..49a85a8c3 100644 --- a/crates/cardano/src/estart/commit.rs +++ b/crates/cardano/src/estart/commit.rs @@ -31,7 +31,7 @@ impl super::WorkContext { fn collect_era_transition( &self, state: &impl StateStore, - ) -> Result, ChainError> { + ) -> Result, ChainError> { let Some(transition) = self.ended_state().pparams.era_transition() else { return Ok(None); }; @@ -75,9 +75,9 @@ impl super::WorkContext { &mut self, state: &D::State, writer: &::Writer, - ) -> Result<(), ChainError> + ) -> Result<(), ChainError> where - D: Domain, + D: Domain, E: Entity + FixedNamespace + Into, { let records = state.iter_entities_typed::(E::NS, None)?; @@ -109,12 +109,12 @@ impl super::WorkContext { } #[instrument(skip_all)] - pub fn commit( + pub fn commit>( &mut self, state: &D::State, archive: &D::Archive, slot: BlockSlot, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { debug!("committing estart changes"); // Collect era transition data first (only 1-2 entities, not a memory concern) diff --git a/crates/cardano/src/estart/loading.rs b/crates/cardano/src/estart/loading.rs index f2106b9fe..5aa417747 100644 --- a/crates/cardano/src/estart/loading.rs +++ b/crates/cardano/src/estart/loading.rs @@ -1,6 +1,6 @@ use std::sync::Arc; -use dolos_core::{ChainError, Domain, Genesis, StateStore, TxoRef}; +use dolos_core::{ChainError, Domain, StateStore, TxoRef}; use crate::{ estart::BoundaryVisitor, load_era_summary, roll::WorkDeltas, AccountState, DRepState, @@ -8,7 +8,7 @@ use crate::{ }; impl super::WorkContext { - pub fn compute_deltas(&mut self, state: &D::State) -> Result<(), ChainError> { + pub fn compute_deltas>(&mut self, state: &D::State) -> Result<(), ChainError> { let mut visitor_nonces = super::nonces::BoundaryVisitor; let mut visitor_reset = super::reset::BoundaryVisitor::default(); @@ -57,14 +57,14 @@ impl super::WorkContext { /// Compute the value of unredeemed AVVM UTxOs at the Shelley→Allegra /// boundary. These UTxOs are removed from the UTxO set and their value /// returned to reserves, matching the Haskell ledger's `translateEra`. - fn compute_avvm_reclamation( + fn compute_avvm_reclamation>( state: &D::State, - genesis: &Genesis, - ) -> Result { + genesis: &crate::CardanoGenesis, + ) -> Result> { let avvm_utxos = pallas::ledger::configs::byron::genesis_avvm_utxos(&genesis.byron); // Collect all Byron genesis AVVM UTxO refs (bootstrap redeemer addresses) - let refs: Vec = avvm_utxos.iter().map(|(tx, _, _)| TxoRef(*tx, 0)).collect(); + let refs: Vec = avvm_utxos.iter().map(|(tx, _, _)| TxoRef(crate::pallas_hash_to_core(*tx), 0)).collect(); // Query the UTxO set to find which are still unspent let remaining = state.get_utxos(refs)?; @@ -73,7 +73,7 @@ impl super::WorkContext { let total: u64 = remaining .values() .map(|utxo| { - pallas::ledger::traverse::MultiEraOutput::try_from(utxo.as_ref()) + crate::multi_era_output_from_era_cbor(utxo.as_ref()) .map(|o| o.value().coin()) .unwrap_or(0) }) @@ -88,7 +88,7 @@ impl super::WorkContext { Ok(total) } - pub fn load(state: &D::State, genesis: Arc) -> Result { + pub fn load>(state: &D::State, genesis: Arc) -> Result> { let ended_state = crate::load_epoch::(state)?; let chain_summary = load_era_summary::(state)?; let active_protocol = EraProtocol::from(chain_summary.edge().protocol); diff --git a/crates/cardano/src/estart/mod.rs b/crates/cardano/src/estart/mod.rs index c0eec69e0..637adc144 100644 --- a/crates/cardano/src/estart/mod.rs +++ b/crates/cardano/src/estart/mod.rs @@ -1,6 +1,6 @@ use std::sync::Arc; -use dolos_core::{ChainError, EntityKey, Genesis}; +use dolos_core::{ChainError, EntityKey}; use crate::{ eras::ChainSummary, roll::WorkDeltas, AccountState, CardanoDelta, CardanoEntity, DRepState, @@ -24,7 +24,7 @@ pub trait BoundaryVisitor { ctx: &mut WorkContext, id: &PoolId, pool: &PoolState, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { Ok(()) } @@ -34,7 +34,7 @@ pub trait BoundaryVisitor { ctx: &mut WorkContext, id: &AccountId, account: &AccountState, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { Ok(()) } @@ -44,7 +44,7 @@ pub trait BoundaryVisitor { ctx: &mut WorkContext, id: &DRepId, drep: &DRepState, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { Ok(()) } @@ -54,12 +54,12 @@ pub trait BoundaryVisitor { ctx: &mut WorkContext, id: &ProposalId, proposal: &ProposalState, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { Ok(()) } #[allow(unused_variables)] - fn flush(&mut self, ctx: &mut WorkContext) -> Result<(), ChainError> { + fn flush(&mut self, ctx: &mut WorkContext) -> Result<(), ChainError> { Ok(()) } } @@ -75,7 +75,7 @@ pub struct WorkContext { pub active_protocol: EraProtocol, pub chain_summary: ChainSummary, - pub genesis: Arc, + pub genesis: Arc, /// Unredeemed AVVM UTxOs reclaimed at the Shelley→Allegra boundary. pub avvm_reclamation: u64, diff --git a/crates/cardano/src/estart/nonces.rs b/crates/cardano/src/estart/nonces.rs index 28c512c52..9271548b6 100644 --- a/crates/cardano/src/estart/nonces.rs +++ b/crates/cardano/src/estart/nonces.rs @@ -65,7 +65,7 @@ fn next_nonce(ctx: &super::WorkContext) -> Option { pub struct BoundaryVisitor; impl super::BoundaryVisitor for BoundaryVisitor { - fn flush(&mut self, ctx: &mut super::WorkContext) -> Result<(), ChainError> { + fn flush(&mut self, ctx: &mut super::WorkContext) -> Result<(), ChainError> { let next_slot = next_largest_stable_slot(ctx); let next_nonce = next_nonce(ctx); diff --git a/crates/cardano/src/estart/reset.rs b/crates/cardano/src/estart/reset.rs index e4dd99227..dc19c1f5f 100644 --- a/crates/cardano/src/estart/reset.rs +++ b/crates/cardano/src/estart/reset.rs @@ -1,6 +1,6 @@ use std::sync::Arc; -use dolos_core::{ChainError, Genesis, NsKey}; +use dolos_core::{ChainError, NsKey}; use pallas::ledger::primitives::Epoch; use serde::{Deserialize, Serialize}; @@ -87,7 +87,7 @@ pub struct EpochTransition { era_transition: Option, #[serde(skip)] - genesis: Option>, + genesis: Option>, } impl std::fmt::Debug for EpochTransition { @@ -252,7 +252,7 @@ impl super::BoundaryVisitor for BoundaryVisitor { ctx: &mut super::WorkContext, id: &AccountId, _: &AccountState, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { self.change(AccountTransition::new(id.clone(), ctx.starting_epoch_no())); Ok(()) @@ -263,13 +263,13 @@ impl super::BoundaryVisitor for BoundaryVisitor { ctx: &mut super::WorkContext, id: &PoolId, _: &PoolState, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { self.change(PoolTransition::new(id.clone(), ctx.starting_epoch_no())); Ok(()) } - fn flush(&mut self, ctx: &mut WorkContext) -> Result<(), ChainError> { + fn flush(&mut self, ctx: &mut WorkContext) -> Result<(), ChainError> { for delta in self.deltas.drain(..) { ctx.add_delta(delta); } diff --git a/crates/cardano/src/estart/work_unit.rs b/crates/cardano/src/estart/work_unit.rs index 78e390712..756667ade 100644 --- a/crates/cardano/src/estart/work_unit.rs +++ b/crates/cardano/src/estart/work_unit.rs @@ -9,10 +9,10 @@ use std::sync::Arc; -use dolos_core::{config::CardanoConfig, BlockSlot, Domain, DomainError, Genesis, WorkUnit}; +use dolos_core::{config::CardanoConfig, BlockSlot, Domain, DomainError, WorkUnit}; use tracing::{debug, info}; -use crate::CardanoLogic; +use crate::{CardanoError, CardanoGenesis, CardanoLogic}; use super::WorkContext; @@ -21,7 +21,7 @@ pub struct EstartWorkUnit { slot: BlockSlot, #[allow(dead_code)] config: CardanoConfig, - genesis: Arc, + genesis: Arc, // Loaded context: Option, @@ -29,7 +29,7 @@ pub struct EstartWorkUnit { impl EstartWorkUnit { /// Create a new estart work unit. - pub fn new(slot: BlockSlot, config: CardanoConfig, genesis: Arc) -> Self { + pub fn new(slot: BlockSlot, config: CardanoConfig, genesis: Arc) -> Self { Self { slot, config, @@ -46,13 +46,13 @@ impl EstartWorkUnit { impl WorkUnit for EstartWorkUnit where - D: Domain, + D: Domain, { fn name(&self) -> &'static str { "estart" } - fn load(&mut self, domain: &D) -> Result<(), DomainError> { + fn load(&mut self, domain: &D) -> Result<(), DomainError> { debug!(slot = self.slot, "loading estart work context"); let context = WorkContext::load::(domain.state(), self.genesis.clone())?; @@ -66,13 +66,13 @@ where Ok(()) } - fn compute(&mut self) -> Result<(), DomainError> { + fn compute(&mut self) -> Result<(), DomainError> { // Computation is done during load via the visitor pattern debug!("estart compute phase (deltas already computed during load)"); Ok(()) } - fn commit_state(&mut self, domain: &D) -> Result<(), DomainError> { + fn commit_state(&mut self, domain: &D) -> Result<(), DomainError> { debug!(slot = self.slot, "committing estart state changes"); let context = self.context.as_mut().ok_or_else(|| { @@ -85,7 +85,7 @@ where Ok(()) } - fn commit_archive(&mut self, _domain: &D) -> Result<(), DomainError> { + fn commit_archive(&mut self, _domain: &D) -> Result<(), DomainError> { // Archive writes are done in commit_state via context.commit() Ok(()) } From 063bbbcc5caad6c73e595d94ce9ace11afd4c3cb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Tue, 17 Mar 2026 01:09:28 -0300 Subject: [PATCH 28/85] feat(cardano/ewrap): fix error constraints --- crates/cardano/src/ewrap/commit.rs | 8 +++---- crates/cardano/src/ewrap/drops.rs | 6 ++--- crates/cardano/src/ewrap/enactment.rs | 4 ++-- crates/cardano/src/ewrap/loading.rs | 34 +++++++++++++-------------- crates/cardano/src/ewrap/mod.rs | 20 ++++++++-------- crates/cardano/src/ewrap/refunds.rs | 8 +++---- crates/cardano/src/ewrap/rewards.rs | 4 ++-- crates/cardano/src/ewrap/work_unit.rs | 18 +++++++------- crates/cardano/src/ewrap/wrapup.rs | 4 ++-- 9 files changed, 53 insertions(+), 53 deletions(-) diff --git a/crates/cardano/src/ewrap/commit.rs b/crates/cardano/src/ewrap/commit.rs index ba96ba156..9e9eee20b 100644 --- a/crates/cardano/src/ewrap/commit.rs +++ b/crates/cardano/src/ewrap/commit.rs @@ -24,9 +24,9 @@ impl BoundaryWork { &mut self, state: &D::State, writer: &::Writer, - ) -> Result<(), ChainError> + ) -> Result<(), ChainError> where - D: Domain, + D: Domain, E: Entity + FixedNamespace + Into, { let records = state.iter_entities_typed::(E::NS, None)?; @@ -58,11 +58,11 @@ impl BoundaryWork { } #[instrument(skip_all)] - pub fn commit( + pub fn commit>( &mut self, state: &D::State, archive: &D::Archive, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { debug!("committing ewrap changes (streaming mode)"); let writer = state.start_writer()?; diff --git a/crates/cardano/src/ewrap/drops.rs b/crates/cardano/src/ewrap/drops.rs index 383ebe3cb..e9d5bf322 100644 --- a/crates/cardano/src/ewrap/drops.rs +++ b/crates/cardano/src/ewrap/drops.rs @@ -157,7 +157,7 @@ impl super::BoundaryVisitor for BoundaryVisitor { ctx: &mut BoundaryWork, id: &AccountId, account: &AccountState, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { let current_epoch = ctx.ending_state.number; // Notice that instead of dropping delegators when a pool is retired, we're moving the data to a different field to be able to still track the relationsihp between the pool and the delegators. @@ -197,7 +197,7 @@ impl super::BoundaryVisitor for BoundaryVisitor { ctx: &mut BoundaryWork, id: &DRepId, drep: &DRepState, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { if ctx.expiring_dreps.contains(&drep.identifier) { self.change(DRepExpiration::new(id.clone())); } @@ -205,7 +205,7 @@ impl super::BoundaryVisitor for BoundaryVisitor { Ok(()) } - fn flush(&mut self, ctx: &mut BoundaryWork) -> Result<(), ChainError> { + fn flush(&mut self, ctx: &mut BoundaryWork) -> Result<(), ChainError> { for delta in self.deltas.drain(..) { ctx.add_delta(delta); } diff --git a/crates/cardano/src/ewrap/enactment.rs b/crates/cardano/src/ewrap/enactment.rs index 053a6ca69..6f01a90f5 100644 --- a/crates/cardano/src/ewrap/enactment.rs +++ b/crates/cardano/src/ewrap/enactment.rs @@ -102,7 +102,7 @@ impl super::BoundaryVisitor for BoundaryVisitor { id: &ProposalId, proposal: &ProposalState, _: Option<&AccountState>, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { tracing::debug!(proposal=%id, "visiting enacted proposal"); // Apply proposal on ending state @@ -129,7 +129,7 @@ impl super::BoundaryVisitor for BoundaryVisitor { Ok(()) } - fn flush(&mut self, ctx: &mut BoundaryWork) -> Result<(), ChainError> { + fn flush(&mut self, ctx: &mut BoundaryWork) -> Result<(), ChainError> { for delta in self.deltas.drain(..) { ctx.add_delta(delta); } diff --git a/crates/cardano/src/ewrap/loading.rs b/crates/cardano/src/ewrap/loading.rs index b473208c2..b80ac5613 100644 --- a/crates/cardano/src/ewrap/loading.rs +++ b/crates/cardano/src/ewrap/loading.rs @@ -1,6 +1,6 @@ use std::{collections::HashMap, sync::Arc}; -use dolos_core::{BlockSlot, ChainError, Domain, Genesis, StateStore, TxOrder}; +use dolos_core::{BlockSlot, ChainError, Domain, StateStore, TxOrder}; use pallas::{codec::minicbor, ledger::primitives::StakeCredential}; use crate::{ @@ -24,11 +24,11 @@ impl BoundaryWork { .is_some_and(|e| e == self.starting_epoch_no()) } - fn load_pool_reward_account( + fn load_pool_reward_account>( &self, state: &D::State, pool: &PoolState, - ) -> Result, ChainError> { + ) -> Result, ChainError> { // Use scheduled (next) params if available, matching the Haskell ledger's // SNAP → POOLREAP ordering where future pool params become current before // pool reaping. This ensures the deposit refund goes to the correct reward @@ -41,7 +41,7 @@ impl BoundaryWork { let account = &snapshot.params.reward_account; let account = - pallas_extras::parse_reward_account(account).ok_or(ChainError::InvalidPoolParams)?; + pallas_extras::parse_reward_account(account).ok_or(ChainError::ChainSpecific(crate::CardanoError::InvalidPoolParams))?; let entity_key = minicbor::to_vec(account).unwrap(); @@ -50,7 +50,7 @@ impl BoundaryWork { Ok(account) } - fn load_pool_data(&mut self, state: &D::State) -> Result<(), ChainError> { + fn load_pool_data>(&mut self, state: &D::State) -> Result<(), ChainError> { let pools = state.iter_entities_typed::(PoolState::NS, None)?; for record in pools { @@ -79,7 +79,7 @@ impl BoundaryWork { self.starting_epoch_no() == unregistered_epoch + 1 } - fn should_expire_drep(&self, drep: &DRepState) -> Result { + fn should_expire_drep(&self, drep: &DRepState) -> Result> { if drep.expired { return Ok(false); } @@ -111,7 +111,7 @@ impl BoundaryWork { None } - fn load_drep_data(&mut self, state: &D::State) -> Result<(), ChainError> { + fn load_drep_data>(&mut self, state: &D::State) -> Result<(), ChainError> { let dreps = state.iter_entities_typed::(DRepState::NS, None)?; for record in dreps { @@ -130,11 +130,11 @@ impl BoundaryWork { Ok(()) } - fn load_proposal_reward_account( + fn load_proposal_reward_account>( &self, state: &D::State, proposal: &ProposalState, - ) -> Result, ChainError> { + ) -> Result, ChainError> { let Some(account) = proposal.reward_account.as_ref() else { return Ok(None); }; @@ -146,7 +146,7 @@ impl BoundaryWork { Ok(account) } - fn load_proposal_data(&mut self, state: &D::State) -> Result<(), ChainError> { + fn load_proposal_data>(&mut self, state: &D::State) -> Result<(), ChainError> { let proposals = state.iter_entities_typed::(ProposalState::NS, None)?; for record in proposals { @@ -172,7 +172,7 @@ impl BoundaryWork { /// Process pending MIRs: check registration status and apply to registered accounts. /// MIRs to unregistered accounts stay in their source pot (no transfer). - fn process_pending_mirs(&mut self, state: &D::State) -> Result<(), ChainError> { + fn process_pending_mirs>(&mut self, state: &D::State) -> Result<(), ChainError> { let pending_iter = state.iter_entities_typed::(PendingMirState::NS, None)?; @@ -249,7 +249,7 @@ impl BoundaryWork { Ok(()) } - pub fn compute_deltas(&mut self, state: &D::State) -> Result<(), ChainError> { + pub fn compute_deltas>(&mut self, state: &D::State) -> Result<(), ChainError> { // Process pending MIRs first (before regular rewards) self.process_pending_mirs::(state)?; @@ -353,10 +353,10 @@ impl BoundaryWork { } /// Load pending rewards from state store (persisted by RUPD). - fn load_pending_rewards( + fn load_pending_rewards>( state: &D::State, incentives: EpochIncentives, - ) -> Result, ChainError> { + ) -> Result, ChainError> { let pending_iter = state.iter_entities_typed::(PendingRewardState::NS, None)?; @@ -396,10 +396,10 @@ impl BoundaryWork { Ok(RewardMap::from_pending(pending, incentives)) } - pub fn load( + pub fn load>( state: &D::State, - genesis: Arc, - ) -> Result { + genesis: Arc, + ) -> Result> { let ending_state = crate::load_epoch::(state)?; let chain_summary = load_era_summary::(state)?; let active_protocol = EraProtocol::from(chain_summary.edge().protocol); diff --git a/crates/cardano/src/ewrap/mod.rs b/crates/cardano/src/ewrap/mod.rs index bb850fe48..c082dd825 100644 --- a/crates/cardano/src/ewrap/mod.rs +++ b/crates/cardano/src/ewrap/mod.rs @@ -3,7 +3,7 @@ use std::{ sync::Arc, }; -use dolos_core::{BlockSlot, ChainError, EntityKey, Genesis, TxOrder}; +use dolos_core::{BlockSlot, ChainError, EntityKey, TxOrder}; use pallas::ledger::primitives::{conway::DRep, StakeCredential}; use crate::{ @@ -42,7 +42,7 @@ pub trait BoundaryVisitor { ctx: &mut BoundaryWork, id: &PoolId, pool: &PoolState, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { Ok(()) } @@ -53,7 +53,7 @@ pub trait BoundaryVisitor { pool_hash: PoolHash, pool: &PoolState, account: Option<&AccountState>, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { Ok(()) } @@ -63,7 +63,7 @@ pub trait BoundaryVisitor { ctx: &mut BoundaryWork, id: &AccountId, account: &AccountState, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { Ok(()) } @@ -73,7 +73,7 @@ pub trait BoundaryVisitor { ctx: &mut BoundaryWork, id: &DRepId, drep: &DRepState, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { Ok(()) } @@ -83,7 +83,7 @@ pub trait BoundaryVisitor { ctx: &mut BoundaryWork, id: &ProposalId, proposal: &ProposalState, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { Ok(()) } @@ -94,7 +94,7 @@ pub trait BoundaryVisitor { id: &ProposalId, proposal: &ProposalState, account: Option<&AccountState>, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { Ok(()) } @@ -105,12 +105,12 @@ pub trait BoundaryVisitor { id: &ProposalId, proposal: &ProposalState, account: Option<&AccountState>, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { Ok(()) } #[allow(unused_variables)] - fn flush(&mut self, ctx: &mut BoundaryWork) -> Result<(), ChainError> { + fn flush(&mut self, ctx: &mut BoundaryWork) -> Result<(), ChainError> { Ok(()) } } @@ -125,7 +125,7 @@ pub struct BoundaryWork { ending_state: EpochState, pub active_protocol: EraProtocol, pub chain_summary: ChainSummary, - pub genesis: Arc, + pub genesis: Arc, pub rewards: RewardMap, // inferred diff --git a/crates/cardano/src/ewrap/refunds.rs b/crates/cardano/src/ewrap/refunds.rs index 5f6b4e6db..8faa4e02c 100644 --- a/crates/cardano/src/ewrap/refunds.rs +++ b/crates/cardano/src/ewrap/refunds.rs @@ -108,7 +108,7 @@ impl super::BoundaryVisitor for BoundaryVisitor { id: &ProposalId, proposal: &ProposalState, account: Option<&AccountState>, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { tracing::debug!(proposal=%id, "visiting dropped proposal"); if let Some(deposit) = proposal.deposit { @@ -131,7 +131,7 @@ impl super::BoundaryVisitor for BoundaryVisitor { id: &ProposalId, proposal: &ProposalState, account: Option<&AccountState>, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { tracing::debug!(proposal=%id, "visiting enacting proposal"); if let Some(deposit) = proposal.deposit { @@ -154,7 +154,7 @@ impl super::BoundaryVisitor for BoundaryVisitor { pool_id: PoolHash, _: &PoolState, account: Option<&AccountState>, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { let deposit = ctx .ending_state() .pparams @@ -178,7 +178,7 @@ impl super::BoundaryVisitor for BoundaryVisitor { Ok(()) } - fn flush(&mut self, ctx: &mut BoundaryWork) -> Result<(), ChainError> { + fn flush(&mut self, ctx: &mut BoundaryWork) -> Result<(), ChainError> { for delta in self.deltas.drain(..) { ctx.add_delta(delta); } diff --git a/crates/cardano/src/ewrap/rewards.rs b/crates/cardano/src/ewrap/rewards.rs index 09ba214c6..7a63193ff 100644 --- a/crates/cardano/src/ewrap/rewards.rs +++ b/crates/cardano/src/ewrap/rewards.rs @@ -111,7 +111,7 @@ impl super::BoundaryVisitor for BoundaryVisitor { ctx: &mut super::BoundaryWork, id: &super::AccountId, account: &AccountState, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { let Some(reward) = ctx.rewards.take_for_apply(&account.credential) else { return Ok(()); }; @@ -193,7 +193,7 @@ impl super::BoundaryVisitor for BoundaryVisitor { Ok(()) } - fn flush(&mut self, ctx: &mut super::BoundaryWork) -> Result<(), ChainError> { + fn flush(&mut self, ctx: &mut super::BoundaryWork) -> Result<(), ChainError> { let mark_protocol = ctx .ending_state() .pparams diff --git a/crates/cardano/src/ewrap/work_unit.rs b/crates/cardano/src/ewrap/work_unit.rs index d519bf8af..3ebc73d6f 100644 --- a/crates/cardano/src/ewrap/work_unit.rs +++ b/crates/cardano/src/ewrap/work_unit.rs @@ -8,10 +8,10 @@ use std::sync::Arc; -use dolos_core::{config::CardanoConfig, BlockSlot, Domain, DomainError, Genesis, WorkUnit}; +use dolos_core::{config::CardanoConfig, BlockSlot, Domain, DomainError, WorkUnit}; use tracing::{debug, info}; -use crate::CardanoLogic; +use crate::{CardanoError, CardanoGenesis, CardanoLogic}; use super::BoundaryWork; @@ -20,7 +20,7 @@ pub struct EwrapWorkUnit { slot: BlockSlot, #[allow(dead_code)] config: CardanoConfig, - genesis: Arc, + genesis: Arc, // Loaded boundary: Option, @@ -29,7 +29,7 @@ pub struct EwrapWorkUnit { impl EwrapWorkUnit { /// Create a new ewrap work unit. /// Rewards are loaded from state store during load phase. - pub fn new(slot: BlockSlot, config: CardanoConfig, genesis: Arc) -> Self { + pub fn new(slot: BlockSlot, config: CardanoConfig, genesis: Arc) -> Self { Self { slot, config, @@ -46,13 +46,13 @@ impl EwrapWorkUnit { impl WorkUnit for EwrapWorkUnit where - D: Domain, + D: Domain, { fn name(&self) -> &'static str { "ewrap" } - fn load(&mut self, domain: &D) -> Result<(), DomainError> { + fn load(&mut self, domain: &D) -> Result<(), DomainError> { debug!(slot = self.slot, "loading ewrap boundary context"); // Load rewards from state store (persisted by RUPD) @@ -66,14 +66,14 @@ where Ok(()) } - fn compute(&mut self) -> Result<(), DomainError> { + fn compute(&mut self) -> Result<(), DomainError> { // Computation is done during load via compute_deltas // This is because the visitor pattern needs access to state debug!("ewrap compute phase (deltas already computed during load)"); Ok(()) } - fn commit_state(&mut self, domain: &D) -> Result<(), DomainError> { + fn commit_state(&mut self, domain: &D) -> Result<(), DomainError> { debug!(slot = self.slot, "committing ewrap state changes"); let boundary = self.boundary.as_mut().ok_or_else(|| { @@ -86,7 +86,7 @@ where Ok(()) } - fn commit_archive(&mut self, _domain: &D) -> Result<(), DomainError> { + fn commit_archive(&mut self, _domain: &D) -> Result<(), DomainError> { // Archive writes are done in commit_state via boundary.commit() // because they're interleaved with state commits Ok(()) diff --git a/crates/cardano/src/ewrap/wrapup.rs b/crates/cardano/src/ewrap/wrapup.rs index f3a2f1d9b..e5104ca37 100644 --- a/crates/cardano/src/ewrap/wrapup.rs +++ b/crates/cardano/src/ewrap/wrapup.rs @@ -200,13 +200,13 @@ impl super::BoundaryVisitor for BoundaryVisitor { pool_hash: PoolHash, _: &PoolState, _: Option<&AccountState>, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { self.change(PoolWrapUp::new(pool_hash)); Ok(()) } - fn flush(&mut self, ctx: &mut super::BoundaryWork) -> Result<(), ChainError> { + fn flush(&mut self, ctx: &mut super::BoundaryWork) -> Result<(), ChainError> { for delta in self.deltas.drain(..) { ctx.add_delta(delta); } From 443084b59473064a1837f422f8f735829e7fc6e3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Tue, 17 Mar 2026 01:10:02 -0300 Subject: [PATCH 29/85] fix(cardano/genesis): type constraints ok --- crates/cardano/src/genesis/mod.rs | 26 ++++++++++++------------- crates/cardano/src/genesis/staking.rs | 8 ++++---- crates/cardano/src/genesis/work_unit.rs | 22 ++++++++++----------- 3 files changed, 27 insertions(+), 29 deletions(-) diff --git a/crates/cardano/src/genesis/mod.rs b/crates/cardano/src/genesis/mod.rs index ce327d206..881c3d987 100644 --- a/crates/cardano/src/genesis/mod.rs +++ b/crates/cardano/src/genesis/mod.rs @@ -1,5 +1,5 @@ use dolos_core::{ - config::CardanoConfig, ChainError, ChainPoint, Domain, EntityKey, Genesis, IndexStore as _, + config::CardanoConfig, ChainError, ChainPoint, Domain, EntityKey, IndexStore as _, IndexWriter as _, StateStore as _, StateWriter as _, }; @@ -14,7 +14,7 @@ pub mod work_unit; pub use work_unit::GenesisWorkUnit; -fn get_utxo_amount(genesis: &Genesis) -> Lovelace { +fn get_utxo_amount(genesis: &crate::CardanoGenesis) -> Lovelace { let byron_utxo = pallas::ledger::configs::byron::genesis_utxos(&genesis.byron) .iter() .fold(0, |acc, (_, _, amount)| acc + amount); @@ -26,7 +26,7 @@ fn get_utxo_amount(genesis: &Genesis) -> Lovelace { byron_utxo + shelley_utxo } -fn bootstrap_pots(pparams: &PParamsSet, genesis: &Genesis) -> Result { +fn bootstrap_pots(pparams: &PParamsSet, genesis: &crate::CardanoGenesis) -> Result> { let utxos = get_utxo_amount(genesis); let max_supply = genesis @@ -47,10 +47,10 @@ fn bootstrap_pots(pparams: &PParamsSet, genesis: &Genesis) -> Result( +pub fn bootstrap_epoch>( state: &D::State, - genesis: &Genesis, -) -> Result { + genesis: &crate::CardanoGenesis, +) -> Result> { let mut pparams = crate::forks::from_byron_genesis(&genesis.byron); let mut nonces = None; @@ -88,7 +88,7 @@ pub fn bootstrap_epoch( Ok(epoch) } -pub fn bootstrap_eras(state: &D::State, epoch: &EpochState) -> Result<(), ChainError> { +pub fn bootstrap_eras>(state: &D::State, epoch: &EpochState) -> Result<(), ChainError> { let pparams = epoch.pparams.unwrap_live(); let system_start = pparams.ensure_system_start()?; @@ -117,12 +117,12 @@ pub fn bootstrap_eras(state: &D::State, epoch: &EpochState) -> Result Ok(()) } -pub fn bootstrap_utxos( +pub fn bootstrap_utxos>( state: &D::State, indexes: &D::Indexes, - genesis: &Genesis, + genesis: &crate::CardanoGenesis, config: &CardanoConfig, -) -> Result<(), ChainError> { +) -> Result<(), ChainError> { let state_writer = state.start_writer()?; let index_writer = indexes.start_writer()?; @@ -146,12 +146,12 @@ pub fn bootstrap_utxos( Ok(()) } -pub fn execute( +pub fn execute>( state: &D::State, indexes: &D::Indexes, - genesis: &Genesis, + genesis: &crate::CardanoGenesis, config: &CardanoConfig, -) -> Result<(), ChainError> { +) -> Result<(), ChainError> { let epoch = bootstrap_epoch::(state, genesis)?; bootstrap_eras::(state, &epoch)?; diff --git a/crates/cardano/src/genesis/staking.rs b/crates/cardano/src/genesis/staking.rs index c71e8831e..6678be1a3 100644 --- a/crates/cardano/src/genesis/staking.rs +++ b/crates/cardano/src/genesis/staking.rs @@ -1,4 +1,4 @@ -use dolos_core::{ChainError, Domain, EntityKey, Genesis, StateStore as _, StateWriter as _}; +use dolos_core::{ChainError, Domain, EntityKey, StateStore as _, StateWriter as _}; use pallas::codec::minicbor; use pallas::ledger::addresses::{Address, Network, StakeAddress, StakePayload}; use pallas::ledger::primitives::StakeCredential; @@ -67,7 +67,7 @@ fn parse_pool(dto: &ConfigPool) -> PoolState { } } -fn find_initial_utxo_sum(credential: &StakeCredential, genesis: &Genesis) -> u64 { +fn find_initial_utxo_sum(credential: &StakeCredential, genesis: &crate::CardanoGenesis) -> u64 { let Some(initial_funds) = &genesis.shelley.initial_funds else { return 0; }; @@ -85,7 +85,7 @@ fn find_initial_utxo_sum(credential: &StakeCredential, genesis: &Genesis) -> u64 0 } -fn parse_delegation(account: &str, pool: &str, genesis: &Genesis) -> AccountState { +fn parse_delegation(account: &str, pool: &str, genesis: &crate::CardanoGenesis) -> AccountState { let keyhash: Hash<28> = account.parse().unwrap(); let credential = StakeCredential::AddrKeyhash(keyhash); @@ -111,7 +111,7 @@ fn parse_delegation(account: &str, pool: &str, genesis: &Genesis) -> AccountStat } } -pub fn bootstrap(state: &D::State, genesis: &Genesis) -> Result<(), ChainError> { +pub fn bootstrap>(state: &D::State, genesis: &crate::CardanoGenesis) -> Result<(), ChainError> { let writer = state.start_writer()?; let Some(staking) = &genesis.shelley.staking else { diff --git a/crates/cardano/src/genesis/work_unit.rs b/crates/cardano/src/genesis/work_unit.rs index ececcc5b4..68fb38489 100644 --- a/crates/cardano/src/genesis/work_unit.rs +++ b/crates/cardano/src/genesis/work_unit.rs @@ -4,53 +4,51 @@ use std::sync::Arc; -use dolos_core::{ - config::CardanoConfig, ChainPoint, Domain, DomainError, Genesis, WalStore as _, WorkUnit, -}; +use dolos_core::{config::CardanoConfig, ChainPoint, Domain, DomainError, WalStore as _, WorkUnit}; use tracing::{debug, info}; -use crate::CardanoLogic; +use crate::{CardanoError, CardanoGenesis, CardanoLogic}; /// Work unit for bootstrapping the chain from genesis. pub struct GenesisWorkUnit { config: CardanoConfig, - genesis: Arc, + genesis: Arc, } impl GenesisWorkUnit { /// Create a new genesis work unit. - pub fn new(config: CardanoConfig, genesis: Arc) -> Self { + pub fn new(config: CardanoConfig, genesis: Arc) -> Self { Self { config, genesis } } } impl WorkUnit for GenesisWorkUnit where - D: Domain, + D: Domain, { fn name(&self) -> &'static str { "genesis" } - fn load(&mut self, _domain: &D) -> Result<(), DomainError> { + fn load(&mut self, _domain: &D) -> Result<(), DomainError> { // Genesis doesn't load existing state - it creates initial state debug!("genesis work unit: no loading required"); Ok(()) } - fn compute(&mut self) -> Result<(), DomainError> { + fn compute(&mut self) -> Result<(), DomainError> { // Genesis is mostly I/O-bound, minimal compute debug!("genesis work unit: no computation required"); Ok(()) } - fn commit_wal(&mut self, domain: &D) -> Result<(), DomainError> { + fn commit_wal(&mut self, domain: &D) -> Result<(), DomainError> { // Reset WAL to origin for genesis domain.wal().reset_to(&ChainPoint::Origin)?; Ok(()) } - fn commit_state(&mut self, domain: &D) -> Result<(), DomainError> { + fn commit_state(&mut self, domain: &D) -> Result<(), DomainError> { info!("bootstrapping chain from genesis"); // Execute the genesis bootstrap @@ -65,7 +63,7 @@ where Ok(()) } - fn commit_archive(&mut self, _domain: &D) -> Result<(), DomainError> { + fn commit_archive(&mut self, _domain: &D) -> Result<(), DomainError> { // Genesis doesn't write to archive Ok(()) } From 37ca6a834caff42639e4360119545a9256c45ec1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Tue, 17 Mar 2026 01:10:29 -0300 Subject: [PATCH 30/85] fix(cardano/configs): genesis --- crates/cardano/src/include/devnet/mod.rs | 6 +++--- crates/cardano/src/include/mainnet/mod.rs | 6 +++--- crates/cardano/src/include/preprod/mod.rs | 5 ++--- crates/cardano/src/include/preview/mod.rs | 5 ++--- 4 files changed, 10 insertions(+), 12 deletions(-) diff --git a/crates/cardano/src/include/devnet/mod.rs b/crates/cardano/src/include/devnet/mod.rs index 415d1a7d3..c4a2288c0 100644 --- a/crates/cardano/src/include/devnet/mod.rs +++ b/crates/cardano/src/include/devnet/mod.rs @@ -1,4 +1,4 @@ -use dolos_core::Genesis; + use pallas::crypto::hash::Hasher; use std::path::Path; @@ -7,11 +7,11 @@ pub const SHELLEY: &[u8] = include_bytes!("shelley.json"); pub const ALONZO: &[u8] = include_bytes!("alonzo.json"); pub const CONWAY: &[u8] = include_bytes!("conway.json"); -pub fn load() -> Genesis { +pub fn load() -> crate::CardanoGenesis { let mut hasher = Hasher::<256>::new(); hasher.input(SHELLEY); let shelley_hash = hasher.finalize(); - Genesis { + crate::CardanoGenesis { alonzo: serde_json::from_slice(ALONZO).unwrap(), conway: serde_json::from_slice(CONWAY).unwrap(), byron: serde_json::from_slice(BYRON).unwrap(), diff --git a/crates/cardano/src/include/mainnet/mod.rs b/crates/cardano/src/include/mainnet/mod.rs index b0f3a9817..72ef7ccb5 100644 --- a/crates/cardano/src/include/mainnet/mod.rs +++ b/crates/cardano/src/include/mainnet/mod.rs @@ -1,4 +1,4 @@ -use dolos_core::Genesis; + use pallas::crypto::hash::Hasher; use std::path::Path; @@ -7,11 +7,11 @@ pub const SHELLEY: &[u8] = include_bytes!("shelley.json"); pub const ALONZO: &[u8] = include_bytes!("alonzo.json"); pub const CONWAY: &[u8] = include_bytes!("conway.json"); -pub fn load() -> Genesis { +pub fn load() -> crate::CardanoGenesis { let mut hasher = Hasher::<256>::new(); hasher.input(SHELLEY); let shelley_hash = hasher.finalize(); - Genesis { + crate::CardanoGenesis { alonzo: serde_json::from_slice(ALONZO).unwrap(), conway: serde_json::from_slice(CONWAY).unwrap(), byron: serde_json::from_slice(BYRON).unwrap(), diff --git a/crates/cardano/src/include/preprod/mod.rs b/crates/cardano/src/include/preprod/mod.rs index 746b26609..44ba7ae98 100644 --- a/crates/cardano/src/include/preprod/mod.rs +++ b/crates/cardano/src/include/preprod/mod.rs @@ -1,6 +1,5 @@ use std::path::Path; -use dolos_core::Genesis; use pallas::crypto::hash::Hasher; pub const BYRON: &[u8] = include_bytes!("byron.json"); @@ -8,12 +7,12 @@ pub const SHELLEY: &[u8] = include_bytes!("shelley.json"); pub const ALONZO: &[u8] = include_bytes!("alonzo.json"); pub const CONWAY: &[u8] = include_bytes!("conway.json"); -pub fn load() -> Genesis { +pub fn load() -> crate::CardanoGenesis { let mut hasher = Hasher::<256>::new(); hasher.input(SHELLEY); let shelley_hash = hasher.finalize(); - Genesis { + crate::CardanoGenesis { alonzo: serde_json::from_slice(ALONZO).unwrap(), conway: serde_json::from_slice(CONWAY).unwrap(), byron: serde_json::from_slice(BYRON).unwrap(), diff --git a/crates/cardano/src/include/preview/mod.rs b/crates/cardano/src/include/preview/mod.rs index cf919de4a..195f25ce8 100644 --- a/crates/cardano/src/include/preview/mod.rs +++ b/crates/cardano/src/include/preview/mod.rs @@ -1,4 +1,3 @@ -use dolos_core::Genesis; use pallas::crypto::hash::Hasher; use std::path::Path; @@ -7,12 +6,12 @@ pub const SHELLEY: &[u8] = include_bytes!("shelley.json"); pub const ALONZO: &[u8] = include_bytes!("alonzo.json"); pub const CONWAY: &[u8] = include_bytes!("conway.json"); -pub fn load() -> Genesis { +pub fn load() -> crate::CardanoGenesis { let mut hasher = Hasher::<256>::new(); hasher.input(SHELLEY); let shelley_hash = hasher.finalize(); - Genesis { + crate::CardanoGenesis { alonzo: serde_json::from_slice(ALONZO).unwrap(), conway: serde_json::from_slice(CONWAY).unwrap(), byron: serde_json::from_slice(BYRON).unwrap(), From d1757a7057c85c9f471a79e5f14ce6daab0f618d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Tue, 17 Mar 2026 01:11:00 -0300 Subject: [PATCH 31/85] fix(indexes/delta): bytes & txo_ref --- crates/cardano/src/indexes/delta.rs | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/crates/cardano/src/indexes/delta.rs b/crates/cardano/src/indexes/delta.rs index babb98807..79ad0a4c4 100644 --- a/crates/cardano/src/indexes/delta.rs +++ b/crates/cardano/src/indexes/delta.rs @@ -201,8 +201,9 @@ impl CardanoIndexDeltaBuilder { /// Add a spent TxO reference to the current block. pub fn add_spent_input(&mut self, input: &MultiEraInput) { - let txo_ref: TxoRef = input.into(); - let bytes: Vec = txo_ref.into(); + let txo_ref = crate::txo_ref_from_input(input); + let mut bytes = txo_ref.0.as_slice().to_vec(); + bytes.extend_from_slice(txo_ref.1.to_be_bytes().as_slice()); self.current_block() .tags .push(Tag::new(archive::SPENT_TXO, bytes)); @@ -288,7 +289,7 @@ impl CardanoIndexDeltaBuilder { /// Extract UTxO filter tags from raw EraCbor. fn extract_tags_from_era_cbor(era_cbor: &EraCbor) -> Option> { - let output = MultiEraOutput::try_from(era_cbor).ok()?; + let output = crate::multi_era_output_from_era_cbor(era_cbor).ok()?; Some(Self::extract_utxo_tags(&output)) } } From 75f2de0f33d70c8a1c74c7114a75aabbcf0e947f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Tue, 17 Mar 2026 01:11:27 -0300 Subject: [PATCH 32/85] fix(cardano/indexes): query fix --- crates/cardano/src/indexes/query.rs | 92 +++++++++++++++-------------- 1 file changed, 47 insertions(+), 45 deletions(-) diff --git a/crates/cardano/src/indexes/query.rs b/crates/cardano/src/indexes/query.rs index df3224ca0..a2e608e61 100644 --- a/crates/cardano/src/indexes/query.rs +++ b/crates/cardano/src/indexes/query.rs @@ -11,7 +11,7 @@ use pallas::{ use dolos_core::{ archive::ArchiveStore, AsyncQueryFacade, BlockBody, BlockSlot, ChainError, Domain, DomainError, - EntityKey, IndexStore, StateStore as _, TagDimension, TxHash, TxoRef, + EntityKey, IndexStore, StateStore as _, TagDimension, TxHash, }; use crate::indexes::dimensions::archive; @@ -43,14 +43,14 @@ pub struct ScriptData { } #[async_trait::async_trait] -pub trait AsyncCardanoQueryExt { +pub trait AsyncCardanoQueryExt> { fn blocks_by_address_stream( &self, address: &[u8], start_slot: BlockSlot, end_slot: BlockSlot, order: SlotOrder, - ) -> impl Stream), DomainError>> + Send + 'static; + ) -> impl Stream), DomainError>> + Send + 'static; fn blocks_by_payment_stream( &self, @@ -58,7 +58,7 @@ pub trait AsyncCardanoQueryExt { start_slot: BlockSlot, end_slot: BlockSlot, order: SlotOrder, - ) -> impl Stream), DomainError>> + Send + 'static; + ) -> impl Stream), DomainError>> + Send + 'static; fn blocks_by_stake_stream( &self, @@ -66,7 +66,7 @@ pub trait AsyncCardanoQueryExt { start_slot: BlockSlot, end_slot: BlockSlot, order: SlotOrder, - ) -> impl Stream), DomainError>> + Send + 'static; + ) -> impl Stream), DomainError>> + Send + 'static; fn blocks_by_asset_stream( &self, @@ -74,7 +74,7 @@ pub trait AsyncCardanoQueryExt { start_slot: BlockSlot, end_slot: BlockSlot, order: SlotOrder, - ) -> impl Stream), DomainError>> + Send + 'static; + ) -> impl Stream), DomainError>> + Send + 'static; fn blocks_by_account_certs_stream( &self, @@ -82,7 +82,7 @@ pub trait AsyncCardanoQueryExt { start_slot: BlockSlot, end_slot: BlockSlot, order: SlotOrder, - ) -> impl Stream), DomainError>> + Send + 'static; + ) -> impl Stream), DomainError>> + Send + 'static; fn blocks_by_metadata_stream( &self, @@ -90,64 +90,64 @@ pub trait AsyncCardanoQueryExt { start_slot: BlockSlot, end_slot: BlockSlot, order: SlotOrder, - ) -> impl Stream), DomainError>> + Send + 'static; + ) -> impl Stream), DomainError>> + Send + 'static; async fn blocks_by_address( &self, address: &[u8], start_slot: BlockSlot, end_slot: BlockSlot, - ) -> Result)>, DomainError>; + ) -> Result)>, DomainError>; async fn blocks_by_payment( &self, payment: &[u8], start_slot: BlockSlot, end_slot: BlockSlot, - ) -> Result)>, DomainError>; + ) -> Result)>, DomainError>; async fn blocks_by_stake( &self, stake: &[u8], start_slot: BlockSlot, end_slot: BlockSlot, - ) -> Result)>, DomainError>; + ) -> Result)>, DomainError>; async fn blocks_by_asset( &self, asset: &[u8], start_slot: BlockSlot, end_slot: BlockSlot, - ) -> Result)>, DomainError>; + ) -> Result)>, DomainError>; async fn blocks_by_account_certs( &self, account: &[u8], start_slot: BlockSlot, end_slot: BlockSlot, - ) -> Result)>, DomainError>; + ) -> Result)>, DomainError>; async fn blocks_by_metadata( &self, label: u64, start_slot: BlockSlot, end_slot: BlockSlot, - ) -> Result)>, DomainError>; + ) -> Result)>, DomainError>; - async fn plutus_data(&self, datum_hash: &Hash<32>) -> Result, DomainError>; + async fn plutus_data(&self, datum_hash: &Hash<32>) -> Result, DomainError>; - async fn get_datum(&self, datum_hash: &Hash<32>) -> Result>, DomainError>; + async fn get_datum(&self, datum_hash: &Hash<32>) -> Result>, DomainError>; async fn script_by_hash( &self, script_hash: &Hash<28>, - ) -> Result, DomainError>; + ) -> Result, DomainError>; - async fn tx_by_spent_txo(&self, spent_txo: &[u8]) -> Result, DomainError>; + async fn tx_by_spent_txo(&self, spent_txo: &[u8]) -> Result, DomainError>; } #[async_trait::async_trait] -impl AsyncCardanoQueryExt for AsyncQueryFacade +impl> AsyncCardanoQueryExt for AsyncQueryFacade where D: Clone + Send + Sync + 'static, { @@ -157,7 +157,7 @@ where start_slot: BlockSlot, end_slot: BlockSlot, order: SlotOrder, - ) -> impl Stream), DomainError>> + Send + 'static + ) -> impl Stream), DomainError>> + Send + 'static { blocks_by_tag_stream( (*self).clone(), @@ -175,7 +175,7 @@ where start_slot: BlockSlot, end_slot: BlockSlot, order: SlotOrder, - ) -> impl Stream), DomainError>> + Send + 'static + ) -> impl Stream), DomainError>> + Send + 'static { blocks_by_tag_stream( (*self).clone(), @@ -193,7 +193,7 @@ where start_slot: BlockSlot, end_slot: BlockSlot, order: SlotOrder, - ) -> impl Stream), DomainError>> + Send + 'static + ) -> impl Stream), DomainError>> + Send + 'static { blocks_by_tag_stream( (*self).clone(), @@ -211,7 +211,7 @@ where start_slot: BlockSlot, end_slot: BlockSlot, order: SlotOrder, - ) -> impl Stream), DomainError>> + Send + 'static + ) -> impl Stream), DomainError>> + Send + 'static { blocks_by_tag_stream( (*self).clone(), @@ -229,7 +229,7 @@ where start_slot: BlockSlot, end_slot: BlockSlot, order: SlotOrder, - ) -> impl Stream), DomainError>> + Send + 'static + ) -> impl Stream), DomainError>> + Send + 'static { blocks_by_tag_stream( (*self).clone(), @@ -247,7 +247,7 @@ where start_slot: BlockSlot, end_slot: BlockSlot, order: SlotOrder, - ) -> impl Stream), DomainError>> + Send + 'static + ) -> impl Stream), DomainError>> + Send + 'static { blocks_by_tag_stream( (*self).clone(), @@ -264,7 +264,7 @@ where address: &[u8], start_slot: BlockSlot, end_slot: BlockSlot, - ) -> Result)>, DomainError> { + ) -> Result)>, DomainError> { blocks_by_tag(self, archive::ADDRESS, address, start_slot, end_slot).await } @@ -273,7 +273,7 @@ where payment: &[u8], start_slot: BlockSlot, end_slot: BlockSlot, - ) -> Result)>, DomainError> { + ) -> Result)>, DomainError> { blocks_by_tag(self, archive::PAYMENT, payment, start_slot, end_slot).await } @@ -282,7 +282,7 @@ where stake: &[u8], start_slot: BlockSlot, end_slot: BlockSlot, - ) -> Result)>, DomainError> { + ) -> Result)>, DomainError> { blocks_by_tag(self, archive::STAKE, stake, start_slot, end_slot).await } @@ -291,7 +291,7 @@ where asset: &[u8], start_slot: BlockSlot, end_slot: BlockSlot, - ) -> Result)>, DomainError> { + ) -> Result)>, DomainError> { blocks_by_tag(self, archive::ASSET, asset, start_slot, end_slot).await } @@ -300,7 +300,7 @@ where account: &[u8], start_slot: BlockSlot, end_slot: BlockSlot, - ) -> Result)>, DomainError> { + ) -> Result)>, DomainError> { blocks_by_tag(self, archive::ACCOUNT_CERTS, account, start_slot, end_slot).await } @@ -309,7 +309,7 @@ where label: u64, start_slot: BlockSlot, end_slot: BlockSlot, - ) -> Result)>, DomainError> { + ) -> Result)>, DomainError> { blocks_by_tag( self, archive::METADATA, @@ -320,7 +320,7 @@ where .await } - async fn plutus_data(&self, datum_hash: &Hash<32>) -> Result, DomainError> { + async fn plutus_data(&self, datum_hash: &Hash<32>) -> Result, DomainError> { let end_slot = self .run_blocking(move |domain| { Ok(domain @@ -365,8 +365,8 @@ where .await } - async fn get_datum(&self, datum_hash: &Hash<32>) -> Result>, DomainError> { - let key = EntityKey::from(*datum_hash); + async fn get_datum(&self, datum_hash: &Hash<32>) -> Result>, DomainError> { + let key = EntityKey::from(datum_hash.as_slice()); self.run_blocking(move |domain| { let datum_state: Option = domain.state().read_entity_typed(DATUM_NS, &key)?; @@ -378,7 +378,7 @@ where async fn script_by_hash( &self, script_hash: &Hash<28>, - ) -> Result, DomainError> { + ) -> Result, DomainError> { let end_slot = self .run_blocking(move |domain| { Ok(domain @@ -479,7 +479,7 @@ where .await } - async fn tx_by_spent_txo(&self, spent_txo: &[u8]) -> Result, DomainError> { + async fn tx_by_spent_txo(&self, spent_txo: &[u8]) -> Result, DomainError> { let spent = spent_txo.to_vec(); let end_slot = self @@ -501,9 +501,11 @@ where |block| { for tx in block.txs().iter() { for input in tx.inputs() { - let bytes: Vec = TxoRef::from(&input).into(); + let txo_ref = crate::txo_ref_from_input(&input); + let mut bytes = txo_ref.0.as_slice().to_vec(); + bytes.extend_from_slice(txo_ref.1.to_be_bytes().as_slice()); if bytes.as_slice() == spent.as_slice() { - return Some(tx.hash()); + return Some(crate::pallas_hash_to_core(tx.hash())); } } } @@ -520,9 +522,9 @@ async fn blocks_by_tag( key: &[u8], start_slot: BlockSlot, end_slot: BlockSlot, -) -> Result)>, DomainError> +) -> Result)>, DomainError> where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, { let slots = facade .slots_by_tag(dimension, key.to_vec(), start_slot, end_slot) @@ -546,9 +548,9 @@ async fn find_first_by_tag( start_slot: BlockSlot, end_slot: BlockSlot, mut predicate: F, -) -> Result, DomainError> +) -> Result, DomainError> where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, F: FnMut(&MultiEraBlock) -> Option, { let mut current_start = start_slot; @@ -577,7 +579,7 @@ where }; let block = MultiEraBlock::decode(raw.as_slice()) - .map_err(|e| DomainError::ChainError(ChainError::DecodingError(e)))?; + .map_err(|e| DomainError::ChainError(ChainError::ChainSpecific(crate::CardanoError::Traverse(e))))?; if let Some(result) = predicate(&block) { return Ok(Some(result)); @@ -603,9 +605,9 @@ fn blocks_by_tag_stream( mut start_slot: BlockSlot, mut end_slot: BlockSlot, order: SlotOrder, -) -> impl Stream), DomainError>> + Send + 'static +) -> impl Stream), DomainError>> + Send + 'static where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, { async_stream::try_stream! { loop { From 2eae04fb5b30cb5450d4422d0dafaabab51991d9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Tue, 17 Mar 2026 01:13:25 -0300 Subject: [PATCH 33/85] fix(cardano): same strategy as before --- crates/cardano/src/lib.rs | 14 ++++++--- crates/cardano/src/rewards/mod.rs | 2 +- crates/cardano/src/roll/accounts.rs | 16 +++++----- crates/cardano/src/roll/assets.rs | 4 +-- crates/cardano/src/roll/batch.rs | 36 +++++++++++------------ crates/cardano/src/roll/datums.rs | 10 +++---- crates/cardano/src/roll/dreps.rs | 4 +-- crates/cardano/src/roll/epochs.rs | 30 +++++++++---------- crates/cardano/src/roll/mod.rs | 44 ++++++++++++++-------------- crates/cardano/src/roll/pools.rs | 8 ++--- crates/cardano/src/roll/proposals.rs | 14 ++++----- crates/cardano/src/roll/txs.rs | 18 ++++++------ crates/cardano/src/roll/work_unit.rs | 24 +++++++-------- crates/cardano/src/rupd/loading.rs | 18 ++++++------ crates/cardano/src/rupd/mod.rs | 20 ++++++------- crates/cardano/src/rupd/work_unit.rs | 18 ++++++------ crates/cardano/src/validate.rs | 2 +- crates/core/src/lib.rs | 4 +-- 18 files changed, 146 insertions(+), 140 deletions(-) diff --git a/crates/cardano/src/lib.rs b/crates/cardano/src/lib.rs index 7d28cf810..62f82c759 100644 --- a/crates/cardano/src/lib.rs +++ b/crates/cardano/src/lib.rs @@ -120,7 +120,7 @@ pub enum CardanoWorkUnit { impl WorkUnit for CardanoWorkUnit where - D: Domain, + D: Domain, { fn name(&self) -> &'static str { match self { @@ -269,6 +269,12 @@ pub enum CardanoError { #[error("phase-2 script rejected the transaction")] Phase2ValidationRejected(Vec), + + #[error("invalid pool registration params")] + InvalidPoolParams, + + #[error("invalid governance proposal params")] + InvalidProposalParams, } #[derive(Clone)] @@ -322,7 +328,7 @@ impl dolos_core::ChainLogic for CardanoLogic { type Utxo = OwnedMultiEraOutput; type Delta = CardanoDelta; type Entity = CardanoEntity; - type WorkUnit> = + type WorkUnit> = CardanoWorkUnit; type ChainSpecificError = CardanoError; type Genesis = CardanoGenesis; @@ -341,7 +347,7 @@ impl dolos_core::ChainLogic for CardanoLogic { None => WorkBuffer::Empty, }; - let eras = eras::load_era_summary::(state)?; + let eras = eras::load_chain_summary_from_state(state)?; // Use randomness_stability_window (4k/f) for the RUPD trigger boundary. // The Haskell ledger's startStep fires at randomnessStabilisationWindow @@ -391,7 +397,7 @@ impl dolos_core::ChainLogic for CardanoLogic { fn pop_work(&mut self, domain: &D) -> Option where - D: Domain, + D: Domain, { // Refresh cache if needed (after previous genesis or estart execution) if self.needs_cache_refresh { diff --git a/crates/cardano/src/rewards/mod.rs b/crates/cardano/src/rewards/mod.rs index b5197faf4..bebe998ba 100644 --- a/crates/cardano/src/rewards/mod.rs +++ b/crates/cardano/src/rewards/mod.rs @@ -506,7 +506,7 @@ fn compute_delegator_chunk( .collect() } -pub fn define_rewards(ctx: &C) -> Result, ChainError> { +pub fn define_rewards(ctx: &C) -> Result, ChainError> { let mut map = RewardMap::::new(ctx.incentives().clone()); // Sequential pool iteration with parallel delegator processing diff --git a/crates/cardano/src/roll/accounts.rs b/crates/cardano/src/roll/accounts.rs index 3cbf0714e..624afef12 100644 --- a/crates/cardano/src/roll/accounts.rs +++ b/crates/cardano/src/roll/accounts.rs @@ -1,4 +1,4 @@ -use dolos_core::{BlockSlot, ChainError, Genesis, NsKey, TxOrder}; +use dolos_core::{BlockSlot, ChainError, NsKey, TxOrder}; use super::WorkDeltas; use pallas::codec::minicbor; @@ -400,12 +400,12 @@ impl BlockVisitor for AccountVisitor { &mut self, _: &mut WorkDeltas, _: &MultiEraBlock, - _: &Genesis, + _: &crate::CardanoGenesis, pparams: &PParamsSet, epoch: Epoch, _: u64, _: u16, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { self.deposit = pparams.ensure_key_deposit().ok(); self.epoch = Some(epoch); self.protocol_version = pparams.protocol_major(); @@ -419,7 +419,7 @@ impl BlockVisitor for AccountVisitor { _: &MultiEraTx, _: &MultiEraInput, resolved: &MultiEraOutput, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { let address = resolved.address().unwrap(); let Some((cred, is_pointer)) = pallas_extras::address_as_stake_cred(&address) else { @@ -442,7 +442,7 @@ impl BlockVisitor for AccountVisitor { _: &MultiEraTx, _: u32, output: &MultiEraOutput, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { let address = output.address().expect("valid address"); let epoch = self.epoch.expect("value set in root"); @@ -467,7 +467,7 @@ impl BlockVisitor for AccountVisitor { _: &MultiEraTx, order: &TxOrder, cert: &MultiEraCert, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { let epoch = self.epoch.expect("value set in root"); if let Some(cred) = pallas_extras::cert_as_stake_registration(cert) { @@ -532,8 +532,8 @@ impl BlockVisitor for AccountVisitor { _: &MultiEraTx, account: &[u8], amount: u64, - ) -> Result<(), ChainError> { - let address = Address::from_bytes(account)?; + ) -> Result<(), ChainError> { + let address = Address::from_bytes(account).map_err(|e| ChainError::ChainSpecific(crate::CardanoError::Address(e)))?; let Some((cred, _)) = pallas_extras::address_as_stake_cred(&address) else { return Ok(()); diff --git a/crates/cardano/src/roll/assets.rs b/crates/cardano/src/roll/assets.rs index 92fa66a24..838b9241d 100644 --- a/crates/cardano/src/roll/assets.rs +++ b/crates/cardano/src/roll/assets.rs @@ -115,7 +115,7 @@ impl BlockVisitor for AssetStateVisitor { block: &MultiEraBlock, tx: &MultiEraTx, mint: &MultiEraPolicyAssets, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { let policy = mint.policy(); let has_metadata = has_cip25_metadata(tx); let cip25_metadata = if has_metadata { @@ -161,7 +161,7 @@ impl BlockVisitor for AssetStateVisitor { tx: &MultiEraTx, _index: u32, output: &MultiEraOutput, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { let Some(_datum_option) = output.datum() else { return Ok(()); }; diff --git a/crates/cardano/src/roll/batch.rs b/crates/cardano/src/roll/batch.rs index ba96df8bd..a74eb840a 100644 --- a/crates/cardano/src/roll/batch.rs +++ b/crates/cardano/src/roll/batch.rs @@ -148,9 +148,9 @@ impl WorkBatch { start..=end } - pub fn load_utxos(&mut self, domain: &D) -> Result<(), DomainError> + pub fn load_utxos(&mut self, domain: &D) -> Result<(), DomainError> where - D: Domain, + D: Domain, { // TODO: paralelize in chunks @@ -168,7 +168,7 @@ impl WorkBatch { Ok(()) } - pub fn decode_utxos(&mut self) -> Result<(), DomainError> { + pub fn decode_utxos(&mut self) -> Result<(), DomainError> { let pairs: Vec<_> = self .utxos .iter() @@ -181,16 +181,16 @@ impl WorkBatch { .map(|(k, v)| { OwnedMultiEraOutput::decode(v.clone()) .map(|x| (k.clone(), x)) - .map_err(ChainError::from) + .map_err(|e| ChainError::ChainSpecific(crate::CardanoError::Traverse(e))) }) - .collect::>()?; + .collect::>>()?; self.utxos_decoded = decoded; Ok(()) } - pub fn commit_wal(&self, domain: &D) -> Result<(), DomainError> + pub fn commit_wal(&self, domain: &D) -> Result<(), DomainError> where D: Domain, { @@ -271,9 +271,9 @@ impl WorkBatch { Ok(()) } - pub fn commit_state(&mut self, domain: &D) -> Result<(), DomainError> + pub fn commit_state(&mut self, domain: &D) -> Result<(), DomainError> where - D: Domain, + D: Domain, { let writer = domain.state().start_writer()?; @@ -297,9 +297,9 @@ impl WorkBatch { Ok(()) } - pub fn commit_archive(&mut self, domain: &D) -> Result<(), DomainError> + pub fn commit_archive(&mut self, domain: &D) -> Result<(), DomainError> where - D: Domain, + D: Domain, { let writer = domain.archive().start_writer()?; @@ -320,7 +320,7 @@ impl WorkBatch { /// This traverses all blocks and extracts index tags using the /// CardanoIndexDeltaBuilder. pub fn build_index_delta(&self) -> IndexDelta { - use pallas::ledger::traverse::{MultiEraBlock, MultiEraOutput}; + use pallas::ledger::traverse::MultiEraBlock; let mut builder = CardanoIndexDeltaBuilder::new(self.last_point()); @@ -340,28 +340,28 @@ impl WorkBatch { if let Some(utxo_delta) = &work_block.utxo_delta { // Produced UTxOs for (txo_ref, body) in &utxo_delta.produced_utxo { - if let Ok(output) = MultiEraOutput::try_from(body.as_ref()) { + if let Ok(output) = crate::multi_era_output_from_era_cbor(body.as_ref()) { builder.add_produced_utxo(txo_ref.clone(), &output); } } // Consumed UTxOs for (txo_ref, body) in &utxo_delta.consumed_utxo { - if let Ok(output) = MultiEraOutput::try_from(body.as_ref()) { + if let Ok(output) = crate::multi_era_output_from_era_cbor(body.as_ref()) { builder.add_consumed_utxo(txo_ref.clone(), &output); } } // Recovered stxis (for rollback support) for (txo_ref, body) in &utxo_delta.recovered_stxi { - if let Ok(output) = MultiEraOutput::try_from(body.as_ref()) { + if let Ok(output) = crate::multi_era_output_from_era_cbor(body.as_ref()) { builder.add_produced_utxo(txo_ref.clone(), &output); } } // Undone UTxOs (for rollback support) for (txo_ref, body) in &utxo_delta.undone_utxo { - if let Ok(output) = MultiEraOutput::try_from(body.as_ref()) { + if let Ok(output) = crate::multi_era_output_from_era_cbor(body.as_ref()) { builder.add_consumed_utxo(txo_ref.clone(), &output); } } @@ -381,7 +381,7 @@ impl WorkBatch { builder.add_spent_input(&input); // Try to get resolved input for address/asset tags - let txo_ref: TxoRef = (&input).into(); + let txo_ref = crate::txo_ref_from_input(&input); if let Some(resolved) = self.utxos_decoded.get(&txo_ref) { resolved.with_dependent(|_, output| { if let Ok(addr) = output.address() { @@ -459,9 +459,9 @@ impl WorkBatch { builder.build() } - pub fn commit_indexes(&mut self, domain: &D) -> Result<(), DomainError> + pub fn commit_indexes(&mut self, domain: &D) -> Result<(), DomainError> where - D: Domain, + D: Domain, { let delta = self.build_index_delta(); diff --git a/crates/cardano/src/roll/datums.rs b/crates/cardano/src/roll/datums.rs index e143ff090..e535cb406 100644 --- a/crates/cardano/src/roll/datums.rs +++ b/crates/cardano/src/roll/datums.rs @@ -51,7 +51,7 @@ impl dolos_core::EntityDelta for DatumRefIncrement { type Entity = DatumState; fn key(&self) -> NsKey { - NsKey::from((DATUM_NS, self.datum_hash)) + NsKey::from((DATUM_NS, dolos_core::EntityKey::from(self.datum_hash.as_slice()))) } fn apply(&mut self, entity: &mut Option) { @@ -105,7 +105,7 @@ impl dolos_core::EntityDelta for DatumRefDecrement { type Entity = DatumState; fn key(&self) -> NsKey { - NsKey::from((DATUM_NS, self.datum_hash)) + NsKey::from((DATUM_NS, dolos_core::EntityKey::from(self.datum_hash.as_slice()))) } fn apply(&mut self, entity: &mut Option) { @@ -148,7 +148,7 @@ impl BlockVisitor for DatumVisitor { _block: &MultiEraBlock, tx: &MultiEraTx, _utxos: &HashMap, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { // Clear and collect witness datums for this transaction self.witness_datums.clear(); @@ -168,7 +168,7 @@ impl BlockVisitor for DatumVisitor { _tx: &MultiEraTx, _index: u32, output: &MultiEraOutput, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { // Check if this output references a datum by hash if let Some(DatumOption::Hash(datum_hash)) = output.datum() { // Only emit increment if the datum is in the witness set @@ -187,7 +187,7 @@ impl BlockVisitor for DatumVisitor { _tx: &MultiEraTx, _input: &MultiEraInput, resolved: &MultiEraOutput, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { // Check if the consumed UTxO had a datum hash if let Some(DatumOption::Hash(datum_hash)) = resolved.datum() { deltas.add_for_entity(DatumRefDecrement::new(datum_hash)); diff --git a/crates/cardano/src/roll/dreps.rs b/crates/cardano/src/roll/dreps.rs index bf0e6e7eb..5bfface6b 100644 --- a/crates/cardano/src/roll/dreps.rs +++ b/crates/cardano/src/roll/dreps.rs @@ -187,7 +187,7 @@ impl BlockVisitor for DRepStateVisitor { block: &MultiEraBlock, tx: &MultiEraTx, _: &HashMap, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { let MultiEraTx::Conway(conway_tx) = tx else { return Ok(()); }; @@ -216,7 +216,7 @@ impl BlockVisitor for DRepStateVisitor { _: &MultiEraTx, order: &TxOrder, cert: &MultiEraCert, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { let Some(drep) = cert_drep(cert) else { return Ok(()); }; diff --git a/crates/cardano/src/roll/epochs.rs b/crates/cardano/src/roll/epochs.rs index eee39fb12..854c85dc8 100644 --- a/crates/cardano/src/roll/epochs.rs +++ b/crates/cardano/src/roll/epochs.rs @@ -1,6 +1,6 @@ use std::collections::{HashMap, HashSet}; -use dolos_core::{BrokenInvariant, ChainError, Genesis, NsKey, TxOrder, TxoRef}; +use dolos_core::{BrokenInvariant, ChainError, NsKey, TxOrder, TxoRef}; use pallas::{ crypto::hash::Hash, ledger::{ @@ -132,16 +132,16 @@ impl dolos_core::EntityDelta for NoncesUpdate { fn compute_collateral_value( tx: &MultiEraTx, utxos: &HashMap, -) -> Result { +) -> Result> { debug_assert!(!tx.is_valid()); let mut total = 0; for input in tx.consumes() { let utxo = utxos - .get(&TxoRef::from(&input)) + .get(&crate::txo_ref_from_input(&input)) .ok_or(ChainError::BrokenInvariant(BrokenInvariant::MissingUtxo( - TxoRef::from(&input), + crate::txo_ref_from_input(&input), )))?; utxo.with_dependent(|_, utxo| { total += utxo.value().coin(); @@ -158,7 +158,7 @@ fn compute_collateral_value( fn define_tx_fees( tx: &MultiEraTx, utxos: &HashMap, -) -> Result { +) -> Result> { if let Some(byron) = tx.as_byron() { let fee = compute_byron_fee(byron, None); Ok(fee) @@ -199,12 +199,12 @@ impl BlockVisitor for EpochStateVisitor { &mut self, _: &mut WorkDeltas, block: &MultiEraBlock, - _: &Genesis, + _: &crate::CardanoGenesis, pparams: &PParamsSet, epoch: Epoch, epoch_start: u64, _: u16, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { self.stats_delta = Some(EpochStatsUpdate { epoch, ..Default::default() @@ -224,7 +224,7 @@ impl BlockVisitor for EpochStateVisitor { self.nonces_delta = Some(NoncesUpdate { slot: block.header().slot(), tail: block.header().previous_hash(), - nonce_vrf_output: block.header().nonce_vrf_output()?, + nonce_vrf_output: block.header().nonce_vrf_output().map_err(|e| ChainError::ChainSpecific(crate::CardanoError::Traverse(e)))?, previous: None, }); } @@ -238,7 +238,7 @@ impl BlockVisitor for EpochStateVisitor { _: &MultiEraBlock, tx: &MultiEraTx, utxos: &HashMap, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { let fees = define_tx_fees(tx, utxos)?; self.stats_delta.as_mut().unwrap().block_fees += fees; @@ -257,7 +257,7 @@ impl BlockVisitor for EpochStateVisitor { _: &MultiEraTx, _: &pallas::ledger::traverse::MultiEraInput, resolved: &pallas::ledger::traverse::MultiEraOutput, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { let amount = resolved.value().coin(); self.stats_delta.as_mut().unwrap().utxo_delta -= amount as i64; @@ -271,7 +271,7 @@ impl BlockVisitor for EpochStateVisitor { _: &MultiEraTx, _: u32, output: &pallas::ledger::traverse::MultiEraOutput, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { let amount = output.value().coin(); self.stats_delta.as_mut().unwrap().utxo_delta += amount as i64; @@ -285,7 +285,7 @@ impl BlockVisitor for EpochStateVisitor { _: &MultiEraTx, _: &TxOrder, cert: &MultiEraCert, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { if pallas_extras::cert_as_stake_registration(cert).is_some() { self.stats_delta.as_mut().unwrap().new_accounts += 1; } @@ -371,7 +371,7 @@ impl BlockVisitor for EpochStateVisitor { _: &MultiEraTx, proposal: &pallas::ledger::traverse::MultiEraProposal, _: usize, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { tracing::debug!(proposal=?proposal.gov_action(), deposit=proposal.deposit(), "proposal deposit"); self.stats_delta.as_mut().unwrap().proposal_deposits += proposal.deposit(); @@ -386,12 +386,12 @@ impl BlockVisitor for EpochStateVisitor { _: &MultiEraTx, _: &[u8], amount: u64, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { self.stats_delta.as_mut().unwrap().withdrawals += amount; Ok(()) } - fn flush(&mut self, deltas: &mut WorkDeltas) -> Result<(), ChainError> { + fn flush(&mut self, deltas: &mut WorkDeltas) -> Result<(), ChainError> { if let Some(delta) = self.stats_delta.take() { deltas.add_for_entity(delta); } diff --git a/crates/cardano/src/roll/mod.rs b/crates/cardano/src/roll/mod.rs index d1671200a..f3cd92ba8 100644 --- a/crates/cardano/src/roll/mod.rs +++ b/crates/cardano/src/roll/mod.rs @@ -1,6 +1,6 @@ use std::{collections::HashMap, sync::Arc}; -use dolos_core::{ChainError, Domain, Genesis, InvariantViolation, StateError, TxOrder, TxoRef}; +use dolos_core::{ChainError, Domain, InvariantViolation, StateError, TxOrder, TxoRef}; use pallas::{ codec::utils::KeepRaw, ledger::{ @@ -49,12 +49,12 @@ pub trait BlockVisitor { &mut self, deltas: &mut WorkDeltas, block: &MultiEraBlock, - genesis: &Genesis, + genesis: &crate::CardanoGenesis, pparams: &PParamsSet, epoch: Epoch, epoch_start: u64, protocol: u16, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { Ok(()) } @@ -65,7 +65,7 @@ pub trait BlockVisitor { block: &MultiEraBlock, tx: &MultiEraTx, utxos: &HashMap, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { Ok(()) } @@ -77,7 +77,7 @@ pub trait BlockVisitor { tx: &MultiEraTx, input: &MultiEraInput, resolved: &MultiEraOutput, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { Ok(()) } @@ -89,7 +89,7 @@ pub trait BlockVisitor { tx: &MultiEraTx, index: u32, output: &MultiEraOutput, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { Ok(()) } @@ -100,7 +100,7 @@ pub trait BlockVisitor { block: &MultiEraBlock, tx: &MultiEraTx, mint: &MultiEraPolicyAssets, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { Ok(()) } @@ -112,7 +112,7 @@ pub trait BlockVisitor { tx: &MultiEraTx, order: &TxOrder, cert: &MultiEraCert, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { Ok(()) } @@ -124,7 +124,7 @@ pub trait BlockVisitor { tx: &MultiEraTx, account: &[u8], amount: u64, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { Ok(()) } @@ -135,7 +135,7 @@ pub trait BlockVisitor { block: &MultiEraBlock, tx: Option<&MultiEraTx>, update: &MultiEraUpdate, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { Ok(()) } @@ -148,7 +148,7 @@ pub trait BlockVisitor { block: &MultiEraBlock, tx: &MultiEraTx, data: &KeepRaw<'_, PlutusData>, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { Ok(()) } @@ -160,7 +160,7 @@ pub trait BlockVisitor { tx: &MultiEraTx, proposal: &MultiEraProposal, idx: usize, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { Ok(()) } @@ -171,18 +171,18 @@ pub trait BlockVisitor { block: &MultiEraBlock, tx: &MultiEraTx, proposal: &MultiEraRedeemer, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { Ok(()) } #[allow(unused_variables)] - fn flush(&mut self, deltas: &mut WorkDeltas) -> Result<(), ChainError> { + fn flush(&mut self, deltas: &mut WorkDeltas) -> Result<(), ChainError> { Ok(()) } } pub struct DeltaBuilder<'a> { - genesis: Arc, + genesis: Arc, work: &'a mut WorkBlock, active_params: &'a PParamsSet, epoch: Epoch, @@ -203,7 +203,7 @@ pub struct DeltaBuilder<'a> { impl<'a> DeltaBuilder<'a> { #[allow(clippy::too_many_arguments)] pub fn new( - genesis: Arc, + genesis: Arc, protocol: u16, active_params: &'a PParamsSet, epoch: Epoch, @@ -230,7 +230,7 @@ impl<'a> DeltaBuilder<'a> { } } - pub fn crawl(&mut self) -> Result<(), ChainError> { + pub fn crawl(&mut self) -> Result<(), ChainError> { let block = self.work.decoded(); let block = block.view(); let mut deltas = WorkDeltas::default(); @@ -326,7 +326,7 @@ impl<'a> DeltaBuilder<'a> { .visit_tx(&mut deltas, block, tx, self.utxos)?; for input in tx.consumes() { - let txoref = TxoRef::from(&input); + let txoref = crate::txo_ref_from_input(&input); let resolved = self.utxos.get(&txoref).ok_or_else(|| { StateError::InvariantViolation(InvariantViolation::InputNotFound(txoref)) @@ -349,7 +349,7 @@ impl<'a> DeltaBuilder<'a> { .visit_input(&mut deltas, block, tx, &input, resolved)?; self.proposal_logs .visit_input(&mut deltas, block, tx, &input, resolved)?; - Result::<_, ChainError>::Ok(()) + Result::<_, ChainError>::Ok(()) })?; } @@ -534,12 +534,12 @@ impl<'a> DeltaBuilder<'a> { } #[instrument(name = "roll", skip_all)] -pub fn compute_delta( - genesis: Arc, +pub fn compute_delta>( + genesis: Arc, cache: &Cache, state: &D::State, batch: &mut WorkBatch, -) -> Result<(), ChainError> { +) -> Result<(), ChainError> { let (epoch, _) = cache.eras.slot_epoch(batch.first_slot()); let (protocol, _) = cache.eras.protocol_and_era_for_epoch(epoch); diff --git a/crates/cardano/src/roll/pools.rs b/crates/cardano/src/roll/pools.rs index 19d39d3f9..ded7513d7 100644 --- a/crates/cardano/src/roll/pools.rs +++ b/crates/cardano/src/roll/pools.rs @@ -1,6 +1,6 @@ use std::ops::Deref; -use dolos_core::{BlockSlot, ChainError, Genesis, NsKey, TxOrder}; +use dolos_core::{BlockSlot, ChainError, NsKey, TxOrder}; use pallas::crypto::hash::{Hash, Hasher}; use pallas::ledger::primitives::Epoch; use pallas::ledger::traverse::{MultiEraBlock, MultiEraCert, MultiEraTx}; @@ -235,12 +235,12 @@ impl BlockVisitor for PoolStateVisitor { &mut self, deltas: &mut WorkDeltas, block: &MultiEraBlock, - _: &Genesis, + _: &crate::CardanoGenesis, pparams: &PParamsSet, epoch: Epoch, _: u64, _: u16, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { self.epoch = Some(epoch); self.deposit = pparams.ensure_pool_deposit().ok(); @@ -259,7 +259,7 @@ impl BlockVisitor for PoolStateVisitor { _: &MultiEraTx, _: &TxOrder, cert: &MultiEraCert, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { if let Some(cert) = pallas_extras::cert_as_pool_registration(cert) { let epoch = self.epoch.expect("value set in root"); let deposit = self.deposit.expect("value set in root"); diff --git a/crates/cardano/src/roll/proposals.rs b/crates/cardano/src/roll/proposals.rs index bb4860778..f2a5c67b4 100644 --- a/crates/cardano/src/roll/proposals.rs +++ b/crates/cardano/src/roll/proposals.rs @@ -1,6 +1,6 @@ use std::collections::BTreeMap; -use dolos_core::{BlockSlot, ChainError, Genesis, NsKey}; +use dolos_core::{BlockSlot, ChainError, NsKey}; use pallas::{ codec::utils::Bytes, crypto::hash::Hash, @@ -328,15 +328,15 @@ impl BlockVisitor for ProposalVisitor { &mut self, _: &mut WorkDeltas, _: &MultiEraBlock, - genesis: &Genesis, + genesis: &crate::CardanoGenesis, pparams: &PParamsSet, epoch: Epoch, _: u64, protocol: u16, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { self.validity_period = pparams.governance_action_validity_period(); self.current_epoch = Some(epoch); - self.network_magic = Some(genesis.network_magic()); + self.network_magic = Some(genesis.shelley.network_magic.unwrap_or_default()); self.protocol = Some(protocol); Ok(()) @@ -348,7 +348,7 @@ impl BlockVisitor for ProposalVisitor { block: &MultiEraBlock, tx: Option<&MultiEraTx>, update: &MultiEraUpdate, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { let action = pre_conway_to_pparamset(update); deltas.add_for_entity(NewProposal { @@ -374,7 +374,7 @@ impl BlockVisitor for ProposalVisitor { tx: &MultiEraTx, proposal: &pallas::ledger::traverse::MultiEraProposal, idx: usize, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { let Some(proposal) = proposal.as_conway() else { return Ok(()); }; @@ -392,7 +392,7 @@ impl BlockVisitor for ProposalVisitor { }; let reward_account = pallas_extras::parse_reward_account(&proposal.reward_account) - .ok_or(ChainError::InvalidProposalParams)?; + .ok_or(ChainError::ChainSpecific(crate::CardanoError::InvalidProposalParams))?; deltas.add_for_entity(NewProposal { slot: block.slot(), diff --git a/crates/cardano/src/roll/txs.rs b/crates/cardano/src/roll/txs.rs index 8ece2522a..18c1a6fa6 100644 --- a/crates/cardano/src/roll/txs.rs +++ b/crates/cardano/src/roll/txs.rs @@ -6,7 +6,7 @@ use std::collections::HashMap; -use dolos_core::{ChainError, Genesis, TxOrder, TxoRef}; +use dolos_core::{ChainError, TxOrder, TxoRef}; use pallas::{ codec::utils::KeepRaw, ledger::{ @@ -30,12 +30,12 @@ impl BlockVisitor for TxLogVisitor { &mut self, _deltas: &mut WorkDeltas, _block: &MultiEraBlock, - _: &Genesis, + _: &crate::CardanoGenesis, _: &PParamsSet, _: Epoch, _: u64, _: u16, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { // Index tag extraction moved to CardanoIndexDeltaBuilder Ok(()) } @@ -46,7 +46,7 @@ impl BlockVisitor for TxLogVisitor { _: &MultiEraBlock, _tx: &MultiEraTx, _: &HashMap, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { // Index tag extraction moved to CardanoIndexDeltaBuilder Ok(()) } @@ -58,7 +58,7 @@ impl BlockVisitor for TxLogVisitor { _: &MultiEraTx, _input: &MultiEraInput, _resolved: &pallas::ledger::traverse::MultiEraOutput, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { // Index tag extraction moved to CardanoIndexDeltaBuilder Ok(()) } @@ -70,7 +70,7 @@ impl BlockVisitor for TxLogVisitor { _: &MultiEraTx, _: u32, _output: &pallas::ledger::traverse::MultiEraOutput, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { // Index tag extraction moved to CardanoIndexDeltaBuilder Ok(()) } @@ -81,7 +81,7 @@ impl BlockVisitor for TxLogVisitor { _: &MultiEraBlock, _: &MultiEraTx, _datum: &KeepRaw<'_, PlutusData>, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { // Index tag extraction moved to CardanoIndexDeltaBuilder Ok(()) } @@ -93,7 +93,7 @@ impl BlockVisitor for TxLogVisitor { _: &MultiEraTx, _: &TxOrder, _cert: &MultiEraCert, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { // Index tag extraction moved to CardanoIndexDeltaBuilder Ok(()) } @@ -104,7 +104,7 @@ impl BlockVisitor for TxLogVisitor { _: &MultiEraBlock, _: &MultiEraTx, _redeemer: &MultiEraRedeemer, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { // Index tag extraction moved to CardanoIndexDeltaBuilder Ok(()) } diff --git a/crates/cardano/src/roll/work_unit.rs b/crates/cardano/src/roll/work_unit.rs index 8f9f839dd..df89ae601 100644 --- a/crates/cardano/src/roll/work_unit.rs +++ b/crates/cardano/src/roll/work_unit.rs @@ -5,11 +5,11 @@ use std::sync::Arc; -use dolos_core::{Domain, DomainError, Genesis, MempoolUpdate, RawBlock, TipEvent, WorkUnit}; +use dolos_core::{Domain, DomainError, MempoolUpdate, RawBlock, TipEvent, WorkUnit}; use tracing::{debug, info}; use crate::roll::batch::WorkBatch; -use crate::{roll, Cache, CardanoDelta, CardanoEntity, CardanoLogic}; +use crate::{roll, Cache, CardanoDelta, CardanoEntity, CardanoError, CardanoGenesis, CardanoLogic}; /// Work unit for processing a batch of blocks ("rolling" the chain forward). pub struct RollWorkUnit { @@ -17,7 +17,7 @@ pub struct RollWorkUnit { batch: WorkBatch, /// Genesis configuration - genesis: Arc, + genesis: Arc, /// Whether this is live mode (emit tip notifications) live_mode: bool, @@ -30,7 +30,7 @@ impl RollWorkUnit { /// Create a new roll work unit. pub(crate) fn new( batch: WorkBatch, - genesis: Arc, + genesis: Arc, live_mode: bool, cache: Cache, ) -> Self { @@ -45,13 +45,13 @@ impl RollWorkUnit { impl WorkUnit for RollWorkUnit where - D: Domain, + D: Domain, { fn name(&self) -> &'static str { "roll" } - fn load(&mut self, domain: &D) -> Result<(), DomainError> { + fn load(&mut self, domain: &D) -> Result<(), DomainError> { debug!(blocks = self.batch.blocks.len(), "loading roll batch UTxOs"); self.batch.load_utxos(domain)?; @@ -68,12 +68,12 @@ where Ok(()) } - fn compute(&mut self) -> Result<(), DomainError> { + fn compute(&mut self) -> Result<(), DomainError> { // Deltas are computed during load() since they require state access. Ok(()) } - fn commit_wal(&mut self, domain: &D) -> Result<(), DomainError> { + fn commit_wal(&mut self, domain: &D) -> Result<(), DomainError> { debug!("committing roll batch to WAL"); // Ensure blocks are sorted before WAL commit @@ -84,7 +84,7 @@ where Ok(()) } - fn commit_state(&mut self, domain: &D) -> Result<(), DomainError> { + fn commit_state(&mut self, domain: &D) -> Result<(), DomainError> { debug!("loading entities for roll batch"); // Load entities that will be modified @@ -106,7 +106,7 @@ where Ok(()) } - fn commit_archive(&mut self, domain: &D) -> Result<(), DomainError> { + fn commit_archive(&mut self, domain: &D) -> Result<(), DomainError> { debug!("committing roll batch to archive"); self.batch.commit_archive(domain)?; @@ -114,7 +114,7 @@ where Ok(()) } - fn commit_indexes(&mut self, domain: &D) -> Result<(), DomainError> { + fn commit_indexes(&mut self, domain: &D) -> Result<(), DomainError> { debug!("committing roll batch to indexes"); self.batch.commit_indexes(domain)?; @@ -154,7 +154,7 @@ where .view() .txs() .iter() - .map(|tx| tx.hash()) + .map(|tx| crate::pallas_hash_to_core(tx.hash())) .collect(), }) .collect() diff --git a/crates/cardano/src/rupd/loading.rs b/crates/cardano/src/rupd/loading.rs index 41d6c3cf9..ac372e6a0 100644 --- a/crates/cardano/src/rupd/loading.rs +++ b/crates/cardano/src/rupd/loading.rs @@ -1,4 +1,4 @@ -use dolos_core::{ChainError, Domain, Genesis, StateStore}; +use dolos_core::{ChainError, Domain, StateStore}; use pallas::ledger::primitives::StakeCredential; use tracing::{debug, trace}; @@ -17,10 +17,10 @@ use crate::{ /// excluding federated/overlay blocks. The eta calculation uses this count /// to determine if pools are producing blocks at the expected rate. fn define_eta( - genesis: &Genesis, + genesis: &crate::CardanoGenesis, epoch: &EpochState, pool_blocks: Option, -) -> Result { +) -> Result> { if epoch.pparams.mark().is_none_or(|x| x.is_byron()) { return Ok(ratio!(1)); } @@ -55,11 +55,11 @@ fn neutral_incentives() -> EpochIncentives { } fn define_epoch_incentives( - genesis: &Genesis, + genesis: &crate::CardanoGenesis, state: &EpochState, reserves: u64, pool_blocks: Option, -) -> Result { +) -> Result> { let pparams = state.pparams.unwrap_live(); if pparams.is_byron() { @@ -96,7 +96,7 @@ impl StakeSnapshot { account: &StakeCredential, pool_id: PoolHash, stake: u64, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { self.accounts_by_pool .insert(pool_id, account.clone(), stake); @@ -122,12 +122,12 @@ impl StakeSnapshot { /// * `protocol` - Era protocol for stake calculation /// * `rupd_slot` - The RUPD boundary slot, used to determine registration status. /// Pre-Babbage filtering requires knowing which accounts were registered at RUPD time. - pub fn load( + pub fn load>( state: &D::State, stake_epoch: u64, protocol: EraProtocol, _rupd_slot: u64, - ) -> Result { + ) -> Result> { let mut snapshot = Self::default(); let pools = state.iter_entities_typed::(PoolState::NS, None)?; @@ -243,7 +243,7 @@ impl RupdWork { self.snapshot.performance_epoch_pool_blocks } - pub fn load(state: &D::State, genesis: &Genesis) -> Result { + pub fn load>(state: &D::State, genesis: &crate::CardanoGenesis) -> Result> { let epoch = crate::load_epoch::(state)?; let current_epoch = epoch.number; diff --git a/crates/cardano/src/rupd/mod.rs b/crates/cardano/src/rupd/mod.rs index ec874613a..f988aeef4 100644 --- a/crates/cardano/src/rupd/mod.rs +++ b/crates/cardano/src/rupd/mod.rs @@ -1,8 +1,8 @@ use std::collections::{HashMap, HashSet}; use dolos_core::{ - ArchiveStore, ArchiveWriter, BlockSlot, ChainError, ChainPoint, Domain, EntityKey, Genesis, - LogKey, TemporalKey, + ArchiveStore, ArchiveWriter, BlockSlot, ChainError, ChainPoint, Domain, EntityKey, LogKey, + TemporalKey, }; use pallas::ledger::primitives::StakeCredential; use tracing::{debug, instrument}; @@ -28,7 +28,7 @@ pub trait RupdVisitor: Default { ctx: &mut RupdWork, id: &PoolId, pool: &PoolState, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { Ok(()) } @@ -38,12 +38,12 @@ pub trait RupdVisitor: Default { ctx: &mut RupdWork, id: &AccountId, account: &AccountState, - ) -> Result<(), ChainError> { + ) -> Result<(), ChainError> { Ok(()) } #[allow(unused_variables)] - fn flush(&mut self, ctx: &mut RupdWork) -> Result<(), ChainError> { + fn flush(&mut self, ctx: &mut RupdWork) -> Result<(), ChainError> { Ok(()) } } @@ -128,11 +128,11 @@ pub struct RupdWork { pub pparams: Option, } -fn log_work( +fn log_work>( work: &RupdWork, rewards: &RewardMap, archive: &D::Archive, -) -> Result<(), ChainError> { +) -> Result<(), ChainError> { let Some((_, epoch)) = work.relevant_epochs() else { return Ok(()); }; @@ -186,12 +186,12 @@ fn log_work( } #[instrument("rupd", skip_all, fields(slot = %slot))] -pub fn execute( +pub fn execute>( state: &D::State, archive: &D::Archive, slot: BlockSlot, - genesis: &Genesis, -) -> Result, ChainError> { + genesis: &crate::CardanoGenesis, +) -> Result, ChainError> { debug!(slot, "executing rupd work unit"); let work = RupdWork::load::(state, genesis)?; diff --git a/crates/cardano/src/rupd/work_unit.rs b/crates/cardano/src/rupd/work_unit.rs index 40c87ebfb..4b2e9938d 100644 --- a/crates/cardano/src/rupd/work_unit.rs +++ b/crates/cardano/src/rupd/work_unit.rs @@ -6,12 +6,12 @@ use std::sync::Arc; -use dolos_core::{BlockSlot, Domain, DomainError, Genesis, StateStore, StateWriter, WorkUnit}; +use dolos_core::{BlockSlot, Domain, DomainError, StateStore, StateWriter, WorkUnit}; use tracing::debug; use crate::{ rewards::{Reward, RewardMap}, - CardanoLogic, FixedNamespace, PendingRewardState, + CardanoError, CardanoGenesis, CardanoLogic, FixedNamespace, PendingRewardState, }; use super::{credential_to_key, RupdWork}; @@ -19,7 +19,7 @@ use super::{credential_to_key, RupdWork}; /// Work unit for computing rewards at the stability window. pub struct RupdWorkUnit { slot: BlockSlot, - genesis: Arc, + genesis: Arc, // Loaded work: Option, @@ -30,7 +30,7 @@ pub struct RupdWorkUnit { impl RupdWorkUnit { /// Create a new rupd work unit. - pub fn new(slot: BlockSlot, genesis: Arc) -> Self { + pub fn new(slot: BlockSlot, genesis: Arc) -> Self { Self { slot, genesis, @@ -52,13 +52,13 @@ impl RupdWorkUnit { impl WorkUnit for RupdWorkUnit where - D: Domain, + D: Domain, { fn name(&self) -> &'static str { "rupd" } - fn load(&mut self, domain: &D) -> Result<(), DomainError> { + fn load(&mut self, domain: &D) -> Result<(), DomainError> { debug!(slot = self.slot, "loading rupd work context"); self.work = Some(RupdWork::load::(domain.state(), &self.genesis)?); @@ -67,7 +67,7 @@ where Ok(()) } - fn compute(&mut self) -> Result<(), DomainError> { + fn compute(&mut self) -> Result<(), DomainError> { debug!(slot = self.slot, "computing rewards"); let work = self @@ -85,7 +85,7 @@ where Ok(()) } - fn commit_state(&mut self, domain: &D) -> Result<(), DomainError> { + fn commit_state(&mut self, domain: &D) -> Result<(), DomainError> { let _work = self .work .as_ref() @@ -155,7 +155,7 @@ where Ok(()) } - fn commit_archive(&mut self, domain: &D) -> Result<(), DomainError> { + fn commit_archive(&mut self, domain: &D) -> Result<(), DomainError> { let work = self .work .as_ref() diff --git a/crates/cardano/src/validate.rs b/crates/cardano/src/validate.rs index 979458613..179125b41 100644 --- a/crates/cardano/src/validate.rs +++ b/crates/cardano/src/validate.rs @@ -5,7 +5,7 @@ use dolos_core::{ChainError, ChainPoint, Domain, EraCbor, MempoolAwareUtxoStore, use crate::{CardanoError, CardanoGenesis}; use pallas::ledger::{ primitives::{NetworkId, TransactionInput}, - traverse::{MultiEraInput, MultiEraOutput, MultiEraTx}, + traverse::{MultiEraInput, MultiEraTx}, }; use tracing::debug; diff --git a/crates/core/src/lib.rs b/crates/core/src/lib.rs index 21fc8b1e4..dd9923d5c 100644 --- a/crates/core/src/lib.rs +++ b/crates/core/src/lib.rs @@ -510,7 +510,7 @@ pub trait ChainLogic: Sized + Send + Sync { type ChainSpecificError: std::error::Error + Send + Sync; /// The concrete work unit type produced by this chain logic. - type WorkUnit>: WorkUnit; + type WorkUnit>: WorkUnit; /// Initialize the chain logic with configuration and state. fn initialize( @@ -542,7 +542,7 @@ pub trait ChainLogic: Sized + Send + Sync { /// The returned work unit should be executed using `executor::execute_work_unit()`. fn pop_work(&mut self, domain: &D) -> Option> where - D: Domain; + D: Domain; /// Compute undo data for a block during rollback. /// From 652c30e7f8ac0b10d90138fc2e0061f7b4e9ec5b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Tue, 17 Mar 2026 01:34:46 -0300 Subject: [PATCH 34/85] fix(crates/redb3): compile redb3 --- crates/core/src/hash.rs | 4 ++ crates/redb3/src/archive/mod.rs | 90 +++++++++++++++++++------------ crates/redb3/src/indexes/mod.rs | 8 +-- crates/redb3/src/mempool.rs | 4 +- crates/redb3/src/state/utxoset.rs | 10 ++-- 5 files changed, 70 insertions(+), 46 deletions(-) diff --git a/crates/core/src/hash.rs b/crates/core/src/hash.rs index e4c71e977..d606131f2 100644 --- a/crates/core/src/hash.rs +++ b/crates/core/src/hash.rs @@ -12,6 +12,10 @@ impl Hash { pub fn as_slice(&self) -> &[u8] { &self.0 } + + pub fn as_array(&self) -> &[u8; N] { + &self.0 + } } impl fmt::Display for Hash { diff --git a/crates/redb3/src/archive/mod.rs b/crates/redb3/src/archive/mod.rs index bde547df9..f29d1f660 100644 --- a/crates/redb3/src/archive/mod.rs +++ b/crates/redb3/src/archive/mod.rs @@ -7,9 +7,11 @@ use std::{ }; use tracing::{debug, info, warn}; +use std::convert::Infallible; + use dolos_core::{ config::RedbArchiveConfig, ArchiveError, BlockBody, BlockSlot, ChainPoint, EntityValue, - EraCbor, LogKey, Namespace, RawBlock, StateSchema, TxHash, TxOrder, TxoRef, + EraCbor, LogKey, Namespace, RawBlock, StateSchema, TxHash, TxOrder, }; use ::redb::Durability; @@ -33,7 +35,7 @@ pub(crate) mod tables; mod tests; #[derive(Debug)] -pub struct RedbArchiveError(ArchiveError); +pub struct RedbArchiveError(ArchiveError); impl std::fmt::Display for RedbArchiveError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { @@ -55,13 +57,13 @@ impl From for RedbArchiveError { } } -impl From for RedbArchiveError { - fn from(value: ArchiveError) -> Self { +impl From> for RedbArchiveError { + fn from(value: ArchiveError) -> Self { Self(value) } } -impl From for ArchiveError { +impl From for ArchiveError { fn from(value: RedbArchiveError) -> Self { value.0 } @@ -252,11 +254,13 @@ impl ArchiveStore { }; if let Some(body) = tables::BlocksTable::get_by_slot(&rx, &self.flatfiles, *slot)? { - let decoded = - MultiEraBlock::decode(&body).map_err(ArchiveError::BlockDecodingError)?; + let decoded = MultiEraBlock::decode(&body) + .map_err(|e| RedbArchiveError(ArchiveError::InternalError(e.to_string())))?; - if decoded.hash().eq(hash) { - return Ok(Some(ChainPoint::Specific(decoded.slot(), decoded.hash()))); + let decoded_hash: [u8; 32] = (*decoded.hash()).into(); + let decoded_hash = dolos_core::hash::Hash::from(decoded_hash); + if decoded_hash.eq(hash) { + return Ok(Some(ChainPoint::Specific(decoded.slot(), decoded_hash))); } } } @@ -498,8 +502,8 @@ impl ArchiveStore { return Ok(None); }; - let block = - MultiEraBlock::decode(raw.as_slice()).map_err(ArchiveError::BlockDecodingError)?; + let block = MultiEraBlock::decode(raw.as_slice()) + .map_err(|e| RedbArchiveError(ArchiveError::InternalError(e.to_string())))?; for (idx, tx) in block.txs().iter().enumerate() { if tx.hash().to_vec() == tx_hash { return Ok(Some((raw, idx))); @@ -619,14 +623,17 @@ impl ArchiveStore { self.get_possible_blocks_by_spent_txo(spent_txo, start_slot, end_slot)?; for raw in possible { - let block = - MultiEraBlock::decode(raw.as_slice()).map_err(ArchiveError::BlockDecodingError)?; + let block = MultiEraBlock::decode(raw.as_slice()) + .map_err(|e| RedbArchiveError(ArchiveError::InternalError(e.to_string())))?; for tx in block.txs().iter() { for input in tx.inputs() { - let bytes: Vec = TxoRef::from(&input).into(); + // TODO: dudoso, ask Santiago + let mut bytes = input.hash().to_vec(); + bytes.extend_from_slice(u32::to_be_bytes(input.index() as u32).as_slice()); if bytes.as_slice() == spent_txo { - return Ok(Some(tx.hash())); + let hash_bytes: [u8; 32] = (*tx.hash()).into(); + return Ok(Some(dolos_core::hash::Hash::from(hash_bytes))); } } } @@ -645,8 +652,8 @@ impl ArchiveStore { return Ok(None); }; - let block = - MultiEraBlock::decode(raw.as_slice()).map_err(ArchiveError::BlockDecodingError)?; + let block = MultiEraBlock::decode(raw.as_slice()) + .map_err(|e| RedbArchiveError(ArchiveError::InternalError(e.to_string())))?; if let Some(tx) = block.txs().iter().find(|x| x.hash().to_vec() == tx_hash) { return Ok(Some(EraCbor(block.era().into(), tx.encode()))); } @@ -663,8 +670,8 @@ impl ArchiveStore { self.get_possible_blocks_by_datum_hash(datum_hash.as_slice(), start_slot, end_slot)?; for raw in possible { - let block = - MultiEraBlock::decode(raw.as_slice()).map_err(ArchiveError::BlockDecodingError)?; + let block = MultiEraBlock::decode(raw.as_slice()) + .map_err(|e| RedbArchiveError(ArchiveError::InternalError(e.to_string())))?; for tx in block.txs() { // Check witnesses if let Some(plutus_data) = tx.find_plutus_data(datum_hash) { @@ -790,7 +797,9 @@ pub struct ArchiveStoreWriter { } impl dolos_core::ArchiveWriter for ArchiveStoreWriter { - fn apply(&self, point: &ChainPoint, block: &RawBlock) -> Result<(), ArchiveError> { + type ChainSpecificError = Infallible; + + fn apply(&self, point: &ChainPoint, block: &RawBlock) -> Result<(), ArchiveError> { self.pending_blocks .lock() .unwrap() @@ -798,12 +807,12 @@ impl dolos_core::ArchiveWriter for ArchiveStoreWriter { Ok(()) } - fn undo(&self, point: &ChainPoint) -> Result<(), ArchiveError> { + fn undo(&self, point: &ChainPoint) -> Result<(), ArchiveError> { tables::BlocksTable::undo(&self.wx, &self.flatfiles, point)?; Ok(()) } - fn commit(self) -> Result<(), ArchiveError> { + fn commit(self) -> Result<(), ArchiveError> { // 1. Batch-append all pending blocks to flat files (fsync inside). // 2. Insert all index entries into redb. // 3. Commit redb transaction. @@ -821,7 +830,7 @@ impl dolos_core::ArchiveWriter for ArchiveStoreWriter { ns: Namespace, key: &dolos_core::LogKey, value: &dolos_core::EntityValue, - ) -> Result<(), ArchiveError> { + ) -> Result<(), ArchiveError> { let table = self .tables .get(&ns) @@ -838,7 +847,7 @@ impl dolos_core::ArchiveWriter for ArchiveStoreWriter { pub struct LogIter(pub(crate) ::redb::Range<'static, &'static [u8], &'static [u8]>); impl Iterator for LogIter { - type Item = Result<(LogKey, EntityValue), ArchiveError>; + type Item = Result<(LogKey, EntityValue), ArchiveError>; fn next(&mut self) -> Option { let next = self.0.next()?; @@ -856,7 +865,7 @@ impl Iterator for LogIter { pub struct EntityValueIter(pub(crate) ::redb::MultimapValue<'static, &'static [u8]>); impl Iterator for EntityValueIter { - type Item = Result; + type Item = Result>; fn next(&mut self) -> Option { let next = self.0.next()?; @@ -875,35 +884,46 @@ impl dolos_core::ArchiveStore for ArchiveStore { type Writer = ArchiveStoreWriter; type LogIter = LogIter; type EntityValueIter = EntityValueIter; + type ChainSpecificError = Infallible; - fn start_writer(&self) -> Result { + fn start_writer(&self) -> Result> { Ok(Self::start_writer(self)?) } - fn get_block_by_slot(&self, slot: &BlockSlot) -> Result, ArchiveError> { + fn get_block_by_slot( + &self, + slot: &BlockSlot, + ) -> Result, ArchiveError> { Ok(Self::get_block_by_slot(self, slot)?) } fn get_range<'a>( &self, from: Option, to: Option, - ) -> Result, ArchiveError> { + ) -> Result, ArchiveError> { Ok(Self::get_range(self, from, to)?) } - fn find_intersect(&self, intersect: &[ChainPoint]) -> Result, ArchiveError> { + fn find_intersect( + &self, + intersect: &[ChainPoint], + ) -> Result, ArchiveError> { Ok(Self::find_intersect(self, intersect)?) } - fn get_tip(&self) -> Result, ArchiveError> { + fn get_tip(&self) -> Result, ArchiveError> { Ok(Self::get_tip(self)?) } - fn prune_history(&self, max_slots: u64, max_prune: Option) -> Result { + fn prune_history( + &self, + max_slots: u64, + max_prune: Option, + ) -> Result> { Ok(Self::prune_history(self, max_slots, max_prune)?) } - fn truncate_front(&self, after: &ChainPoint) -> Result<(), ArchiveError> { + fn truncate_front(&self, after: &ChainPoint) -> Result<(), ArchiveError> { Ok(Self::truncate_front(self, after)?) } @@ -911,7 +931,7 @@ impl dolos_core::ArchiveStore for ArchiveStore { &self, ns: Namespace, keys: &[&dolos_core::LogKey], - ) -> Result>, ArchiveError> { + ) -> Result>, ArchiveError> { let mut rx = self.db().begin_read().map_err(RedbArchiveError::from)?; let table = self @@ -935,7 +955,7 @@ impl dolos_core::ArchiveStore for ArchiveStore { &self, ns: Namespace, range: std::ops::Range, - ) -> Result { + ) -> Result> { let mut rx = self.db().begin_read().map_err(RedbArchiveError::from)?; let range = std::ops::Range { @@ -1015,7 +1035,7 @@ pub struct ArchiveSparseIter( ); impl Iterator for ArchiveSparseIter { - type Item = Result<(BlockSlot, Option), ArchiveError>; + type Item = Result<(BlockSlot, Option), ArchiveError>; fn next(&mut self) -> Option { let next = self.1.next()?; diff --git a/crates/redb3/src/indexes/mod.rs b/crates/redb3/src/indexes/mod.rs index 276b69ffa..212e81482 100644 --- a/crates/redb3/src/indexes/mod.rs +++ b/crates/redb3/src/indexes/mod.rs @@ -204,7 +204,7 @@ impl FilterIndexes { // Insert produced UTxOs for (txo_ref, tags) in &delta.utxo.produced { - let v: (&[u8; 32], u32) = (&txo_ref.0, txo_ref.1); + let v: (&[u8; 32], u32) = (txo_ref.0.as_array(), txo_ref.1); for tag in tags { match tag.dimension { @@ -230,7 +230,7 @@ impl FilterIndexes { // Remove consumed UTxOs for (txo_ref, tags) in &delta.utxo.consumed { - let v: (&[u8; 32], u32) = (&txo_ref.0, txo_ref.1); + let v: (&[u8; 32], u32) = (txo_ref.0.as_array(), txo_ref.1); for tag in tags { match tag.dimension { @@ -269,7 +269,7 @@ impl FilterIndexes { // Remove produced UTxOs (undo insertion) for (txo_ref, tags) in &delta.utxo.produced { - let v: (&[u8; 32], u32) = (&txo_ref.0, txo_ref.1); + let v: (&[u8; 32], u32) = (txo_ref.0.as_array(), txo_ref.1); for tag in tags { match tag.dimension { @@ -295,7 +295,7 @@ impl FilterIndexes { // Restore consumed UTxOs (undo removal) for (txo_ref, tags) in &delta.utxo.consumed { - let v: (&[u8; 32], u32) = (&txo_ref.0, txo_ref.1); + let v: (&[u8; 32], u32) = (txo_ref.0.as_array(), txo_ref.1); for tag in tags { match tag.dimension { diff --git a/crates/redb3/src/mempool.rs b/crates/redb3/src/mempool.rs index 9ab735323..bc380c1cf 100644 --- a/crates/redb3/src/mempool.rs +++ b/crates/redb3/src/mempool.rs @@ -415,7 +415,7 @@ impl InflightRecord { fn into_finalized_entry(self, hash: TxHash) -> FinalizedEntry { FinalizedEntry { - hash: hash.to_vec(), + hash: hash.as_slice().to_vec(), confirmations: self.confirmations, confirmed_at: self.confirmed_at, payload: Some(self.payload), @@ -425,7 +425,7 @@ impl InflightRecord { fn into_dropped_entry(self, hash: TxHash) -> FinalizedEntry { FinalizedEntry { - hash: hash.to_vec(), + hash: hash.as_slice().to_vec(), confirmations: self.confirmations, confirmed_at: self.confirmed_at, payload: Some(self.payload), diff --git a/crates/redb3/src/state/utxoset.rs b/crates/redb3/src/state/utxoset.rs index 05329b539..30afb6231 100644 --- a/crates/redb3/src/state/utxoset.rs +++ b/crates/redb3/src/state/utxoset.rs @@ -58,7 +58,7 @@ impl UtxosTable { let mut out = HashMap::new(); for key in refs { - if let Some(body) = table.get(&(&key.0 as &[u8; 32], key.1))? { + if let Some(body) = table.get(&(key.0.as_array(), key.1))? { let (era, cbor) = body.value(); let cbor = cbor.to_owned(); let value = Arc::new(EraCbor(era, cbor)); @@ -74,24 +74,24 @@ impl UtxosTable { let mut table = wx.open_table(Self::DEF)?; for (k, v) in delta.produced_utxo.iter() { - let k: (&[u8; 32], u32) = (&k.0, k.1); + let k: (&[u8; 32], u32) = (k.0.as_array(), k.1); let v: (u16, &[u8]) = (v.0, &v.1); table.insert(k, v)?; } for (k, v) in delta.recovered_stxi.iter() { - let k: (&[u8; 32], u32) = (&k.0, k.1); + let k: (&[u8; 32], u32) = (k.0.as_array(), k.1); let v: (u16, &[u8]) = (v.0, &v.1); table.insert(k, v)?; } for (k, _) in delta.undone_utxo.iter() { - let k: (&[u8; 32], u32) = (&k.0, k.1); + let k: (&[u8; 32], u32) = (k.0.as_array(), k.1); table.remove(k)?; } for (k, _) in delta.consumed_utxo.iter() { - let k: (&[u8; 32], u32) = (&k.0, k.1); + let k: (&[u8; 32], u32) = (k.0.as_array(), k.1); table.remove(k)?; } From cb74c6faa125d75398c22c0e5bd378e7b49aa2ea Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Tue, 17 Mar 2026 02:30:58 -0300 Subject: [PATCH 35/85] fix(minibf): compiling with the new setup --- crates/minibf/src/hacks.rs | 38 ++++++++++++---------- crates/minibf/src/lib.rs | 30 +++++++++++++---- crates/minibf/src/mapping.rs | 19 ++++++----- crates/minibf/src/routes/accounts.rs | 29 +++++++++++------ crates/minibf/src/routes/addresses.rs | 28 +++++++++------- crates/minibf/src/routes/assets.rs | 20 ++++++------ crates/minibf/src/routes/blocks.rs | 12 +++---- crates/minibf/src/routes/epochs/mapping.rs | 5 ++- crates/minibf/src/routes/epochs/mod.rs | 11 ++++--- crates/minibf/src/routes/genesis.rs | 10 +++--- crates/minibf/src/routes/metadata.rs | 8 ++--- crates/minibf/src/routes/network.rs | 15 +++++---- crates/minibf/src/routes/pools.rs | 4 +-- crates/minibf/src/routes/scripts.rs | 12 +++---- crates/minibf/src/routes/tx/submit/mod.rs | 10 ++---- crates/minibf/src/routes/txs.rs | 31 +++++++++++------- crates/minibf/src/routes/utxos.rs | 24 ++++++++++---- 17 files changed, 180 insertions(+), 126 deletions(-) diff --git a/crates/minibf/src/hacks.rs b/crates/minibf/src/hacks.rs index eb4b004ca..ae0de44ee 100644 --- a/crates/minibf/src/hacks.rs +++ b/crates/minibf/src/hacks.rs @@ -1,8 +1,8 @@ use axum::http::StatusCode; use blockfrost_openapi::models::tx_content_utxo::TxContentUtxo; use blockfrost_openapi::models::{block_content::BlockContent, tx_content::TxContent}; -use dolos_cardano::indexes::AsyncCardanoQueryExt; -use dolos_core::{ArchiveStore as _, Domain, TxoRef}; +use dolos_cardano::{indexes::AsyncCardanoQueryExt, pallas_hash_to_core, core_hash_to_pallas, CardanoError, CardanoGenesis}; +use dolos_core::{ArchiveStore as _, Domain}; use pallas::crypto::hash::Hash; use pallas::ledger::{ configs::{byron, shelley}, @@ -57,7 +57,7 @@ struct GenesisTxModel<'a> { consumed_by: Option>, } -pub fn genesis_hash_for_domain(domain: &Facade) -> Option<&'static str> { +pub fn genesis_hash_for_domain>(domain: &Facade) -> Option<&'static str> { match domain.genesis().shelley.network_magic { Some(1) => Some(GENESIS_HASH_PREPROD), Some(2) => Some(GENESIS_HASH_PREVIEW), @@ -66,7 +66,7 @@ pub fn genesis_hash_for_domain(domain: &Facade) -> Option<&'static } } -pub fn genesis_block_metadata_for_domain( +pub fn genesis_block_metadata_for_domain>( domain: &Facade, ) -> Option { match domain.genesis().shelley.network_magic { @@ -86,7 +86,7 @@ pub fn genesis_block_metadata_for_domain( } } -fn genesis_tx_output_by_hash( +fn genesis_tx_output_by_hash>( domain: &Facade, hash: &[u8], ) -> Result>, StatusCode> { @@ -182,9 +182,9 @@ impl<'a> IntoModel for GenesisTxModel<'a> { fn into_model(self) -> Result { let output = self.output.as_output(); - let builder = UtxoOutputModelBuilder::from_output(self.output.tx_hash, 0, output); + let builder = UtxoOutputModelBuilder::from_output(pallas_hash_to_core(self.output.tx_hash), 0, output); let builder = if let Some(consumed_by) = self.consumed_by { - builder.with_consumed_by(consumed_by) + builder.with_consumed_by(pallas_hash_to_core(consumed_by)) } else { builder }; @@ -198,7 +198,7 @@ impl<'a> IntoModel for GenesisTxModel<'a> { } } -pub fn genesis_tx_content_for_hash( +pub fn genesis_tx_content_for_hash>( domain: &Facade, hash: &[u8], ) -> Result { @@ -218,7 +218,7 @@ pub async fn genesis_tx_utxos_for_hash( hash: &[u8], ) -> Result where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, { let Some(block_meta) = genesis_block_metadata_for_domain(domain) else { return Err(StatusCode::NOT_FOUND); @@ -228,7 +228,11 @@ where return Err(StatusCode::NOT_FOUND); }; - let key: Vec = TxoRef(output.tx_hash, 0).into(); + let key = { + let mut v = output.tx_hash.as_slice().to_vec(); + v.extend_from_slice(&0u32.to_be_bytes()); + v + }; let consumed_by = domain .query() .tx_by_spent_txo(&key) @@ -236,7 +240,7 @@ where .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?; let model = if let Some(consumed_by) = consumed_by { - GenesisTxModel::new(block_meta, output).with_consumed_by(consumed_by) + GenesisTxModel::new(block_meta, output).with_consumed_by(core_hash_to_pallas(consumed_by)) } else { GenesisTxModel::new(block_meta, output) }; @@ -244,7 +248,7 @@ where model.into_model() } -pub fn is_genesis_hash_for_domain( +pub fn is_genesis_hash_for_domain>( domain: &Facade, hash: &[u8], ) -> Result { @@ -257,7 +261,7 @@ pub fn is_genesis_hash_for_domain( Ok(hash == genesis_hash.as_slice()) } -pub fn genesis_block_for_domain( +pub fn genesis_block_for_domain>( domain: &Facade, ) -> Result, StatusCode> { match domain.genesis().shelley.network_magic { @@ -268,7 +272,7 @@ pub fn genesis_block_for_domain( } } -pub fn genesis_block_preview(domain: &Facade) -> Result { +pub fn genesis_block_preview>(domain: &Facade) -> Result { let confirmations = MultiEraBlock::decode( &domain .archive() @@ -311,7 +315,7 @@ pub fn genesis_block_preview(domain: &Facade) -> Result(domain: &Facade) -> Result { +pub fn genesis_block_preprod>(domain: &Facade) -> Result { let confirmations = MultiEraBlock::decode( &domain .archive() @@ -354,7 +358,7 @@ pub fn genesis_block_preprod(domain: &Facade) -> Result(domain: &Facade) -> Result { +pub fn genesis_block_mainnet>(domain: &Facade) -> Result { let confirmations = MultiEraBlock::decode( &domain .archive() @@ -397,7 +401,7 @@ pub fn genesis_block_mainnet(domain: &Facade) -> Result(domain: &Facade, block: &mut BlockContent) { +pub fn maybe_set_genesis_previous_block>(domain: &Facade, block: &mut BlockContent) { if block.height.is_some_and(|x| x > 1) { return; } diff --git a/crates/minibf/src/lib.rs b/crates/minibf/src/lib.rs index a286f2c70..712d28ffc 100644 --- a/crates/minibf/src/lib.rs +++ b/crates/minibf/src/lib.rs @@ -6,7 +6,7 @@ use axum::{ }; use dolos_cardano::{ model::{AccountState, AssetState, DRepState, EpochState, FixedNamespace, PoolState}, - ChainSummary, PParamsSet, + CardanoError, CardanoGenesis, ChainSummary, PParamsSet, }; use pallas::{ crypto::hash::Hash, @@ -92,7 +92,10 @@ impl Facade { Ok(tip.slot()) } - pub fn get_network_id(&self) -> Result { + pub fn get_network_id(&self) -> Result + where + D: Domain, + { match self.genesis().shelley.network_id.as_ref() { Some(x) if x == "Mainnet" => Ok(Network::Mainnet), Some(x) if x == "Testnet" => Ok(Network::Testnet), @@ -271,7 +274,12 @@ pub struct Driver; pub fn build_router(cfg: MinibfConfig, domain: D) -> Router where - D: Domain + SubmitExt + Clone + Send + Sync + 'static, + D: Domain + + SubmitExt + + Clone + + Send + + Sync + + 'static, Option: From, Option: From, Option: From, @@ -287,7 +295,12 @@ where pub(crate) fn build_router_with_facade(facade: Facade) -> Router where - D: Domain + SubmitExt + Clone + Send + Sync + 'static, + D: Domain + + SubmitExt + + Clone + + Send + + Sync + + 'static, Option: From, Option: From, Option: From, @@ -501,9 +514,14 @@ where app.layer(NormalizePathLayer::trim_trailing_slash()) } -impl dolos_core::Driver for Driver +impl dolos_core::Driver for Driver where - D: Clone + Send + Sync + 'static, + D: Domain + + SubmitExt + + Clone + + Send + + Sync + + 'static, Option: From, Option: From, Option: From, diff --git a/crates/minibf/src/mapping.rs b/crates/minibf/src/mapping.rs index ed67d2b31..1ef5e7cb8 100644 --- a/crates/minibf/src/mapping.rs +++ b/crates/minibf/src/mapping.rs @@ -54,7 +54,8 @@ use blockfrost_openapi::models::{ }; use dolos_cardano::{ - pallas_extras, AccountState, ChainSummary, DRepState, PParamsSet, PoolHash, PoolState, + pallas_extras, pallas_hash_to_core, AccountState, ChainSummary, DRepState, + PParamsSet, PoolHash, PoolState, }; use dolos_core::{BlockSlot, Domain, EraCbor, TxHash, TxOrder, TxoIdx, TxoRef}; @@ -841,15 +842,15 @@ impl<'a> TxModelBuilder<'a> { let mut deps = vec![]; for i in tx.inputs() { - deps.push(*i.hash()); + deps.push(pallas_hash_to_core(*i.hash())); } for i in tx.collateral() { - deps.push(*i.hash()); + deps.push(pallas_hash_to_core(*i.hash())); } for i in tx.reference_inputs() { - deps.push(*i.hash()); + deps.push(pallas_hash_to_core(*i.hash())); } let unique = deps.into_iter().unique().collect(); @@ -863,7 +864,7 @@ impl<'a> TxModelBuilder<'a> { let mut deps = vec![]; for (i, _) in tx.produces() { - deps.push(TxoRef(tx.hash(), i as u32)); + deps.push(TxoRef(pallas_hash_to_core(tx.hash()), i as u32)); } Ok(deps) @@ -889,7 +890,7 @@ impl<'a> TxModelBuilder<'a> { where 'b: 'c, { - let tx = self.deps.get(input.hash()); + let tx = self.deps.get(&pallas_hash_to_core(*input.hash())); let as_output = tx.and_then(|tx| tx.output_at(input.index() as usize)); UtxoInputModelBuilder { @@ -944,7 +945,7 @@ impl<'a> IntoModel for TxModelBuilder<'a> { .outputs() .into_iter() .enumerate() - .map(|(i, o)| UtxoOutputModelBuilder::from_output(tx.hash(), i as u32, o)) + .map(|(i, o)| UtxoOutputModelBuilder::from_output(pallas_hash_to_core(tx.hash()), i as u32, o)) .map(|b| { let builder = if let Some(consumed_by) = self.consumed_deps.get(&b.txo_ref()) { b.with_consumed_by(*consumed_by) @@ -960,7 +961,7 @@ impl<'a> IntoModel for TxModelBuilder<'a> { .into_iter() .enumerate() .map(|(i, o)| { - UtxoOutputModelBuilder::from_collateral(tx.hash(), outputs.len(), i as u32, o) + UtxoOutputModelBuilder::from_collateral(pallas_hash_to_core(tx.hash()), outputs.len(), i as u32, o) }) .map(|b| { let builder = if let Some(consumed_by) = self.consumed_deps.get(&b.txo_ref()) { @@ -1224,7 +1225,7 @@ impl TxModelBuilder<'_> { let tx_hash = input.hash(); let index = input.index() as usize; - let source = self.deps.get(tx_hash)?; + let source = self.deps.get(&pallas_hash_to_core(*tx_hash))?; let outputs = source.outputs(); let output = outputs.get(index)?; diff --git a/crates/minibf/src/routes/accounts.rs b/crates/minibf/src/routes/accounts.rs index 08ff98122..f83bb42b2 100644 --- a/crates/minibf/src/routes/accounts.rs +++ b/crates/minibf/src/routes/accounts.rs @@ -17,8 +17,8 @@ use blockfrost_openapi::models::{ use dolos_cardano::{ indexes::{AsyncCardanoQueryExt, CardanoIndexExt, SlotOrder}, model::{AccountState, DRepState}, - pallas_extras, ChainSummary, FixedNamespace, LeaderRewardLog, MemberRewardLog, - PoolDepositRefundLog, + pallas_extras, CardanoError, CardanoGenesis, ChainSummary, FixedNamespace, LeaderRewardLog, + MemberRewardLog, PoolDepositRefundLog, }; use dolos_core::{ArchiveStore as _, Domain, EntityKey, LogKey, TemporalKey}; use futures_util::StreamExt; @@ -193,7 +193,7 @@ pub async fn by_stake_addresses( ) -> Result>, Error> where Option: From, - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, { let pagination = Pagination::try_from(params)?; pagination.enforce_max_scan_limit(domain.config.max_scan_items())?; @@ -268,7 +268,7 @@ pub async fn by_stake_utxos( State(domain): State>, ) -> Result>, Error> where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, { let pagination = Pagination::try_from(params)?; @@ -484,7 +484,7 @@ where Epoch, Network, ) -> Result, StatusCode>, - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, { let account_key = parse_account_key_param(stake_address)?; @@ -545,7 +545,7 @@ pub async fn by_stake_delegations( ) -> Result>, Error> where Option: From, - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, { let pagination = Pagination::try_from(params)?; pagination.enforce_max_scan_limit(domain.config.max_scan_items())?; @@ -568,7 +568,7 @@ pub async fn by_stake_registrations( ) -> Result>, Error> where Option: From, - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, { let pagination = Pagination::try_from(params)?; pagination.enforce_max_scan_limit(domain.config.max_scan_items())?; @@ -614,7 +614,10 @@ impl TryFrom for AccountRewardContentInner { fn try_from(value: AccountRewardWrapper) -> Result { match value { AccountRewardWrapper::Leader((epoch, x)) => { - let operator = Hash::<28>::from(EntityKey::from(x.pool_id)); + let key = EntityKey::from(x.pool_id); + let arr: [u8; 28] = key.as_ref().try_into() + .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?; + let operator = Hash::<28>::from(arr); let pool_id = mapping::bech32_pool(operator)?; Ok(AccountRewardContentInner { @@ -625,7 +628,10 @@ impl TryFrom for AccountRewardContentInner { }) } AccountRewardWrapper::Member((epoch, x)) => { - let operator = Hash::<28>::from(EntityKey::from(x.pool_id)); + let key = EntityKey::from(x.pool_id); + let arr: [u8; 28] = key.as_ref().try_into() + .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?; + let operator = Hash::<28>::from(arr); let pool_id = mapping::bech32_pool(operator)?; Ok(AccountRewardContentInner { @@ -636,7 +642,10 @@ impl TryFrom for AccountRewardContentInner { }) } AccountRewardWrapper::PoolDepositRefund((epoch, x)) => { - let operator = Hash::<28>::from(EntityKey::from(x.pool_id)); + let key = EntityKey::from(x.pool_id); + let arr: [u8; 28] = key.as_ref().try_into() + .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?; + let operator = Hash::<28>::from(arr); let pool_id = mapping::bech32_pool(operator)?; Ok(AccountRewardContentInner { diff --git a/crates/minibf/src/routes/addresses.rs b/crates/minibf/src/routes/addresses.rs index d86237895..f25be2914 100644 --- a/crates/minibf/src/routes/addresses.rs +++ b/crates/minibf/src/routes/addresses.rs @@ -20,9 +20,10 @@ use pallas::ledger::{ use dolos_cardano::{ indexes::{AsyncCardanoQueryExt, CardanoIndexExt, SlotOrder}, - pallas_extras, ChainSummary, + pallas_extras, CardanoError, ChainSummary, }; use dolos_core::{BlockBody, BlockSlot, Domain, EraCbor, StateStore as _, TxoRef}; +use pallas::ledger::traverse::Era; use crate::{ error::Error, @@ -45,7 +46,7 @@ type VKeyOrAddress = Either, Vec>; /// Stream of blocks returned by address queries type BlockStream = std::pin::Pin< - Box), dolos_core::DomainError>> + Send>, + Box), dolos_core::DomainError>> + Send>, >; enum ParsedAddress { @@ -158,7 +159,7 @@ fn blocks_for_address_stream( order: SlotOrder, ) -> Result<(BlockStream, VKeyOrAddress), Error> where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, { match parse_address(address)? { ParsedAddress::Payment { key, .. } => Ok(( @@ -211,9 +212,12 @@ fn amount_for_refs( let outputs: Vec> = utxos .values() - .map(|x| MultiEraOutput::try_from(x.as_ref())) + .map(|x| { + let era = Era::try_from(x.0).map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?; + MultiEraOutput::decode(era, &x.1).map_err(|_| StatusCode::INTERNAL_SERVER_ERROR) + }) .collect::>() - .map_err(|err| { + .map_err(|err: StatusCode| { tracing::error!(?err); StatusCode::INTERNAL_SERVER_ERROR })?; @@ -226,7 +230,7 @@ pub async fn by_address( State(domain): State>, ) -> Result, Error> where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, { let parsed = parse_address(&address)?; let refs = refs_for_parsed_address(&domain, &parsed)?; @@ -250,7 +254,7 @@ where async fn is_address_in_chain(domain: &Facade, address: &str) -> Result where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, { let end_slot = domain.get_tip_slot()?; let start_slot = 0; @@ -274,7 +278,7 @@ where async fn is_asset_in_chain(domain: &Facade, asset: &[u8]) -> Result where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, { let end_slot = domain.get_tip_slot()?; let start_slot = 0; @@ -297,7 +301,7 @@ pub async fn utxos( State(domain): State>, ) -> Result>, Error> where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, { let pagination = Pagination::try_from(params)?; @@ -322,7 +326,7 @@ pub async fn utxos_with_asset( State(domain): State>, ) -> Result>, Error> where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, { let pagination = Pagination::try_from(params)?; @@ -464,7 +468,7 @@ pub async fn transactions( State(domain): State>, ) -> Result>, Error> where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, { let pagination = Pagination::try_from(params)?; pagination.enforce_max_scan_limit(domain.config.max_scan_items())?; @@ -528,7 +532,7 @@ pub async fn txs( State(domain): State>, ) -> Result>, Error> where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, { let pagination = Pagination::try_from(params)?; pagination.enforce_max_scan_limit(domain.config.max_scan_items())?; diff --git a/crates/minibf/src/routes/assets.rs b/crates/minibf/src/routes/assets.rs index 45661b275..62c8629cd 100644 --- a/crates/minibf/src/routes/assets.rs +++ b/crates/minibf/src/routes/assets.rs @@ -16,7 +16,7 @@ use dolos_cardano::{ cip68::{cip_68_reference_asset, encode_to_hex, parse_cip68_metadata_map, Cip68TokenStandard}, indexes::{AsyncCardanoQueryExt, CardanoIndexExt, SlotOrder}, model::AssetState, - ChainSummary, + CardanoError, ChainSummary, }; use dolos_core::{BlockSlot, Domain, EraCbor, IndexStore as _, StateStore as _}; use futures_util::StreamExt; @@ -226,7 +226,7 @@ async fn datum_from_hash( hash: Hash<32>, ) -> Result, StatusCode> where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, { let Some(bytes) = domain .query() @@ -265,7 +265,7 @@ async fn metadata_from_datum_option( standard: Cip68TokenStandard, ) -> Result, StatusCode> where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, { match datum_option { pallas::ledger::primitives::conway::DatumOption::Hash(hash) => { @@ -287,7 +287,7 @@ async fn last_cip68_metadata_from_tx( standard: Cip68TokenStandard, ) -> Result, StatusCode> where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, { let mut last_metadata = None; @@ -320,7 +320,7 @@ impl AssetModelBuilder { domain: &Facade, ) -> Result, StatusCode> where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, Option: From, { let cip68_reference = match cip68_reference_from_unit(&self.unit)? { @@ -429,7 +429,7 @@ impl AssetModelBuilder { async fn into_model(self, domain: &Facade) -> Result where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, Option: From, { let policy = self.subject[..28].to_vec(); @@ -474,7 +474,7 @@ pub async fn by_subject( ) -> Result, StatusCode> where Option: From, - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, { let subject = hex::decode(&unit).map_err(|_| StatusCode::BAD_REQUEST)?; let entity_key = pallas::crypto::hash::Hasher::<256>::hash(subject.as_slice()); @@ -611,7 +611,7 @@ async fn tx_has_subject( tx: &MultiEraTx<'_>, ) -> Result where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, { for (_, output) in tx.produces() { if output_has_subject(subject, &output) { @@ -651,7 +651,7 @@ async fn find_txs( block: &[u8], ) -> Result, StatusCode> where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, { let block = MultiEraBlock::decode(block).map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?; @@ -685,7 +685,7 @@ pub async fn by_subject_transactions( State(domain): State>, ) -> Result>, Error> where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, { let pagination = Pagination::try_from(params)?; pagination.enforce_max_scan_limit(domain.config.max_scan_items())?; diff --git a/crates/minibf/src/routes/blocks.rs b/crates/minibf/src/routes/blocks.rs index 8fe550110..b65387d9f 100644 --- a/crates/minibf/src/routes/blocks.rs +++ b/crates/minibf/src/routes/blocks.rs @@ -4,7 +4,7 @@ use axum::{ Json, }; use blockfrost_openapi::models::block_content::BlockContent; -use dolos_cardano::ChainSummary; +use dolos_cardano::{CardanoGenesis, ChainSummary}; use dolos_core::{archive::Skippable as _, ArchiveStore as _, BlockBody, Domain}; use futures::future::try_join_all; use itertools::Either; @@ -122,7 +122,7 @@ pub async fn by_hash_or_number( State(domain): State>, ) -> Result, Error> where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, { let hash_or_number = parse_hash_or_number(&hash_or_number)?; @@ -172,7 +172,7 @@ pub async fn by_hash_or_number_previous( State(domain): State>, ) -> Result>, Error> where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, { let pagination = Pagination::try_from(params)?; @@ -250,7 +250,7 @@ pub async fn by_hash_or_number_next( State(domain): State>, ) -> Result>, Error> where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, { let pagination = Pagination::try_from(params)?; @@ -330,7 +330,7 @@ where pub async fn latest(State(domain): State>) -> Result, StatusCode> where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, { let (_, tip) = domain .archive() @@ -351,7 +351,7 @@ pub async fn by_slot( State(domain): State>, ) -> Result, StatusCode> where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, { let block = domain .archive() diff --git a/crates/minibf/src/routes/epochs/mapping.rs b/crates/minibf/src/routes/epochs/mapping.rs index df49ffaaa..26ab1d5b5 100644 --- a/crates/minibf/src/routes/epochs/mapping.rs +++ b/crates/minibf/src/routes/epochs/mapping.rs @@ -5,8 +5,7 @@ use crate::{ routes::epochs::cost_models::get_named_cost_model, }; use blockfrost_openapi::models::epoch_param_content::EpochParamContent; -use dolos_cardano::PParamsSet; -use dolos_core::Genesis; +use dolos_cardano::{CardanoGenesis, PParamsSet}; use pallas::ledger::primitives::{conway::CostModels, Epoch}; fn cost_models_to_key_value(cost_models: &CostModels) -> Vec<(&'static str, &[i64])> { @@ -68,7 +67,7 @@ fn map_cost_models_named(cost_models: &CostModels) -> Option { pub epoch: Epoch, pub params: PParamsSet, - pub genesis: &'a Genesis, + pub genesis: &'a CardanoGenesis, pub nonce: Option, } diff --git a/crates/minibf/src/routes/epochs/mod.rs b/crates/minibf/src/routes/epochs/mod.rs index 8e009c1e4..cc6f58efc 100644 --- a/crates/minibf/src/routes/epochs/mod.rs +++ b/crates/minibf/src/routes/epochs/mod.rs @@ -6,6 +6,7 @@ use axum::{ use blockfrost_openapi::models::epoch_param_content::EpochParamContent; use pallas::ledger::{primitives::Epoch, traverse::MultiEraBlock}; +use dolos_cardano::CardanoGenesis; use dolos_core::{archive::Skippable as _, ArchiveStore, Domain}; use crate::{ @@ -18,7 +19,7 @@ use crate::{ pub mod cost_models; pub mod mapping; -pub async fn latest_parameters( +pub async fn latest_parameters>( State(domain): State>, ) -> Result, Error> { let tip = domain.get_tip_slot()?; @@ -30,17 +31,18 @@ pub async fn latest_parameters( let state = dolos_cardano::load_epoch::(domain.state()) .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?; + let genesis = domain.genesis(); let model = mapping::ParametersModelBuilder { epoch, params: state.pparams.live().cloned().unwrap_or_default(), - genesis: &domain.genesis(), + genesis: &genesis, nonce: state.nonces.map(|x| x.active.to_string()), }; Ok(model.into_response()?) } -pub async fn by_number_parameters( +pub async fn by_number_parameters>( State(domain): State>, Path(epoch): Path, ) -> Result, Error> { @@ -57,10 +59,11 @@ pub async fn by_number_parameters( .ok_or(StatusCode::NOT_FOUND)? }; + let genesis = domain.genesis(); let model = mapping::ParametersModelBuilder { epoch: epoch.number, params: epoch.pparams.live().cloned().unwrap_or_default(), - genesis: &domain.genesis(), + genesis: &genesis, nonce: epoch.nonces.map(|x| x.active.to_string()), }; diff --git a/crates/minibf/src/routes/genesis.rs b/crates/minibf/src/routes/genesis.rs index 9db32fc59..c8ba30172 100644 --- a/crates/minibf/src/routes/genesis.rs +++ b/crates/minibf/src/routes/genesis.rs @@ -1,6 +1,7 @@ use axum::{extract::State, http::StatusCode, Json}; use blockfrost_openapi::models::genesis_content::GenesisContent; -use dolos_core::{Domain, Genesis}; +use dolos_cardano::CardanoGenesis; +use dolos_core::Domain; use crate::{ mapping::{round_f64, IntoModel}, @@ -14,7 +15,7 @@ pub fn parse_datetime_into_timestamp(s: &str) -> Result { - pub genesis: &'a Genesis, + pub genesis: &'a CardanoGenesis, } impl<'a> IntoModel for GenesisModelBuilder<'a> { @@ -54,11 +55,12 @@ impl<'a> IntoModel for GenesisModelBuilder<'a> { } } -pub async fn naked( +pub async fn naked>( State(domain): State>, ) -> Result, StatusCode> { + let genesis = domain.genesis(); let model = GenesisModelBuilder { - genesis: &domain.genesis(), + genesis: &genesis, }; model.into_response() diff --git a/crates/minibf/src/routes/metadata.rs b/crates/minibf/src/routes/metadata.rs index bb2591b80..1ee400914 100644 --- a/crates/minibf/src/routes/metadata.rs +++ b/crates/minibf/src/routes/metadata.rs @@ -7,7 +7,7 @@ use blockfrost_openapi::models::{ tx_metadata_label_cbor_inner::TxMetadataLabelCborInner, tx_metadata_label_json_inner::TxMetadataLabelJsonInner, }; -use dolos_cardano::indexes::{AsyncCardanoQueryExt, SlotOrder}; +use dolos_cardano::{indexes::{AsyncCardanoQueryExt, SlotOrder}, CardanoError}; use dolos_core::Domain; use futures_util::StreamExt; use pallas::{ @@ -128,7 +128,7 @@ async fn by_label( domain: &Facade, ) -> Result where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, { let label: u64 = label.parse().map_err(|_| StatusCode::BAD_REQUEST)?; let pagination = Pagination::try_from(pagination)?; @@ -167,7 +167,7 @@ pub async fn by_label_json( State(domain): State>, ) -> Result>, Error> where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, { let builder = by_label(&label, params, &domain).await?; @@ -185,7 +185,7 @@ pub async fn by_label_cbor( State(domain): State>, ) -> Result>, Error> where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, { let builder = by_label(&label, params, &domain).await?; diff --git a/crates/minibf/src/routes/network.rs b/crates/minibf/src/routes/network.rs index 7ffef6b0a..82029f909 100644 --- a/crates/minibf/src/routes/network.rs +++ b/crates/minibf/src/routes/network.rs @@ -7,16 +7,17 @@ use blockfrost_openapi::models::{ network_supply::NetworkSupply, }; use dolos_cardano::{ - model::EpochState, mutable_slots, AccountState, EraProtocol, EraSummary, FixedNamespace, + model::EpochState, mutable_slots, AccountState, CardanoGenesis, EraProtocol, EraSummary, + FixedNamespace, }; -use dolos_core::{BlockSlot, Domain, Genesis, StateStore}; +use dolos_core::{BlockSlot, Domain, StateStore}; use crate::{mapping::IntoModel, routes::genesis::parse_datetime_into_timestamp, Facade}; struct ChainModelBuilder<'a> { tip: BlockSlot, eras: Vec<(u16, EraSummary)>, - genesis: &'a Genesis, + genesis: &'a CardanoGenesis, } impl<'a> IntoModel> for ChainModelBuilder<'a> { @@ -182,7 +183,7 @@ impl<'a> IntoModel> for ChainModelBuilder<'a> { } } -pub async fn eras( +pub async fn eras>( State(domain): State>, ) -> Result>, StatusCode> { let genesis = domain.genesis(); @@ -212,7 +213,7 @@ pub async fn eras( } struct NetworkModelBuilder<'a> { - genesis: &'a Genesis, + genesis: &'a CardanoGenesis, active: EpochState, live_stake: u64, active_stake: u64, @@ -248,7 +249,7 @@ impl<'a> IntoModel for NetworkModelBuilder<'a> { } } -fn compute_network_sync(domain: Facade) -> Result +fn compute_network_sync>(domain: Facade) -> Result where Option: From, { @@ -285,7 +286,7 @@ where pub async fn naked(State(domain): State>) -> Result, StatusCode> where Option: From, - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, { const TTL: std::time::Duration = std::time::Duration::from_secs(30); diff --git a/crates/minibf/src/routes/pools.rs b/crates/minibf/src/routes/pools.rs index acef15d54..86f69cb16 100644 --- a/crates/minibf/src/routes/pools.rs +++ b/crates/minibf/src/routes/pools.rs @@ -11,7 +11,7 @@ use blockfrost_openapi::models::{ }; use dolos_cardano::{ model::{AccountState, PoolState}, - FixedNamespace, PoolDelegation, StakeLog, + CardanoGenesis, FixedNamespace, PoolDelegation, StakeLog, }; use dolos_core::{ArchiveStore, BlockSlot, Domain, EntityKey, TemporalKey}; use futures::future::join_all; @@ -218,7 +218,7 @@ impl IntoModel for PoolDelegatorModelBuilder { } } -pub async fn by_id_delegators( +pub async fn by_id_delegators>( Path(id): Path, Query(params): Query, State(domain): State>, diff --git a/crates/minibf/src/routes/scripts.rs b/crates/minibf/src/routes/scripts.rs index 523f22a74..ec6e023f3 100644 --- a/crates/minibf/src/routes/scripts.rs +++ b/crates/minibf/src/routes/scripts.rs @@ -9,7 +9,7 @@ use blockfrost_openapi::models::{ script_datum_cbor::ScriptDatumCbor, script_json::ScriptJson, }; -use dolos_cardano::indexes::{AsyncCardanoQueryExt, ScriptLanguage}; +use dolos_cardano::{indexes::{AsyncCardanoQueryExt, ScriptLanguage}, CardanoError}; use dolos_core::Domain; use pallas::crypto::hash::Hash; use pallas::ledger::primitives::alonzo::NativeScript; @@ -49,7 +49,7 @@ pub async fn by_hash( State(domain): State>, ) -> Result, Error> where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, { let script = domain .query() @@ -78,7 +78,7 @@ pub async fn by_hash_json( State(domain): State>, ) -> Result, Error> where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, { let script = domain .query() @@ -105,7 +105,7 @@ pub async fn by_hash_cbor( State(domain): State>, ) -> Result, Error> where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, { let script = domain .query() @@ -127,7 +127,7 @@ pub async fn by_datum_hash( State(domain): State>, ) -> Result, Error> where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, { let datum = domain .query() @@ -146,7 +146,7 @@ pub async fn by_datum_hash_cbor( State(domain): State>, ) -> Result, Error> where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, { let datum = domain .query() diff --git a/crates/minibf/src/routes/tx/submit/mod.rs b/crates/minibf/src/routes/tx/submit/mod.rs index 87427330d..0a2b30be0 100644 --- a/crates/minibf/src/routes/tx/submit/mod.rs +++ b/crates/minibf/src/routes/tx/submit/mod.rs @@ -3,6 +3,7 @@ use axum::{ extract::State, http::{header, HeaderMap, StatusCode}, }; +use dolos_cardano::CardanoError; use dolos_core::{ChainError, Domain, DomainError, MempoolError, SubmitExt}; use crate::Facade; @@ -19,7 +20,7 @@ fn is_valid_cbor_content_type(headers: &HeaderMap) -> bool { content_type == "application/cbor" } -pub async fn route( +pub async fn route + SubmitExt>( State(domain): State>, headers: HeaderMap, cbor: Bytes, @@ -34,15 +35,10 @@ pub async fn route( let hash = result.map_err(|e| match e { DomainError::ChainError(x) => match x { ChainError::BrokenInvariant(_) => StatusCode::BAD_REQUEST, - ChainError::DecodingError(_) => StatusCode::BAD_REQUEST, - ChainError::CborDecodingError(_) => StatusCode::BAD_REQUEST, - ChainError::AddressDecoding(_) => StatusCode::BAD_REQUEST, - ChainError::Phase1ValidationRejected(_) => StatusCode::BAD_REQUEST, - ChainError::Phase2ValidationRejected(_) => StatusCode::BAD_REQUEST, + ChainError::ChainSpecific(_) => StatusCode::BAD_REQUEST, _ => StatusCode::INTERNAL_SERVER_ERROR, }, DomainError::MempoolError(x) => match x { - MempoolError::TraverseError(_) => StatusCode::BAD_REQUEST, MempoolError::InvalidTx(_) => StatusCode::BAD_REQUEST, MempoolError::DecodeError(_) => StatusCode::BAD_REQUEST, MempoolError::PlutusNotSupported => StatusCode::BAD_REQUEST, diff --git a/crates/minibf/src/routes/txs.rs b/crates/minibf/src/routes/txs.rs index 68f1a0fb7..a806ccc0b 100644 --- a/crates/minibf/src/routes/txs.rs +++ b/crates/minibf/src/routes/txs.rs @@ -15,7 +15,10 @@ use blockfrost_openapi::models::{ tx_content_withdrawals_inner::TxContentWithdrawalsInner, }; -use dolos_cardano::{indexes::AsyncCardanoQueryExt, AccountState, DRepState, PoolState}; +use dolos_cardano::{ + core_hash_to_pallas, indexes::AsyncCardanoQueryExt, pallas_hash_to_core, AccountState, + CardanoError, CardanoGenesis, DRepState, PoolState, +}; use dolos_core::Domain; use crate::{ @@ -29,7 +32,7 @@ pub async fn by_hash( State(domain): State>, ) -> Result, StatusCode> where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, Option: From, Option: From, Option: From, @@ -76,7 +79,7 @@ pub async fn by_hash_utxos( State(domain): State>, ) -> Result, StatusCode> where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, { let hash = hex::decode(tx_hash).map_err(|_| StatusCode::BAD_REQUEST)?; @@ -94,7 +97,11 @@ where let mut consumed_deps = std::collections::HashMap::new(); for x in builder.required_consumed_deps()? { - let bytes: Vec = x.clone().into(); + let bytes = { + let mut v = x.0.as_slice().to_vec(); + v.extend_from_slice(&x.1.to_be_bytes()); + v + }; let maybe = domain .query() .tx_by_spent_txo(&bytes) @@ -107,11 +114,11 @@ where builder = builder.with_consumed_deps(consumed_deps); let deps = builder.required_deps()?; - let deps = domain.get_tx_batch(deps).await?; + let deps = domain.get_tx_batch(deps.into_iter().map(core_hash_to_pallas)).await?; for (key, cbor) in deps.iter() { if let Some(cbor) = cbor { - builder.load_dep(*key, cbor)?; + builder.load_dep(pallas_hash_to_core(*key), cbor)?; } } @@ -168,11 +175,11 @@ where .with_historical_pparams::(&domain)?; let deps = builder.required_deps()?; - let deps = domain.get_tx_batch(deps).await?; + let deps = domain.get_tx_batch(deps.into_iter().map(core_hash_to_pallas)).await?; for (key, cbor) in deps.iter() { if let Some(cbor) = cbor { - builder.load_dep(*key, cbor)?; + builder.load_dep(pallas_hash_to_core(*key), cbor)?; } } @@ -200,7 +207,7 @@ pub async fn by_hash_delegations( State(domain): State>, ) -> Result>, StatusCode> where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, { let hash = hex::decode(tx_hash).map_err(|_| StatusCode::BAD_REQUEST)?; @@ -221,7 +228,7 @@ pub async fn by_hash_mirs( State(domain): State>, ) -> Result>, StatusCode> where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, { let hash = hex::decode(tx_hash).map_err(|_| StatusCode::BAD_REQUEST)?; @@ -255,7 +262,7 @@ pub async fn by_hash_pool_updates( State(domain): State>, ) -> Result>, StatusCode> where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, Option: From, { let hash = hex::decode(tx_hash).map_err(|_| StatusCode::BAD_REQUEST)?; @@ -280,7 +287,7 @@ pub async fn by_hash_stakes( State(domain): State>, ) -> Result>, StatusCode> where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, { let hash = hex::decode(tx_hash).map_err(|_| StatusCode::BAD_REQUEST)?; diff --git a/crates/minibf/src/routes/utxos.rs b/crates/minibf/src/routes/utxos.rs index 824339f56..d6e44e82d 100644 --- a/crates/minibf/src/routes/utxos.rs +++ b/crates/minibf/src/routes/utxos.rs @@ -2,10 +2,10 @@ use axum::http::StatusCode; use blockfrost_openapi::models::address_utxo_content_inner::AddressUtxoContentInner; use futures::future::join_all; use itertools::Itertools; -use pallas::ledger::traverse::{MultiEraBlock, MultiEraOutput}; +use pallas::ledger::traverse::{Era, MultiEraBlock, MultiEraOutput}; use std::collections::{HashMap, HashSet}; -use dolos_cardano::indexes::AsyncCardanoQueryExt; +use dolos_cardano::{indexes::AsyncCardanoQueryExt, CardanoError}; use dolos_core::{Domain, StateStore as _, TxHash, TxoRef}; use crate::{ @@ -20,7 +20,7 @@ pub async fn load_utxo_models( pagination: Pagination, ) -> Result, StatusCode> where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, { let utxos = domain .state() @@ -30,15 +30,20 @@ where // decoded let utxos: HashMap<_, _> = utxos .iter() - .map(|(k, v)| MultiEraOutput::try_from(v.as_ref()).map(|x| (k, x))) + .map(|(k, v)| { + let era = Era::try_from(v.0).map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?; + let output = + MultiEraOutput::decode(era, &v.1).map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?; + Ok::<_, StatusCode>((k, output)) + }) .try_collect() - .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?; + .map_err(|e: StatusCode| e)?; let tx_deps: Vec<_> = utxos.keys().map(|txoref| txoref.0).unique().collect(); let block_deps: HashMap = join_all(tx_deps.iter().map(|tx| { let tx = *tx; async move { - match domain.query().block_by_tx_hash(tx.to_vec()).await { + match domain.query().block_by_tx_hash(tx.as_slice().to_vec()).await { Ok(Some((cbor, txorder))) => { let Ok(block) = MultiEraBlock::decode(&cbor) else { return Some(Err(StatusCode::INTERNAL_SERVER_ERROR)); @@ -95,7 +100,12 @@ where continue; }; - let key: Vec = builder.txo_ref().into(); + let txo_ref = builder.txo_ref(); + let key = { + let mut v = txo_ref.0.as_slice().to_vec(); + v.extend_from_slice(&txo_ref.1.to_be_bytes()); + v + }; let consumed_by = domain .query() .tx_by_spent_txo(&key) From 0a561b3fe22a026eb3c4fc9950216f65783edd8d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Tue, 17 Mar 2026 02:31:05 -0300 Subject: [PATCH 36/85] fix: clippy --- crates/core/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/core/src/lib.rs b/crates/core/src/lib.rs index dd9923d5c..0a1d46b08 100644 --- a/crates/core/src/lib.rs +++ b/crates/core/src/lib.rs @@ -107,7 +107,7 @@ mod cbor_bytes { use minicbor::{Decoder, Encoder}; pub fn encode( - v: &Vec, + v: &[u8], e: &mut Encoder, _: &mut C, ) -> Result<(), minicbor::encode::Error> { From efedd9a725664238a7db02aa251a66b390de535a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Tue, 17 Mar 2026 02:44:09 -0300 Subject: [PATCH 37/85] fix(minikupo): compiling --- crates/minikupo/src/lib.rs | 14 +++++++------- crates/minikupo/src/routes/datums.rs | 3 ++- crates/minikupo/src/routes/matches.rs | 23 ++++++++++++----------- crates/minikupo/src/routes/scripts.rs | 3 ++- 4 files changed, 23 insertions(+), 20 deletions(-) diff --git a/crates/minikupo/src/lib.rs b/crates/minikupo/src/lib.rs index e5d793b12..f83440647 100644 --- a/crates/minikupo/src/lib.rs +++ b/crates/minikupo/src/lib.rs @@ -5,7 +5,7 @@ use axum::{ routing::get, Json, Router, ServiceExt, }; -use dolos_cardano::indexes::{AsyncCardanoQueryExt, ScriptLanguage as CardanoLanguage}; +use dolos_cardano::{indexes::{AsyncCardanoQueryExt, ScriptLanguage as CardanoLanguage}, CardanoError, CardanoGenesis}; use dolos_core::{config::MinikupoConfig, AsyncQueryFacade, CancelToken, Domain, ServeError}; use pallas::{codec::minicbor, crypto::hash::Hash}; use std::ops::Deref; @@ -45,7 +45,7 @@ impl Facade { script_hash: &Hash<28>, ) -> Result, StatusCode> where - D: Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, { let script = self .query() @@ -73,7 +73,7 @@ impl Facade { datum_hash: &Hash<32>, ) -> Result, StatusCode> where - D: Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, { let datum = self .query() @@ -97,7 +97,7 @@ pub struct Driver; pub fn build_router(cfg: MinikupoConfig, domain: D) -> Router where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, { build_router_with_facade(Facade { inner: domain, @@ -107,7 +107,7 @@ where pub(crate) fn build_router_with_facade(facade: Facade) -> Router where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, { let permissive_cors = facade.config.permissive_cors(); let app = Router::new() @@ -151,7 +151,7 @@ where fn api_router() -> Router> where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, { Router::new() .route("/matches/{*pattern}", get(routes::matches::by_pattern::)) @@ -161,7 +161,7 @@ where .route("/health", get(routes::health::health::)) } -impl dolos_core::Driver for Driver +impl, C: CancelToken> dolos_core::Driver for Driver where D: Clone + Send + Sync + 'static, { diff --git a/crates/minikupo/src/routes/datums.rs b/crates/minikupo/src/routes/datums.rs index 94f900d64..88efbfe69 100644 --- a/crates/minikupo/src/routes/datums.rs +++ b/crates/minikupo/src/routes/datums.rs @@ -4,12 +4,13 @@ use axum::{ response::{IntoResponse, Response}, Json, }; +use dolos_cardano::CardanoError; use dolos_core::Domain; use pallas::crypto::hash::Hash; use crate::{bad_request, Facade}; -pub async fn by_hash( +pub async fn by_hash + Clone + Send + Sync + 'static>( State(facade): State>, Path(datum_hash): Path, ) -> Response { diff --git a/crates/minikupo/src/routes/matches.rs b/crates/minikupo/src/routes/matches.rs index 535b33e4b..269e25d18 100644 --- a/crates/minikupo/src/routes/matches.rs +++ b/crates/minikupo/src/routes/matches.rs @@ -4,7 +4,7 @@ use axum::{ response::{IntoResponse, Response}, Json, }; -use dolos_cardano::{indexes::CardanoIndexExt, network_from_genesis, pallas_extras}; +use dolos_cardano::{indexes::CardanoIndexExt, network_from_genesis, pallas_extras, pallas_hash_to_core, CardanoError, CardanoGenesis}; use dolos_core::{Domain, EraCbor, IndexStore as _, StateStore as _, TxoRef, UtxoSet}; use pallas::codec::minicbor; use pallas::ledger::{ @@ -19,7 +19,7 @@ use std::collections::HashMap; use crate::{bad_request, patterns, Facade}; -pub async fn by_pattern( +pub async fn by_pattern>( State(facade): State>, Path(pattern): Path, Query(query): Query, @@ -235,7 +235,7 @@ struct BlockInfo { tx_index: usize, } -fn refs_for_address_pattern( +fn refs_for_address_pattern>( facade: &Facade, pattern: &patterns::AddressPattern, ) -> Result<(UtxoSet, OutputFilter), MatchError> { @@ -352,7 +352,7 @@ async fn refs_for_output_ref_pattern( let refs = match pattern.index() { patterns::OutputIndexPattern::Exact(index) => { let mut refs = UtxoSet::new(); - refs.insert(TxoRef(tx_hash, *index)); + refs.insert(TxoRef(pallas_hash_to_core(tx_hash), *index)); refs } patterns::OutputIndexPattern::Any => { @@ -369,7 +369,7 @@ async fn refs_for_output_ref_pattern( let tx = MultiEraTx::decode_for_era(era, &cbor).map_err(|_| MatchError::Internal)?; let mut refs = UtxoSet::new(); for (index, _) in tx.outputs().iter().enumerate() { - refs.insert(TxoRef(tx_hash, index as u32)); + refs.insert(TxoRef(pallas_hash_to_core(tx_hash), index as u32)); } refs } @@ -394,7 +394,7 @@ fn stake_credential_pattern(stake: &StakeAddress) -> patterns::CredentialPattern } } -fn stake_keys_for_credential( +fn stake_keys_for_credential>( facade: &Facade, credential: &patterns::CredentialPattern, ) -> Result>>, MatchError> { @@ -421,7 +421,7 @@ fn stake_keys_for_credential( Ok(Some(keys)) } -async fn build_matches( +async fn build_matches>( facade: &Facade, refs: UtxoSet, filter: OutputFilter, @@ -432,12 +432,13 @@ async fn build_matches( .get_utxos(refs.into_iter().collect()) .map_err(|_| MatchError::Internal)?; - let mut block_cache: HashMap, BlockInfo> = HashMap::new(); + let mut block_cache: HashMap, BlockInfo> = HashMap::new(); let mut out = Vec::new(); for (txo_ref, cbor) in utxos { let cbor: &dolos_core::EraCbor = cbor.as_ref(); - let output = MultiEraOutput::try_from(cbor).map_err(|_| MatchError::Internal)?; + let era = Era::try_from(cbor.0).map_err(|_| MatchError::Internal)?; + let output = MultiEraOutput::decode(era, &cbor.1).map_err(|_| MatchError::Internal)?; let address = output.address().map_err(|_| MatchError::Internal)?; if !matches_output_filter(&output, &address, &filter) { @@ -450,7 +451,7 @@ async fn build_matches( None => { let Some((raw_block, tx_index)) = facade .query() - .block_by_tx_hash(tx_hash.to_vec()) + .block_by_tx_hash(tx_hash.as_slice().to_vec()) .await .map_err(|_| MatchError::Internal)? else { @@ -553,7 +554,7 @@ fn output_script_hash(output: &MultiEraOutput<'_>) -> Option( +async fn resolve_output_extras>( facade: &Facade, output: &MultiEraOutput<'_>, script_hash: Option>, diff --git a/crates/minikupo/src/routes/scripts.rs b/crates/minikupo/src/routes/scripts.rs index f23419546..9b617edb1 100644 --- a/crates/minikupo/src/routes/scripts.rs +++ b/crates/minikupo/src/routes/scripts.rs @@ -4,12 +4,13 @@ use axum::{ response::{IntoResponse, Response}, Json, }; +use dolos_cardano::CardanoError; use dolos_core::Domain; use pallas::crypto::hash::Hash; use crate::{bad_request, Facade}; -pub async fn by_hash( +pub async fn by_hash + Clone + Send + Sync + 'static>( State(facade): State>, Path(script_hash): Path, ) -> Response { From 60a3da7daa177b5dc54cb37a5b15115c21dc167b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Tue, 17 Mar 2026 02:44:55 -0300 Subject: [PATCH 38/85] fix(trp): compiling --- crates/trp/src/compiler.rs | 15 ++++++++------- crates/trp/src/error.rs | 18 ++++++------------ crates/trp/src/lib.rs | 3 ++- crates/trp/src/mapping.rs | 5 +++-- crates/trp/src/methods.rs | 5 +++-- crates/trp/src/utxos.rs | 13 ++++++------- 6 files changed, 28 insertions(+), 31 deletions(-) diff --git a/crates/trp/src/compiler.rs b/crates/trp/src/compiler.rs index 6aa6445f9..38930195b 100644 --- a/crates/trp/src/compiler.rs +++ b/crates/trp/src/compiler.rs @@ -1,11 +1,12 @@ use pallas::ledger::primitives::conway::CostModels; use std::collections::HashMap; -use dolos_core::{config::TrpConfig, Domain, Genesis, StateStore}; +use dolos_cardano::CardanoGenesis; +use dolos_core::{config::TrpConfig, Domain, StateStore}; use crate::Error; -pub fn network_id_from_genesis(genesis: &Genesis) -> Option { +pub fn network_id_from_genesis(genesis: &CardanoGenesis) -> Option { match genesis.shelley.network_id.as_ref() { Some(network) => match network.as_str() { "Mainnet" => Some(tx3_cardano::Network::Mainnet), @@ -29,8 +30,8 @@ fn map_cost_models(original: CostModels) -> HashMap HashMap::from_iter(present) } -fn build_pparams(domain: &D) -> Result { - let network = network_id_from_genesis(&domain.genesis()).unwrap(); +fn build_pparams>(domain: &D) -> Result { + let network = network_id_from_genesis(domain.genesis().as_ref()).unwrap(); let pparams = dolos_cardano::load_effective_pparams::(domain.state())?; @@ -47,7 +48,7 @@ fn build_pparams(domain: &D) -> Result { Ok(out) } -pub fn find_cursor(domain: &D) -> Result { +pub fn find_cursor>(domain: &D) -> Result { let cursor = domain .state() .read_cursor() @@ -58,12 +59,12 @@ pub fn find_cursor(domain: &D) -> Result( +pub fn load_compiler>( domain: &D, config: &TrpConfig, ) -> Result { diff --git a/crates/trp/src/error.rs b/crates/trp/src/error.rs index 1ae577f28..ce17a43ac 100644 --- a/crates/trp/src/error.rs +++ b/crates/trp/src/error.rs @@ -1,3 +1,4 @@ +use dolos_cardano::CardanoError; use dolos_core::DomainError; use jsonrpsee::types::ErrorCode; use serde_json::Value; @@ -66,10 +67,6 @@ impl Error { ))) } - fn tx_script_failure(logs: Vec) -> Self { - Error::Tx3Error(Box::new(tx3_resolver::Error::TxScriptFailure(logs))) - } - fn internal(error: impl std::fmt::Display) -> Self { Error::InternalError(error.to_string()) } @@ -87,21 +84,18 @@ impl From for Error { } } -impl From for Error { - fn from(error: dolos_core::ChainError) -> Self { +impl From> for Error { + fn from(error: dolos_core::ChainError) -> Self { match error { dolos_core::ChainError::BrokenInvariant(x) => Error::tx_not_accepted(x), - dolos_core::ChainError::DecodingError(x) => Error::tx_not_accepted(x), - dolos_core::ChainError::CborDecodingError(x) => Error::tx_not_accepted(x), - dolos_core::ChainError::Phase1ValidationRejected(x) => Error::tx_not_accepted(x), - dolos_core::ChainError::Phase2ValidationRejected(x) => Error::tx_script_failure(x), + dolos_core::ChainError::ChainSpecific(x) => Error::tx_not_accepted(x.to_string()), x => Error::internal(x), } } } -impl From for Error { - fn from(error: DomainError) -> Self { +impl From> for Error { + fn from(error: DomainError) -> Self { match error { dolos_core::DomainError::ChainError(e) => Error::from(e), dolos_core::DomainError::MempoolError(e) => Error::from(e), diff --git a/crates/trp/src/lib.rs b/crates/trp/src/lib.rs index 34b54bdd9..74db7281a 100644 --- a/crates/trp/src/lib.rs +++ b/crates/trp/src/lib.rs @@ -5,6 +5,7 @@ use tower::ServiceBuilder; use tower_http::cors::CorsLayer; use tracing::info; +use dolos_cardano::{CardanoError, CardanoGenesis}; use dolos_core::{config::TrpConfig, CancelToken, Domain, ServeError, SubmitExt}; mod compiler; @@ -25,7 +26,7 @@ pub struct Context { pub struct Driver; -impl dolos_core::Driver for Driver { +impl + SubmitExt, C: CancelToken> dolos_core::Driver for Driver { type Config = TrpConfig; async fn run(cfg: Self::Config, domain: D, cancel: C) -> Result<(), ServeError> { diff --git a/crates/trp/src/mapping.rs b/crates/trp/src/mapping.rs index 2ff1a1bde..b5e8e0d49 100644 --- a/crates/trp/src/mapping.rs +++ b/crates/trp/src/mapping.rs @@ -2,6 +2,7 @@ use std::sync::Arc; use tx3_resolver::{Expression, StructExpr}; +use dolos_cardano::pallas_hash_to_core; use dolos_core::{EraCbor, TxoRef}; use pallas::{ codec::utils::KeyValuePairs, @@ -97,12 +98,12 @@ fn map_datum(datum: &PlutusData) -> Expression { pub fn from_tx3_utxoref(r#ref: tx3_resolver::UtxoRef) -> TxoRef { let txid = dolos_cardano::pallas::crypto::hash::Hash::from(r#ref.txid.as_slice()); - TxoRef(txid, r#ref.index) + TxoRef(pallas_hash_to_core(txid), r#ref.index) } pub fn into_tx3_utxoref(txoref: TxoRef) -> tx3_resolver::UtxoRef { tx3_resolver::UtxoRef { - txid: txoref.0.to_vec(), + txid: txoref.0.as_slice().to_vec(), index: txoref.1, } } diff --git a/crates/trp/src/methods.rs b/crates/trp/src/methods.rs index 16988898f..8fad1394b 100644 --- a/crates/trp/src/methods.rs +++ b/crates/trp/src/methods.rs @@ -13,13 +13,14 @@ use tx3_resolver::trp::{ TxStatus, TxWitness, }; +use dolos_cardano::{CardanoError, CardanoGenesis}; use dolos_core::{Domain, MempoolAwareUtxoStore, MempoolStore as _, StateStore as _, SubmitExt}; use crate::{compiler::load_compiler, utxos::UtxoStoreAdapter}; use super::{Context, Error}; -pub async fn trp_resolve( +pub async fn trp_resolve>( params: Params<'_>, context: Arc>, ) -> Result { @@ -96,7 +97,7 @@ fn apply_witnesses(original: &[u8], witnesses: &[TxWitness]) -> Result, Ok(pallas::codec::minicbor::to_vec(&tx).unwrap()) } -pub async fn trp_submit( +pub async fn trp_submit + SubmitExt>( params: Params<'_>, context: Arc>, ) -> Result { diff --git a/crates/trp/src/utxos.rs b/crates/trp/src/utxos.rs index 4c7516612..9b884876e 100644 --- a/crates/trp/src/utxos.rs +++ b/crates/trp/src/utxos.rs @@ -1,8 +1,7 @@ use std::collections::HashSet; -use dolos_cardano::indexes::utxo_dimensions; +use dolos_cardano::{indexes::utxo_dimensions, CardanoError}; use dolos_core::{Domain, IndexError, MempoolAwareUtxoStore, TxoRef}; -use pallas::ledger::traverse::MultiEraOutput; use tx3_resolver::{Error as Tx3Error, UtxoPattern, UtxoRef, UtxoSet, UtxoStore}; use crate::{ @@ -10,12 +9,12 @@ use crate::{ Error, }; -fn search_state_utxos( +fn search_state_utxos>( pattern: &UtxoPattern<'_>, store: &MempoolAwareUtxoStore, ) -> Result, IndexError> { // Dummy filter that always returns true (we want all UTxOs matching the index) - let no_filter = |_: &MultiEraOutput<'_>| true; + let no_filter = |_: &dolos_core::EraCbor| true; let refs = match pattern { UtxoPattern::ByAddress(address) => { @@ -33,11 +32,11 @@ fn search_state_utxos( Ok(refs) } -pub struct UtxoStoreAdapter<'a, D: Domain> { +pub struct UtxoStoreAdapter<'a, D: Domain> { inner: MempoolAwareUtxoStore<'a, D>, } -impl<'a, D: Domain> UtxoStoreAdapter<'a, D> { +impl<'a, D: Domain> UtxoStoreAdapter<'a, D> { pub fn new(inner: MempoolAwareUtxoStore<'a, D>) -> Self { Self { inner } } @@ -64,7 +63,7 @@ impl<'a, D: Domain> UtxoStoreAdapter<'a, D> { } } -impl<'a, D: Domain> UtxoStore for UtxoStoreAdapter<'a, D> { +impl<'a, D: Domain> UtxoStore for UtxoStoreAdapter<'a, D> { async fn narrow_refs(&self, pattern: UtxoPattern<'_>) -> Result, Tx3Error> { self.narrow_refs(pattern) .await From bc407a0ece335ec7ade9f320196aca5dba782ca8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Tue, 17 Mar 2026 14:06:11 -0300 Subject: [PATCH 39/85] fix: prelude cardano error --- src/prelude.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/prelude.rs b/src/prelude.rs index 767ab025a..a909c4363 100644 --- a/src/prelude.rs +++ b/src/prelude.rs @@ -28,7 +28,7 @@ pub enum Error { WalError(#[from] WalError), #[error("chain error: {0}")] - ArchiveError(#[from] ArchiveError), + ArchiveError(#[from] ArchiveError), #[error("state error: {0}")] StateError(#[from] StateError), From 7c1bd0c5cb4b51b3c2ee3e19d4fe16135b6aadec Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Tue, 17 Mar 2026 14:20:00 -0300 Subject: [PATCH 40/85] fix: era_cbor_from_output use pallas From --- crates/cardano/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/cardano/src/lib.rs b/crates/cardano/src/lib.rs index 62f82c759..60ef7123a 100644 --- a/crates/cardano/src/lib.rs +++ b/crates/cardano/src/lib.rs @@ -80,7 +80,7 @@ pub(crate) fn txo_ref_from_pallas(hash: pallas::crypto::hash::Hash<32>, idx: u32 } pub(crate) fn era_cbor_from_output(output: &MultiEraOutput<'_>) -> EraCbor { - EraCbor(output.era() as u16, output.encode()) + EraCbor(output.era().into(), output.encode()) } pub(crate) fn txo_ref_from_input(input: &MultiEraInput<'_>) -> TxoRef { From 03c2ea1dd77b3d79dfff4ddffd1a78e88b4572d1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Tue, 17 Mar 2026 14:28:13 -0300 Subject: [PATCH 41/85] feat(adapters): mapping errors and types to core --- crates/redb3/src/archive/mod.rs | 4 +- src/adapters/mod.rs | 28 ++++-- src/adapters/storage.rs | 173 +++++++++++++++++++++----------- 3 files changed, 133 insertions(+), 72 deletions(-) diff --git a/crates/redb3/src/archive/mod.rs b/crates/redb3/src/archive/mod.rs index f29d1f660..dc9d612a5 100644 --- a/crates/redb3/src/archive/mod.rs +++ b/crates/redb3/src/archive/mod.rs @@ -257,7 +257,7 @@ impl ArchiveStore { let decoded = MultiEraBlock::decode(&body) .map_err(|e| RedbArchiveError(ArchiveError::InternalError(e.to_string())))?; - let decoded_hash: [u8; 32] = (*decoded.hash()).into(); + let decoded_hash: [u8; 32] = *decoded.hash(); let decoded_hash = dolos_core::hash::Hash::from(decoded_hash); if decoded_hash.eq(hash) { return Ok(Some(ChainPoint::Specific(decoded.slot(), decoded_hash))); @@ -632,7 +632,7 @@ impl ArchiveStore { let mut bytes = input.hash().to_vec(); bytes.extend_from_slice(u32::to_be_bytes(input.index() as u32).as_slice()); if bytes.as_slice() == spent_txo { - let hash_bytes: [u8; 32] = (*tx.hash()).into(); + let hash_bytes: [u8; 32] = *tx.hash(); return Ok(Some(dolos_core::hash::Hash::from(hash_bytes))); } } diff --git a/src/adapters/mod.rs b/src/adapters/mod.rs index 830d75322..200b1f4b0 100644 --- a/src/adapters/mod.rs +++ b/src/adapters/mod.rs @@ -2,7 +2,7 @@ pub mod storage; use std::sync::Arc; -use dolos_cardano::CardanoLogic; +use dolos_cardano::{CardanoGenesis, CardanoLogic, core_hash_to_pallas, pallas_hash_to_core}; use dolos_core::{ config::{StorageConfig, SyncConfig}, *, @@ -35,7 +35,7 @@ impl dolos_core::TipSubscription for TipSubscription { pub struct DomainAdapter { pub storage_config: Arc, pub sync_config: Arc, - pub genesis: Arc, + pub genesis: Arc, pub wal: WalAdapter, pub chain: Arc>, pub state: StateStoreBackend, @@ -52,7 +52,7 @@ impl DomainAdapter { /// especially after heavy write operations like bulk imports. This ensures /// that storage backends complete any pending background work before being /// dropped. - pub fn shutdown(&self) -> Result<(), DomainError> { + pub fn shutdown(&self) -> Result<(), DomainError> { tracing::info!("domain adapter: starting graceful shutdown"); self.wal.shutdown().map_err(DomainError::WalError)?; @@ -73,15 +73,18 @@ impl DomainAdapter { } let mut result = std::collections::HashMap::new(); - let refs_set: std::collections::HashSet<_> = - refs.iter().copied().map(TxoRef::from).collect(); + let refs_set: std::collections::HashSet<_> = refs + .iter() + .copied() + .map(|(h, i)| TxoRef(pallas_hash_to_core(h), i)) + .collect(); let iter = self.wal().iter_logs(None, None).ok()?; for (_, log) in iter.rev() { for (txo_ref, era_cbor) in &log.inputs { if refs_set.contains(txo_ref) { let era = era_cbor.0.try_into().expect("era out of range"); - result.insert(txo_ref.clone().into(), (era, era_cbor.1.clone())); + result.insert((core_hash_to_pallas(txo_ref.0), txo_ref.1), (era, era_cbor.1.clone())); } } @@ -99,6 +102,8 @@ impl DomainAdapter { } impl Domain for DomainAdapter { + type Genesis = CardanoGenesis; + type ChainSpecificError = dolos_cardano::CardanoError; type Entity = dolos_cardano::CardanoEntity; type EntityDelta = dolos_cardano::CardanoDelta; type Chain = CardanoLogic; @@ -110,7 +115,7 @@ impl Domain for DomainAdapter { type Mempool = MempoolBackend; type TipSubscription = TipSubscription; - fn genesis(&self) -> Arc { + fn genesis(&self) -> Arc { self.genesis.clone() } @@ -150,7 +155,7 @@ impl Domain for DomainAdapter { &self.sync_config } - fn watch_tip(&self, from: Option) -> Result { + fn watch_tip(&self, from: Option) -> Result> { // TODO: do a more thorough analysis to understand if this approach is // susceptible to race conditions. Things to explore: // - a mutex to block the sending of events while gathering the replay. @@ -180,14 +185,17 @@ impl pallas::interop::utxorpc::LedgerContext for DomainAdapter { &self, refs: &[pallas::interop::utxorpc::TxoRef], ) -> Option { - let refs: Vec<_> = refs.iter().map(|x| TxoRef::from(*x)).collect(); + let refs: Vec<_> = refs + .iter() + .map(|(h, i)| TxoRef(pallas_hash_to_core(*h), *i)) + .collect(); let some = dolos_core::StateStore::get_utxos(self.state(), refs) .ok()? .into_iter() .map(|(k, v)| { let era = v.0.try_into().expect("era out of range"); - (k.into(), (era, v.1.clone())) + ((core_hash_to_pallas(k.0), k.1), (era, v.1.clone())) }) .collect(); diff --git a/src/adapters/storage.rs b/src/adapters/storage.rs index d7eadf483..60a11ecfe 100644 --- a/src/adapters/storage.rs +++ b/src/adapters/storage.rs @@ -10,6 +10,7 @@ use std::{ops::Range, path::Path, path::PathBuf}; +use dolos_cardano::CardanoError; use dolos_core::{ archive::{ ArchiveError, ArchiveStore as CoreArchiveStore, ArchiveWriter as CoreArchiveWriter, LogKey, @@ -538,6 +539,20 @@ impl CoreStateStore for StateStoreBackend { // Archive Store Backend // ============================================================================ +fn convert_archive_err(e: ArchiveError) -> ArchiveError { + match e { + ArchiveError::BrokenInvariant(e) => ArchiveError::BrokenInvariant(e), + ArchiveError::InternalError(s) => ArchiveError::InternalError(s), + ArchiveError::QueryNotSupported => ArchiveError::QueryNotSupported, + ArchiveError::InvalidStoreVersion => ArchiveError::InvalidStoreVersion, + ArchiveError::DecodingError(e) => ArchiveError::DecodingError(e), + ArchiveError::EntityDecodingError(s) => ArchiveError::EntityDecodingError(s), + ArchiveError::NamespaceNotFound(ns) => ArchiveError::NamespaceNotFound(ns), + // TODO: what + ArchiveError::ChainSpecifc(inf) => match inf {}, + } +} + /// Enum wrapper for archive store backends. #[derive(Clone)] pub enum ArchiveStoreBackend { @@ -551,10 +566,11 @@ impl ArchiveStoreBackend { path: impl AsRef, schema: StateSchema, config: &RedbArchiveConfig, - ) -> Result { - Ok(Self::Redb(dolos_redb3::archive::ArchiveStore::open( - schema, path, config, - )?)) + ) -> Result> { + Ok(Self::Redb( + dolos_redb3::archive::ArchiveStore::open(schema, path, config) + .map_err(|e| ArchiveError::InternalError(e.to_string()))?, + )) } /// Create a no-op archive store that discards all writes. @@ -563,10 +579,11 @@ impl ArchiveStoreBackend { } /// Create an in-memory archive store. - pub fn in_memory(schema: StateSchema) -> Result { - Ok(Self::Redb(dolos_redb3::archive::ArchiveStore::in_memory( - schema, - )?)) + pub fn in_memory(schema: StateSchema) -> Result> { + Ok(Self::Redb( + dolos_redb3::archive::ArchiveStore::in_memory(schema) + .map_err(|e| ArchiveError::InternalError(e.to_string()))?, + )) } /// Open an archive store based on the config variant. @@ -578,7 +595,7 @@ impl ArchiveStoreBackend { path: impl AsRef, schema: StateSchema, config: &ArchiveStoreConfig, - ) -> Result { + ) -> Result> { match config { ArchiveStoreConfig::Redb(cfg) => Self::open_redb(path, schema, cfg), ArchiveStoreConfig::InMemory => Self::in_memory(schema), @@ -586,12 +603,12 @@ impl ArchiveStoreBackend { } } - pub fn shutdown(&self) -> Result<(), ArchiveError> { + pub fn shutdown(&self) -> Result<(), ArchiveError> { match self { Self::Redb(s) => s .shutdown() .map_err(|e| ArchiveError::InternalError(e.to_string())), - Self::NoOp(s) => s.shutdown(), + Self::NoOp(s) => s.shutdown().map_err(convert_archive_err), } } } @@ -602,10 +619,16 @@ pub enum ArchiveWriterBackend { } impl CoreArchiveWriter for ArchiveWriterBackend { - fn apply(&self, point: &ChainPoint, block: &RawBlock) -> Result<(), ArchiveError> { + type ChainSpecificError = CardanoError; + + fn apply( + &self, + point: &ChainPoint, + block: &RawBlock, + ) -> Result<(), ArchiveError> { match self { - Self::Redb(w) => w.apply(point, block), - Self::NoOp(w) => w.apply(point, block), + Self::Redb(w) => w.apply(point, block).map_err(convert_archive_err), + Self::NoOp(w) => w.apply(point, block).map_err(convert_archive_err), } } @@ -614,24 +637,24 @@ impl CoreArchiveWriter for ArchiveWriterBackend { ns: Namespace, key: &LogKey, value: &EntityValue, - ) -> Result<(), ArchiveError> { + ) -> Result<(), ArchiveError> { match self { - Self::Redb(w) => w.write_log(ns, key, value), - Self::NoOp(w) => w.write_log(ns, key, value), + Self::Redb(w) => w.write_log(ns, key, value).map_err(convert_archive_err), + Self::NoOp(w) => w.write_log(ns, key, value).map_err(convert_archive_err), } } - fn undo(&self, point: &ChainPoint) -> Result<(), ArchiveError> { + fn undo(&self, point: &ChainPoint) -> Result<(), ArchiveError> { match self { - Self::Redb(w) => w.undo(point), - Self::NoOp(w) => w.undo(point), + Self::Redb(w) => w.undo(point).map_err(convert_archive_err), + Self::NoOp(w) => w.undo(point).map_err(convert_archive_err), } } - fn commit(self) -> Result<(), ArchiveError> { + fn commit(self) -> Result<(), ArchiveError> { match self { - Self::Redb(w) => (*w).commit(), - Self::NoOp(w) => w.commit(), + Self::Redb(w) => (*w).commit().map_err(convert_archive_err), + Self::NoOp(w) => w.commit().map_err(convert_archive_err), } } } @@ -682,11 +705,11 @@ pub enum ArchiveLogIterBackend { } impl Iterator for ArchiveLogIterBackend { - type Item = Result<(LogKey, EntityValue), ArchiveError>; + type Item = Result<(LogKey, EntityValue), ArchiveError>; fn next(&mut self) -> Option { match self { - Self::Redb(iter) => iter.next(), - Self::NoOp(iter) => iter.next(), + Self::Redb(iter) => iter.next().map(|r| r.map_err(convert_archive_err)), + Self::NoOp(iter) => iter.next().map(|r| r.map_err(convert_archive_err)), } } } @@ -697,26 +720,30 @@ pub enum ArchiveEntityValueIterBackend { } impl Iterator for ArchiveEntityValueIterBackend { - type Item = Result; + type Item = Result>; fn next(&mut self) -> Option { match self { - Self::Redb(iter) => iter.next(), - Self::NoOp(iter) => iter.next(), + Self::Redb(iter) => iter.next().map(|r| r.map_err(convert_archive_err)), + Self::NoOp(iter) => iter.next().map(|r| r.map_err(convert_archive_err)), } } } impl CoreArchiveStore for ArchiveStoreBackend { + type ChainSpecificError = CardanoError; type BlockIter<'a> = ArchiveBlockIterBackend; type Writer = ArchiveWriterBackend; type LogIter = ArchiveLogIterBackend; type EntityValueIter = ArchiveEntityValueIterBackend; - fn start_writer(&self) -> Result { + fn start_writer(&self) -> Result> { match self { Self::Redb(s) => CoreArchiveStore::start_writer(s) - .map(|writer| ArchiveWriterBackend::Redb(Box::new(writer))), - Self::NoOp(s) => CoreArchiveStore::start_writer(s).map(ArchiveWriterBackend::NoOp), + .map(|writer| ArchiveWriterBackend::Redb(Box::new(writer))) + .map_err(convert_archive_err), + Self::NoOp(s) => CoreArchiveStore::start_writer(s) + .map(ArchiveWriterBackend::NoOp) + .map_err(convert_archive_err), } } @@ -724,10 +751,10 @@ impl CoreArchiveStore for ArchiveStoreBackend { &self, ns: Namespace, keys: &[&LogKey], - ) -> Result>, ArchiveError> { + ) -> Result>, ArchiveError> { match self { - Self::Redb(s) => CoreArchiveStore::read_logs(s, ns, keys), - Self::NoOp(s) => CoreArchiveStore::read_logs(s, ns, keys), + Self::Redb(s) => CoreArchiveStore::read_logs(s, ns, keys).map_err(convert_archive_err), + Self::NoOp(s) => CoreArchiveStore::read_logs(s, ns, keys).map_err(convert_archive_err), } } @@ -735,20 +762,28 @@ impl CoreArchiveStore for ArchiveStoreBackend { &self, ns: Namespace, range: Range, - ) -> Result { + ) -> Result> { match self { Self::Redb(s) => CoreArchiveStore::iter_logs(s, ns, range) - .map(|iter| ArchiveLogIterBackend::Redb(Box::new(iter))), - Self::NoOp(s) => { - CoreArchiveStore::iter_logs(s, ns, range).map(ArchiveLogIterBackend::NoOp) - } + .map(|iter| ArchiveLogIterBackend::Redb(Box::new(iter))) + .map_err(convert_archive_err), + Self::NoOp(s) => CoreArchiveStore::iter_logs(s, ns, range) + .map(ArchiveLogIterBackend::NoOp) + .map_err(convert_archive_err), } } - fn get_block_by_slot(&self, slot: &BlockSlot) -> Result, ArchiveError> { + fn get_block_by_slot( + &self, + slot: &BlockSlot, + ) -> Result, ArchiveError> { match self { - Self::Redb(s) => CoreArchiveStore::get_block_by_slot(s, slot), - Self::NoOp(s) => CoreArchiveStore::get_block_by_slot(s, slot), + Self::Redb(s) => { + CoreArchiveStore::get_block_by_slot(s, slot).map_err(convert_archive_err) + } + Self::NoOp(s) => { + CoreArchiveStore::get_block_by_slot(s, slot).map_err(convert_archive_err) + } } } @@ -756,41 +791,59 @@ impl CoreArchiveStore for ArchiveStoreBackend { &self, from: Option, to: Option, - ) -> Result, ArchiveError> { + ) -> Result, ArchiveError> { match self { Self::Redb(s) => CoreArchiveStore::get_range(s, from, to) - .map(|iter| ArchiveBlockIterBackend::Redb(Box::new(iter))), - Self::NoOp(s) => { - CoreArchiveStore::get_range(s, from, to).map(ArchiveBlockIterBackend::NoOp) - } + .map(|iter| ArchiveBlockIterBackend::Redb(Box::new(iter))) + .map_err(convert_archive_err), + Self::NoOp(s) => CoreArchiveStore::get_range(s, from, to) + .map(ArchiveBlockIterBackend::NoOp) + .map_err(convert_archive_err), } } - fn find_intersect(&self, intersect: &[ChainPoint]) -> Result, ArchiveError> { + fn find_intersect( + &self, + intersect: &[ChainPoint], + ) -> Result, ArchiveError> { match self { - Self::Redb(s) => CoreArchiveStore::find_intersect(s, intersect), - Self::NoOp(s) => CoreArchiveStore::find_intersect(s, intersect), + Self::Redb(s) => { + CoreArchiveStore::find_intersect(s, intersect).map_err(convert_archive_err) + } + Self::NoOp(s) => { + CoreArchiveStore::find_intersect(s, intersect).map_err(convert_archive_err) + } } } - fn get_tip(&self) -> Result, ArchiveError> { + fn get_tip(&self) -> Result, ArchiveError> { match self { - Self::Redb(s) => CoreArchiveStore::get_tip(s), - Self::NoOp(s) => CoreArchiveStore::get_tip(s), + Self::Redb(s) => CoreArchiveStore::get_tip(s).map_err(convert_archive_err), + Self::NoOp(s) => CoreArchiveStore::get_tip(s).map_err(convert_archive_err), } } - fn prune_history(&self, max_slots: u64, max_prune: Option) -> Result { + fn prune_history( + &self, + max_slots: u64, + max_prune: Option, + ) -> Result> { match self { - Self::Redb(s) => CoreArchiveStore::prune_history(s, max_slots, max_prune), - Self::NoOp(s) => CoreArchiveStore::prune_history(s, max_slots, max_prune), + Self::Redb(s) => CoreArchiveStore::prune_history(s, max_slots, max_prune) + .map_err(convert_archive_err), + Self::NoOp(s) => CoreArchiveStore::prune_history(s, max_slots, max_prune) + .map_err(convert_archive_err), } } - fn truncate_front(&self, after: &ChainPoint) -> Result<(), ArchiveError> { + fn truncate_front(&self, after: &ChainPoint) -> Result<(), ArchiveError> { match self { - Self::Redb(s) => CoreArchiveStore::truncate_front(s, after), - Self::NoOp(s) => CoreArchiveStore::truncate_front(s, after), + Self::Redb(s) => { + CoreArchiveStore::truncate_front(s, after).map_err(convert_archive_err) + } + Self::NoOp(s) => { + CoreArchiveStore::truncate_front(s, after).map_err(convert_archive_err) + } } } } From d905e40eed9d231fdd422ad2b53bcb8c1b81ee64 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Tue, 17 Mar 2026 14:29:07 -0300 Subject: [PATCH 42/85] fix: rest of compilation issues on main solved by claude (they are all similar) --- src/bin/dolos/bootstrap/mithril.rs | 16 +++++++- src/bin/dolos/common.rs | 8 ++-- src/bin/dolos/data/compute_nonce.rs | 6 +-- src/bin/dolos/data/dump_logs.rs | 11 ++++-- src/bin/dolos/data/find_seq.rs | 3 +- src/bin/dolos/data/import_archive.rs | 2 +- src/bin/dolos/doctor/rollback.rs | 3 +- src/bin/dolos/doctor/wal_integrity.rs | 2 +- src/bin/dolos/eval.rs | 5 ++- src/bin/dolos/init.rs | 4 +- src/prelude.rs | 20 ++++++++++ src/relay/blockfetch.rs | 4 +- src/relay/chainsync.rs | 10 ++--- src/serve/grpc/convert.rs | 6 +-- src/serve/grpc/mod.rs | 2 +- src/serve/grpc/query.rs | 16 ++++---- src/serve/grpc/submit.rs | 52 ++++++++----------------- src/serve/grpc/sync.rs | 2 +- src/serve/grpc/watch.rs | 5 ++- src/serve/o7s_unix/chainsync.rs | 10 ++--- src/serve/o7s_unix/mod.rs | 6 +-- src/serve/o7s_unix/statequery.rs | 18 +++++---- src/serve/o7s_unix/utils/era_history.rs | 2 +- src/serve/o7s_unix/utils/utxo.rs | 7 +++- src/sync/pull.rs | 5 +-- src/sync/submit.rs | 8 ++-- 26 files changed, 133 insertions(+), 100 deletions(-) diff --git a/src/bin/dolos/bootstrap/mithril.rs b/src/bin/dolos/bootstrap/mithril.rs index b4200855e..5f8d226ff 100644 --- a/src/bin/dolos/bootstrap/mithril.rs +++ b/src/bin/dolos/bootstrap/mithril.rs @@ -164,8 +164,20 @@ fn define_starting_point( ) -> Result { use dolos_core::StateStore; + fn chain_to_pallas(c: dolos_core::ChainPoint) -> pallas::network::miniprotocols::Point { + match c { + dolos_core::ChainPoint::Origin => pallas::network::miniprotocols::Point::Origin, + dolos_core::ChainPoint::Specific(slot, hash) => { + pallas::network::miniprotocols::Point::Specific(slot, hash.as_slice().to_vec()) + } + dolos_core::ChainPoint::Slot(slot) => { + pallas::network::miniprotocols::Point::Specific(slot, vec![]) + } + } + } + if let Some(point) = &args.start_from { - Ok(point.clone().try_into().unwrap()) + Ok(chain_to_pallas(point.clone())) } else { let cursor = state .read_cursor() @@ -173,7 +185,7 @@ fn define_starting_point( .context("reading state cursor")?; let point = cursor - .map(|c| c.try_into().unwrap()) + .map(chain_to_pallas) .unwrap_or(pallas::network::miniprotocols::Point::Origin); Ok(point) diff --git a/src/bin/dolos/common.rs b/src/bin/dolos/common.rs index c193f72c7..37e250430 100644 --- a/src/bin/dolos/common.rs +++ b/src/bin/dolos/common.rs @@ -10,7 +10,7 @@ use tracing::{debug, info}; use tracing_subscriber::{filter::Targets, prelude::*}; use dolos::adapters::DomainAdapter; -use dolos::core::GenesisCardanoCardano; +use dolos_cardano::CardanoGenesis; use dolos::prelude::*; use dolos::storage; @@ -73,7 +73,7 @@ pub fn setup_domain(config: &RootConfig) -> miette::Result { let chain = dolos_cardano::CardanoLogic::initialize::( chain_config, &stores.state, - &genesis, + (*genesis).clone(), ) .into_diagnostic()?; @@ -204,8 +204,8 @@ pub fn setup_tracing(config: &LoggingConfig, telemetry: &TelemetryConfig) -> mie Ok(()) } -pub fn open_genesis_files(config: &GenesisConfig) -> miette::Result { - GenesisCardanoCardano::from_file_paths( +pub fn open_genesis_files(config: &GenesisConfig) -> miette::Result { + CardanoGenesis::from_file_paths( &config.byron_path, &config.shelley_path, &config.alonzo_path, diff --git a/src/bin/dolos/data/compute_nonce.rs b/src/bin/dolos/data/compute_nonce.rs index cbd5d3a2a..d7e399146 100644 --- a/src/bin/dolos/data/compute_nonce.rs +++ b/src/bin/dolos/data/compute_nonce.rs @@ -1,4 +1,4 @@ -use dolos_cardano::{load_era_summary, utils::nonce_stability_window, EraSummary, Nonces}; +use dolos_cardano::{load_era_summary, utils::nonce_stability_window, CardanoGenesis, EraSummary, Nonces}; use dolos_core::{ArchiveStore, Domain}; use miette::{bail, Context, IntoDiagnostic}; use pallas::{crypto::hash::Hash, ledger::traverse::MultiEraBlock}; @@ -27,7 +27,7 @@ pub fn get_nh(epoch: u64, domain: &D, summary: &EraSummary) -> miette Ok(block.header().previous_hash().unwrap()) } -pub fn compute_nonce(epoch: u64, domain: &D) -> miette::Result> { +pub fn compute_nonce>(epoch: u64, domain: &D) -> miette::Result> { let summary = load_era_summary::(domain.state()) .into_diagnostic() .context("loading era summary")?; @@ -48,7 +48,7 @@ pub fn compute_nonce(epoch: u64, domain: &D) -> miette::Result Result { + type ChainSpecificError = dolos_cardano::CardanoError; + + fn decode_entity( + ns: Namespace, + value: &EntityValue, + ) -> Result> { EpochState::decode_entity(ns, value).map(EpochPParams) } @@ -619,9 +624,9 @@ fn setup_tracing_for_format(config: &RootConfig, format: OutputFormat) -> miette crate::common::setup_tracing(&config.logging, &config.telemetry) } -fn decode_stake_credential(key: &EntityKey) -> Result { +fn decode_stake_credential(key: &EntityKey) -> Result> { let mut decoder = minicbor::Decoder::new(key.as_ref()); - decoder.decode().map_err(Into::into) + decoder.decode().map_err(|e| Box::new(e) as _) } fn log_slot_from_key(key: &LogKey) -> u64 { diff --git a/src/bin/dolos/data/find_seq.rs b/src/bin/dolos/data/find_seq.rs index dcd93132e..633156bc7 100644 --- a/src/bin/dolos/data/find_seq.rs +++ b/src/bin/dolos/data/find_seq.rs @@ -1,5 +1,6 @@ use dolos_core::config::RootConfig; use miette::{Context, IntoDiagnostic}; +use dolos_cardano::pallas_hash_to_core; use pallas::crypto::hash::Hash; use std::str::FromStr; @@ -25,7 +26,7 @@ pub fn run(config: &RootConfig, args: &Args) -> miette::Result<()> { .into_diagnostic() .context("error parsing hash")?; - let point = ChainPoint::Specific(args.slot, hash); + let point = ChainPoint::Specific(args.slot, pallas_hash_to_core(hash)); let seq = wal .find_intersect(&[point]) diff --git a/src/bin/dolos/data/import_archive.rs b/src/bin/dolos/data/import_archive.rs index 6f6c16a98..80bcf9b7b 100644 --- a/src/bin/dolos/data/import_archive.rs +++ b/src/bin/dolos/data/import_archive.rs @@ -146,7 +146,7 @@ pub fn run(config: &RootConfig, args: &Args, feedback: &Feedback) -> miette::Res break; } - let point = ChainPoint::Specific(block.slot, block.hash); + let point = ChainPoint::Specific(block.slot, dolos_cardano::pallas_hash_to_core(block.hash)); writer .apply(&point, &block.raw) diff --git a/src/bin/dolos/doctor/rollback.rs b/src/bin/dolos/doctor/rollback.rs index 102ead8b7..f0009b3d9 100644 --- a/src/bin/dolos/doctor/rollback.rs +++ b/src/bin/dolos/doctor/rollback.rs @@ -2,6 +2,7 @@ use std::str::FromStr; use dolos_core::sync::SyncExt as _; use miette::IntoDiagnostic as _; +use dolos_cardano::pallas_hash_to_core; use pallas::crypto::hash::Hash; use dolos_core::{config::RootConfig, ChainPoint}; @@ -24,7 +25,7 @@ pub async fn run(config: &RootConfig, args: &Args) -> miette::Result<()> { let domain = crate::common::setup_domain(config)?; let hash: Hash<32> = Hash::from_str(&args.hash).into_diagnostic()?; - let point = ChainPoint::Specific(args.slot, hash); + let point = ChainPoint::Specific(args.slot, pallas_hash_to_core(hash)); domain .rollback(&point) diff --git a/src/bin/dolos/doctor/wal_integrity.rs b/src/bin/dolos/doctor/wal_integrity.rs index 6a4b75416..d717cf989 100644 --- a/src/bin/dolos/doctor/wal_integrity.rs +++ b/src/bin/dolos/doctor/wal_integrity.rs @@ -74,7 +74,7 @@ pub fn run(config: &RootConfig, _args: &Args) -> miette::Result<()> { assert_eq!(previous, last); } - last_hash = Some(hash); + last_hash = Some(hash.map(dolos_cardano::core_hash_to_pallas)); feedback.global_pb.set_position(slot); } diff --git a/src/bin/dolos/eval.rs b/src/bin/dolos/eval.rs index 9bb123770..ea15e1866 100644 --- a/src/bin/dolos/eval.rs +++ b/src/bin/dolos/eval.rs @@ -11,6 +11,7 @@ use dolos::{ adapters::DomainAdapter, core::{Domain, EraCbor, StateStore as _, TxoRef}, }; +use dolos_cardano::{core_hash_to_pallas, pallas_hash_to_core}; #[derive(Debug, clap::Args)] pub struct Args { @@ -53,7 +54,7 @@ pub async fn run(config: &RootConfig, args: &Args) -> miette::Result<()> { let refs = tx .consumes() .iter() - .map(|utxo| TxoRef(*utxo.hash(), utxo.index() as u32)) + .map(|utxo| TxoRef(pallas_hash_to_core(*utxo.hash()), utxo.index() as u32)) .collect_vec(); let resolved = domain @@ -73,7 +74,7 @@ pub async fn run(config: &RootConfig, args: &Args) -> miette::Result<()> { .context("era out of range")?; let txin = pallas::ledger::primitives::byron::TxIn::Variant0( - pallas::codec::utils::CborWrap((ref_.0, ref_.1)), + pallas::codec::utils::CborWrap((core_hash_to_pallas(ref_.0), ref_.1)), ); let key = MultiEraInput::Byron( diff --git a/src/bin/dolos/init.rs b/src/bin/dolos/init.rs index 3d84f32fe..01eb2b86f 100644 --- a/src/bin/dolos/init.rs +++ b/src/bin/dolos/init.rs @@ -1,12 +1,12 @@ use clap::Parser; use dolos_cardano::{include, mutable_slots}; +use dolos_cardano::CardanoGenesis; use dolos_core::{ config::{ CardanoConfig, ChainConfig, GenesisConfig, GrpcConfig, MinibfConfig, MinikupoConfig, MithrilConfig, PeerConfig, RelayConfig, RootConfig, StorageConfig, StorageVersion, TrpConfig, UpstreamConfig, }, - GenesisCardanoCardano, }; use inquire::{Confirm, MultiSelect, Select, Text}; use miette::{miette, Context as _, IntoDiagnostic}; @@ -84,7 +84,7 @@ impl KnownNetwork { ] } - pub fn load_included_genesis(&self) -> GenesisCardanoCardano { + pub fn load_included_genesis(&self) -> CardanoGenesis { match self { KnownNetwork::CardanoMainnet => include::mainnet::load(), KnownNetwork::CardanoPreProd => include::preprod::load(), diff --git a/src/prelude.rs b/src/prelude.rs index a909c4363..67cf31d0c 100644 --- a/src/prelude.rs +++ b/src/prelude.rs @@ -1,9 +1,29 @@ pub use dolos_core::*; use miette::Diagnostic; +use pallas::network::miniprotocols::Point; use std::fmt::Display; use thiserror::Error; +pub fn pallas_point_to_chain(p: Point) -> ChainPoint { + match p { + Point::Origin => ChainPoint::Origin, + Point::Specific(slot, hash) => { + let arr: [u8; 32] = hash.as_slice().try_into().unwrap_or_default(); + ChainPoint::Specific(slot, dolos_core::hash::Hash::new(arr)) + } + } +} + +#[allow(clippy::result_unit_err)] +pub fn chain_point_to_pallas(p: ChainPoint) -> Result { + match p { + ChainPoint::Origin => Ok(Point::Origin), + ChainPoint::Specific(slot, hash) => Ok(Point::Specific(slot, hash.as_slice().to_vec())), + ChainPoint::Slot(_) => Err(()), + } +} + #[derive(Error, Debug, Diagnostic)] pub enum Error { #[error("io error: {0}")] diff --git a/src/relay/blockfetch.rs b/src/relay/blockfetch.rs index d10dbc67b..d795ae2c1 100644 --- a/src/relay/blockfetch.rs +++ b/src/relay/blockfetch.rs @@ -33,8 +33,8 @@ async fn process_request( debug!(?p1, ?p2, "processing equest"); - let p1 = ChainPoint::from(p1); - let p2 = ChainPoint::from(p2); + let p1 = pallas_point_to_chain(p1); + let p2 = pallas_point_to_chain(p2); let ok1 = wal.contains_point(&p1).map_err(Error::server)?; let ok2 = wal.contains_point(&p2).map_err(Error::server)?; diff --git a/src/relay/chainsync.rs b/src/relay/chainsync.rs index 7af859a05..304203a0b 100644 --- a/src/relay/chainsync.rs +++ b/src/relay/chainsync.rs @@ -31,7 +31,7 @@ impl Session { .map_err(Error::server)? .unwrap_or(ChainPoint::Origin); - let point = Point::try_from(point).map_err(|_| Error::custom("invalid point"))?; + let point = chain_point_to_pallas(point).map_err(|_| Error::custom("invalid point"))?; Ok(Tip(point, 0)) } @@ -43,7 +43,7 @@ impl Session { let tip = self.prepare_tip()?; - let point = Point::try_from(point).map_err(|_| Error::custom("invalid point"))?; + let point = chain_point_to_pallas(point).map_err(|_| Error::custom("invalid point"))?; self.connection .send_intersect_found(point, tip) @@ -75,7 +75,7 @@ impl Session { let tip = self.prepare_tip()?; - let point = Point::try_from(point).map_err(|_| Error::custom("invalid point"))?; + let point = chain_point_to_pallas(point).map_err(|_| Error::custom("invalid point"))?; // Ouroboros chain-sync always starts by sending the intersection point as an // initial rollback event. The `is_new_intersection`` flag allows us to track if @@ -102,7 +102,7 @@ impl Session { let tip = self.prepare_tip()?; - let point = Point::try_from(point).map_err(|_| Error::custom("invalid point"))?; + let point = chain_point_to_pallas(point).map_err(|_| Error::custom("invalid point"))?; self.connection .send_roll_backward(point, tip) @@ -147,7 +147,7 @@ impl Session { points.push(Point::Origin); } - let points = points.into_iter().map(From::from).collect_vec(); + let points = points.into_iter().map(pallas_point_to_chain).collect_vec(); let intersect = ChainCrawler::::start(&self.domain, &points).unwrap(); diff --git a/src/serve/grpc/convert.rs b/src/serve/grpc/convert.rs index 19b343b05..9d6d5b2b6 100644 --- a/src/serve/grpc/convert.rs +++ b/src/serve/grpc/convert.rs @@ -1,12 +1,12 @@ use bytes::Bytes; -use pallas::crypto::hash::Hash; +use dolos_core::hash::Hash as CoreHash; use tonic::Status; -pub fn bytes_to_hash32(data: &Bytes) -> Result, Status> { +pub fn bytes_to_hash32(data: &Bytes) -> Result, Status> { let array: [u8; 32] = data .as_ref() .try_into() .map_err(|_| Status::invalid_argument("invalid hash value, needs to be 32-bytes long"))?; - Ok(Hash::<32>::new(array)) + Ok(CoreHash::<32>::new(array)) } diff --git a/src/serve/grpc/mod.rs b/src/serve/grpc/mod.rs index b28634d1a..680ea3301 100644 --- a/src/serve/grpc/mod.rs +++ b/src/serve/grpc/mod.rs @@ -21,7 +21,7 @@ pub struct Driver; impl dolos_core::Driver for Driver where - D: Domain + LedgerContext, + D: Domain + LedgerContext, C: CancelToken, { type Config = GrpcConfig; diff --git a/src/serve/grpc/query.rs b/src/serve/grpc/query.rs index a01f9705a..85e32daee 100644 --- a/src/serve/grpc/query.rs +++ b/src/serve/grpc/query.rs @@ -1,4 +1,5 @@ use dolos_cardano::indexes::CardanoIndexExt; +use dolos_cardano::CardanoError; use itertools::Itertools as _; use pallas::interop::utxorpc::{self as interop, spec::query::any_utxo_pattern::UtxoPattern}; use pallas::interop::utxorpc::{spec as u5c, LedgerContext}; @@ -14,14 +15,14 @@ use dolos_cardano::indexes::AsyncCardanoQueryExt; pub fn point_to_u5c(_ledger: &T, point: &ChainPoint) -> u5c::query::ChainPoint { u5c::query::ChainPoint { slot: point.slot(), - hash: point.hash().map(|h| h.to_vec()).unwrap_or_default().into(), + hash: point.hash().map(|h| h.as_slice().to_vec()).unwrap_or_default().into(), ..Default::default() } } pub struct QueryServiceImpl where - D: Domain + LedgerContext, + D: Domain + LedgerContext, { domain: D, mapper: interop::Mapper, @@ -29,7 +30,7 @@ where impl QueryServiceImpl where - D: Domain + LedgerContext, + D: Domain + LedgerContext, { pub fn new(domain: D) -> Self { let mapper = interop::Mapper::new(domain.clone()); @@ -203,7 +204,7 @@ fn from_u5c_txoref(txo: u5c::query::TxoRef) -> Result { Ok(TxoRef(hash, txo.index)) } -async fn into_u5c_utxo( +async fn into_u5c_utxo + LedgerContext>( txo: &TxoRef, body: &EraCbor, mapper: &interop::Mapper, @@ -213,7 +214,8 @@ async fn into_u5c_utxo( let query = dolos_core::AsyncQueryFacade::new(domain.clone()); - let parsed_output = MultiEraOutput::try_from(body)?; + let era = pallas::ledger::traverse::Era::try_from(body.0)?; + let parsed_output = MultiEraOutput::decode(era, &body.1)?; let mut parsed = mapper.map_tx_output(&parsed_output, None); // If the output has a datum hash, try to fetch the datum value from storage @@ -261,7 +263,7 @@ async fn into_u5c_utxo( Ok(u5c::query::AnyUtxoData { txo_ref: Some(u5c::query::TxoRef { - hash: txo.0.to_vec().into(), + hash: txo.0.as_slice().to_vec().into(), index: txo.1, }), native_bytes: body.1.clone().into(), @@ -273,7 +275,7 @@ async fn into_u5c_utxo( #[async_trait::async_trait] impl u5c::query::query_service_server::QueryService for QueryServiceImpl where - D: Domain + LedgerContext, + D: Domain + LedgerContext, { async fn read_params( &self, diff --git a/src/serve/grpc/submit.rs b/src/serve/grpc/submit.rs index 5d44290b0..15a7e481d 100644 --- a/src/serve/grpc/submit.rs +++ b/src/serve/grpc/submit.rs @@ -2,9 +2,7 @@ use any_chain_eval::Chain; use dolos_core::SubmitExt; use futures_core::Stream; use futures_util::{StreamExt as _, TryStreamExt as _}; -use pallas::crypto::hash::Hash; use pallas::interop::utxorpc as u5c; -use pallas::interop::utxorpc::spec::cardano::ExUnits; use pallas::interop::utxorpc::spec::submit::{WaitForTxResponse, *}; use pallas::interop::utxorpc::{self as interop, LedgerContext}; use std::collections::HashSet; @@ -46,7 +44,7 @@ fn tx_stage_to_u5c(stage: MempoolTxStage) -> i32 { fn event_to_watch_mempool_response(event: MempoolEvent) -> WatchMempoolResponse { WatchMempoolResponse { tx: TxInMempool { - r#ref: event.tx.hash.to_vec().into(), + r#ref: event.tx.hash.as_slice().to_vec().into(), native_bytes: event.tx.payload.cbor().to_vec().into(), stage: tx_stage_to_u5c(event.tx.stage.clone()), parsed_state: None, // TODO @@ -58,38 +56,19 @@ fn event_to_watch_mempool_response(event: MempoolEvent) -> WatchMempoolResponse fn event_to_wait_for_tx_response(event: MempoolEvent) -> WaitForTxResponse { WaitForTxResponse { stage: tx_stage_to_u5c(event.tx.stage.clone()), - r#ref: event.tx.hash.to_vec().into(), + r#ref: event.tx.hash.as_slice().to_vec().into(), } } -fn tx_eval_to_u5c(eval: Result) -> u5c::spec::cardano::TxEval { +fn tx_eval_to_u5c( + eval: Result>, +) -> u5c::spec::cardano::TxEval { match eval { - Ok(tx) => u5c::spec::cardano::TxEval { - ex_units: tx.report.iter().flatten().try_fold( - u5c::spec::cardano::ExUnits::default(), - |acc, eval| { - Some(ExUnits { - steps: acc.steps + eval.units.steps, - memory: acc.memory + eval.units.mem, - }) - }, - ), - redeemers: tx - .report - .iter() - .flatten() - .map(|x| u5c::spec::cardano::Redeemer { - purpose: x.tag as i32, - index: x.index, - ex_units: Some(u5c::spec::cardano::ExUnits { - steps: x.units.steps, - memory: x.units.mem, - }), - ..Default::default() - }) - .collect(), - fee: None, // TODO - traces: vec![], // TODO + Ok(_tx) => u5c::spec::cardano::TxEval { + ex_units: None, + redeemers: vec![], + fee: None, + traces: vec![], ..Default::default() }, Err(e) => u5c::spec::cardano::TxEval { @@ -136,7 +115,7 @@ where .map_err(|e| Status::invalid_argument(format!("could not process tx: {e}")))?; Ok(Response::new(SubmitTxResponse { - r#ref: hash.to_vec().into(), + r#ref: hash.as_slice().to_vec().into(), })) } @@ -144,11 +123,14 @@ where &self, request: Request, ) -> Result, Status> { - let subjects: HashSet<_> = request + let subjects: HashSet = request .into_inner() .r#ref .into_iter() - .map(|x| Hash::from(x.as_ref())) + .map(|x| { + let arr: [u8; 32] = x.as_ref().try_into().unwrap_or_default(); + dolos_core::hash::Hash::new(arr) + }) .collect(); let initial_stages: Vec<_> = subjects @@ -156,7 +138,7 @@ where .map(|x| { Result::<_, Status>::Ok(WaitForTxResponse { stage: tx_stage_to_u5c(self.domain.mempool().check_status(x).stage), - r#ref: x.to_vec().into(), + r#ref: x.as_slice().to_vec().into(), }) }) .collect(); diff --git a/src/serve/grpc/sync.rs b/src/serve/grpc/sync.rs index 608a24501..c3fd83328 100644 --- a/src/serve/grpc/sync.rs +++ b/src/serve/grpc/sync.rs @@ -53,7 +53,7 @@ fn raw_to_blockref( fn point_to_blockref(point: &ChainPoint, timestamp: u64) -> u5c::sync::BlockRef { BlockRef { - hash: point.hash().map(|h| h.to_vec()).unwrap_or_default().into(), + hash: point.hash().map(|h| h.as_slice().to_vec()).unwrap_or_default().into(), slot: point.slot(), timestamp, ..Default::default() diff --git a/src/serve/grpc/watch.rs b/src/serve/grpc/watch.rs index 7e90b7218..eb7ce3b6a 100644 --- a/src/serve/grpc/watch.rs +++ b/src/serve/grpc/watch.rs @@ -274,7 +274,10 @@ where let intersect = inner_req .intersect .iter() - .map(|x| ChainPoint::Specific(x.slot, x.hash.to_vec().as_slice().into())) + .map(|x| { + let arr: [u8; 32] = x.hash.as_ref().try_into().unwrap_or([0u8; 32]); + ChainPoint::Specific(x.slot, dolos_core::hash::Hash::new(arr)) + }) .collect::>(); let stream = diff --git a/src/serve/o7s_unix/chainsync.rs b/src/serve/o7s_unix/chainsync.rs index a8f9a570f..81c6efdb1 100644 --- a/src/serve/o7s_unix/chainsync.rs +++ b/src/serve/o7s_unix/chainsync.rs @@ -33,7 +33,7 @@ impl Session { .map(|(point, _)| point) .unwrap_or(ChainPoint::Origin); - let point = Point::try_from(point).map_err(|_| Error::custom("invalid point"))?; + let point = chain_point_to_pallas(point).map_err(|_| Error::custom("invalid point"))?; Ok(Tip(point, 0)) } @@ -45,7 +45,7 @@ impl Session { let tip = self.prepare_tip()?; - let point = Point::try_from(point).map_err(|_| Error::custom("invalid point"))?; + let point = chain_point_to_pallas(point).map_err(|_| Error::custom("invalid point"))?; self.connection .send_intersect_found(point, tip) @@ -77,7 +77,7 @@ impl Session { let tip = self.prepare_tip()?; - let point = Point::try_from(point).map_err(|_| Error::custom("invalid point"))?; + let point = chain_point_to_pallas(point).map_err(|_| Error::custom("invalid point"))?; // Ouroboros chain-sync always starts by sending the intersection point as an // initial rollback event. The `is_new_intersection`` flag allows us to track if @@ -104,7 +104,7 @@ impl Session { let tip = self.prepare_tip()?; - let point = Point::try_from(point).map_err(|_| Error::custom("invalid point"))?; + let point = chain_point_to_pallas(point).map_err(|_| Error::custom("invalid point"))?; self.connection .send_roll_backward(point, tip) @@ -149,7 +149,7 @@ impl Session { points.push(Point::Origin); } - let points = points.into_iter().map(From::from).collect_vec(); + let points = points.into_iter().map(pallas_point_to_chain).collect_vec(); let intersect = ChainCrawler::::start(&self.domain, &points).unwrap(); diff --git a/src/serve/o7s_unix/mod.rs b/src/serve/o7s_unix/mod.rs index 39232f4fe..a4633d15a 100644 --- a/src/serve/o7s_unix/mod.rs +++ b/src/serve/o7s_unix/mod.rs @@ -20,7 +20,7 @@ pub struct DriverConfig { //#[cfg(test)] //mod tests; -async fn handle_session( +async fn handle_session, C: CancelToken>( domain: D, connection: NodeServer, cancel: C, @@ -55,7 +55,7 @@ async fn handle_session( Ok(()) } -async fn accept_client_connections( +async fn accept_client_connections, C: CancelToken>( domain: D, config: &DriverConfig, tasks: &mut TaskTracker, @@ -89,7 +89,7 @@ async fn accept_client_connections( pub struct Driver; -impl dolos_core::Driver for Driver { +impl, C: CancelToken> dolos_core::Driver for Driver { type Config = DriverConfig; #[instrument(skip_all)] diff --git a/src/serve/o7s_unix/statequery.rs b/src/serve/o7s_unix/statequery.rs index aca1cbae9..a1e454df2 100644 --- a/src/serve/o7s_unix/statequery.rs +++ b/src/serve/o7s_unix/statequery.rs @@ -8,6 +8,8 @@ use pallas::codec::minicbor; use pallas::network::miniprotocols::{localstate, localstate::queries_v16 as q16, Point as OPoint}; use tracing::{debug, info, warn}; +use dolos_cardano::{pallas_hash_to_core, CardanoGenesis}; + use crate::prelude::*; use crate::serve::o7s_unix::utils; use utils::{ @@ -16,13 +18,13 @@ use utils::{ build_stake_snapshots_response, build_utxo_by_address_response, }; -pub struct Session { +pub struct Session> { domain: D, connection: localstate::Server, acquired_point: Option, } -impl Session { +impl> Session { fn tip_cursor(&self) -> Result { let point = self .domain @@ -43,7 +45,7 @@ impl Session { let block = MultiEraBlock::decode(&body).map_err(|e| Error::server(e.to_string()))?; - Ok(ChainPoint::Specific(slot, block.hash())) + Ok(ChainPoint::Specific(slot, pallas_hash_to_core(block.hash()))) } _ => Ok(point), } @@ -74,7 +76,7 @@ impl Session { debug!(?point, "handling acquire request"); let chain_point = match point { - Some(p) => ChainPoint::from(p), + Some(p) => pallas_point_to_chain(p), None => { // None means acquire the latest point self.tip_cursor()? @@ -215,7 +217,7 @@ impl Session { AnyCbor::from_encode(match point { ChainPoint::Origin => OPoint::Origin, - ChainPoint::Specific(s, h) => OPoint::Specific(s, h.to_vec()), + ChainPoint::Specific(s, h) => OPoint::Specific(s, h.as_slice().to_vec()), ChainPoint::Slot(_) => OPoint::Origin, }) } @@ -230,7 +232,7 @@ impl Session { let eras: Vec = chain_summary.iter_all().cloned().collect(); let genesis = self.domain.genesis(); - build_era_history_response(&eras, &genesis)? + build_era_history_response(&eras, genesis.as_ref())? } Ok(q16::Request::LedgerQuery(q16::LedgerQuery::HardForkQuery( q16::HardForkQuery::GetCurrentEra, @@ -263,7 +265,7 @@ impl Session { let p = match point { ChainPoint::Origin => OPoint::Origin, - ChainPoint::Specific(s, h) => OPoint::Specific(s, h.to_vec()), + ChainPoint::Specific(s, h) => OPoint::Specific(s, h.as_slice().to_vec()), ChainPoint::Slot(_) => OPoint::Origin, }; AnyCbor::from_encode((p,)) @@ -402,7 +404,7 @@ impl Session { } } -pub async fn handle_session( +pub async fn handle_session, C: CancelToken>( domain: D, connection: localstate::Server, cancel: C, diff --git a/src/serve/o7s_unix/utils/era_history.rs b/src/serve/o7s_unix/utils/era_history.rs index e441f7ea5..710c732f2 100644 --- a/src/serve/o7s_unix/utils/era_history.rs +++ b/src/serve/o7s_unix/utils/era_history.rs @@ -83,7 +83,7 @@ impl<'a, C> minicbor::Encode for EraHistoryResponse<'a> { pub fn build_era_history_response( eras: &[DolosEraSummary], - genesis: &GenesisCardanoCardano, + genesis: &dolos_cardano::CardanoGenesis, ) -> Result { if eras.is_empty() { return Err(Error::server("era summary is empty")); diff --git a/src/serve/o7s_unix/utils/utxo.rs b/src/serve/o7s_unix/utils/utxo.rs index f937fd3d5..d8b8656ba 100644 --- a/src/serve/o7s_unix/utils/utxo.rs +++ b/src/serve/o7s_unix/utils/utxo.rs @@ -1,4 +1,5 @@ use crate::prelude::*; +use dolos_cardano::core_hash_to_pallas; use dolos_core::{IndexStore, StateStore}; use pallas::codec::utils::{AnyCbor, AnyUInt, KeyValuePairs}; use pallas::ledger::addresses::Address; @@ -60,10 +61,12 @@ pub fn build_utxo_by_address_response( for utxo_ref in refs_vec { if let Some(era_cbor) = utxos.get(&utxo_ref) { - let output = MultiEraOutput::try_from(era_cbor.as_ref()) + let era = pallas::ledger::traverse::Era::try_from(era_cbor.0) + .map_err(|e| Error::server(format!("failed to decode utxo era: {}", e)))?; + let output = MultiEraOutput::decode(era, &era_cbor.1) .map_err(|e| Error::server(format!("failed to decode utxo: {}", e)))?; let q16_utxo = q16::UTxO { - transaction_id: utxo_ref.0, + transaction_id: core_hash_to_pallas(utxo_ref.0), index: AnyUInt::U32(utxo_ref.1), }; diff --git a/src/sync/pull.rs b/src/sync/pull.rs index 9a0ee914f..b56364c46 100644 --- a/src/sync/pull.rs +++ b/src/sync/pull.rs @@ -124,8 +124,7 @@ impl gasket::framework::Worker for Worker { .intersect_candidates(5) .or_panic()? .into_iter() - .map(TryFrom::try_from) - .filter_map(|x| x.ok()) + .filter_map(|p| chain_point_to_pallas(p).ok()) .collect_vec(); if candidates.is_empty() { @@ -309,7 +308,7 @@ impl Stage { }; self.downstream - .send(PullEvent::Rollback(point.into()).into()) + .send(PullEvent::Rollback(pallas_point_to_chain(point)).into()) .await .or_panic()?; diff --git a/src/sync/submit.rs b/src/sync/submit.rs index 10f9e94e4..daf6f367f 100644 --- a/src/sync/submit.rs +++ b/src/sync/submit.rs @@ -2,7 +2,6 @@ use std::collections::VecDeque; use gasket::framework::*; use itertools::Itertools as _; -use pallas::crypto::hash::Hash; use pallas::network::facades::PeerClient; use pallas::network::miniprotocols::txsubmission::{EraTxBody, EraTxId, Request, TxIdAndSize}; use std::time::Duration; @@ -21,7 +20,7 @@ fn to_n2n_reply(mempool_tx: &MempoolTx) -> TxIdAndSize { let era = to_n2n_era(*era); - let id = EraTxId(era, mempool_tx.hash.to_vec()); + let id = EraTxId(era, mempool_tx.hash.as_slice().to_vec()); TxIdAndSize(id, bytes.len() as u32) } @@ -203,7 +202,10 @@ impl gasket::framework::Worker for Worker { let found: Vec = ids .iter() - .filter_map(|x| stage.mempool.find_inflight(&Hash::from(x.1.as_slice()))) + .filter_map(|x| { + let arr: [u8; 32] = x.1.as_slice().try_into().ok()?; + stage.mempool.find_inflight(&dolos_core::hash::Hash::new(arr)) + }) .collect_vec(); let to_send = found.into_iter().map(to_n2n_body).collect_vec(); From 35eb65407f1364a762f5256313c2140ae52ff57b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Tue, 17 Mar 2026 15:35:20 -0300 Subject: [PATCH 43/85] fix: core & archive have generic chain error (even if infallible) When using a different backend, the backend might validate blocks --- crates/core/src/lib.rs | 8 +- crates/redb3/src/archive/mod.rs | 166 ++++++++++++++++++++++---------- 2 files changed, 123 insertions(+), 51 deletions(-) diff --git a/crates/core/src/lib.rs b/crates/core/src/lib.rs index 0a1d46b08..52b7c3e61 100644 --- a/crates/core/src/lib.rs +++ b/crates/core/src/lib.rs @@ -542,7 +542,13 @@ pub trait ChainLogic: Sized + Send + Sync { /// The returned work unit should be executed using `executor::execute_work_unit()`. fn pop_work(&mut self, domain: &D) -> Option> where - D: Domain; + D: Domain< + Chain = Self, + Entity = Self::Entity, + EntityDelta = Self::Delta, + ChainSpecificError = Self::ChainSpecificError, + Genesis = Self::Genesis, + >; /// Compute undo data for a block during rollback. /// diff --git a/crates/redb3/src/archive/mod.rs b/crates/redb3/src/archive/mod.rs index dc9d612a5..9f70b24fc 100644 --- a/crates/redb3/src/archive/mod.rs +++ b/crates/redb3/src/archive/mod.rs @@ -2,6 +2,7 @@ use ::redb::{Database, ReadableDatabase}; use redb::ReadTransaction; use std::{ collections::HashMap, + marker::PhantomData, path::Path, sync::{Arc, Mutex}, }; @@ -49,6 +50,24 @@ impl RedbArchiveError { pub fn from_io(e: std::io::Error) -> Self { Self(ArchiveError::InternalError(e.to_string())) } + + /// Convert to ArchiveError with any ChainSpecificError type. + /// Since this error type never produces a ChainSpecificError (it's Infallible), + /// this is safe to do. + pub fn into_archive_error( + self, + ) -> ArchiveError { + match self.0 { + ArchiveError::BrokenInvariant(e) => ArchiveError::BrokenInvariant(e), + ArchiveError::InternalError(e) => ArchiveError::InternalError(e), + ArchiveError::QueryNotSupported => ArchiveError::QueryNotSupported, + ArchiveError::InvalidStoreVersion => ArchiveError::InvalidStoreVersion, + ArchiveError::DecodingError(e) => ArchiveError::DecodingError(e), + ArchiveError::EntityDecodingError(e) => ArchiveError::EntityDecodingError(e), + ArchiveError::NamespaceNotFound(ns) => ArchiveError::NamespaceNotFound(ns), + ArchiveError::ChainSpecifc(infallible) => match infallible {}, + } + } } impl From for RedbArchiveError { @@ -113,15 +132,27 @@ impl From for RedbArchiveError { const DEFAULT_CACHE_SIZE_MB: usize = 500; -#[derive(Clone)] -pub struct ArchiveStore { +pub struct ArchiveStore { db: Arc, tables: HashMap, flatfiles: Arc, _tempdir: Option>, + _phantom: PhantomData, +} + +impl Clone for ArchiveStore { + fn clone(&self) -> Self { + Self { + db: self.db.clone(), + tables: self.tables.clone(), + flatfiles: self.flatfiles.clone(), + _tempdir: self._tempdir.clone(), + _phantom: PhantomData, + } + } } -impl ArchiveStore { +impl ArchiveStore { /// Gracefully shutdown the archive store. pub fn shutdown(&self) -> Result<(), RedbArchiveError> { Ok(()) @@ -162,6 +193,7 @@ impl ArchiveStore { tables: HashMap::from_iter(tables), flatfiles: Arc::new(flatfiles), _tempdir: None, + _phantom: PhantomData, }; store.initialize()?; @@ -182,6 +214,7 @@ impl ArchiveStore { tables: HashMap::from_iter(tables), flatfiles: Arc::new(flatfiles), _tempdir: Some(Arc::new(tempdir)), + _phantom: PhantomData, }; store.initialize()?; @@ -229,7 +262,7 @@ impl ArchiveStore { }) } - pub fn start_writer(&self) -> Result { + pub fn start_writer(&self) -> Result, RedbArchiveError> { let mut wx = self.db().begin_write()?; wx.set_durability(Durability::Immediate)?; wx.set_quick_repair(true); @@ -239,6 +272,7 @@ impl ArchiveStore { tables: self.tables.clone(), flatfiles: self.flatfiles.clone(), pending_blocks: Mutex::new(Vec::new()), + _phantom: PhantomData, }) } @@ -789,17 +823,18 @@ impl ArchiveStore { } } -pub struct ArchiveStoreWriter { +pub struct ArchiveStoreWriter { wx: WriteTransaction, tables: HashMap, flatfiles: Arc, pending_blocks: Mutex>, + _phantom: PhantomData, } -impl dolos_core::ArchiveWriter for ArchiveStoreWriter { - type ChainSpecificError = Infallible; +impl dolos_core::ArchiveWriter for ArchiveStoreWriter { + type ChainSpecificError = E; - fn apply(&self, point: &ChainPoint, block: &RawBlock) -> Result<(), ArchiveError> { + fn apply(&self, point: &ChainPoint, block: &RawBlock) -> Result<(), ArchiveError> { self.pending_blocks .lock() .unwrap() @@ -807,21 +842,26 @@ impl dolos_core::ArchiveWriter for ArchiveStoreWriter { Ok(()) } - fn undo(&self, point: &ChainPoint) -> Result<(), ArchiveError> { - tables::BlocksTable::undo(&self.wx, &self.flatfiles, point)?; - Ok(()) + fn undo(&self, point: &ChainPoint) -> Result<(), ArchiveError> { + tables::BlocksTable::undo(&self.wx, &self.flatfiles, point) + .map_err(RedbArchiveError::from) + .map_err(|e| e.into_archive_error()) } - fn commit(self) -> Result<(), ArchiveError> { + fn commit(self) -> Result<(), ArchiveError> { // 1. Batch-append all pending blocks to flat files (fsync inside). // 2. Insert all index entries into redb. // 3. Commit redb transaction. let pending = self.pending_blocks.into_inner().unwrap(); if !pending.is_empty() { - tables::BlocksTable::apply_batch(&self.wx, &self.flatfiles, &pending)?; + tables::BlocksTable::apply_batch(&self.wx, &self.flatfiles, &pending) + .map_err(RedbArchiveError::from) + .map_err(|e| e.into_archive_error())?; } - self.wx.commit().map_err(RedbArchiveError::from)?; + self.wx.commit() + .map_err(RedbArchiveError::from) + .map_err(|e| e.into_archive_error())?; Ok(()) } @@ -830,7 +870,7 @@ impl dolos_core::ArchiveWriter for ArchiveStoreWriter { ns: Namespace, key: &dolos_core::LogKey, value: &dolos_core::EntityValue, - ) -> Result<(), ArchiveError> { + ) -> Result<(), ArchiveError> { let table = self .tables .get(&ns) @@ -838,16 +878,20 @@ impl dolos_core::ArchiveWriter for ArchiveStoreWriter { table .write(&self.wx, key, value) - .map_err(RedbArchiveError::from)?; + .map_err(RedbArchiveError::from) + .map_err(|e| e.into_archive_error())?; Ok(()) } } -pub struct LogIter(pub(crate) ::redb::Range<'static, &'static [u8], &'static [u8]>); +pub struct LogIter( + pub(crate) ::redb::Range<'static, &'static [u8], &'static [u8]>, + PhantomData, +); -impl Iterator for LogIter { - type Item = Result<(LogKey, EntityValue), ArchiveError>; +impl Iterator for LogIter { + type Item = Result<(LogKey, EntityValue), ArchiveError>; fn next(&mut self) -> Option { let next = self.0.next()?; @@ -856,16 +900,19 @@ impl Iterator for LogIter { .map(|(k, v)| (k.value().to_vec(), v.value().to_vec())) .map(|(k, v)| (LogKey::from(k), v)) .map_err(RedbArchiveError::from) - .map_err(ArchiveError::from); + .map_err(|e: RedbArchiveError| e.into_archive_error()); Some(entry) } } -pub struct EntityValueIter(pub(crate) ::redb::MultimapValue<'static, &'static [u8]>); +pub struct EntityValueIter( + pub(crate) ::redb::MultimapValue<'static, &'static [u8]>, + PhantomData, +); -impl Iterator for EntityValueIter { - type Item = Result>; +impl Iterator for EntityValueIter { + type Item = Result>; fn next(&mut self) -> Option { let next = self.0.next()?; @@ -873,66 +920,76 @@ impl Iterator for EntityValueIter { let entry = next .map(|v| v.value().to_vec()) .map_err(RedbArchiveError::from) - .map_err(ArchiveError::from); + .map_err(|e: RedbArchiveError| e.into_archive_error()); Some(entry) } } -impl dolos_core::ArchiveStore for ArchiveStore { +impl dolos_core::ArchiveStore for ArchiveStore { type BlockIter<'a> = ArchiveRangeIter; - type Writer = ArchiveStoreWriter; - type LogIter = LogIter; - type EntityValueIter = EntityValueIter; - type ChainSpecificError = Infallible; + type Writer = ArchiveStoreWriter; + type LogIter = LogIter; + type EntityValueIter = EntityValueIter; + type ChainSpecificError = E; - fn start_writer(&self) -> Result> { - Ok(Self::start_writer(self)?) + fn start_writer(&self) -> Result> { + Self::start_writer(self) + .map_err(|e| e.into_archive_error()) } fn get_block_by_slot( &self, slot: &BlockSlot, - ) -> Result, ArchiveError> { - Ok(Self::get_block_by_slot(self, slot)?) + ) -> Result, ArchiveError> { + Self::get_block_by_slot(self, slot) + .map_err(|e| e.into_archive_error()) } fn get_range<'a>( &self, from: Option, to: Option, - ) -> Result, ArchiveError> { - Ok(Self::get_range(self, from, to)?) + ) -> Result, ArchiveError> { + Self::get_range(self, from, to) + .map_err(|e| e.into_archive_error()) } fn find_intersect( &self, intersect: &[ChainPoint], - ) -> Result, ArchiveError> { - Ok(Self::find_intersect(self, intersect)?) + ) -> Result, ArchiveError> { + Self::find_intersect(self, intersect) + .map_err(|e| e.into_archive_error()) } - fn get_tip(&self) -> Result, ArchiveError> { - Ok(Self::get_tip(self)?) + fn get_tip(&self) -> Result, ArchiveError> { + Self::get_tip(self) + .map_err(|e| e.into_archive_error()) } fn prune_history( &self, max_slots: u64, max_prune: Option, - ) -> Result> { - Ok(Self::prune_history(self, max_slots, max_prune)?) + ) -> Result> { + Self::prune_history(self, max_slots, max_prune) + .map_err(|e| e.into_archive_error()) } - fn truncate_front(&self, after: &ChainPoint) -> Result<(), ArchiveError> { - Ok(Self::truncate_front(self, after)?) + fn truncate_front(&self, after: &ChainPoint) -> Result<(), ArchiveError> { + Self::truncate_front(self, after) + .map_err(|e| e.into_archive_error()) } fn read_logs( &self, ns: Namespace, keys: &[&dolos_core::LogKey], - ) -> Result>, ArchiveError> { - let mut rx = self.db().begin_read().map_err(RedbArchiveError::from)?; + ) -> Result>, ArchiveError> { + let mut rx = self.db().begin_read().map_err(|e| { + let redb_err: RedbArchiveError = e.into(); + redb_err.into_archive_error() + })?; let table = self .tables @@ -944,7 +1001,10 @@ impl dolos_core::ArchiveStore for ArchiveStore { for key in keys { let value = table .read_value(&mut rx, key.as_ref()) - .map_err(RedbArchiveError::from)?; + .map_err(|e| { + let redb_err: RedbArchiveError = e.into(); + redb_err.into_archive_error() + })?; out.push(value); } @@ -955,8 +1015,11 @@ impl dolos_core::ArchiveStore for ArchiveStore { &self, ns: Namespace, range: std::ops::Range, - ) -> Result> { - let mut rx = self.db().begin_read().map_err(RedbArchiveError::from)?; + ) -> Result> { + let mut rx = self.db().begin_read().map_err(|e| { + let redb_err: RedbArchiveError = e.into(); + redb_err.into_archive_error() + })?; let range = std::ops::Range { start: range.start.as_ref(), @@ -970,9 +1033,12 @@ impl dolos_core::ArchiveStore for ArchiveStore { let values = table .range(&mut rx, range) - .map_err(RedbArchiveError::from)?; + .map_err(|e| { + let redb_err: RedbArchiveError = e.into(); + redb_err.into_archive_error() + })?; - Ok(LogIter(values)) + Ok(LogIter(values, PhantomData)) } } From 982b4748e8f6fa00c766b7789fa12f8dda06c3ec Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Wed, 18 Mar 2026 13:33:47 -0300 Subject: [PATCH 44/85] fix: tests & format rest of crates --- crates/cardano/src/eras.rs | 8 +- crates/cardano/src/estart/commit.rs | 4 +- crates/cardano/src/estart/loading.rs | 23 +++- crates/cardano/src/estart/nonces.rs | 5 +- crates/cardano/src/ewrap/commit.rs | 4 +- crates/cardano/src/ewrap/loading.rs | 61 ++++++++-- crates/cardano/src/ewrap/rewards.rs | 5 +- crates/cardano/src/ewrap/wrapup.rs | 5 +- crates/cardano/src/genesis/mod.rs | 20 +++- crates/cardano/src/genesis/staking.rs | 7 +- crates/cardano/src/include/devnet/mod.rs | 1 - crates/cardano/src/include/mainnet/mod.rs | 1 - crates/cardano/src/indexes/delta.rs | 3 +- crates/cardano/src/indexes/query.rs | 96 ++++++++++----- crates/cardano/src/lib.rs | 29 ++++- crates/cardano/src/model.rs | 10 +- crates/cardano/src/rewards/mod.rs | 4 +- crates/cardano/src/roll/accounts.rs | 5 +- crates/cardano/src/roll/datums.rs | 10 +- crates/cardano/src/roll/epochs.rs | 18 +-- crates/cardano/src/roll/mod.rs | 4 +- crates/cardano/src/roll/pools.rs | 2 +- crates/cardano/src/roll/proposals.rs | 7 +- crates/cardano/src/roll/txs.rs | 2 +- crates/cardano/src/roll/work_unit.rs | 7 +- crates/cardano/src/rupd/loading.rs | 5 +- crates/cardano/src/validate.rs | 11 +- crates/core/src/async_query.rs | 7 +- crates/core/src/builtin/noop.rs | 112 ++++++++++++------ crates/core/src/lib.rs | 11 +- crates/core/src/mempool.rs | 5 +- crates/minibf/src/hacks.rs | 38 ++++-- crates/minibf/src/mapping.rs | 15 ++- crates/minibf/src/routes/accounts.rs | 36 ++++-- crates/minibf/src/routes/addresses.rs | 9 +- crates/minibf/src/routes/genesis.rs | 4 +- crates/minibf/src/routes/metadata.rs | 5 +- crates/minibf/src/routes/network.rs | 4 +- crates/minibf/src/routes/scripts.rs | 5 +- crates/minibf/src/routes/txs.rs | 14 ++- crates/minibf/src/routes/utxos.rs | 6 +- crates/minikupo/src/lib.rs | 26 +++- crates/minikupo/src/routes/datums.rs | 4 +- crates/minikupo/src/routes/matches.rs | 5 +- crates/minikupo/src/routes/scripts.rs | 4 +- crates/redb3/src/archive/mod.rs | 55 ++++----- crates/redb3/src/archive/tests.rs | 7 +- crates/redb3/src/mempool.rs | 4 +- crates/redb3/src/state/utxoset.rs | 34 ++++-- crates/testing/src/blocks.rs | 9 +- crates/testing/src/faults.rs | 86 +++++++++----- crates/testing/src/harness/cardano.rs | 39 ++++-- crates/testing/src/lib.rs | 6 +- crates/testing/src/synthetic.rs | 31 +++-- crates/testing/src/toy_domain.rs | 28 +++-- crates/trp/src/compiler.rs | 21 +++- crates/trp/src/lib.rs | 6 +- crates/trp/src/methods.rs | 3 +- src/adapters/mod.rs | 12 +- src/adapters/storage.rs | 137 +++++++++------------- src/bin/dolos/common.rs | 2 +- src/bin/dolos/data/compute_nonce.rs | 9 +- src/bin/dolos/data/export.rs | 2 +- src/bin/dolos/data/find_seq.rs | 2 +- src/bin/dolos/data/import_archive.rs | 3 +- src/bin/dolos/doctor/rollback.rs | 2 +- src/bin/dolos/init.rs | 12 +- src/serve/grpc/query.rs | 6 +- src/serve/grpc/sync.rs | 6 +- src/serve/o7s_unix/mod.rs | 9 +- src/serve/o7s_unix/statequery.rs | 5 +- src/sync/submit.rs | 4 +- 72 files changed, 796 insertions(+), 411 deletions(-) diff --git a/crates/cardano/src/eras.rs b/crates/cardano/src/eras.rs index 0b78ee651..4ec34b316 100644 --- a/crates/cardano/src/eras.rs +++ b/crates/cardano/src/eras.rs @@ -187,7 +187,9 @@ impl ChainSummary { } } -pub fn load_era_summary(state: &D::State) -> Result> { +pub fn load_era_summary( + state: &D::State, +) -> Result> { let eras = state.iter_entities_typed(EraSummary::NS, None)?; let mut chain = ChainSummary::default(); @@ -201,7 +203,9 @@ pub fn load_era_summary(state: &D::State) -> Result Result> { +pub fn load_chain_summary_from_state( + state: &impl StateStore, +) -> Result> { let eras = state.iter_entities_typed(EraSummary::NS, None)?; let mut chain = ChainSummary::default(); diff --git a/crates/cardano/src/estart/commit.rs b/crates/cardano/src/estart/commit.rs index 49a85a8c3..0fb0dac1a 100644 --- a/crates/cardano/src/estart/commit.rs +++ b/crates/cardano/src/estart/commit.rs @@ -109,7 +109,9 @@ impl super::WorkContext { } #[instrument(skip_all)] - pub fn commit>( + pub fn commit< + D: Domain, + >( &mut self, state: &D::State, archive: &D::Archive, diff --git a/crates/cardano/src/estart/loading.rs b/crates/cardano/src/estart/loading.rs index 5aa417747..9fbdad8e5 100644 --- a/crates/cardano/src/estart/loading.rs +++ b/crates/cardano/src/estart/loading.rs @@ -8,7 +8,12 @@ use crate::{ }; impl super::WorkContext { - pub fn compute_deltas>(&mut self, state: &D::State) -> Result<(), ChainError> { + pub fn compute_deltas< + D: Domain, + >( + &mut self, + state: &D::State, + ) -> Result<(), ChainError> { let mut visitor_nonces = super::nonces::BoundaryVisitor; let mut visitor_reset = super::reset::BoundaryVisitor::default(); @@ -57,14 +62,19 @@ impl super::WorkContext { /// Compute the value of unredeemed AVVM UTxOs at the Shelley→Allegra /// boundary. These UTxOs are removed from the UTxO set and their value /// returned to reserves, matching the Haskell ledger's `translateEra`. - fn compute_avvm_reclamation>( + fn compute_avvm_reclamation< + D: Domain, + >( state: &D::State, genesis: &crate::CardanoGenesis, ) -> Result> { let avvm_utxos = pallas::ledger::configs::byron::genesis_avvm_utxos(&genesis.byron); // Collect all Byron genesis AVVM UTxO refs (bootstrap redeemer addresses) - let refs: Vec = avvm_utxos.iter().map(|(tx, _, _)| TxoRef(crate::pallas_hash_to_core(*tx), 0)).collect(); + let refs: Vec = avvm_utxos + .iter() + .map(|(tx, _, _)| TxoRef(crate::pallas_hash_to_core(*tx), 0)) + .collect(); // Query the UTxO set to find which are still unspent let remaining = state.get_utxos(refs)?; @@ -88,7 +98,12 @@ impl super::WorkContext { Ok(total) } - pub fn load>(state: &D::State, genesis: Arc) -> Result> { + pub fn load< + D: Domain, + >( + state: &D::State, + genesis: Arc, + ) -> Result> { let ended_state = crate::load_epoch::(state)?; let chain_summary = load_era_summary::(state)?; let active_protocol = EraProtocol::from(chain_summary.edge().protocol); diff --git a/crates/cardano/src/estart/nonces.rs b/crates/cardano/src/estart/nonces.rs index 9271548b6..d8868f68c 100644 --- a/crates/cardano/src/estart/nonces.rs +++ b/crates/cardano/src/estart/nonces.rs @@ -65,7 +65,10 @@ fn next_nonce(ctx: &super::WorkContext) -> Option { pub struct BoundaryVisitor; impl super::BoundaryVisitor for BoundaryVisitor { - fn flush(&mut self, ctx: &mut super::WorkContext) -> Result<(), ChainError> { + fn flush( + &mut self, + ctx: &mut super::WorkContext, + ) -> Result<(), ChainError> { let next_slot = next_largest_stable_slot(ctx); let next_nonce = next_nonce(ctx); diff --git a/crates/cardano/src/ewrap/commit.rs b/crates/cardano/src/ewrap/commit.rs index 9e9eee20b..2b1893827 100644 --- a/crates/cardano/src/ewrap/commit.rs +++ b/crates/cardano/src/ewrap/commit.rs @@ -58,7 +58,9 @@ impl BoundaryWork { } #[instrument(skip_all)] - pub fn commit>( + pub fn commit< + D: Domain, + >( &mut self, state: &D::State, archive: &D::Archive, diff --git a/crates/cardano/src/ewrap/loading.rs b/crates/cardano/src/ewrap/loading.rs index b80ac5613..a4ee79b76 100644 --- a/crates/cardano/src/ewrap/loading.rs +++ b/crates/cardano/src/ewrap/loading.rs @@ -24,7 +24,9 @@ impl BoundaryWork { .is_some_and(|e| e == self.starting_epoch_no()) } - fn load_pool_reward_account>( + fn load_pool_reward_account< + D: Domain, + >( &self, state: &D::State, pool: &PoolState, @@ -40,8 +42,9 @@ impl BoundaryWork { .unwrap_or_else(|| pool.snapshot.unwrap_live()); let account = &snapshot.params.reward_account; - let account = - pallas_extras::parse_reward_account(account).ok_or(ChainError::ChainSpecific(crate::CardanoError::InvalidPoolParams))?; + let account = pallas_extras::parse_reward_account(account).ok_or( + ChainError::ChainSpecific(crate::CardanoError::InvalidPoolParams), + )?; let entity_key = minicbor::to_vec(account).unwrap(); @@ -50,7 +53,12 @@ impl BoundaryWork { Ok(account) } - fn load_pool_data>(&mut self, state: &D::State) -> Result<(), ChainError> { + fn load_pool_data< + D: Domain, + >( + &mut self, + state: &D::State, + ) -> Result<(), ChainError> { let pools = state.iter_entities_typed::(PoolState::NS, None)?; for record in pools { @@ -79,7 +87,10 @@ impl BoundaryWork { self.starting_epoch_no() == unregistered_epoch + 1 } - fn should_expire_drep(&self, drep: &DRepState) -> Result> { + fn should_expire_drep( + &self, + drep: &DRepState, + ) -> Result> { if drep.expired { return Ok(false); } @@ -111,7 +122,12 @@ impl BoundaryWork { None } - fn load_drep_data>(&mut self, state: &D::State) -> Result<(), ChainError> { + fn load_drep_data< + D: Domain, + >( + &mut self, + state: &D::State, + ) -> Result<(), ChainError> { let dreps = state.iter_entities_typed::(DRepState::NS, None)?; for record in dreps { @@ -130,7 +146,9 @@ impl BoundaryWork { Ok(()) } - fn load_proposal_reward_account>( + fn load_proposal_reward_account< + D: Domain, + >( &self, state: &D::State, proposal: &ProposalState, @@ -146,7 +164,12 @@ impl BoundaryWork { Ok(account) } - fn load_proposal_data>(&mut self, state: &D::State) -> Result<(), ChainError> { + fn load_proposal_data< + D: Domain, + >( + &mut self, + state: &D::State, + ) -> Result<(), ChainError> { let proposals = state.iter_entities_typed::(ProposalState::NS, None)?; for record in proposals { @@ -172,7 +195,12 @@ impl BoundaryWork { /// Process pending MIRs: check registration status and apply to registered accounts. /// MIRs to unregistered accounts stay in their source pot (no transfer). - fn process_pending_mirs>(&mut self, state: &D::State) -> Result<(), ChainError> { + fn process_pending_mirs< + D: Domain, + >( + &mut self, + state: &D::State, + ) -> Result<(), ChainError> { let pending_iter = state.iter_entities_typed::(PendingMirState::NS, None)?; @@ -249,7 +277,12 @@ impl BoundaryWork { Ok(()) } - pub fn compute_deltas>(&mut self, state: &D::State) -> Result<(), ChainError> { + pub fn compute_deltas< + D: Domain, + >( + &mut self, + state: &D::State, + ) -> Result<(), ChainError> { // Process pending MIRs first (before regular rewards) self.process_pending_mirs::(state)?; @@ -353,7 +386,9 @@ impl BoundaryWork { } /// Load pending rewards from state store (persisted by RUPD). - fn load_pending_rewards>( + fn load_pending_rewards< + D: Domain, + >( state: &D::State, incentives: EpochIncentives, ) -> Result, ChainError> { @@ -396,7 +431,9 @@ impl BoundaryWork { Ok(RewardMap::from_pending(pending, incentives)) } - pub fn load>( + pub fn load< + D: Domain, + >( state: &D::State, genesis: Arc, ) -> Result> { diff --git a/crates/cardano/src/ewrap/rewards.rs b/crates/cardano/src/ewrap/rewards.rs index 7a63193ff..c7235dec5 100644 --- a/crates/cardano/src/ewrap/rewards.rs +++ b/crates/cardano/src/ewrap/rewards.rs @@ -193,7 +193,10 @@ impl super::BoundaryVisitor for BoundaryVisitor { Ok(()) } - fn flush(&mut self, ctx: &mut super::BoundaryWork) -> Result<(), ChainError> { + fn flush( + &mut self, + ctx: &mut super::BoundaryWork, + ) -> Result<(), ChainError> { let mark_protocol = ctx .ending_state() .pparams diff --git a/crates/cardano/src/ewrap/wrapup.rs b/crates/cardano/src/ewrap/wrapup.rs index e5104ca37..91b15b042 100644 --- a/crates/cardano/src/ewrap/wrapup.rs +++ b/crates/cardano/src/ewrap/wrapup.rs @@ -206,7 +206,10 @@ impl super::BoundaryVisitor for BoundaryVisitor { Ok(()) } - fn flush(&mut self, ctx: &mut super::BoundaryWork) -> Result<(), ChainError> { + fn flush( + &mut self, + ctx: &mut super::BoundaryWork, + ) -> Result<(), ChainError> { for delta in self.deltas.drain(..) { ctx.add_delta(delta); } diff --git a/crates/cardano/src/genesis/mod.rs b/crates/cardano/src/genesis/mod.rs index 881c3d987..0c9ea3620 100644 --- a/crates/cardano/src/genesis/mod.rs +++ b/crates/cardano/src/genesis/mod.rs @@ -26,7 +26,10 @@ fn get_utxo_amount(genesis: &crate::CardanoGenesis) -> Lovelace { byron_utxo + shelley_utxo } -fn bootstrap_pots(pparams: &PParamsSet, genesis: &crate::CardanoGenesis) -> Result> { +fn bootstrap_pots( + pparams: &PParamsSet, + genesis: &crate::CardanoGenesis, +) -> Result> { let utxos = get_utxo_amount(genesis); let max_supply = genesis @@ -47,7 +50,9 @@ fn bootstrap_pots(pparams: &PParamsSet, genesis: &crate::CardanoGenesis) -> Resu }) } -pub fn bootstrap_epoch>( +pub fn bootstrap_epoch< + D: Domain, +>( state: &D::State, genesis: &crate::CardanoGenesis, ) -> Result> { @@ -88,7 +93,12 @@ pub fn bootstrap_epoch>(state: &D::State, epoch: &EpochState) -> Result<(), ChainError> { +pub fn bootstrap_eras< + D: Domain, +>( + state: &D::State, + epoch: &EpochState, +) -> Result<(), ChainError> { let pparams = epoch.pparams.unwrap_live(); let system_start = pparams.ensure_system_start()?; @@ -117,7 +127,9 @@ pub fn bootstrap_eras>( +pub fn bootstrap_utxos< + D: Domain, +>( state: &D::State, indexes: &D::Indexes, genesis: &crate::CardanoGenesis, diff --git a/crates/cardano/src/genesis/staking.rs b/crates/cardano/src/genesis/staking.rs index 6678be1a3..2ba8ef598 100644 --- a/crates/cardano/src/genesis/staking.rs +++ b/crates/cardano/src/genesis/staking.rs @@ -111,7 +111,12 @@ fn parse_delegation(account: &str, pool: &str, genesis: &crate::CardanoGenesis) } } -pub fn bootstrap>(state: &D::State, genesis: &crate::CardanoGenesis) -> Result<(), ChainError> { +pub fn bootstrap< + D: Domain, +>( + state: &D::State, + genesis: &crate::CardanoGenesis, +) -> Result<(), ChainError> { let writer = state.start_writer()?; let Some(staking) = &genesis.shelley.staking else { diff --git a/crates/cardano/src/include/devnet/mod.rs b/crates/cardano/src/include/devnet/mod.rs index c4a2288c0..ef7a2104f 100644 --- a/crates/cardano/src/include/devnet/mod.rs +++ b/crates/cardano/src/include/devnet/mod.rs @@ -1,4 +1,3 @@ - use pallas::crypto::hash::Hasher; use std::path::Path; diff --git a/crates/cardano/src/include/mainnet/mod.rs b/crates/cardano/src/include/mainnet/mod.rs index 72ef7ccb5..528d90978 100644 --- a/crates/cardano/src/include/mainnet/mod.rs +++ b/crates/cardano/src/include/mainnet/mod.rs @@ -1,4 +1,3 @@ - use pallas::crypto::hash::Hasher; use std::path::Path; diff --git a/crates/cardano/src/indexes/delta.rs b/crates/cardano/src/indexes/delta.rs index 79ad0a4c4..dc46ef302 100644 --- a/crates/cardano/src/indexes/delta.rs +++ b/crates/cardano/src/indexes/delta.rs @@ -308,8 +308,7 @@ pub fn index_delta_from_utxo_delta(cursor: ChainPoint, utxo_delta: &UtxoSetDelta #[cfg(test)] mod tests { use super::*; - use dolos_core::ChainPoint; - use pallas::crypto::hash::Hash; + use dolos_core::{hash::Hash, ChainPoint}; use pallas::ledger::addresses::{ Network, ShelleyAddress, ShelleyDelegationPart, ShelleyPaymentPart, }; diff --git a/crates/cardano/src/indexes/query.rs b/crates/cardano/src/indexes/query.rs index a2e608e61..23fdaa0c7 100644 --- a/crates/cardano/src/indexes/query.rs +++ b/crates/cardano/src/indexes/query.rs @@ -50,7 +50,9 @@ pub trait AsyncCardanoQueryExt impl Stream), DomainError>> + Send + 'static; + ) -> impl Stream), DomainError>> + + Send + + 'static; fn blocks_by_payment_stream( &self, @@ -58,7 +60,9 @@ pub trait AsyncCardanoQueryExt impl Stream), DomainError>> + Send + 'static; + ) -> impl Stream), DomainError>> + + Send + + 'static; fn blocks_by_stake_stream( &self, @@ -66,7 +70,9 @@ pub trait AsyncCardanoQueryExt impl Stream), DomainError>> + Send + 'static; + ) -> impl Stream), DomainError>> + + Send + + 'static; fn blocks_by_asset_stream( &self, @@ -74,7 +80,9 @@ pub trait AsyncCardanoQueryExt impl Stream), DomainError>> + Send + 'static; + ) -> impl Stream), DomainError>> + + Send + + 'static; fn blocks_by_account_certs_stream( &self, @@ -82,7 +90,9 @@ pub trait AsyncCardanoQueryExt impl Stream), DomainError>> + Send + 'static; + ) -> impl Stream), DomainError>> + + Send + + 'static; fn blocks_by_metadata_stream( &self, @@ -90,7 +100,9 @@ pub trait AsyncCardanoQueryExt impl Stream), DomainError>> + Send + 'static; + ) -> impl Stream), DomainError>> + + Send + + 'static; async fn blocks_by_address( &self, @@ -134,20 +146,30 @@ pub trait AsyncCardanoQueryExt Result)>, DomainError>; - async fn plutus_data(&self, datum_hash: &Hash<32>) -> Result, DomainError>; + async fn plutus_data( + &self, + datum_hash: &Hash<32>, + ) -> Result, DomainError>; - async fn get_datum(&self, datum_hash: &Hash<32>) -> Result>, DomainError>; + async fn get_datum( + &self, + datum_hash: &Hash<32>, + ) -> Result>, DomainError>; async fn script_by_hash( &self, script_hash: &Hash<28>, ) -> Result, DomainError>; - async fn tx_by_spent_txo(&self, spent_txo: &[u8]) -> Result, DomainError>; + async fn tx_by_spent_txo( + &self, + spent_txo: &[u8], + ) -> Result, DomainError>; } #[async_trait::async_trait] -impl> AsyncCardanoQueryExt for AsyncQueryFacade +impl> AsyncCardanoQueryExt + for AsyncQueryFacade where D: Clone + Send + Sync + 'static, { @@ -157,8 +179,9 @@ where start_slot: BlockSlot, end_slot: BlockSlot, order: SlotOrder, - ) -> impl Stream), DomainError>> + Send + 'static - { + ) -> impl Stream), DomainError>> + + Send + + 'static { blocks_by_tag_stream( (*self).clone(), archive::ADDRESS, @@ -175,8 +198,9 @@ where start_slot: BlockSlot, end_slot: BlockSlot, order: SlotOrder, - ) -> impl Stream), DomainError>> + Send + 'static - { + ) -> impl Stream), DomainError>> + + Send + + 'static { blocks_by_tag_stream( (*self).clone(), archive::PAYMENT, @@ -193,8 +217,9 @@ where start_slot: BlockSlot, end_slot: BlockSlot, order: SlotOrder, - ) -> impl Stream), DomainError>> + Send + 'static - { + ) -> impl Stream), DomainError>> + + Send + + 'static { blocks_by_tag_stream( (*self).clone(), archive::STAKE, @@ -211,8 +236,9 @@ where start_slot: BlockSlot, end_slot: BlockSlot, order: SlotOrder, - ) -> impl Stream), DomainError>> + Send + 'static - { + ) -> impl Stream), DomainError>> + + Send + + 'static { blocks_by_tag_stream( (*self).clone(), archive::ASSET, @@ -229,8 +255,9 @@ where start_slot: BlockSlot, end_slot: BlockSlot, order: SlotOrder, - ) -> impl Stream), DomainError>> + Send + 'static - { + ) -> impl Stream), DomainError>> + + Send + + 'static { blocks_by_tag_stream( (*self).clone(), archive::ACCOUNT_CERTS, @@ -247,8 +274,9 @@ where start_slot: BlockSlot, end_slot: BlockSlot, order: SlotOrder, - ) -> impl Stream), DomainError>> + Send + 'static - { + ) -> impl Stream), DomainError>> + + Send + + 'static { blocks_by_tag_stream( (*self).clone(), archive::METADATA, @@ -320,7 +348,10 @@ where .await } - async fn plutus_data(&self, datum_hash: &Hash<32>) -> Result, DomainError> { + async fn plutus_data( + &self, + datum_hash: &Hash<32>, + ) -> Result, DomainError> { let end_slot = self .run_blocking(move |domain| { Ok(domain @@ -365,7 +396,10 @@ where .await } - async fn get_datum(&self, datum_hash: &Hash<32>) -> Result>, DomainError> { + async fn get_datum( + &self, + datum_hash: &Hash<32>, + ) -> Result>, DomainError> { let key = EntityKey::from(datum_hash.as_slice()); self.run_blocking(move |domain| { let datum_state: Option = @@ -479,7 +513,10 @@ where .await } - async fn tx_by_spent_txo(&self, spent_txo: &[u8]) -> Result, DomainError> { + async fn tx_by_spent_txo( + &self, + spent_txo: &[u8], + ) -> Result, DomainError> { let spent = spent_txo.to_vec(); let end_slot = self @@ -578,8 +615,9 @@ where continue; }; - let block = MultiEraBlock::decode(raw.as_slice()) - .map_err(|e| DomainError::ChainError(ChainError::ChainSpecific(crate::CardanoError::Traverse(e))))?; + let block = MultiEraBlock::decode(raw.as_slice()).map_err(|e| { + DomainError::ChainError(ChainError::ChainSpecific(crate::CardanoError::Traverse(e))) + })?; if let Some(result) = predicate(&block) { return Ok(Some(result)); @@ -605,7 +643,9 @@ fn blocks_by_tag_stream( mut start_slot: BlockSlot, mut end_slot: BlockSlot, order: SlotOrder, -) -> impl Stream), DomainError>> + Send + 'static +) -> impl Stream), DomainError>> + + Send + + 'static where D: Domain + Clone + Send + Sync + 'static, { diff --git a/crates/cardano/src/lib.rs b/crates/cardano/src/lib.rs index 60ef7123a..bc026bcb8 100644 --- a/crates/cardano/src/lib.rs +++ b/crates/cardano/src/lib.rs @@ -120,7 +120,12 @@ pub enum CardanoWorkUnit { impl WorkUnit for CardanoWorkUnit where - D: Domain, + D: Domain< + Chain = CardanoLogic, + Entity = CardanoEntity, + EntityDelta = CardanoDelta, + ChainSpecificError = CardanoError, + >, { fn name(&self) -> &'static str { match self { @@ -328,8 +333,14 @@ impl dolos_core::ChainLogic for CardanoLogic { type Utxo = OwnedMultiEraOutput; type Delta = CardanoDelta; type Entity = CardanoEntity; - type WorkUnit> = - CardanoWorkUnit; + type WorkUnit< + D: Domain< + Chain = Self, + Entity = Self::Entity, + EntityDelta = Self::Delta, + ChainSpecificError = Self::ChainSpecificError, + >, + > = CardanoWorkUnit; type ChainSpecificError = CardanoError; type Genesis = CardanoGenesis; @@ -397,7 +408,13 @@ impl dolos_core::ChainLogic for CardanoLogic { fn pop_work(&mut self, domain: &D) -> Option where - D: Domain, + D: Domain< + Chain = Self, + Entity = CardanoEntity, + EntityDelta = CardanoDelta, + ChainSpecificError = Self::ChainSpecificError, + Genesis = Self::Genesis, + >, { // Refresh cache if needed (after previous genesis or estart execution) if self.needs_cache_refresh { @@ -574,14 +591,14 @@ pub fn load_epoch( } #[cfg(test)] -pub fn load_test_genesis(env: &str) -> GenesisCardanoCardano { +pub fn load_test_genesis(env: &str) -> CardanoGenesis { use std::path::PathBuf; let test_data = PathBuf::from(std::env::var("CARGO_MANIFEST_DIR").unwrap()) .join("test_data") .join(env); - GenesisCardanoCardano::from_file_paths( + CardanoGenesis::from_file_paths( test_data.join("genesis/byron.json"), test_data.join("genesis/shelley.json"), test_data.join("genesis/alonzo.json"), diff --git a/crates/cardano/src/model.rs b/crates/cardano/src/model.rs index 441d9704f..b113720ec 100644 --- a/crates/cardano/src/model.rs +++ b/crates/cardano/src/model.rs @@ -349,7 +349,10 @@ macro_rules! entity_boilerplate { impl dolos_core::Entity for $type { type ChainSpecificError = crate::CardanoError; - fn decode_entity(ns: Namespace, value: &EntityValue) -> Result> { + fn decode_entity( + ns: Namespace, + value: &EntityValue, + ) -> Result> { assert_eq!(ns, $type::NS); let value = pallas::codec::minicbor::decode(value) .map_err(|e| ChainError::ChainSpecific(crate::CardanoError::Cbor(e)))?; @@ -1945,7 +1948,10 @@ variant_boilerplate!(PendingMirState); impl dolos_core::Entity for CardanoEntity { type ChainSpecificError = crate::CardanoError; - fn decode_entity(ns: Namespace, value: &EntityValue) -> Result> { + fn decode_entity( + ns: Namespace, + value: &EntityValue, + ) -> Result> { match ns { EraSummary::NS => EraSummary::decode_entity(ns, value).map(Into::into), AccountState::NS => AccountState::decode_entity(ns, value).map(Into::into), diff --git a/crates/cardano/src/rewards/mod.rs b/crates/cardano/src/rewards/mod.rs index bebe998ba..5967d5274 100644 --- a/crates/cardano/src/rewards/mod.rs +++ b/crates/cardano/src/rewards/mod.rs @@ -506,7 +506,9 @@ fn compute_delegator_chunk( .collect() } -pub fn define_rewards(ctx: &C) -> Result, ChainError> { +pub fn define_rewards( + ctx: &C, +) -> Result, ChainError> { let mut map = RewardMap::::new(ctx.incentives().clone()); // Sequential pool iteration with parallel delegator processing diff --git a/crates/cardano/src/roll/accounts.rs b/crates/cardano/src/roll/accounts.rs index 624afef12..8a8c75480 100644 --- a/crates/cardano/src/roll/accounts.rs +++ b/crates/cardano/src/roll/accounts.rs @@ -1,4 +1,4 @@ -use dolos_core::{BlockSlot, ChainError, NsKey, TxOrder}; +use dolos_core::{BlockSlot, ChainError, NsKey, TxOrder}; use super::WorkDeltas; use pallas::codec::minicbor; @@ -533,7 +533,8 @@ impl BlockVisitor for AccountVisitor { account: &[u8], amount: u64, ) -> Result<(), ChainError> { - let address = Address::from_bytes(account).map_err(|e| ChainError::ChainSpecific(crate::CardanoError::Address(e)))?; + let address = Address::from_bytes(account) + .map_err(|e| ChainError::ChainSpecific(crate::CardanoError::Address(e)))?; let Some((cred, _)) = pallas_extras::address_as_stake_cred(&address) else { return Ok(()); diff --git a/crates/cardano/src/roll/datums.rs b/crates/cardano/src/roll/datums.rs index e535cb406..f3c7d6a9d 100644 --- a/crates/cardano/src/roll/datums.rs +++ b/crates/cardano/src/roll/datums.rs @@ -51,7 +51,10 @@ impl dolos_core::EntityDelta for DatumRefIncrement { type Entity = DatumState; fn key(&self) -> NsKey { - NsKey::from((DATUM_NS, dolos_core::EntityKey::from(self.datum_hash.as_slice()))) + NsKey::from(( + DATUM_NS, + dolos_core::EntityKey::from(self.datum_hash.as_slice()), + )) } fn apply(&mut self, entity: &mut Option) { @@ -105,7 +108,10 @@ impl dolos_core::EntityDelta for DatumRefDecrement { type Entity = DatumState; fn key(&self) -> NsKey { - NsKey::from((DATUM_NS, dolos_core::EntityKey::from(self.datum_hash.as_slice()))) + NsKey::from(( + DATUM_NS, + dolos_core::EntityKey::from(self.datum_hash.as_slice()), + )) } fn apply(&mut self, entity: &mut Option) { diff --git a/crates/cardano/src/roll/epochs.rs b/crates/cardano/src/roll/epochs.rs index 854c85dc8..a28c866f2 100644 --- a/crates/cardano/src/roll/epochs.rs +++ b/crates/cardano/src/roll/epochs.rs @@ -1,6 +1,6 @@ use std::collections::{HashMap, HashSet}; -use dolos_core::{BrokenInvariant, ChainError, NsKey, TxOrder, TxoRef}; +use dolos_core::{BrokenInvariant, ChainError, NsKey, TxOrder, TxoRef}; use pallas::{ crypto::hash::Hash, ledger::{ @@ -138,11 +138,12 @@ fn compute_collateral_value( let mut total = 0; for input in tx.consumes() { - let utxo = utxos - .get(&crate::txo_ref_from_input(&input)) - .ok_or(ChainError::BrokenInvariant(BrokenInvariant::MissingUtxo( - crate::txo_ref_from_input(&input), - )))?; + let utxo = + utxos + .get(&crate::txo_ref_from_input(&input)) + .ok_or(ChainError::BrokenInvariant(BrokenInvariant::MissingUtxo( + crate::txo_ref_from_input(&input), + )))?; utxo.with_dependent(|_, utxo| { total += utxo.value().coin(); }); @@ -224,7 +225,10 @@ impl BlockVisitor for EpochStateVisitor { self.nonces_delta = Some(NoncesUpdate { slot: block.header().slot(), tail: block.header().previous_hash(), - nonce_vrf_output: block.header().nonce_vrf_output().map_err(|e| ChainError::ChainSpecific(crate::CardanoError::Traverse(e)))?, + nonce_vrf_output: block + .header() + .nonce_vrf_output() + .map_err(|e| ChainError::ChainSpecific(crate::CardanoError::Traverse(e)))?, previous: None, }); } diff --git a/crates/cardano/src/roll/mod.rs b/crates/cardano/src/roll/mod.rs index f3cd92ba8..5f7ca9d32 100644 --- a/crates/cardano/src/roll/mod.rs +++ b/crates/cardano/src/roll/mod.rs @@ -534,7 +534,9 @@ impl<'a> DeltaBuilder<'a> { } #[instrument(name = "roll", skip_all)] -pub fn compute_delta>( +pub fn compute_delta< + D: Domain, +>( genesis: Arc, cache: &Cache, state: &D::State, diff --git a/crates/cardano/src/roll/pools.rs b/crates/cardano/src/roll/pools.rs index ded7513d7..0209722a2 100644 --- a/crates/cardano/src/roll/pools.rs +++ b/crates/cardano/src/roll/pools.rs @@ -1,6 +1,6 @@ use std::ops::Deref; -use dolos_core::{BlockSlot, ChainError, NsKey, TxOrder}; +use dolos_core::{BlockSlot, ChainError, NsKey, TxOrder}; use pallas::crypto::hash::{Hash, Hasher}; use pallas::ledger::primitives::Epoch; use pallas::ledger::traverse::{MultiEraBlock, MultiEraCert, MultiEraTx}; diff --git a/crates/cardano/src/roll/proposals.rs b/crates/cardano/src/roll/proposals.rs index f2a5c67b4..d712b6ad6 100644 --- a/crates/cardano/src/roll/proposals.rs +++ b/crates/cardano/src/roll/proposals.rs @@ -1,6 +1,6 @@ use std::collections::BTreeMap; -use dolos_core::{BlockSlot, ChainError, NsKey}; +use dolos_core::{BlockSlot, ChainError, NsKey}; use pallas::{ codec::utils::Bytes, crypto::hash::Hash, @@ -391,8 +391,9 @@ impl BlockVisitor for ProposalVisitor { GovAction::NewConstitution(..) => ProposalAction::Other, }; - let reward_account = pallas_extras::parse_reward_account(&proposal.reward_account) - .ok_or(ChainError::ChainSpecific(crate::CardanoError::InvalidProposalParams))?; + let reward_account = pallas_extras::parse_reward_account(&proposal.reward_account).ok_or( + ChainError::ChainSpecific(crate::CardanoError::InvalidProposalParams), + )?; deltas.add_for_entity(NewProposal { slot: block.slot(), diff --git a/crates/cardano/src/roll/txs.rs b/crates/cardano/src/roll/txs.rs index 18c1a6fa6..b19435f9f 100644 --- a/crates/cardano/src/roll/txs.rs +++ b/crates/cardano/src/roll/txs.rs @@ -6,7 +6,7 @@ use std::collections::HashMap; -use dolos_core::{ChainError, TxOrder, TxoRef}; +use dolos_core::{ChainError, TxOrder, TxoRef}; use pallas::{ codec::utils::KeepRaw, ledger::{ diff --git a/crates/cardano/src/roll/work_unit.rs b/crates/cardano/src/roll/work_unit.rs index df89ae601..e7530b5dc 100644 --- a/crates/cardano/src/roll/work_unit.rs +++ b/crates/cardano/src/roll/work_unit.rs @@ -45,7 +45,12 @@ impl RollWorkUnit { impl WorkUnit for RollWorkUnit where - D: Domain, + D: Domain< + Chain = CardanoLogic, + Entity = CardanoEntity, + EntityDelta = CardanoDelta, + ChainSpecificError = CardanoError, + >, { fn name(&self) -> &'static str { "roll" diff --git a/crates/cardano/src/rupd/loading.rs b/crates/cardano/src/rupd/loading.rs index ac372e6a0..b62cce9a2 100644 --- a/crates/cardano/src/rupd/loading.rs +++ b/crates/cardano/src/rupd/loading.rs @@ -243,7 +243,10 @@ impl RupdWork { self.snapshot.performance_epoch_pool_blocks } - pub fn load>(state: &D::State, genesis: &crate::CardanoGenesis) -> Result> { + pub fn load>( + state: &D::State, + genesis: &crate::CardanoGenesis, + ) -> Result> { let epoch = crate::load_epoch::(state)?; let current_epoch = epoch.number; diff --git a/crates/cardano/src/validate.rs b/crates/cardano/src/validate.rs index 179125b41..c1eb677f2 100644 --- a/crates/cardano/src/validate.rs +++ b/crates/cardano/src/validate.rs @@ -63,8 +63,8 @@ pub fn validate_tx>( let eracbor = eracbor.as_ref(); - let output = crate::multi_era_output_from_era_cbor(eracbor) - .map_err(ChainError::ChainSpecific)?; + let output = + crate::multi_era_output_from_era_cbor(eracbor).map_err(ChainError::ChainSpecific)?; pallas_utxos.insert(input, output); } @@ -147,9 +147,10 @@ pub fn evaluate_tx>( }) .collect(); - let report = - pallas::ledger::validate::phase2::evaluate_tx(&tx, &pparams, &utxos, &slot_config) - .map_err(|e| ChainError::ChainSpecific(CardanoError::Phase2EvaluationError(e.to_string())))?; + let report = pallas::ledger::validate::phase2::evaluate_tx(&tx, &pparams, &utxos, &slot_config) + .map_err(|e| { + ChainError::ChainSpecific(CardanoError::Phase2EvaluationError(e.to_string())) + })?; Ok(report) } diff --git a/crates/core/src/async_query.rs b/crates/core/src/async_query.rs index a70745812..d3aa0cdcd 100644 --- a/crates/core/src/async_query.rs +++ b/crates/core/src/async_query.rs @@ -130,9 +130,10 @@ where return Ok(None); }; - D::Chain::find_tx_in_block(&raw, &tx_hash) - .map_err(|err| DomainError::ChainError(ChainError::ChainSpecific(err))) - .map(|maybe_ix| maybe_ix.map(|(era_cbor, ix)| (era_cbor.cbor().to_vec(), ix))) + let result = D::Chain::find_tx_in_block(&raw, &tx_hash) + .map_err(|err| DomainError::ChainError(ChainError::ChainSpecific(err)))?; + + Ok(result.map(|(_, ix)| (raw, ix))) } pub async fn tx_cbor( diff --git a/crates/core/src/builtin/noop.rs b/crates/core/src/builtin/noop.rs index 15b3c0c63..4c527eca8 100644 --- a/crates/core/src/builtin/noop.rs +++ b/crates/core/src/builtin/noop.rs @@ -4,7 +4,7 @@ //! for all reads. Useful when you want to disable certain storage backends //! (e.g., indexes or archive) while still keeping the system functional. -use std::ops::Range; +use std::{marker::PhantomData, ops::Range}; use crate::{ archive::{ArchiveError, ArchiveStore, ArchiveWriter, LogKey}, @@ -117,13 +117,24 @@ impl IndexStore for NoOpIndexStore { // ============================================================================ /// No-op archive writer that accepts all operations but does nothing. -#[derive(Debug, Default)] -pub struct NoOpArchiveWriter; +pub struct NoOpArchiveWriter(PhantomData); + +impl std::fmt::Debug for NoOpArchiveWriter { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("NoOpArchiveWriter").finish() + } +} + +impl Default for NoOpArchiveWriter { + fn default() -> Self { + Self(PhantomData) + } +} -impl ArchiveWriter for NoOpArchiveWriter { - type ChainSpecificError = std::convert::Infallible; +impl ArchiveWriter for NoOpArchiveWriter { + type ChainSpecificError = E; - fn apply(&self, _point: &ChainPoint, _block: &RawBlock) -> Result<(), ArchiveError> { + fn apply(&self, _point: &ChainPoint, _block: &RawBlock) -> Result<(), ArchiveError> { Ok(()) } @@ -132,29 +143,46 @@ impl ArchiveWriter for NoOpArchiveWriter { _ns: Namespace, _key: &LogKey, _value: &EntityValue, - ) -> Result<(), ArchiveError> { + ) -> Result<(), ArchiveError> { Ok(()) } - fn undo(&self, _point: &ChainPoint) -> Result<(), ArchiveError> { + fn undo(&self, _point: &ChainPoint) -> Result<(), ArchiveError> { Ok(()) } - fn commit(self) -> Result<(), ArchiveError> { + fn commit(self) -> Result<(), ArchiveError> { Ok(()) } } /// No-op archive store that returns empty results for all queries. -#[derive(Debug, Clone, Default)] -pub struct NoOpArchiveStore; +pub struct NoOpArchiveStore(PhantomData); + +impl std::fmt::Debug for NoOpArchiveStore { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("NoOpArchiveStore").finish() + } +} + +impl Clone for NoOpArchiveStore { + fn clone(&self) -> Self { + Self(PhantomData) + } +} + +impl Default for NoOpArchiveStore { + fn default() -> Self { + Self(PhantomData) + } +} -impl NoOpArchiveStore { +impl NoOpArchiveStore { pub fn new() -> Self { - Self + Self(PhantomData) } - pub fn shutdown(&self) -> Result<(), ArchiveError> { + pub fn shutdown(&self) -> Result<(), ArchiveError> { Ok(()) } } @@ -182,10 +210,16 @@ impl crate::archive::Skippable for EmptyBlockIter { } /// Empty iterator for log queries. -pub struct EmptyLogIter; +pub struct EmptyLogIter(PhantomData); -impl Iterator for EmptyLogIter { - type Item = Result<(LogKey, EntityValue), ArchiveError>; +impl Default for EmptyLogIter { + fn default() -> Self { + Self(PhantomData) + } +} + +impl Iterator for EmptyLogIter { + type Item = Result<(LogKey, EntityValue), ArchiveError>; fn next(&mut self) -> Option { None @@ -193,32 +227,38 @@ impl Iterator for EmptyLogIter { } /// Empty iterator for entity value queries. -pub struct EmptyEntityValueIter; +pub struct EmptyEntityValueIter(PhantomData); + +impl Default for EmptyEntityValueIter { + fn default() -> Self { + Self(PhantomData) + } +} -impl Iterator for EmptyEntityValueIter { - type Item = Result>; +impl Iterator for EmptyEntityValueIter { + type Item = Result>; fn next(&mut self) -> Option { None } } -impl ArchiveStore for NoOpArchiveStore { - type ChainSpecificError = std::convert::Infallible; +impl ArchiveStore for NoOpArchiveStore { + type ChainSpecificError = E; type BlockIter<'a> = EmptyBlockIter; - type Writer = NoOpArchiveWriter; - type LogIter = EmptyLogIter; - type EntityValueIter = EmptyEntityValueIter; + type Writer = NoOpArchiveWriter; + type LogIter = EmptyLogIter; + type EntityValueIter = EmptyEntityValueIter; - fn start_writer(&self) -> Result> { - Ok(NoOpArchiveWriter) + fn start_writer(&self) -> Result> { + Ok(NoOpArchiveWriter::default()) } fn read_logs( &self, _ns: Namespace, keys: &[&LogKey], - ) -> Result>, ArchiveError> { + ) -> Result>, ArchiveError> { Ok(vec![None; keys.len()]) } @@ -226,11 +266,11 @@ impl ArchiveStore for NoOpArchiveStore { &self, _ns: Namespace, _range: Range, - ) -> Result> { - Ok(EmptyLogIter) + ) -> Result> { + Ok(EmptyLogIter::default()) } - fn get_block_by_slot(&self, _slot: &BlockSlot) -> Result, ArchiveError> { + fn get_block_by_slot(&self, _slot: &BlockSlot) -> Result, ArchiveError> { Ok(None) } @@ -238,18 +278,18 @@ impl ArchiveStore for NoOpArchiveStore { &self, _from: Option, _to: Option, - ) -> Result, ArchiveError> { + ) -> Result, ArchiveError> { Ok(EmptyBlockIter) } fn find_intersect( &self, _intersect: &[ChainPoint], - ) -> Result, ArchiveError> { + ) -> Result, ArchiveError> { Ok(None) } - fn get_tip(&self) -> Result, ArchiveError> { + fn get_tip(&self) -> Result, ArchiveError> { Ok(None) } @@ -257,12 +297,12 @@ impl ArchiveStore for NoOpArchiveStore { &self, _max_slots: u64, _max_prune: Option, - ) -> Result> { + ) -> Result> { // Nothing to prune, always "done" Ok(true) } - fn truncate_front(&self, _after: &ChainPoint) -> Result<(), ArchiveError> { + fn truncate_front(&self, _after: &ChainPoint) -> Result<(), ArchiveError> { Ok(()) } } diff --git a/crates/core/src/lib.rs b/crates/core/src/lib.rs index 52b7c3e61..1b2dcf16e 100644 --- a/crates/core/src/lib.rs +++ b/crates/core/src/lib.rs @@ -737,11 +737,12 @@ pub trait Driver: Send + Sync + 'static { mod tests { use super::*; - //pub fn slot_to_hash(slot: u64) -> BlockHash { - // let mut hasher = pallas::crypto::hash::Hasher::<256>::new(); - // hasher.input(&(slot as i32).to_le_bytes()); - // hasher.finalize() - //} + pub fn slot_to_hash(slot: u64) -> BlockHash { + let mut bytes = [0u8; 32]; + let slot_bytes = (slot as i32).to_le_bytes(); + bytes[..4].copy_from_slice(&slot_bytes); + BlockHash::new(bytes) + } #[test] fn chainpoint_partial_eq() { diff --git a/crates/core/src/mempool.rs b/crates/core/src/mempool.rs index 1d2964808..f2b620cf9 100644 --- a/crates/core/src/mempool.rs +++ b/crates/core/src/mempool.rs @@ -416,12 +416,13 @@ mod tests { use std::task::{Context, Poll}; use dolos_testing::streams::{noop_waker, ScriptedStream}; - use dolos_testing::tx_sequence_to_hash; type MockStream = ScriptedStream>; fn test_hash(n: u8) -> TxHash { - tx_sequence_to_hash(n as u64) + let mut bytes = [0u8; 32]; + bytes[0] = n; + TxHash::new(bytes) } fn test_event(hash: TxHash) -> MempoolEvent { diff --git a/crates/minibf/src/hacks.rs b/crates/minibf/src/hacks.rs index ae0de44ee..9d1b8ca6b 100644 --- a/crates/minibf/src/hacks.rs +++ b/crates/minibf/src/hacks.rs @@ -1,7 +1,10 @@ use axum::http::StatusCode; use blockfrost_openapi::models::tx_content_utxo::TxContentUtxo; use blockfrost_openapi::models::{block_content::BlockContent, tx_content::TxContent}; -use dolos_cardano::{indexes::AsyncCardanoQueryExt, pallas_hash_to_core, core_hash_to_pallas, CardanoError, CardanoGenesis}; +use dolos_cardano::{ + core_hash_to_pallas, indexes::AsyncCardanoQueryExt, pallas_hash_to_core, CardanoError, + CardanoGenesis, +}; use dolos_core::{ArchiveStore as _, Domain}; use pallas::crypto::hash::Hash; use pallas::ledger::{ @@ -57,7 +60,9 @@ struct GenesisTxModel<'a> { consumed_by: Option>, } -pub fn genesis_hash_for_domain>(domain: &Facade) -> Option<&'static str> { +pub fn genesis_hash_for_domain>( + domain: &Facade, +) -> Option<&'static str> { match domain.genesis().shelley.network_magic { Some(1) => Some(GENESIS_HASH_PREPROD), Some(2) => Some(GENESIS_HASH_PREVIEW), @@ -182,7 +187,11 @@ impl<'a> IntoModel for GenesisTxModel<'a> { fn into_model(self) -> Result { let output = self.output.as_output(); - let builder = UtxoOutputModelBuilder::from_output(pallas_hash_to_core(self.output.tx_hash), 0, output); + let builder = UtxoOutputModelBuilder::from_output( + pallas_hash_to_core(self.output.tx_hash), + 0, + output, + ); let builder = if let Some(consumed_by) = self.consumed_by { builder.with_consumed_by(pallas_hash_to_core(consumed_by)) } else { @@ -218,7 +227,11 @@ pub async fn genesis_tx_utxos_for_hash( hash: &[u8], ) -> Result where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + + Clone + + Send + + Sync + + 'static, { let Some(block_meta) = genesis_block_metadata_for_domain(domain) else { return Err(StatusCode::NOT_FOUND); @@ -272,7 +285,9 @@ pub fn genesis_block_for_domain>( } } -pub fn genesis_block_preview>(domain: &Facade) -> Result { +pub fn genesis_block_preview>( + domain: &Facade, +) -> Result { let confirmations = MultiEraBlock::decode( &domain .archive() @@ -315,7 +330,9 @@ pub fn genesis_block_preview>(domain: &Facad }) } -pub fn genesis_block_preprod>(domain: &Facade) -> Result { +pub fn genesis_block_preprod>( + domain: &Facade, +) -> Result { let confirmations = MultiEraBlock::decode( &domain .archive() @@ -358,7 +375,9 @@ pub fn genesis_block_preprod>(domain: &Facad }) } -pub fn genesis_block_mainnet>(domain: &Facade) -> Result { +pub fn genesis_block_mainnet>( + domain: &Facade, +) -> Result { let confirmations = MultiEraBlock::decode( &domain .archive() @@ -401,7 +420,10 @@ pub fn genesis_block_mainnet>(domain: &Facad }) } -pub fn maybe_set_genesis_previous_block>(domain: &Facade, block: &mut BlockContent) { +pub fn maybe_set_genesis_previous_block>( + domain: &Facade, + block: &mut BlockContent, +) { if block.height.is_some_and(|x| x > 1) { return; } diff --git a/crates/minibf/src/mapping.rs b/crates/minibf/src/mapping.rs index 1ef5e7cb8..0e2bc5acd 100644 --- a/crates/minibf/src/mapping.rs +++ b/crates/minibf/src/mapping.rs @@ -54,8 +54,8 @@ use blockfrost_openapi::models::{ }; use dolos_cardano::{ - pallas_extras, pallas_hash_to_core, AccountState, ChainSummary, DRepState, - PParamsSet, PoolHash, PoolState, + pallas_extras, pallas_hash_to_core, AccountState, ChainSummary, DRepState, PParamsSet, + PoolHash, PoolState, }; use dolos_core::{BlockSlot, Domain, EraCbor, TxHash, TxOrder, TxoIdx, TxoRef}; @@ -945,7 +945,9 @@ impl<'a> IntoModel for TxModelBuilder<'a> { .outputs() .into_iter() .enumerate() - .map(|(i, o)| UtxoOutputModelBuilder::from_output(pallas_hash_to_core(tx.hash()), i as u32, o)) + .map(|(i, o)| { + UtxoOutputModelBuilder::from_output(pallas_hash_to_core(tx.hash()), i as u32, o) + }) .map(|b| { let builder = if let Some(consumed_by) = self.consumed_deps.get(&b.txo_ref()) { b.with_consumed_by(*consumed_by) @@ -961,7 +963,12 @@ impl<'a> IntoModel for TxModelBuilder<'a> { .into_iter() .enumerate() .map(|(i, o)| { - UtxoOutputModelBuilder::from_collateral(pallas_hash_to_core(tx.hash()), outputs.len(), i as u32, o) + UtxoOutputModelBuilder::from_collateral( + pallas_hash_to_core(tx.hash()), + outputs.len(), + i as u32, + o, + ) }) .map(|b| { let builder = if let Some(consumed_by) = self.consumed_deps.get(&b.txo_ref()) { diff --git a/crates/minibf/src/routes/accounts.rs b/crates/minibf/src/routes/accounts.rs index f83bb42b2..19da4d4e9 100644 --- a/crates/minibf/src/routes/accounts.rs +++ b/crates/minibf/src/routes/accounts.rs @@ -484,7 +484,11 @@ where Epoch, Network, ) -> Result, StatusCode>, - D: Domain + Clone + Send + Sync + 'static, + D: Domain + + Clone + + Send + + Sync + + 'static, { let account_key = parse_account_key_param(stake_address)?; @@ -545,7 +549,11 @@ pub async fn by_stake_delegations( ) -> Result>, Error> where Option: From, - D: Domain + Clone + Send + Sync + 'static, + D: Domain + + Clone + + Send + + Sync + + 'static, { let pagination = Pagination::try_from(params)?; pagination.enforce_max_scan_limit(domain.config.max_scan_items())?; @@ -568,7 +576,11 @@ pub async fn by_stake_registrations( ) -> Result>, Error> where Option: From, - D: Domain + Clone + Send + Sync + 'static, + D: Domain + + Clone + + Send + + Sync + + 'static, { let pagination = Pagination::try_from(params)?; pagination.enforce_max_scan_limit(domain.config.max_scan_items())?; @@ -614,8 +626,10 @@ impl TryFrom for AccountRewardContentInner { fn try_from(value: AccountRewardWrapper) -> Result { match value { AccountRewardWrapper::Leader((epoch, x)) => { - let key = EntityKey::from(x.pool_id); - let arr: [u8; 28] = key.as_ref().try_into() + let arr: [u8; 28] = x + .pool_id + .as_slice() + .try_into() .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?; let operator = Hash::<28>::from(arr); let pool_id = mapping::bech32_pool(operator)?; @@ -628,8 +642,10 @@ impl TryFrom for AccountRewardContentInner { }) } AccountRewardWrapper::Member((epoch, x)) => { - let key = EntityKey::from(x.pool_id); - let arr: [u8; 28] = key.as_ref().try_into() + let arr: [u8; 28] = x + .pool_id + .as_slice() + .try_into() .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?; let operator = Hash::<28>::from(arr); let pool_id = mapping::bech32_pool(operator)?; @@ -642,8 +658,10 @@ impl TryFrom for AccountRewardContentInner { }) } AccountRewardWrapper::PoolDepositRefund((epoch, x)) => { - let key = EntityKey::from(x.pool_id); - let arr: [u8; 28] = key.as_ref().try_into() + let arr: [u8; 28] = x + .pool_id + .as_slice() + .try_into() .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?; let operator = Hash::<28>::from(arr); let pool_id = mapping::bech32_pool(operator)?; diff --git a/crates/minibf/src/routes/addresses.rs b/crates/minibf/src/routes/addresses.rs index f25be2914..f21fa3ed1 100644 --- a/crates/minibf/src/routes/addresses.rs +++ b/crates/minibf/src/routes/addresses.rs @@ -46,7 +46,14 @@ type VKeyOrAddress = Either, Vec>; /// Stream of blocks returned by address queries type BlockStream = std::pin::Pin< - Box), dolos_core::DomainError>> + Send>, + Box< + dyn Stream< + Item = Result< + (BlockSlot, Option), + dolos_core::DomainError, + >, + > + Send, + >, >; enum ParsedAddress { diff --git a/crates/minibf/src/routes/genesis.rs b/crates/minibf/src/routes/genesis.rs index c8ba30172..bb0f45cfb 100644 --- a/crates/minibf/src/routes/genesis.rs +++ b/crates/minibf/src/routes/genesis.rs @@ -59,9 +59,7 @@ pub async fn naked>( State(domain): State>, ) -> Result, StatusCode> { let genesis = domain.genesis(); - let model = GenesisModelBuilder { - genesis: &genesis, - }; + let model = GenesisModelBuilder { genesis: &genesis }; model.into_response() } diff --git a/crates/minibf/src/routes/metadata.rs b/crates/minibf/src/routes/metadata.rs index 1ee400914..b0103be96 100644 --- a/crates/minibf/src/routes/metadata.rs +++ b/crates/minibf/src/routes/metadata.rs @@ -7,7 +7,10 @@ use blockfrost_openapi::models::{ tx_metadata_label_cbor_inner::TxMetadataLabelCborInner, tx_metadata_label_json_inner::TxMetadataLabelJsonInner, }; -use dolos_cardano::{indexes::{AsyncCardanoQueryExt, SlotOrder}, CardanoError}; +use dolos_cardano::{ + indexes::{AsyncCardanoQueryExt, SlotOrder}, + CardanoError, +}; use dolos_core::Domain; use futures_util::StreamExt; use pallas::{ diff --git a/crates/minibf/src/routes/network.rs b/crates/minibf/src/routes/network.rs index 82029f909..9a2bb60dc 100644 --- a/crates/minibf/src/routes/network.rs +++ b/crates/minibf/src/routes/network.rs @@ -249,7 +249,9 @@ impl<'a> IntoModel for NetworkModelBuilder<'a> { } } -fn compute_network_sync>(domain: Facade) -> Result +fn compute_network_sync>( + domain: Facade, +) -> Result where Option: From, { diff --git a/crates/minibf/src/routes/scripts.rs b/crates/minibf/src/routes/scripts.rs index ec6e023f3..29b19e56c 100644 --- a/crates/minibf/src/routes/scripts.rs +++ b/crates/minibf/src/routes/scripts.rs @@ -9,7 +9,10 @@ use blockfrost_openapi::models::{ script_datum_cbor::ScriptDatumCbor, script_json::ScriptJson, }; -use dolos_cardano::{indexes::{AsyncCardanoQueryExt, ScriptLanguage}, CardanoError}; +use dolos_cardano::{ + indexes::{AsyncCardanoQueryExt, ScriptLanguage}, + CardanoError, +}; use dolos_core::Domain; use pallas::crypto::hash::Hash; use pallas::ledger::primitives::alonzo::NativeScript; diff --git a/crates/minibf/src/routes/txs.rs b/crates/minibf/src/routes/txs.rs index a806ccc0b..2a77d9ecd 100644 --- a/crates/minibf/src/routes/txs.rs +++ b/crates/minibf/src/routes/txs.rs @@ -79,7 +79,11 @@ pub async fn by_hash_utxos( State(domain): State>, ) -> Result, StatusCode> where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + + Clone + + Send + + Sync + + 'static, { let hash = hex::decode(tx_hash).map_err(|_| StatusCode::BAD_REQUEST)?; @@ -114,7 +118,9 @@ where builder = builder.with_consumed_deps(consumed_deps); let deps = builder.required_deps()?; - let deps = domain.get_tx_batch(deps.into_iter().map(core_hash_to_pallas)).await?; + let deps = domain + .get_tx_batch(deps.into_iter().map(core_hash_to_pallas)) + .await?; for (key, cbor) in deps.iter() { if let Some(cbor) = cbor { @@ -175,7 +181,9 @@ where .with_historical_pparams::(&domain)?; let deps = builder.required_deps()?; - let deps = domain.get_tx_batch(deps.into_iter().map(core_hash_to_pallas)).await?; + let deps = domain + .get_tx_batch(deps.into_iter().map(core_hash_to_pallas)) + .await?; for (key, cbor) in deps.iter() { if let Some(cbor) = cbor { diff --git a/crates/minibf/src/routes/utxos.rs b/crates/minibf/src/routes/utxos.rs index d6e44e82d..f80a4f930 100644 --- a/crates/minibf/src/routes/utxos.rs +++ b/crates/minibf/src/routes/utxos.rs @@ -43,7 +43,11 @@ where let block_deps: HashMap = join_all(tx_deps.iter().map(|tx| { let tx = *tx; async move { - match domain.query().block_by_tx_hash(tx.as_slice().to_vec()).await { + match domain + .query() + .block_by_tx_hash(tx.as_slice().to_vec()) + .await + { Ok(Some((cbor, txorder))) => { let Ok(block) = MultiEraBlock::decode(&cbor) else { return Some(Err(StatusCode::INTERNAL_SERVER_ERROR)); diff --git a/crates/minikupo/src/lib.rs b/crates/minikupo/src/lib.rs index f83440647..24d1ecc9b 100644 --- a/crates/minikupo/src/lib.rs +++ b/crates/minikupo/src/lib.rs @@ -5,7 +5,10 @@ use axum::{ routing::get, Json, Router, ServiceExt, }; -use dolos_cardano::{indexes::{AsyncCardanoQueryExt, ScriptLanguage as CardanoLanguage}, CardanoError, CardanoGenesis}; +use dolos_cardano::{ + indexes::{AsyncCardanoQueryExt, ScriptLanguage as CardanoLanguage}, + CardanoError, CardanoGenesis, +}; use dolos_core::{config::MinikupoConfig, AsyncQueryFacade, CancelToken, Domain, ServeError}; use pallas::{codec::minicbor, crypto::hash::Hash}; use std::ops::Deref; @@ -97,7 +100,11 @@ pub struct Driver; pub fn build_router(cfg: MinikupoConfig, domain: D) -> Router where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + + Clone + + Send + + Sync + + 'static, { build_router_with_facade(Facade { inner: domain, @@ -107,7 +114,11 @@ where pub(crate) fn build_router_with_facade(facade: Facade) -> Router where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + + Clone + + Send + + Sync + + 'static, { let permissive_cors = facade.config.permissive_cors(); let app = Router::new() @@ -151,7 +162,11 @@ where fn api_router() -> Router> where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + + Clone + + Send + + Sync + + 'static, { Router::new() .route("/matches/{*pattern}", get(routes::matches::by_pattern::)) @@ -161,7 +176,8 @@ where .route("/health", get(routes::health::health::)) } -impl, C: CancelToken> dolos_core::Driver for Driver +impl, C: CancelToken> + dolos_core::Driver for Driver where D: Clone + Send + Sync + 'static, { diff --git a/crates/minikupo/src/routes/datums.rs b/crates/minikupo/src/routes/datums.rs index 88efbfe69..1d7999a0f 100644 --- a/crates/minikupo/src/routes/datums.rs +++ b/crates/minikupo/src/routes/datums.rs @@ -10,7 +10,9 @@ use pallas::crypto::hash::Hash; use crate::{bad_request, Facade}; -pub async fn by_hash + Clone + Send + Sync + 'static>( +pub async fn by_hash< + D: Domain + Clone + Send + Sync + 'static, +>( State(facade): State>, Path(datum_hash): Path, ) -> Response { diff --git a/crates/minikupo/src/routes/matches.rs b/crates/minikupo/src/routes/matches.rs index 269e25d18..5abaf6199 100644 --- a/crates/minikupo/src/routes/matches.rs +++ b/crates/minikupo/src/routes/matches.rs @@ -4,7 +4,10 @@ use axum::{ response::{IntoResponse, Response}, Json, }; -use dolos_cardano::{indexes::CardanoIndexExt, network_from_genesis, pallas_extras, pallas_hash_to_core, CardanoError, CardanoGenesis}; +use dolos_cardano::{ + indexes::CardanoIndexExt, network_from_genesis, pallas_extras, pallas_hash_to_core, + CardanoError, CardanoGenesis, +}; use dolos_core::{Domain, EraCbor, IndexStore as _, StateStore as _, TxoRef, UtxoSet}; use pallas::codec::minicbor; use pallas::ledger::{ diff --git a/crates/minikupo/src/routes/scripts.rs b/crates/minikupo/src/routes/scripts.rs index 9b617edb1..94767b366 100644 --- a/crates/minikupo/src/routes/scripts.rs +++ b/crates/minikupo/src/routes/scripts.rs @@ -10,7 +10,9 @@ use pallas::crypto::hash::Hash; use crate::{bad_request, Facade}; -pub async fn by_hash + Clone + Send + Sync + 'static>( +pub async fn by_hash< + D: Domain + Clone + Send + Sync + 'static, +>( State(facade): State>, Path(script_hash): Path, ) -> Response { diff --git a/crates/redb3/src/archive/mod.rs b/crates/redb3/src/archive/mod.rs index 9f70b24fc..0f298de80 100644 --- a/crates/redb3/src/archive/mod.rs +++ b/crates/redb3/src/archive/mod.rs @@ -831,7 +831,9 @@ pub struct ArchiveStoreWriter, } -impl dolos_core::ArchiveWriter for ArchiveStoreWriter { +impl dolos_core::ArchiveWriter + for ArchiveStoreWriter +{ type ChainSpecificError = E; fn apply(&self, point: &ChainPoint, block: &RawBlock) -> Result<(), ArchiveError> { @@ -844,7 +846,6 @@ impl dolos_core::ArchiveWriter for fn undo(&self, point: &ChainPoint) -> Result<(), ArchiveError> { tables::BlocksTable::undo(&self.wx, &self.flatfiles, point) - .map_err(RedbArchiveError::from) .map_err(|e| e.into_archive_error()) } @@ -855,11 +856,11 @@ impl dolos_core::ArchiveWriter for let pending = self.pending_blocks.into_inner().unwrap(); if !pending.is_empty() { tables::BlocksTable::apply_batch(&self.wx, &self.flatfiles, &pending) - .map_err(RedbArchiveError::from) .map_err(|e| e.into_archive_error())?; } - self.wx.commit() + self.wx + .commit() .map_err(RedbArchiveError::from) .map_err(|e| e.into_archive_error())?; Ok(()) @@ -934,37 +935,29 @@ impl dolos_core::ArchiveStore for type ChainSpecificError = E; fn start_writer(&self) -> Result> { - Self::start_writer(self) - .map_err(|e| e.into_archive_error()) + Self::start_writer(self).map_err(|e| e.into_archive_error()) } - fn get_block_by_slot( - &self, - slot: &BlockSlot, - ) -> Result, ArchiveError> { - Self::get_block_by_slot(self, slot) - .map_err(|e| e.into_archive_error()) + fn get_block_by_slot(&self, slot: &BlockSlot) -> Result, ArchiveError> { + Self::get_block_by_slot(self, slot).map_err(|e| e.into_archive_error()) } fn get_range<'a>( &self, from: Option, to: Option, ) -> Result, ArchiveError> { - Self::get_range(self, from, to) - .map_err(|e| e.into_archive_error()) + Self::get_range(self, from, to).map_err(|e| e.into_archive_error()) } fn find_intersect( &self, intersect: &[ChainPoint], ) -> Result, ArchiveError> { - Self::find_intersect(self, intersect) - .map_err(|e| e.into_archive_error()) + Self::find_intersect(self, intersect).map_err(|e| e.into_archive_error()) } fn get_tip(&self) -> Result, ArchiveError> { - Self::get_tip(self) - .map_err(|e| e.into_archive_error()) + Self::get_tip(self).map_err(|e| e.into_archive_error()) } fn prune_history( @@ -972,13 +965,11 @@ impl dolos_core::ArchiveStore for max_slots: u64, max_prune: Option, ) -> Result> { - Self::prune_history(self, max_slots, max_prune) - .map_err(|e| e.into_archive_error()) + Self::prune_history(self, max_slots, max_prune).map_err(|e| e.into_archive_error()) } fn truncate_front(&self, after: &ChainPoint) -> Result<(), ArchiveError> { - Self::truncate_front(self, after) - .map_err(|e| e.into_archive_error()) + Self::truncate_front(self, after).map_err(|e| e.into_archive_error()) } fn read_logs( @@ -999,12 +990,10 @@ impl dolos_core::ArchiveStore for let mut out = vec![]; for key in keys { - let value = table - .read_value(&mut rx, key.as_ref()) - .map_err(|e| { - let redb_err: RedbArchiveError = e.into(); - redb_err.into_archive_error() - })?; + let value = table.read_value(&mut rx, key.as_ref()).map_err(|e| { + let redb_err: RedbArchiveError = e.into(); + redb_err.into_archive_error() + })?; out.push(value); } @@ -1031,12 +1020,10 @@ impl dolos_core::ArchiveStore for .get(&ns) .ok_or(ArchiveError::NamespaceNotFound(ns))?; - let values = table - .range(&mut rx, range) - .map_err(|e| { - let redb_err: RedbArchiveError = e.into(); - redb_err.into_archive_error() - })?; + let values = table.range(&mut rx, range).map_err(|e| { + let redb_err: RedbArchiveError = e.into(); + redb_err.into_archive_error() + })?; Ok(LogIter(values, PhantomData)) } diff --git a/crates/redb3/src/archive/tests.rs b/crates/redb3/src/archive/tests.rs index 9d6516607..176fb69fa 100644 --- a/crates/redb3/src/archive/tests.rs +++ b/crates/redb3/src/archive/tests.rs @@ -1,6 +1,6 @@ use std::sync::Arc; -use dolos_core::{ArchiveWriter, BlockSlot, ChainPoint, StateSchema}; +use dolos_core::{hash::Hash as CoreHash, ArchiveWriter, BlockSlot, ChainPoint, StateSchema}; use super::ArchiveStore; @@ -11,7 +11,7 @@ fn test_store() -> ArchiveStore { /// Create a fake ChainPoint with the given slot. fn point(slot: u64) -> ChainPoint { - ChainPoint::Specific(slot, pallas::crypto::hash::Hash::new([0u8; 32])) + ChainPoint::Specific(slot, CoreHash::new([0u8; 32])) } /// Create fake block data for a given slot. @@ -236,7 +236,8 @@ fn test_truncate_front() { #[test] fn test_in_memory_store() { - let store = ArchiveStore::in_memory(StateSchema::default()).unwrap(); + let store = + ArchiveStore::::in_memory(StateSchema::default()).unwrap(); let writer = store.start_writer().unwrap(); writer.apply(&point(42), &Arc::new(fake_block(42))).unwrap(); diff --git a/crates/redb3/src/mempool.rs b/crates/redb3/src/mempool.rs index bc380c1cf..53698ea71 100644 --- a/crates/redb3/src/mempool.rs +++ b/crates/redb3/src/mempool.rs @@ -1017,11 +1017,11 @@ mod tests { } fn test_point() -> ChainPoint { - ChainPoint::Specific(12345, pallas::crypto::hash::Hash::new([0xAB; 32])) + ChainPoint::Specific(12345, dolos_core::hash::Hash::new([0xAB; 32])) } fn test_point_2() -> ChainPoint { - ChainPoint::Specific(12346, pallas::crypto::hash::Hash::new([0xCD; 32])) + ChainPoint::Specific(12346, dolos_core::hash::Hash::new([0xCD; 32])) } #[test] diff --git a/crates/redb3/src/state/utxoset.rs b/crates/redb3/src/state/utxoset.rs index 30afb6231..f4941e0aa 100644 --- a/crates/redb3/src/state/utxoset.rs +++ b/crates/redb3/src/state/utxoset.rs @@ -138,7 +138,7 @@ mod tests { use dolos_testing::*; use pallas::ledger::{ addresses::{Address, ShelleyDelegationPart}, - traverse::MultiEraOutput, + traverse::{Era as PallasEra, MultiEraOutput}, }; use crate::state::StateStore; @@ -179,33 +179,41 @@ mod tests { // Handle forward operations: produced_utxo -> add to index, consumed_utxo -> remove from index for (txo_ref, era_cbor) in utxo_delta.produced_utxo.iter() { - if let Ok(output) = MultiEraOutput::try_from(era_cbor.as_ref()) { - let tags = extract_utxo_tags(&output); - produced.push((txo_ref.clone(), tags)); + if let Ok(pallas_era) = PallasEra::try_from(era_cbor.0) { + if let Ok(output) = MultiEraOutput::decode(pallas_era, &era_cbor.1) { + let tags = extract_utxo_tags(&output); + produced.push((txo_ref.clone(), tags)); + } } } for (txo_ref, era_cbor) in utxo_delta.consumed_utxo.iter() { - if let Ok(output) = MultiEraOutput::try_from(era_cbor.as_ref()) { - let tags = extract_utxo_tags(&output); - consumed.push((txo_ref.clone(), tags)); + if let Ok(pallas_era) = PallasEra::try_from(era_cbor.0) { + if let Ok(output) = MultiEraOutput::decode(pallas_era, &era_cbor.1) { + let tags = extract_utxo_tags(&output); + consumed.push((txo_ref.clone(), tags)); + } } } // Handle rollback operations: recovered_stxi -> restore to index (add), undone_utxo -> remove from index // recovered_stxi: UTxOs that were previously consumed, now being restored for (txo_ref, era_cbor) in utxo_delta.recovered_stxi.iter() { - if let Ok(output) = MultiEraOutput::try_from(era_cbor.as_ref()) { - let tags = extract_utxo_tags(&output); - produced.push((txo_ref.clone(), tags)); + if let Ok(pallas_era) = PallasEra::try_from(era_cbor.0) { + if let Ok(output) = MultiEraOutput::decode(pallas_era, &era_cbor.1) { + let tags = extract_utxo_tags(&output); + produced.push((txo_ref.clone(), tags)); + } } } // undone_utxo: UTxOs that were previously produced, now being removed for (txo_ref, era_cbor) in utxo_delta.undone_utxo.iter() { - if let Ok(output) = MultiEraOutput::try_from(era_cbor.as_ref()) { - let tags = extract_utxo_tags(&output); - consumed.push((txo_ref.clone(), tags)); + if let Ok(pallas_era) = PallasEra::try_from(era_cbor.0) { + if let Ok(output) = MultiEraOutput::decode(pallas_era, &era_cbor.1) { + let tags = extract_utxo_tags(&output); + consumed.push((txo_ref.clone(), tags)); + } } } diff --git a/crates/testing/src/blocks.rs b/crates/testing/src/blocks.rs index 8cbafcc0c..56f52185f 100644 --- a/crates/testing/src/blocks.rs +++ b/crates/testing/src/blocks.rs @@ -1,6 +1,7 @@ use std::sync::Arc; -use dolos_core::{BlockHash, BlockSlot, ChainPoint, RawBlock}; +use dolos_cardano::pallas_hash_to_core; +use dolos_core::{BlockSlot, ChainPoint, RawBlock}; use pallas::{ codec::utils::{Bytes, KeepRaw}, crypto::hash::Hash, @@ -15,7 +16,7 @@ use pallas::{ }; use std::collections::BTreeMap; -pub fn slot_to_hash(slot: u64) -> BlockHash { +pub fn slot_to_hash(slot: u64) -> pallas::crypto::hash::Hash<32> { let mut hasher = pallas::crypto::hash::Hasher::<256>::new(); hasher.input(&(slot as i32).to_le_bytes()); hasher.finalize() @@ -58,7 +59,7 @@ pub fn make_conway_block(slot: BlockSlot) -> (ChainPoint, RawBlock) { let wrapper = (Era::Conway as u16, block); let raw_bytes = pallas::codec::minicbor::to_vec(&wrapper).unwrap(); - let chain_point = ChainPoint::Specific(slot, hash); + let chain_point = ChainPoint::Specific(slot, dolos_cardano::pallas_hash_to_core(hash)); (chain_point, Arc::new(raw_bytes)) } @@ -123,7 +124,7 @@ pub fn make_conway_block_with_tx( let wrapper = (Era::Conway as u16, block); let raw_bytes = pallas::codec::minicbor::to_vec(&wrapper).unwrap(); - let chain_point = ChainPoint::Specific(slot, hash); + let chain_point = ChainPoint::Specific(slot, pallas_hash_to_core(hash)); (chain_point, Arc::new(raw_bytes)) } diff --git a/crates/testing/src/faults.rs b/crates/testing/src/faults.rs index bdd36d147..7c362b08e 100644 --- a/crates/testing/src/faults.rs +++ b/crates/testing/src/faults.rs @@ -22,7 +22,7 @@ pub enum TestFault { #[derive(Clone)] pub struct FaultyToyDomain { inner: ToyDomain, - genesis_override: Option>, + genesis_override: Option>, state: FaultyStateStore, archive: FaultyArchiveStore, indexes: FaultyIndexStore, @@ -140,12 +140,15 @@ impl StateStore for FaultyStateStore { #[derive(Clone)] pub struct FaultyArchiveStore { - inner: dolos_redb3::archive::ArchiveStore, + inner: dolos_redb3::archive::ArchiveStore, fault: TestFault, } impl FaultyArchiveStore { - pub fn new(inner: dolos_redb3::archive::ArchiveStore, fault: TestFault) -> Self { + pub fn new( + inner: dolos_redb3::archive::ArchiveStore, + fault: TestFault, + ) -> Self { Self { inner, fault } } @@ -153,29 +156,34 @@ impl FaultyArchiveStore { matches!(self.fault, TestFault::ArchiveStoreError) } - fn fault_err(&self) -> ArchiveError { + fn fault_err(&self) -> ArchiveError { ArchiveError::InternalError("fault injection: archive store".into()) } } impl ArchiveStore for FaultyArchiveStore { - type BlockIter<'a> = ::BlockIter<'a>; - type Writer = ::Writer; - type LogIter = ::LogIter; - type EntityValueIter = ::EntityValueIter; - - fn start_writer(&self) -> Result { + type BlockIter<'a> = + as ArchiveStore>::BlockIter<'a>; + type Writer = + as ArchiveStore>::Writer; + type LogIter = + as ArchiveStore>::LogIter; + type EntityValueIter = + as ArchiveStore>::EntityValueIter; + type ChainSpecificError = dolos_cardano::CardanoError; + + fn start_writer(&self) -> Result> { if self.should_fault() { return Err(self.fault_err()); } - self.inner.start_writer().map_err(ArchiveError::from) + ArchiveStore::start_writer(&self.inner) } fn read_logs( &self, ns: Namespace, keys: &[&LogKey], - ) -> Result>, ArchiveError> { + ) -> Result>, ArchiveError> { if self.should_fault() { return Err(self.fault_err()); } @@ -186,59 +194,68 @@ impl ArchiveStore for FaultyArchiveStore { &self, ns: Namespace, range: std::ops::Range, - ) -> Result { + ) -> Result> { if self.should_fault() { return Err(self.fault_err()); } self.inner.iter_logs(ns, range) } - fn get_block_by_slot(&self, slot: &BlockSlot) -> Result, ArchiveError> { + fn get_block_by_slot( + &self, + slot: &BlockSlot, + ) -> Result, ArchiveError> { if self.should_fault() { return Err(self.fault_err()); } - self.inner - .get_block_by_slot(slot) - .map_err(ArchiveError::from) + ArchiveStore::get_block_by_slot(&self.inner, slot) } fn get_range<'a>( &self, from: Option, to: Option, - ) -> Result, ArchiveError> { + ) -> Result, ArchiveError> { if self.should_fault() { return Err(self.fault_err()); } - self.inner.get_range(from, to).map_err(ArchiveError::from) + ArchiveStore::get_range(&self.inner, from, to) } - fn find_intersect(&self, intersect: &[ChainPoint]) -> Result, ArchiveError> { + fn find_intersect( + &self, + intersect: &[ChainPoint], + ) -> Result, ArchiveError> { if self.should_fault() { return Err(self.fault_err()); } - self.inner - .find_intersect(intersect) - .map_err(ArchiveError::from) + ArchiveStore::find_intersect(&self.inner, intersect) } - fn get_tip(&self) -> Result, ArchiveError> { + fn get_tip( + &self, + ) -> Result, ArchiveError> { if self.should_fault() { return Err(self.fault_err()); } - self.inner.get_tip().map_err(ArchiveError::from) + ArchiveStore::get_tip(&self.inner) } - fn prune_history(&self, max_slots: u64, max_prune: Option) -> Result { + fn prune_history( + &self, + max_slots: u64, + max_prune: Option, + ) -> Result> { if self.should_fault() { return Err(self.fault_err()); } - self.inner - .prune_history(max_slots, max_prune) - .map_err(ArchiveError::from) + ArchiveStore::prune_history(&self.inner, max_slots, max_prune) } - fn truncate_front(&self, after: &ChainPoint) -> Result<(), ArchiveError> { + fn truncate_front( + &self, + after: &ChainPoint, + ) -> Result<(), ArchiveError> { if self.should_fault() { return Err(self.fault_err()); } @@ -461,6 +478,8 @@ impl Domain for FaultyToyDomain { type TipSubscription = TipSubscription; type Indexes = FaultyIndexStore; type Mempool = Mempool; + type ChainSpecificError = dolos_cardano::CardanoError; + type Genesis = dolos_cardano::CardanoGenesis; fn storage_config(&self) -> &dolos_core::config::StorageConfig { self.inner.storage_config() @@ -470,7 +489,7 @@ impl Domain for FaultyToyDomain { self.inner.sync_config() } - fn genesis(&self) -> Arc { + fn genesis(&self) -> Arc { self.genesis_override .as_ref() .cloned() @@ -505,7 +524,10 @@ impl Domain for FaultyToyDomain { self.inner.mempool() } - fn watch_tip(&self, from: Option) -> Result { + fn watch_tip( + &self, + from: Option, + ) -> Result> { self.inner.watch_tip(from) } diff --git a/crates/testing/src/harness/cardano.rs b/crates/testing/src/harness/cardano.rs index b5b7ceb75..874fe5ca1 100644 --- a/crates/testing/src/harness/cardano.rs +++ b/crates/testing/src/harness/cardano.rs @@ -1,11 +1,11 @@ use std::path::{Path, PathBuf}; use std::sync::{Arc, RwLock}; -use dolos_cardano::{CardanoDelta, CardanoLogic, CardanoWorkUnit}; +use dolos_cardano::{CardanoDelta, CardanoGenesis, CardanoLogic, CardanoWorkUnit}; use dolos_core::{ config::{CardanoConfig, FjallStateConfig, StorageConfig, SyncConfig}, - BootstrapExt, ChainLogic, Domain, DomainError, Genesis, MempoolError, MempoolEvent, - MempoolStore, MempoolTx, MempoolTxStage, StateStore as CoreStateStore, TipEvent, + BootstrapExt, ChainLogic, Domain, DomainError, MempoolError, MempoolEvent, MempoolStore, + MempoolTx, MempoolTxStage, StateStore as CoreStateStore, TipEvent, TipSubscription as CoreTipSubscription, *, }; @@ -19,7 +19,7 @@ pub struct Config { /// Path to Mithril immutable DB directory. pub immutable_dir: PathBuf, /// Pre-loaded genesis data. - pub genesis: Genesis, + pub genesis: CardanoGenesis, /// Cardano chain config (stop_epoch, custom_utxos, etc). pub chain: CardanoConfig, /// Fjall state store configuration. @@ -130,25 +130,27 @@ pub struct HarnessDomain { wal: dolos_redb3::wal::RedbWalStore, chain: Arc>, state: dolos_fjall::StateStore, - archive: dolos_core::builtin::NoOpArchiveStore, + archive: dolos_core::builtin::NoOpArchiveStore, indexes: dolos_core::builtin::NoOpIndexStore, mempool: Mempool, storage_config: StorageConfig, sync_config: SyncConfig, - genesis: Arc, + genesis: Arc, } impl Domain for HarnessDomain { type Entity = dolos_cardano::CardanoEntity; type EntityDelta = CardanoDelta; type Wal = dolos_redb3::wal::RedbWalStore; - type Archive = dolos_core::builtin::NoOpArchiveStore; + type Archive = dolos_core::builtin::NoOpArchiveStore; type State = dolos_fjall::StateStore; type Chain = CardanoLogic; type WorkUnit = CardanoWorkUnit; type TipSubscription = StubTipSubscription; type Indexes = dolos_core::builtin::NoOpIndexStore; type Mempool = Mempool; + type Genesis = CardanoGenesis; + type ChainSpecificError = dolos_cardano::CardanoError; fn storage_config(&self) -> &StorageConfig { &self.storage_config @@ -158,7 +160,7 @@ impl Domain for HarnessDomain { &self.sync_config } - fn genesis(&self) -> Arc { + fn genesis(&self) -> Arc { self.genesis.clone() } @@ -190,7 +192,10 @@ impl Domain for HarnessDomain { &self.mempool } - fn watch_tip(&self, _from: Option) -> Result { + fn watch_tip( + &self, + _from: Option, + ) -> Result> { Ok(StubTipSubscription) } @@ -215,14 +220,18 @@ impl LedgerHarness { // 3. Initialize chain logic let genesis = Arc::new(config.genesis); - let chain = CardanoLogic::initialize::(config.chain, &state, &genesis)?; + let chain = CardanoLogic::initialize::( + config.chain, + &state, + genesis.as_ref().clone(), + )?; // 4. Assemble domain let domain = HarnessDomain { wal: dolos_redb3::wal::RedbWalStore::memory()?, chain: Arc::new(RwLock::new(chain)), state, - archive: dolos_core::builtin::NoOpArchiveStore, + archive: dolos_core::builtin::NoOpArchiveStore::default(), indexes: dolos_core::builtin::NoOpIndexStore, mempool: Mempool {}, storage_config: StorageConfig::default(), @@ -255,7 +264,13 @@ impl LedgerHarness { .domain .state .read_cursor()? - .map(|c| c.try_into().unwrap()) + .and_then(|c| match c { + dolos_core::ChainPoint::Origin => Some(Point::Origin), + dolos_core::ChainPoint::Specific(slot, hash) => { + Some(Point::Specific(slot, hash.as_slice().to_vec())) + } + dolos_core::ChainPoint::Slot(_) => None, + }) .unwrap_or(Point::Origin); let mut iter = pallas::storage::hardano::immutable::read_blocks_from_point( diff --git a/crates/testing/src/lib.rs b/crates/testing/src/lib.rs index 20035ad28..668953805 100644 --- a/crates/testing/src/lib.rs +++ b/crates/testing/src/lib.rs @@ -217,7 +217,7 @@ pub fn genesis_tx_hash() -> Hash<32> { pub fn slot_to_hash(slot: u64) -> BlockHash { let mut hasher = pallas::crypto::hash::Hasher::<256>::new(); hasher.input(&(slot as i32).to_le_bytes()); - hasher.finalize() + dolos_cardano::pallas_hash_to_core(hasher.finalize()) } pub fn slot_to_chainpoint(slot: u64) -> ChainPoint { @@ -227,7 +227,7 @@ pub fn slot_to_chainpoint(slot: u64) -> ChainPoint { pub fn tx_sequence_to_hash(sequence: u64) -> TxHash { let mut hasher = pallas::crypto::hash::Hasher::<256>::new(); hasher.input(&sequence.to_le_bytes()); - hasher.finalize() + dolos_cardano::pallas_hash_to_core(hasher.finalize()) } pub fn fake_genesis_utxo( @@ -236,7 +236,7 @@ pub fn fake_genesis_utxo( amount: u64, ) -> (TxoRef, EraCbor) { let tx_hash = genesis_tx_hash(); - let txoref = TxoRef(tx_hash, ordinal as u32); + let txoref = TxoRef(dolos_cardano::pallas_hash_to_core(tx_hash), ordinal as u32); (txoref, utxo_with_value(address, Value::Coin(amount))) } diff --git a/crates/testing/src/synthetic.rs b/crates/testing/src/synthetic.rs index b5fb74beb..3d75dd729 100644 --- a/crates/testing/src/synthetic.rs +++ b/crates/testing/src/synthetic.rs @@ -273,7 +273,7 @@ pub fn build_synthetic_blocks( tx_bodies.push(sample_transaction_body( Bytes::from(output_address), cfg.lovelace, - seed_tx_hash, + dolos_cardano::core_hash_to_pallas(seed_tx_hash), policy_id, asset_name.clone(), cfg.asset_amount, @@ -363,18 +363,23 @@ pub fn build_synthetic_blocks( (raw_blocks, vectors, chain_config) } -pub fn seed_reward_logs( +pub fn seed_reward_logs( domain: &D, stake_address: &str, pool_id: &str, epochs: &[u64], -) -> Result<(), ChainError> { - let address = Address::from_bech32(stake_address)?; - let (stake_cred, _) = dolos_cardano::pallas_extras::address_as_stake_cred(&address) - .ok_or(ChainError::InvalidPoolParams)?; +) -> Result<(), ChainError> +where + D: Domain, +{ + let address = Address::from_bech32(stake_address) + .map_err(|_| ChainError::ChainSpecific(dolos_cardano::CardanoError::InvalidPoolParams))?; + let (stake_cred, _) = dolos_cardano::pallas_extras::address_as_stake_cred(&address).ok_or( + ChainError::ChainSpecific(dolos_cardano::CardanoError::InvalidPoolParams), + )?; let entity_key = credential_to_key(&stake_cred); - let pool_keyhash = - pool_keyhash_from_bech32(pool_id).map_err(|_| ChainError::InvalidPoolParams)?; + let pool_keyhash = pool_keyhash_from_bech32(pool_id) + .map_err(|_| ChainError::ChainSpecific(dolos_cardano::CardanoError::InvalidPoolParams))?; let summary = dolos_cardano::eras::load_era_summary::(domain.state())?; let writer = domain.archive().start_writer()?; @@ -395,7 +400,13 @@ pub fn seed_reward_logs( Ok(()) } -pub fn seed_epoch_logs(domain: &D, epochs: &[u64]) -> Result<(), ChainError> { +pub fn seed_epoch_logs( + domain: &D, + epochs: &[u64], +) -> Result<(), ChainError> +where + D: Domain, +{ let summary = dolos_cardano::eras::load_era_summary::(domain.state())?; let base = dolos_cardano::load_epoch::(domain.state())?; @@ -627,7 +638,7 @@ fn build_submit_tx_cbor( signing_key: &SecretKeyExtended, ) -> Vec { let input = TransactionInput { - transaction_id: input.0, + transaction_id: dolos_cardano::core_hash_to_pallas(input.0), index: input.1.into(), }; diff --git a/crates/testing/src/toy_domain.rs b/crates/testing/src/toy_domain.rs index c7e02ecee..747c39a00 100644 --- a/crates/testing/src/toy_domain.rs +++ b/crates/testing/src/toy_domain.rs @@ -124,12 +124,12 @@ pub struct ToyDomain { wal: dolos_redb3::wal::RedbWalStore, chain: Arc>, state: dolos_redb3::state::StateStore, - archive: dolos_redb3::archive::ArchiveStore, + archive: dolos_redb3::archive::ArchiveStore, indexes: dolos_redb3::indexes::IndexStore, mempool: Mempool, storage_config: StorageConfig, sync_config: SyncConfig, - genesis: Arc, + genesis: Arc, tip_broadcast: tokio::sync::broadcast::Sender, } @@ -141,7 +141,7 @@ impl ToyDomain { } pub fn new_with_genesis( - genesis: Arc, + genesis: Arc, initial_delta: Option, storage_config: Option, ) -> Self { @@ -154,7 +154,7 @@ impl ToyDomain { } pub fn new_with_genesis_and_config( - genesis: Arc, + genesis: Arc, config: CardanoConfig, initial_delta: Option, storage_config: Option, @@ -170,9 +170,12 @@ impl ToyDomain { let indexes = dolos_redb3::indexes::IndexStore::in_memory().unwrap(); - let chain = - dolos_cardano::CardanoLogic::initialize::(config.clone(), &state, &genesis) - .unwrap(); + let chain = dolos_cardano::CardanoLogic::initialize::( + config.clone(), + &state, + genesis.as_ref().clone(), + ) + .unwrap(); // Create the domain first (genesis work unit needs it for execution) let domain = Self { @@ -260,13 +263,15 @@ impl dolos_core::Domain for ToyDomain { type Entity = dolos_cardano::CardanoEntity; type EntityDelta = dolos_cardano::CardanoDelta; type Wal = dolos_redb3::wal::RedbWalStore; - type Archive = dolos_redb3::archive::ArchiveStore; + type Archive = dolos_redb3::archive::ArchiveStore; type State = dolos_redb3::state::StateStore; type Chain = dolos_cardano::CardanoLogic; type WorkUnit = dolos_cardano::CardanoWorkUnit; type TipSubscription = TipSubscription; type Indexes = dolos_redb3::indexes::IndexStore; type Mempool = Mempool; + type Genesis = dolos_cardano::CardanoGenesis; + type ChainSpecificError = dolos_cardano::CardanoError; fn storage_config(&self) -> &StorageConfig { &self.storage_config @@ -276,7 +281,7 @@ impl dolos_core::Domain for ToyDomain { &self.sync_config } - fn genesis(&self) -> Arc { + fn genesis(&self) -> Arc { self.genesis.clone() } @@ -308,7 +313,10 @@ impl dolos_core::Domain for ToyDomain { &self.mempool } - fn watch_tip(&self, from: Option) -> Result { + fn watch_tip( + &self, + from: Option, + ) -> Result> { let receiver = self.tip_broadcast.subscribe(); let replay = self diff --git a/crates/trp/src/compiler.rs b/crates/trp/src/compiler.rs index 38930195b..1d05a64a0 100644 --- a/crates/trp/src/compiler.rs +++ b/crates/trp/src/compiler.rs @@ -30,7 +30,11 @@ fn map_cost_models(original: CostModels) -> HashMap HashMap::from_iter(present) } -fn build_pparams>(domain: &D) -> Result { +fn build_pparams< + D: Domain, +>( + domain: &D, +) -> Result { let network = network_id_from_genesis(domain.genesis().as_ref()).unwrap(); let pparams = dolos_cardano::load_effective_pparams::(domain.state())?; @@ -48,7 +52,11 @@ fn build_pparams>(domain: &D) -> Result { +pub fn find_cursor< + D: Domain, +>( + domain: &D, +) -> Result { let cursor = domain .state() .read_cursor() @@ -59,12 +67,17 @@ pub fn find_cursor>( +pub fn load_compiler< + D: Domain, +>( domain: &D, config: &TrpConfig, ) -> Result { diff --git a/crates/trp/src/lib.rs b/crates/trp/src/lib.rs index 74db7281a..5c229dc01 100644 --- a/crates/trp/src/lib.rs +++ b/crates/trp/src/lib.rs @@ -26,7 +26,11 @@ pub struct Context { pub struct Driver; -impl + SubmitExt, C: CancelToken> dolos_core::Driver for Driver { +impl< + D: Domain + SubmitExt, + C: CancelToken, + > dolos_core::Driver for Driver +{ type Config = TrpConfig; async fn run(cfg: Self::Config, domain: D, cancel: C) -> Result<(), ServeError> { diff --git a/crates/trp/src/methods.rs b/crates/trp/src/methods.rs index 8fad1394b..9596af754 100644 --- a/crates/trp/src/methods.rs +++ b/crates/trp/src/methods.rs @@ -642,7 +642,8 @@ mod tests { // 4. Verify the tx is in the mempool as pending let tx_hash_bytes = hex::decode(&response.hash).unwrap(); - let tx_hash: pallas::crypto::hash::Hash<32> = tx_hash_bytes.as_slice().into(); + let tx_hash = + dolos_core::hash::Hash::<32>::new(tx_hash_bytes.as_slice().try_into().unwrap()); let status = MempoolStore::check_status(context.domain.mempool(), &tx_hash); assert_eq!(status.stage, MempoolTxStage::Pending); } diff --git a/src/adapters/mod.rs b/src/adapters/mod.rs index 200b1f4b0..7515e228b 100644 --- a/src/adapters/mod.rs +++ b/src/adapters/mod.rs @@ -2,7 +2,7 @@ pub mod storage; use std::sync::Arc; -use dolos_cardano::{CardanoGenesis, CardanoLogic, core_hash_to_pallas, pallas_hash_to_core}; +use dolos_cardano::{core_hash_to_pallas, pallas_hash_to_core, CardanoGenesis, CardanoLogic}; use dolos_core::{ config::{StorageConfig, SyncConfig}, *, @@ -84,7 +84,10 @@ impl DomainAdapter { for (txo_ref, era_cbor) in &log.inputs { if refs_set.contains(txo_ref) { let era = era_cbor.0.try_into().expect("era out of range"); - result.insert((core_hash_to_pallas(txo_ref.0), txo_ref.1), (era, era_cbor.1.clone())); + result.insert( + (core_hash_to_pallas(txo_ref.0), txo_ref.1), + (era, era_cbor.1.clone()), + ); } } @@ -155,7 +158,10 @@ impl Domain for DomainAdapter { &self.sync_config } - fn watch_tip(&self, from: Option) -> Result> { + fn watch_tip( + &self, + from: Option, + ) -> Result> { // TODO: do a more thorough analysis to understand if this approach is // susceptible to race conditions. Things to explore: // - a mutex to block the sending of events while gathering the replay. diff --git a/src/adapters/storage.rs b/src/adapters/storage.rs index 60a11ecfe..b76bbb40c 100644 --- a/src/adapters/storage.rs +++ b/src/adapters/storage.rs @@ -16,8 +16,8 @@ use dolos_core::{ ArchiveError, ArchiveStore as CoreArchiveStore, ArchiveWriter as CoreArchiveWriter, LogKey, }, builtin::{ - EmptyBlockIter, EmptyLogIter, EmptySlotIter, NoOpArchiveStore, NoOpArchiveWriter, - NoOpIndexStore, NoOpIndexWriter, + EmptyBlockIter, EmptyEntityValueIter, EmptyLogIter, EmptySlotIter, NoOpArchiveStore, + NoOpArchiveWriter, NoOpIndexStore, NoOpIndexWriter, }, config::{ ArchiveStoreConfig, FjallIndexConfig, FjallStateConfig, IndexStoreConfig, @@ -539,25 +539,11 @@ impl CoreStateStore for StateStoreBackend { // Archive Store Backend // ============================================================================ -fn convert_archive_err(e: ArchiveError) -> ArchiveError { - match e { - ArchiveError::BrokenInvariant(e) => ArchiveError::BrokenInvariant(e), - ArchiveError::InternalError(s) => ArchiveError::InternalError(s), - ArchiveError::QueryNotSupported => ArchiveError::QueryNotSupported, - ArchiveError::InvalidStoreVersion => ArchiveError::InvalidStoreVersion, - ArchiveError::DecodingError(e) => ArchiveError::DecodingError(e), - ArchiveError::EntityDecodingError(s) => ArchiveError::EntityDecodingError(s), - ArchiveError::NamespaceNotFound(ns) => ArchiveError::NamespaceNotFound(ns), - // TODO: what - ArchiveError::ChainSpecifc(inf) => match inf {}, - } -} - /// Enum wrapper for archive store backends. #[derive(Clone)] pub enum ArchiveStoreBackend { - Redb(dolos_redb3::archive::ArchiveStore), - NoOp(NoOpArchiveStore), + Redb(dolos_redb3::archive::ArchiveStore), + NoOp(NoOpArchiveStore), } impl ArchiveStoreBackend { @@ -575,7 +561,7 @@ impl ArchiveStoreBackend { /// Create a no-op archive store that discards all writes. pub fn noop() -> Self { - Self::NoOp(NoOpArchiveStore) + Self::NoOp(NoOpArchiveStore::default()) } /// Create an in-memory archive store. @@ -608,14 +594,14 @@ impl ArchiveStoreBackend { Self::Redb(s) => s .shutdown() .map_err(|e| ArchiveError::InternalError(e.to_string())), - Self::NoOp(s) => s.shutdown().map_err(convert_archive_err), + Self::NoOp(s) => s.shutdown(), } } } pub enum ArchiveWriterBackend { - Redb(Box<::Writer>), - NoOp(NoOpArchiveWriter), + Redb(Box< as CoreArchiveStore>::Writer>), + NoOp(NoOpArchiveWriter), } impl CoreArchiveWriter for ArchiveWriterBackend { @@ -627,8 +613,8 @@ impl CoreArchiveWriter for ArchiveWriterBackend { block: &RawBlock, ) -> Result<(), ArchiveError> { match self { - Self::Redb(w) => w.apply(point, block).map_err(convert_archive_err), - Self::NoOp(w) => w.apply(point, block).map_err(convert_archive_err), + Self::Redb(w) => w.apply(point, block), + Self::NoOp(w) => w.apply(point, block), } } @@ -639,28 +625,34 @@ impl CoreArchiveWriter for ArchiveWriterBackend { value: &EntityValue, ) -> Result<(), ArchiveError> { match self { - Self::Redb(w) => w.write_log(ns, key, value).map_err(convert_archive_err), - Self::NoOp(w) => w.write_log(ns, key, value).map_err(convert_archive_err), + Self::Redb(w) => w.write_log(ns, key, value), + Self::NoOp(w) => w.write_log(ns, key, value), } } fn undo(&self, point: &ChainPoint) -> Result<(), ArchiveError> { match self { - Self::Redb(w) => w.undo(point).map_err(convert_archive_err), - Self::NoOp(w) => w.undo(point).map_err(convert_archive_err), + Self::Redb(w) => w.undo(point), + Self::NoOp(w) => w.undo(point), } } fn commit(self) -> Result<(), ArchiveError> { match self { - Self::Redb(w) => (*w).commit().map_err(convert_archive_err), - Self::NoOp(w) => w.commit().map_err(convert_archive_err), + Self::Redb(w) => (*w).commit(), + Self::NoOp(w) => w.commit(), } } } pub enum ArchiveBlockIterBackend { - Redb(Box<::BlockIter<'static>>), + Redb( + Box< + as CoreArchiveStore>::BlockIter< + 'static, + >, + >, + ), NoOp(EmptyBlockIter), } @@ -700,31 +692,35 @@ impl dolos_core::archive::Skippable for ArchiveBlockIterBackend { } pub enum ArchiveLogIterBackend { - Redb(Box<::LogIter>), - NoOp(EmptyLogIter), + Redb(Box< as CoreArchiveStore>::LogIter>), + NoOp(EmptyLogIter), } impl Iterator for ArchiveLogIterBackend { type Item = Result<(LogKey, EntityValue), ArchiveError>; fn next(&mut self) -> Option { match self { - Self::Redb(iter) => iter.next().map(|r| r.map_err(convert_archive_err)), - Self::NoOp(iter) => iter.next().map(|r| r.map_err(convert_archive_err)), + Self::Redb(iter) => iter.next().map(|r| r), + Self::NoOp(iter) => iter.next().map(|r| r), } } } pub enum ArchiveEntityValueIterBackend { - Redb(Box<::EntityValueIter>), - NoOp(dolos_core::builtin::EmptyEntityValueIter), + Redb( + Box< + as CoreArchiveStore>::EntityValueIter, + >, + ), + NoOp(EmptyEntityValueIter), } impl Iterator for ArchiveEntityValueIterBackend { type Item = Result>; fn next(&mut self) -> Option { match self { - Self::Redb(iter) => iter.next().map(|r| r.map_err(convert_archive_err)), - Self::NoOp(iter) => iter.next().map(|r| r.map_err(convert_archive_err)), + Self::Redb(iter) => iter.next().map(|r| r), + Self::NoOp(iter) => iter.next().map(|r| r), } } } @@ -739,11 +735,8 @@ impl CoreArchiveStore for ArchiveStoreBackend { fn start_writer(&self) -> Result> { match self { Self::Redb(s) => CoreArchiveStore::start_writer(s) - .map(|writer| ArchiveWriterBackend::Redb(Box::new(writer))) - .map_err(convert_archive_err), - Self::NoOp(s) => CoreArchiveStore::start_writer(s) - .map(ArchiveWriterBackend::NoOp) - .map_err(convert_archive_err), + .map(|writer| ArchiveWriterBackend::Redb(Box::new(writer))), + Self::NoOp(s) => CoreArchiveStore::start_writer(s).map(ArchiveWriterBackend::NoOp), } } @@ -753,8 +746,8 @@ impl CoreArchiveStore for ArchiveStoreBackend { keys: &[&LogKey], ) -> Result>, ArchiveError> { match self { - Self::Redb(s) => CoreArchiveStore::read_logs(s, ns, keys).map_err(convert_archive_err), - Self::NoOp(s) => CoreArchiveStore::read_logs(s, ns, keys).map_err(convert_archive_err), + Self::Redb(s) => CoreArchiveStore::read_logs(s, ns, keys), + Self::NoOp(s) => CoreArchiveStore::read_logs(s, ns, keys), } } @@ -765,11 +758,10 @@ impl CoreArchiveStore for ArchiveStoreBackend { ) -> Result> { match self { Self::Redb(s) => CoreArchiveStore::iter_logs(s, ns, range) - .map(|iter| ArchiveLogIterBackend::Redb(Box::new(iter))) - .map_err(convert_archive_err), - Self::NoOp(s) => CoreArchiveStore::iter_logs(s, ns, range) - .map(ArchiveLogIterBackend::NoOp) - .map_err(convert_archive_err), + .map(|iter| ArchiveLogIterBackend::Redb(Box::new(iter))), + Self::NoOp(s) => { + CoreArchiveStore::iter_logs(s, ns, range).map(ArchiveLogIterBackend::NoOp) + } } } @@ -778,12 +770,8 @@ impl CoreArchiveStore for ArchiveStoreBackend { slot: &BlockSlot, ) -> Result, ArchiveError> { match self { - Self::Redb(s) => { - CoreArchiveStore::get_block_by_slot(s, slot).map_err(convert_archive_err) - } - Self::NoOp(s) => { - CoreArchiveStore::get_block_by_slot(s, slot).map_err(convert_archive_err) - } + Self::Redb(s) => CoreArchiveStore::get_block_by_slot(s, slot), + Self::NoOp(s) => CoreArchiveStore::get_block_by_slot(s, slot), } } @@ -794,11 +782,10 @@ impl CoreArchiveStore for ArchiveStoreBackend { ) -> Result, ArchiveError> { match self { Self::Redb(s) => CoreArchiveStore::get_range(s, from, to) - .map(|iter| ArchiveBlockIterBackend::Redb(Box::new(iter))) - .map_err(convert_archive_err), - Self::NoOp(s) => CoreArchiveStore::get_range(s, from, to) - .map(ArchiveBlockIterBackend::NoOp) - .map_err(convert_archive_err), + .map(|iter| ArchiveBlockIterBackend::Redb(Box::new(iter))), + Self::NoOp(s) => { + CoreArchiveStore::get_range(s, from, to).map(ArchiveBlockIterBackend::NoOp) + } } } @@ -807,19 +794,15 @@ impl CoreArchiveStore for ArchiveStoreBackend { intersect: &[ChainPoint], ) -> Result, ArchiveError> { match self { - Self::Redb(s) => { - CoreArchiveStore::find_intersect(s, intersect).map_err(convert_archive_err) - } - Self::NoOp(s) => { - CoreArchiveStore::find_intersect(s, intersect).map_err(convert_archive_err) - } + Self::Redb(s) => CoreArchiveStore::find_intersect(s, intersect), + Self::NoOp(s) => CoreArchiveStore::find_intersect(s, intersect), } } fn get_tip(&self) -> Result, ArchiveError> { match self { - Self::Redb(s) => CoreArchiveStore::get_tip(s).map_err(convert_archive_err), - Self::NoOp(s) => CoreArchiveStore::get_tip(s).map_err(convert_archive_err), + Self::Redb(s) => CoreArchiveStore::get_tip(s), + Self::NoOp(s) => CoreArchiveStore::get_tip(s), } } @@ -829,21 +812,15 @@ impl CoreArchiveStore for ArchiveStoreBackend { max_prune: Option, ) -> Result> { match self { - Self::Redb(s) => CoreArchiveStore::prune_history(s, max_slots, max_prune) - .map_err(convert_archive_err), - Self::NoOp(s) => CoreArchiveStore::prune_history(s, max_slots, max_prune) - .map_err(convert_archive_err), + Self::Redb(s) => CoreArchiveStore::prune_history(s, max_slots, max_prune), + Self::NoOp(s) => CoreArchiveStore::prune_history(s, max_slots, max_prune), } } fn truncate_front(&self, after: &ChainPoint) -> Result<(), ArchiveError> { match self { - Self::Redb(s) => { - CoreArchiveStore::truncate_front(s, after).map_err(convert_archive_err) - } - Self::NoOp(s) => { - CoreArchiveStore::truncate_front(s, after).map_err(convert_archive_err) - } + Self::Redb(s) => CoreArchiveStore::truncate_front(s, after), + Self::NoOp(s) => CoreArchiveStore::truncate_front(s, after), } } } diff --git a/src/bin/dolos/common.rs b/src/bin/dolos/common.rs index 37e250430..618b57fb3 100644 --- a/src/bin/dolos/common.rs +++ b/src/bin/dolos/common.rs @@ -10,9 +10,9 @@ use tracing::{debug, info}; use tracing_subscriber::{filter::Targets, prelude::*}; use dolos::adapters::DomainAdapter; -use dolos_cardano::CardanoGenesis; use dolos::prelude::*; use dolos::storage; +use dolos_cardano::CardanoGenesis; pub type Stores = storage::Stores; diff --git a/src/bin/dolos/data/compute_nonce.rs b/src/bin/dolos/data/compute_nonce.rs index d7e399146..dd59a4895 100644 --- a/src/bin/dolos/data/compute_nonce.rs +++ b/src/bin/dolos/data/compute_nonce.rs @@ -1,4 +1,6 @@ -use dolos_cardano::{load_era_summary, utils::nonce_stability_window, CardanoGenesis, EraSummary, Nonces}; +use dolos_cardano::{ + load_era_summary, utils::nonce_stability_window, CardanoGenesis, EraSummary, Nonces, +}; use dolos_core::{ArchiveStore, Domain}; use miette::{bail, Context, IntoDiagnostic}; use pallas::{crypto::hash::Hash, ledger::traverse::MultiEraBlock}; @@ -27,7 +29,10 @@ pub fn get_nh(epoch: u64, domain: &D, summary: &EraSummary) -> miette Ok(block.header().previous_hash().unwrap()) } -pub fn compute_nonce>(epoch: u64, domain: &D) -> miette::Result> { +pub fn compute_nonce>( + epoch: u64, + domain: &D, +) -> miette::Result> { let summary = load_era_summary::(domain.state()) .into_diagnostic() .context("loading era summary")?; diff --git a/src/bin/dolos/data/export.rs b/src/bin/dolos/data/export.rs index ce7d58b6b..b0d3e230b 100644 --- a/src/bin/dolos/data/export.rs +++ b/src/bin/dolos/data/export.rs @@ -90,7 +90,7 @@ fn append_dir_filtered( } fn prepare_archive( - archive: &mut dolos_redb3::archive::ArchiveStore, + archive: &mut dolos_redb3::archive::ArchiveStore, pb: &crate::feedback::ProgressBar, ) -> miette::Result<()> { let db = archive.db_mut(); diff --git a/src/bin/dolos/data/find_seq.rs b/src/bin/dolos/data/find_seq.rs index 633156bc7..d31e92c44 100644 --- a/src/bin/dolos/data/find_seq.rs +++ b/src/bin/dolos/data/find_seq.rs @@ -1,6 +1,6 @@ +use dolos_cardano::pallas_hash_to_core; use dolos_core::config::RootConfig; use miette::{Context, IntoDiagnostic}; -use dolos_cardano::pallas_hash_to_core; use pallas::crypto::hash::Hash; use std::str::FromStr; diff --git a/src/bin/dolos/data/import_archive.rs b/src/bin/dolos/data/import_archive.rs index 80bcf9b7b..33755b7ed 100644 --- a/src/bin/dolos/data/import_archive.rs +++ b/src/bin/dolos/data/import_archive.rs @@ -146,7 +146,8 @@ pub fn run(config: &RootConfig, args: &Args, feedback: &Feedback) -> miette::Res break; } - let point = ChainPoint::Specific(block.slot, dolos_cardano::pallas_hash_to_core(block.hash)); + let point = + ChainPoint::Specific(block.slot, dolos_cardano::pallas_hash_to_core(block.hash)); writer .apply(&point, &block.raw) diff --git a/src/bin/dolos/doctor/rollback.rs b/src/bin/dolos/doctor/rollback.rs index f0009b3d9..96abd502b 100644 --- a/src/bin/dolos/doctor/rollback.rs +++ b/src/bin/dolos/doctor/rollback.rs @@ -1,8 +1,8 @@ use std::str::FromStr; +use dolos_cardano::pallas_hash_to_core; use dolos_core::sync::SyncExt as _; use miette::IntoDiagnostic as _; -use dolos_cardano::pallas_hash_to_core; use pallas::crypto::hash::Hash; use dolos_core::{config::RootConfig, ChainPoint}; diff --git a/src/bin/dolos/init.rs b/src/bin/dolos/init.rs index 01eb2b86f..40e303292 100644 --- a/src/bin/dolos/init.rs +++ b/src/bin/dolos/init.rs @@ -1,12 +1,10 @@ use clap::Parser; -use dolos_cardano::{include, mutable_slots}; use dolos_cardano::CardanoGenesis; -use dolos_core::{ - config::{ - CardanoConfig, ChainConfig, GenesisConfig, GrpcConfig, MinibfConfig, MinikupoConfig, - MithrilConfig, PeerConfig, RelayConfig, RootConfig, StorageConfig, StorageVersion, - TrpConfig, UpstreamConfig, - }, +use dolos_cardano::{include, mutable_slots}; +use dolos_core::config::{ + CardanoConfig, ChainConfig, GenesisConfig, GrpcConfig, MinibfConfig, MinikupoConfig, + MithrilConfig, PeerConfig, RelayConfig, RootConfig, StorageConfig, StorageVersion, TrpConfig, + UpstreamConfig, }; use inquire::{Confirm, MultiSelect, Select, Text}; use miette::{miette, Context as _, IntoDiagnostic}; diff --git a/src/serve/grpc/query.rs b/src/serve/grpc/query.rs index 85e32daee..2743b0cf4 100644 --- a/src/serve/grpc/query.rs +++ b/src/serve/grpc/query.rs @@ -15,7 +15,11 @@ use dolos_cardano::indexes::AsyncCardanoQueryExt; pub fn point_to_u5c(_ledger: &T, point: &ChainPoint) -> u5c::query::ChainPoint { u5c::query::ChainPoint { slot: point.slot(), - hash: point.hash().map(|h| h.as_slice().to_vec()).unwrap_or_default().into(), + hash: point + .hash() + .map(|h| h.as_slice().to_vec()) + .unwrap_or_default() + .into(), ..Default::default() } } diff --git a/src/serve/grpc/sync.rs b/src/serve/grpc/sync.rs index c3fd83328..9354cbe63 100644 --- a/src/serve/grpc/sync.rs +++ b/src/serve/grpc/sync.rs @@ -53,7 +53,11 @@ fn raw_to_blockref( fn point_to_blockref(point: &ChainPoint, timestamp: u64) -> u5c::sync::BlockRef { BlockRef { - hash: point.hash().map(|h| h.as_slice().to_vec()).unwrap_or_default().into(), + hash: point + .hash() + .map(|h| h.as_slice().to_vec()) + .unwrap_or_default() + .into(), slot: point.slot(), timestamp, ..Default::default() diff --git a/src/serve/o7s_unix/mod.rs b/src/serve/o7s_unix/mod.rs index a4633d15a..21bdeef4c 100644 --- a/src/serve/o7s_unix/mod.rs +++ b/src/serve/o7s_unix/mod.rs @@ -55,7 +55,10 @@ async fn handle_session, C: C Ok(()) } -async fn accept_client_connections, C: CancelToken>( +async fn accept_client_connections< + D: Domain, + C: CancelToken, +>( domain: D, config: &DriverConfig, tasks: &mut TaskTracker, @@ -89,7 +92,9 @@ async fn accept_client_connections, C: CancelToken> dolos_core::Driver for Driver { +impl, C: CancelToken> dolos_core::Driver + for Driver +{ type Config = DriverConfig; #[instrument(skip_all)] diff --git a/src/serve/o7s_unix/statequery.rs b/src/serve/o7s_unix/statequery.rs index a1e454df2..f4af423a3 100644 --- a/src/serve/o7s_unix/statequery.rs +++ b/src/serve/o7s_unix/statequery.rs @@ -45,7 +45,10 @@ impl> Session { let block = MultiEraBlock::decode(&body).map_err(|e| Error::server(e.to_string()))?; - Ok(ChainPoint::Specific(slot, pallas_hash_to_core(block.hash()))) + Ok(ChainPoint::Specific( + slot, + pallas_hash_to_core(block.hash()), + )) } _ => Ok(point), } diff --git a/src/sync/submit.rs b/src/sync/submit.rs index daf6f367f..77e71fc58 100644 --- a/src/sync/submit.rs +++ b/src/sync/submit.rs @@ -204,7 +204,9 @@ impl gasket::framework::Worker for Worker { .iter() .filter_map(|x| { let arr: [u8; 32] = x.1.as_slice().try_into().ok()?; - stage.mempool.find_inflight(&dolos_core::hash::Hash::new(arr)) + stage + .mempool + .find_inflight(&dolos_core::hash::Hash::new(arr)) }) .collect_vec(); From 7281b9f177b87764960184271a3d069c99a8b000 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Fri, 20 Mar 2026 11:49:46 -0300 Subject: [PATCH 45/85] chore: remove useless chain_id from genesis Should I remove the trait? Maybe but it's consistent this way --- crates/cardano/src/lib.rs | 6 +----- crates/core/src/lib.rs | 4 +--- 2 files changed, 2 insertions(+), 8 deletions(-) diff --git a/crates/cardano/src/lib.rs b/crates/cardano/src/lib.rs index bc026bcb8..2eff054dd 100644 --- a/crates/cardano/src/lib.rs +++ b/crates/cardano/src/lib.rs @@ -292,11 +292,7 @@ pub struct CardanoGenesis { pub force_protocol: Option, } -impl dolos_core::Genesis for CardanoGenesis { - fn chain_id(&self) -> u32 { - self.shelley.network_magic.unwrap_or_default() - } -} +impl dolos_core::Genesis for CardanoGenesis {} impl CardanoGenesis { pub fn from_file_paths( diff --git a/crates/core/src/lib.rs b/crates/core/src/lib.rs index 1b2dcf16e..725200e09 100644 --- a/crates/core/src/lib.rs +++ b/crates/core/src/lib.rs @@ -484,9 +484,7 @@ pub enum ChainError { // Phase2ValidationRejected(Phase2Log), } -pub trait Genesis: Clone + Send + Sync + 'static { - fn chain_id(&self) -> u32; -} +pub trait Genesis: Clone + Send + Sync + 'static {} // Note: The WorkUnit trait is now defined in work_unit.rs // Chain-specific work unit implementations live in their respective crates From 48b6fe8849b901984e432021d20588b947d7c423 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Fri, 20 Mar 2026 13:57:04 -0300 Subject: [PATCH 46/85] feat: dolos core without minicbore --- Cargo.lock | 1 - crates/cardano/src/roll/mod.rs | 9 +++-- crates/core/Cargo.toml | 1 - crates/core/src/archive.rs | 9 +---- crates/core/src/lib.rs | 26 +------------ crates/core/src/mempool.rs | 2 +- crates/core/src/state.rs | 5 +-- crates/redb3/src/mempool.rs | 70 ++++++++++++++++++++++++++++++++-- src/adapters/storage.rs | 8 ++-- 9 files changed, 83 insertions(+), 48 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index af23ef027..aa6e65489 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1373,7 +1373,6 @@ dependencies = [ "futures-util", "hex", "itertools 0.14.0", - "minicbor 0.26.4", "opentelemetry", "proptest", "rayon", diff --git a/crates/cardano/src/roll/mod.rs b/crates/cardano/src/roll/mod.rs index 5f7ca9d32..5a71d6ce5 100644 --- a/crates/cardano/src/roll/mod.rs +++ b/crates/cardano/src/roll/mod.rs @@ -328,9 +328,12 @@ impl<'a> DeltaBuilder<'a> { for input in tx.consumes() { let txoref = crate::txo_ref_from_input(&input); - let resolved = self.utxos.get(&txoref).ok_or_else(|| { - StateError::InvariantViolation(InvariantViolation::InputNotFound(txoref)) - })?; + let resolved = self + .utxos + .get(&txoref) + .ok_or(StateError::InvariantViolation( + InvariantViolation::InputNotFound(txoref), + ))?; resolved.with_dependent(|_, resolved| { self.account_state diff --git a/crates/core/Cargo.toml b/crates/core/Cargo.toml index db6ce5e88..f600ed539 100644 --- a/crates/core/Cargo.toml +++ b/crates/core/Cargo.toml @@ -22,7 +22,6 @@ tokio-stream.workspace = true hex.workspace = true regex.workspace = true serde_with = "3.16.0" -minicbor = { version = "0.26", features = ["derive"] } [dev-dependencies] proptest = "1.7.0" diff --git a/crates/core/src/archive.rs b/crates/core/src/archive.rs index 80763e1ac..2752ee958 100644 --- a/crates/core/src/archive.rs +++ b/crates/core/src/archive.rs @@ -155,13 +155,8 @@ pub enum ArchiveError { #[error("invalid store version")] InvalidStoreVersion, - #[error("decoding error")] - DecodingError(#[from] minicbor::decode::Error), - - //#[error("address decoding error")] - //AddressDecoding(#[from] pallas::ledger::addresses::Error), - //#[error("decoding error")] - //DecodingError(#[from] pallas::codec::minicbor::decode::Error), + #[error("decoding error: {0}")] + DecodingError(String), //#[error("block decoding error")] //BlockDecodingError(#[from] pallas::ledger::traverse::Error), diff --git a/crates/core/src/lib.rs b/crates/core/src/lib.rs index 725200e09..eb0b1e2be 100644 --- a/crates/core/src/lib.rs +++ b/crates/core/src/lib.rs @@ -16,7 +16,6 @@ // traverse::{MultiEraInput, MultiEraOutput, MultiEraTx, MultiEraUpdate}, // }, //}; -use minicbor::{Decode, Encode}; use serde::{Deserialize, Serialize}; use std::{ collections::{HashMap, HashSet}, @@ -102,29 +101,8 @@ pub use point::*; pub use state::*; pub use wal::*; -// TODO: ask santiago. Doubtful -mod cbor_bytes { - use minicbor::{Decoder, Encoder}; - - pub fn encode( - v: &[u8], - e: &mut Encoder, - _: &mut C, - ) -> Result<(), minicbor::encode::Error> { - e.bytes(v)?; - Ok(()) - } - - pub fn decode<'b, C>( - d: &mut Decoder<'b>, - _: &mut C, - ) -> Result, minicbor::decode::Error> { - d.bytes().map(|b| b.to_vec()) - } -} - -#[derive(Debug, Eq, PartialEq, Clone, Serialize, Deserialize, Encode, Decode)] -pub struct EraCbor(#[n(0)] pub Era, #[cbor(n(1), with = "cbor_bytes")] pub Cbor); +#[derive(Debug, Eq, PartialEq, Clone, Serialize, Deserialize)] +pub struct EraCbor(pub Era, pub Cbor); impl EraCbor { pub fn era(&self) -> Era { diff --git a/crates/core/src/mempool.rs b/crates/core/src/mempool.rs index f2b620cf9..e50516ddf 100644 --- a/crates/core/src/mempool.rs +++ b/crates/core/src/mempool.rs @@ -116,7 +116,7 @@ pub enum MempoolError { // #[error("traverse error: {0}")] // TraverseError(#[from] pallas::ledger::traverse::Error), #[error("decode error: {0}")] - DecodeError(#[from] minicbor::decode::Error), + DecodeError(String), #[error(transparent)] StateError(#[from] StateError), diff --git a/crates/core/src/state.rs b/crates/core/src/state.rs index 605845863..5492607a0 100644 --- a/crates/core/src/state.rs +++ b/crates/core/src/state.rs @@ -193,9 +193,8 @@ pub enum StateError { #[error("invalid namespace: {0}")] InvalidNamespace(Namespace), - #[error(transparent)] - DecodingError(#[from] minicbor::decode::Error), - //DecodingError(#[from] pallas::codec::minicbor::decode::Error), + #[error("decoding error: {0}")] + DecodingError(String), // #[error(transparent)] // TraverseError(#[from] pallas::ledger::traverse::Error), #[error(transparent)] diff --git a/crates/redb3/src/mempool.rs b/crates/redb3/src/mempool.rs index 53698ea71..0dd1a1aa7 100644 --- a/crates/redb3/src/mempool.rs +++ b/crates/redb3/src/mempool.rs @@ -2,6 +2,63 @@ use std::collections::HashSet; use std::sync::Arc; use pallas::codec::minicbor::{self, Decode, Encode}; + +// Provides minicbor Encode/Decode for EraCbor (which no longer derives them) +// using the same on-disk CBOR format: array(2) [ uint(era), bytes(cbor) ] +mod era_cbor_codec { + use dolos_core::EraCbor; + use pallas::codec::minicbor; + + pub fn encode( + v: &EraCbor, + e: &mut minicbor::Encoder, + _: &mut C, + ) -> Result<(), minicbor::encode::Error> { + e.array(2)?.u16(v.0)?.bytes(&v.1)?; + Ok(()) + } + + pub fn decode<'b, C>( + d: &mut minicbor::Decoder<'b>, + _: &mut C, + ) -> Result { + d.array()?; + let era = d.u16()?; + let cbor = d.bytes()?.to_vec(); + Ok(EraCbor(era, cbor)) + } +} + +mod opt_era_cbor_codec { + use dolos_core::EraCbor; + use pallas::codec::minicbor; + + pub fn encode( + v: &Option, + e: &mut minicbor::Encoder, + ctx: &mut C, + ) -> Result<(), minicbor::encode::Error> { + match v { + None => { + e.null()?; + Ok(()) + } + Some(inner) => super::era_cbor_codec::encode(inner, e, ctx), + } + } + + pub fn decode<'b, C>( + d: &mut minicbor::Decoder<'b>, + ctx: &mut C, + ) -> Result, minicbor::decode::Error> { + if d.datatype()? == minicbor::data::Type::Null { + d.null()?; + Ok(None) + } else { + Ok(Some(super::era_cbor_codec::decode(d, ctx)?)) + } + } +} use redb::{ReadableDatabase, ReadableTable, ReadableTableMetadata, TableDefinition}; use thiserror::Error; use tokio::sync::broadcast; @@ -206,14 +263,19 @@ impl redb::Value for DbEraCbor { where Self: 'a, { - Self(minicbor::decode(data).unwrap()) + let mut d = minicbor::Decoder::new(data); + let era_cbor = era_cbor_codec::decode(&mut d, &mut ()).unwrap(); + Self(era_cbor) } fn as_bytes<'a, 'b: 'a>(value: &'a Self::SelfType<'b>) -> Self::AsBytes<'a> where Self: 'b, { - minicbor::to_vec(&value.0).unwrap() + let mut buf = Vec::new(); + let mut e = minicbor::Encoder::new(&mut buf); + era_cbor_codec::encode(&value.0, &mut e, &mut ()).unwrap(); + buf } fn type_name() -> redb::TypeName { @@ -239,7 +301,7 @@ struct InflightRecord { stage: InflightStage, #[n(1)] confirmations: u32, - #[n(2)] + #[cbor(n(2), with = "era_cbor_codec")] payload: EraCbor, #[cbor(n(3), with = "minicbor::bytes")] confirmed_at: Option>, @@ -289,7 +351,7 @@ struct FinalizedEntry { confirmations: u32, #[cbor(n(2), with = "minicbor::bytes")] confirmed_at: Option>, - #[n(3)] + #[cbor(n(3), with = "opt_era_cbor_codec")] payload: Option, #[n(4)] dropped: Option, diff --git a/src/adapters/storage.rs b/src/adapters/storage.rs index b76bbb40c..86d20187f 100644 --- a/src/adapters/storage.rs +++ b/src/adapters/storage.rs @@ -700,8 +700,8 @@ impl Iterator for ArchiveLogIterBackend { type Item = Result<(LogKey, EntityValue), ArchiveError>; fn next(&mut self) -> Option { match self { - Self::Redb(iter) => iter.next().map(|r| r), - Self::NoOp(iter) => iter.next().map(|r| r), + Self::Redb(iter) => iter.next(), + Self::NoOp(iter) => iter.next(), } } } @@ -719,8 +719,8 @@ impl Iterator for ArchiveEntityValueIterBackend { type Item = Result>; fn next(&mut self) -> Option { match self { - Self::Redb(iter) => iter.next().map(|r| r), - Self::NoOp(iter) => iter.next().map(|r| r), + Self::Redb(iter) => iter.next(), + Self::NoOp(iter) => iter.next(), } } } From da8603e16d9fd35cb803d971360137840a525bc4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Wed, 1 Apr 2026 12:23:08 -0300 Subject: [PATCH 47/85] fix: some critical missing code --- src/prelude.rs | 20 ++++++++++++------- src/relay/blockfetch.rs | 4 ++-- src/relay/chainsync.rs | 6 ++++-- src/serve/grpc/submit.rs | 33 ++++++++++++++++++++++---------- src/serve/o7s_unix/chainsync.rs | 6 ++++-- src/serve/o7s_unix/statequery.rs | 2 +- src/sync/pull.rs | 3 ++- 7 files changed, 49 insertions(+), 25 deletions(-) diff --git a/src/prelude.rs b/src/prelude.rs index 67cf31d0c..45f671b0d 100644 --- a/src/prelude.rs +++ b/src/prelude.rs @@ -5,22 +5,28 @@ use pallas::network::miniprotocols::Point; use std::fmt::Display; use thiserror::Error; -pub fn pallas_point_to_chain(p: Point) -> ChainPoint { +pub fn pallas_point_to_chain(p: Point) -> Result { match p { - Point::Origin => ChainPoint::Origin, + Point::Origin => Ok(ChainPoint::Origin), Point::Specific(slot, hash) => { - let arr: [u8; 32] = hash.as_slice().try_into().unwrap_or_default(); - ChainPoint::Specific(slot, dolos_core::hash::Hash::new(arr)) + let len = hash.len(); + let arr: [u8; 32] = hash.as_slice().try_into().map_err(|_| { + Error::parse(format!( + "invalid block hash length: expected 32 bytes, got {len}" + )) + })?; + Ok(ChainPoint::Specific(slot, dolos_core::hash::Hash::new(arr))) } } } -#[allow(clippy::result_unit_err)] -pub fn chain_point_to_pallas(p: ChainPoint) -> Result { +pub fn chain_point_to_pallas(p: ChainPoint) -> Result { match p { ChainPoint::Origin => Ok(Point::Origin), ChainPoint::Specific(slot, hash) => Ok(Point::Specific(slot, hash.as_slice().to_vec())), - ChainPoint::Slot(_) => Err(()), + ChainPoint::Slot(slot) => Err(Error::parse(format!( + "ChainPoint::Slot({slot}) cannot be converted to a pallas Point: no hash available" + ))), } } diff --git a/src/relay/blockfetch.rs b/src/relay/blockfetch.rs index d795ae2c1..6634e5367 100644 --- a/src/relay/blockfetch.rs +++ b/src/relay/blockfetch.rs @@ -33,8 +33,8 @@ async fn process_request( debug!(?p1, ?p2, "processing equest"); - let p1 = pallas_point_to_chain(p1); - let p2 = pallas_point_to_chain(p2); + let p1 = pallas_point_to_chain(p1).map_err(Error::parse)?; + let p2 = pallas_point_to_chain(p2).map_err(Error::parse)?; let ok1 = wal.contains_point(&p1).map_err(Error::server)?; let ok2 = wal.contains_point(&p2).map_err(Error::server)?; diff --git a/src/relay/chainsync.rs b/src/relay/chainsync.rs index 304203a0b..04a4ad162 100644 --- a/src/relay/chainsync.rs +++ b/src/relay/chainsync.rs @@ -1,5 +1,4 @@ use dolos_core::crawl::ChainCrawler; -use itertools::*; use pallas::network::miniprotocols::{ chainsync::{ClientRequest, N2NServer, Tip}, Point, @@ -147,7 +146,10 @@ impl Session { points.push(Point::Origin); } - let points = points.into_iter().map(pallas_point_to_chain).collect_vec(); + let points: Vec<_> = points + .into_iter() + .filter_map(|p| pallas_point_to_chain(p).ok()) + .collect(); let intersect = ChainCrawler::::start(&self.domain, &points).unwrap(); diff --git a/src/serve/grpc/submit.rs b/src/serve/grpc/submit.rs index 15a7e481d..14a698aca 100644 --- a/src/serve/grpc/submit.rs +++ b/src/serve/grpc/submit.rs @@ -64,13 +64,20 @@ fn tx_eval_to_u5c( eval: Result>, ) -> u5c::spec::cardano::TxEval { match eval { - Ok(_tx) => u5c::spec::cardano::TxEval { - ex_units: None, - redeemers: vec![], - fee: None, - traces: vec![], - ..Default::default() - }, + Ok(tx) => { + let traces = tx + .report + .and_then(|b| String::from_utf8(b).ok()) + .map(|msg| vec![u5c::spec::cardano::EvalTrace { msg }]) + .unwrap_or_default(); + u5c::spec::cardano::TxEval { + ex_units: None, + redeemers: vec![], + fee: None, + traces, + ..Default::default() + } + } Err(e) => u5c::spec::cardano::TxEval { errors: vec![u5c::spec::cardano::EvalError { msg: format!("{e:#?}"), @@ -128,10 +135,16 @@ where .r#ref .into_iter() .map(|x| { - let arr: [u8; 32] = x.as_ref().try_into().unwrap_or_default(); - dolos_core::hash::Hash::new(arr) + let bytes: &[u8] = x.as_ref(); + let arr: [u8; 32] = bytes.try_into().map_err(|_| { + Status::invalid_argument(format!( + "invalid tx hash length: expected 32 bytes, got {}", + bytes.len() + )) + })?; + Ok(dolos_core::hash::Hash::new(arr)) }) - .collect(); + .collect::, Status>>()?; let initial_stages: Vec<_> = subjects .iter() diff --git a/src/serve/o7s_unix/chainsync.rs b/src/serve/o7s_unix/chainsync.rs index 81c6efdb1..097bceae0 100644 --- a/src/serve/o7s_unix/chainsync.rs +++ b/src/serve/o7s_unix/chainsync.rs @@ -1,5 +1,4 @@ use dolos_core::crawl::ChainCrawler; -use itertools::*; use pallas::network::miniprotocols::{ chainsync::{BlockContent, ClientRequest, N2CServer, Tip}, Point, @@ -149,7 +148,10 @@ impl Session { points.push(Point::Origin); } - let points = points.into_iter().map(pallas_point_to_chain).collect_vec(); + let points: Vec<_> = points + .into_iter() + .filter_map(|p| pallas_point_to_chain(p).ok()) + .collect(); let intersect = ChainCrawler::::start(&self.domain, &points).unwrap(); diff --git a/src/serve/o7s_unix/statequery.rs b/src/serve/o7s_unix/statequery.rs index f4af423a3..91244edf5 100644 --- a/src/serve/o7s_unix/statequery.rs +++ b/src/serve/o7s_unix/statequery.rs @@ -79,7 +79,7 @@ impl> Session { debug!(?point, "handling acquire request"); let chain_point = match point { - Some(p) => pallas_point_to_chain(p), + Some(p) => pallas_point_to_chain(p)?, None => { // None means acquire the latest point self.tip_cursor()? diff --git a/src/sync/pull.rs b/src/sync/pull.rs index b56364c46..aaf1b992c 100644 --- a/src/sync/pull.rs +++ b/src/sync/pull.rs @@ -307,8 +307,9 @@ impl Stage { Point::Specific(slot, _) => debug!(slot, "rollback"), }; + let chain_point = pallas_point_to_chain(point).or_panic()?; self.downstream - .send(PullEvent::Rollback(pallas_point_to_chain(point)).into()) + .send(PullEvent::Rollback(chain_point).into()) .await .or_panic()?; From 471ce0df3651cfd9ff48132cc5374b5961d8b532 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Thu, 2 Apr 2026 14:19:56 -0300 Subject: [PATCH 48/85] feat: add missing type constraints --- crates/cardano/src/estart/work_unit.rs | 9 +++++---- crates/cardano/src/ewrap/work_unit.rs | 9 +++++---- crates/cardano/src/genesis/mod.rs | 14 ++++++++++++-- crates/cardano/src/genesis/work_unit.rs | 2 +- crates/cardano/src/lib.rs | 2 ++ crates/core/src/lib.rs | 2 +- 6 files changed, 26 insertions(+), 12 deletions(-) diff --git a/crates/cardano/src/estart/work_unit.rs b/crates/cardano/src/estart/work_unit.rs index c8596568b..2546aea0e 100644 --- a/crates/cardano/src/estart/work_unit.rs +++ b/crates/cardano/src/estart/work_unit.rs @@ -46,7 +46,7 @@ impl EstartWorkUnit { impl WorkUnit for EstartWorkUnit where - D: Domain, + D: Domain, { fn name(&self) -> &'static str { "estart" @@ -75,9 +75,10 @@ where fn commit_state(&mut self, domain: &D) -> Result<(), DomainError> { debug!(slot = self.slot, "committing estart state changes"); - let context = self.context.as_mut().ok_or_else(|| { - DomainError::Internal("estart context not loaded".into()) - })?; + let context = self + .context + .as_mut() + .ok_or_else(|| DomainError::Internal("estart context not loaded".into()))?; context.commit::(domain.state(), domain.archive(), self.slot)?; diff --git a/crates/cardano/src/ewrap/work_unit.rs b/crates/cardano/src/ewrap/work_unit.rs index 62fd22109..b63da6f1b 100644 --- a/crates/cardano/src/ewrap/work_unit.rs +++ b/crates/cardano/src/ewrap/work_unit.rs @@ -46,7 +46,7 @@ impl EwrapWorkUnit { impl WorkUnit for EwrapWorkUnit where - D: Domain, + D: Domain, { fn name(&self) -> &'static str { "ewrap" @@ -76,9 +76,10 @@ where fn commit_state(&mut self, domain: &D) -> Result<(), DomainError> { debug!(slot = self.slot, "committing ewrap state changes"); - let boundary = self.boundary.as_mut().ok_or_else(|| { - DomainError::Internal("ewrap boundary not loaded".into()) - })?; + let boundary = self + .boundary + .as_mut() + .ok_or_else(|| DomainError::Internal("ewrap boundary not loaded".into()))?; boundary.commit::(domain.state(), domain.archive())?; diff --git a/crates/cardano/src/genesis/mod.rs b/crates/cardano/src/genesis/mod.rs index 0c9ea3620..519936ba9 100644 --- a/crates/cardano/src/genesis/mod.rs +++ b/crates/cardano/src/genesis/mod.rs @@ -51,7 +51,11 @@ fn bootstrap_pots( } pub fn bootstrap_epoch< - D: Domain, + D: Domain< + Chain = crate::CardanoLogic, + ChainSpecificError = crate::CardanoError, + Genesis = crate::CardanoGenesis, + >, >( state: &D::State, genesis: &crate::CardanoGenesis, @@ -158,7 +162,13 @@ pub fn bootstrap_utxos< Ok(()) } -pub fn execute>( +pub fn execute< + D: Domain< + Chain = crate::CardanoLogic, + ChainSpecificError = crate::CardanoError, + Genesis = crate::CardanoGenesis, + >, +>( state: &D::State, indexes: &D::Indexes, genesis: &crate::CardanoGenesis, diff --git a/crates/cardano/src/genesis/work_unit.rs b/crates/cardano/src/genesis/work_unit.rs index 68fb38489..218df1646 100644 --- a/crates/cardano/src/genesis/work_unit.rs +++ b/crates/cardano/src/genesis/work_unit.rs @@ -24,7 +24,7 @@ impl GenesisWorkUnit { impl WorkUnit for GenesisWorkUnit where - D: Domain, + D: Domain, { fn name(&self) -> &'static str { "genesis" diff --git a/crates/cardano/src/lib.rs b/crates/cardano/src/lib.rs index 8fc0b5be3..4fc582139 100644 --- a/crates/cardano/src/lib.rs +++ b/crates/cardano/src/lib.rs @@ -125,6 +125,7 @@ where Entity = CardanoEntity, EntityDelta = CardanoDelta, ChainSpecificError = CardanoError, + Genesis = CardanoGenesis, >, { fn name(&self) -> &'static str { @@ -335,6 +336,7 @@ impl dolos_core::ChainLogic for CardanoLogic { Entity = Self::Entity, EntityDelta = Self::Delta, ChainSpecificError = Self::ChainSpecificError, + Genesis = Self::Genesis, >, > = CardanoWorkUnit; type ChainSpecificError = CardanoError; diff --git a/crates/core/src/lib.rs b/crates/core/src/lib.rs index f27dd4a6a..904f7cbf5 100644 --- a/crates/core/src/lib.rs +++ b/crates/core/src/lib.rs @@ -498,7 +498,7 @@ pub trait ChainLogic: Sized + Send + Sync { type ChainSpecificError: std::error::Error + Send + Sync; /// The concrete work unit type produced by this chain logic. - type WorkUnit>: WorkUnit; + type WorkUnit>: WorkUnit; /// Initialize the chain logic with configuration and state. fn initialize( From 1e4e22f7481fa4756d8d6fccd6ce1eca3f5b0284 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Thu, 2 Apr 2026 14:45:59 -0300 Subject: [PATCH 49/85] fix: rejec malfored intersect hashes --- src/serve/grpc/watch.rs | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/src/serve/grpc/watch.rs b/src/serve/grpc/watch.rs index eb7ce3b6a..dbb3df665 100644 --- a/src/serve/grpc/watch.rs +++ b/src/serve/grpc/watch.rs @@ -275,10 +275,14 @@ where .intersect .iter() .map(|x| { - let arr: [u8; 32] = x.hash.as_ref().try_into().unwrap_or([0u8; 32]); - ChainPoint::Specific(x.slot, dolos_core::hash::Hash::new(arr)) + let arr: [u8; 32] = x + .hash + .as_ref() + .try_into() + .map_err(|_| Status::invalid_argument("malformed intersect hash"))?; + Ok(ChainPoint::Specific(x.slot, dolos_core::hash::Hash::new(arr))) }) - .collect::>(); + .collect::, Status>>()?; let stream = ChainStream::start::(self.domain.clone(), intersect, self.cancel.clone()); From be1c545eb8380d6e29aff3f7edd714c4637d73ae Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Thu, 2 Apr 2026 14:50:29 -0300 Subject: [PATCH 50/85] fix: archive typo --- crates/core/src/archive.rs | 2 +- crates/redb3/src/archive/mod.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/core/src/archive.rs b/crates/core/src/archive.rs index 2752ee958..1244eee64 100644 --- a/crates/core/src/archive.rs +++ b/crates/core/src/archive.rs @@ -167,7 +167,7 @@ pub enum ArchiveError { NamespaceNotFound(Namespace), #[error("chain-specific error: {0}")] - ChainSpecifc(E), + ChainSpecific(E), } pub trait ArchiveWriter: Send + Sync + 'static { diff --git a/crates/redb3/src/archive/mod.rs b/crates/redb3/src/archive/mod.rs index 0f298de80..7ad3e8df9 100644 --- a/crates/redb3/src/archive/mod.rs +++ b/crates/redb3/src/archive/mod.rs @@ -65,7 +65,7 @@ impl RedbArchiveError { ArchiveError::DecodingError(e) => ArchiveError::DecodingError(e), ArchiveError::EntityDecodingError(e) => ArchiveError::EntityDecodingError(e), ArchiveError::NamespaceNotFound(ns) => ArchiveError::NamespaceNotFound(ns), - ArchiveError::ChainSpecifc(infallible) => match infallible {}, + ArchiveError::ChainSpecific(infallible) => match infallible {}, } } } From b402dbc080c2657c7b02a1699b5b1ecdcf8a1a80 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Thu, 2 Apr 2026 14:53:28 -0300 Subject: [PATCH 51/85] fix: reuse of chain point to pallas --- src/bin/dolos/bootstrap/mithril.rs | 20 ++++++-------------- 1 file changed, 6 insertions(+), 14 deletions(-) diff --git a/src/bin/dolos/bootstrap/mithril.rs b/src/bin/dolos/bootstrap/mithril.rs index 5f8d226ff..d91e4f2fe 100644 --- a/src/bin/dolos/bootstrap/mithril.rs +++ b/src/bin/dolos/bootstrap/mithril.rs @@ -164,20 +164,10 @@ fn define_starting_point( ) -> Result { use dolos_core::StateStore; - fn chain_to_pallas(c: dolos_core::ChainPoint) -> pallas::network::miniprotocols::Point { - match c { - dolos_core::ChainPoint::Origin => pallas::network::miniprotocols::Point::Origin, - dolos_core::ChainPoint::Specific(slot, hash) => { - pallas::network::miniprotocols::Point::Specific(slot, hash.as_slice().to_vec()) - } - dolos_core::ChainPoint::Slot(slot) => { - pallas::network::miniprotocols::Point::Specific(slot, vec![]) - } - } - } - if let Some(point) = &args.start_from { - Ok(chain_to_pallas(point.clone())) + chain_point_to_pallas(point.clone()) + .into_diagnostic() + .context("converting start-from point") } else { let cursor = state .read_cursor() @@ -185,7 +175,9 @@ fn define_starting_point( .context("reading state cursor")?; let point = cursor - .map(chain_to_pallas) + .map(|c| chain_point_to_pallas(c).into_diagnostic()) + .transpose() + .context("converting cursor to pallas point")? .unwrap_or(pallas::network::miniprotocols::Point::Origin); Ok(point) From d1feb196e4da4a0becd71cdea245407c2b3d0ce6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Sat, 4 Apr 2026 00:51:23 -0300 Subject: [PATCH 52/85] feat: introduce CardanoDomain to cleanup code a bit --- crates/cardano/src/estart/commit.rs | 12 +++++----- crates/cardano/src/estart/loading.rs | 16 +++++-------- crates/cardano/src/estart/work_unit.rs | 11 +++++---- crates/cardano/src/ewrap/commit.rs | 12 +++++----- crates/cardano/src/ewrap/loading.rs | 22 +++++++++--------- crates/cardano/src/ewrap/work_unit.rs | 11 +++++---- crates/cardano/src/genesis/mod.rs | 30 +++++++------------------ crates/cardano/src/genesis/staking.rs | 8 +++---- crates/cardano/src/genesis/work_unit.rs | 11 +++++---- crates/cardano/src/lib.rs | 19 ++++++++++++++++ crates/cardano/src/roll/batch.rs | 13 ++++++----- crates/cardano/src/roll/mod.rs | 8 +++---- crates/cardano/src/roll/work_unit.rs | 1 + 13 files changed, 89 insertions(+), 85 deletions(-) diff --git a/crates/cardano/src/estart/commit.rs b/crates/cardano/src/estart/commit.rs index 0fb0dac1a..6184ae8e6 100644 --- a/crates/cardano/src/estart/commit.rs +++ b/crates/cardano/src/estart/commit.rs @@ -5,9 +5,11 @@ //! in memory. use dolos_core::{ - ArchiveStore, ArchiveWriter, BlockSlot, BrokenInvariant, ChainError, ChainPoint, Domain, - Entity, EntityDelta as _, EntityKey, LogKey, NsKey, StateStore, StateWriter, TemporalKey, + ArchiveStore, ArchiveWriter, BlockSlot, BrokenInvariant, ChainError, ChainPoint, Entity, + EntityDelta as _, EntityKey, LogKey, NsKey, StateStore, StateWriter, TemporalKey, }; + +use crate::CardanoDomain; use tracing::{debug, instrument, trace, warn}; use crate::{ @@ -77,7 +79,7 @@ impl super::WorkContext { writer: &::Writer, ) -> Result<(), ChainError> where - D: Domain, + D: CardanoDomain, E: Entity + FixedNamespace + Into, { let records = state.iter_entities_typed::(E::NS, None)?; @@ -109,9 +111,7 @@ impl super::WorkContext { } #[instrument(skip_all)] - pub fn commit< - D: Domain, - >( + pub fn commit( &mut self, state: &D::State, archive: &D::Archive, diff --git a/crates/cardano/src/estart/loading.rs b/crates/cardano/src/estart/loading.rs index 9fbdad8e5..0bedab025 100644 --- a/crates/cardano/src/estart/loading.rs +++ b/crates/cardano/src/estart/loading.rs @@ -1,6 +1,8 @@ use std::sync::Arc; -use dolos_core::{ChainError, Domain, StateStore, TxoRef}; +use dolos_core::{ChainError, StateStore, TxoRef}; + +use crate::CardanoDomain; use crate::{ estart::BoundaryVisitor, load_era_summary, roll::WorkDeltas, AccountState, DRepState, @@ -8,9 +10,7 @@ use crate::{ }; impl super::WorkContext { - pub fn compute_deltas< - D: Domain, - >( + pub fn compute_deltas( &mut self, state: &D::State, ) -> Result<(), ChainError> { @@ -62,9 +62,7 @@ impl super::WorkContext { /// Compute the value of unredeemed AVVM UTxOs at the Shelley→Allegra /// boundary. These UTxOs are removed from the UTxO set and their value /// returned to reserves, matching the Haskell ledger's `translateEra`. - fn compute_avvm_reclamation< - D: Domain, - >( + fn compute_avvm_reclamation( state: &D::State, genesis: &crate::CardanoGenesis, ) -> Result> { @@ -98,9 +96,7 @@ impl super::WorkContext { Ok(total) } - pub fn load< - D: Domain, - >( + pub fn load( state: &D::State, genesis: Arc, ) -> Result> { diff --git a/crates/cardano/src/estart/work_unit.rs b/crates/cardano/src/estart/work_unit.rs index 2546aea0e..148803b49 100644 --- a/crates/cardano/src/estart/work_unit.rs +++ b/crates/cardano/src/estart/work_unit.rs @@ -9,10 +9,12 @@ use std::sync::Arc; -use dolos_core::{config::CardanoConfig, BlockSlot, Domain, DomainError, WorkUnit}; +use dolos_core::{config::CardanoConfig, BlockSlot, DomainError, WorkUnit}; + +use crate::CardanoDomain; use tracing::{debug, info}; -use crate::{CardanoError, CardanoGenesis, CardanoLogic}; +use crate::CardanoGenesis; use super::WorkContext; @@ -44,10 +46,7 @@ impl EstartWorkUnit { } } -impl WorkUnit for EstartWorkUnit -where - D: Domain, -{ +impl WorkUnit for EstartWorkUnit { fn name(&self) -> &'static str { "estart" } diff --git a/crates/cardano/src/ewrap/commit.rs b/crates/cardano/src/ewrap/commit.rs index 2b1893827..10e223aee 100644 --- a/crates/cardano/src/ewrap/commit.rs +++ b/crates/cardano/src/ewrap/commit.rs @@ -5,9 +5,11 @@ //! in memory. use dolos_core::{ - ArchiveStore, ArchiveWriter, ChainError, ChainPoint, Domain, Entity, EntityDelta as _, LogKey, - NsKey, StateStore, StateWriter, TemporalKey, + ArchiveStore, ArchiveWriter, ChainError, ChainPoint, Entity, EntityDelta as _, LogKey, NsKey, + StateStore, StateWriter, TemporalKey, }; + +use crate::CardanoDomain; use tracing::{debug, instrument, trace, warn}; use crate::{ @@ -26,7 +28,7 @@ impl BoundaryWork { writer: &::Writer, ) -> Result<(), ChainError> where - D: Domain, + D: CardanoDomain, E: Entity + FixedNamespace + Into, { let records = state.iter_entities_typed::(E::NS, None)?; @@ -58,9 +60,7 @@ impl BoundaryWork { } #[instrument(skip_all)] - pub fn commit< - D: Domain, - >( + pub fn commit( &mut self, state: &D::State, archive: &D::Archive, diff --git a/crates/cardano/src/ewrap/loading.rs b/crates/cardano/src/ewrap/loading.rs index a4ee79b76..2399a7c87 100644 --- a/crates/cardano/src/ewrap/loading.rs +++ b/crates/cardano/src/ewrap/loading.rs @@ -1,6 +1,8 @@ use std::{collections::HashMap, sync::Arc}; -use dolos_core::{BlockSlot, ChainError, Domain, StateStore, TxOrder}; +use dolos_core::{BlockSlot, ChainError, StateStore, TxOrder}; + +use crate::CardanoDomain; use pallas::{codec::minicbor, ledger::primitives::StakeCredential}; use crate::{ @@ -25,7 +27,7 @@ impl BoundaryWork { } fn load_pool_reward_account< - D: Domain, + D: CardanoDomain, >( &self, state: &D::State, @@ -54,7 +56,7 @@ impl BoundaryWork { } fn load_pool_data< - D: Domain, + D: CardanoDomain, >( &mut self, state: &D::State, @@ -123,7 +125,7 @@ impl BoundaryWork { } fn load_drep_data< - D: Domain, + D: CardanoDomain, >( &mut self, state: &D::State, @@ -147,7 +149,7 @@ impl BoundaryWork { } fn load_proposal_reward_account< - D: Domain, + D: CardanoDomain, >( &self, state: &D::State, @@ -165,7 +167,7 @@ impl BoundaryWork { } fn load_proposal_data< - D: Domain, + D: CardanoDomain, >( &mut self, state: &D::State, @@ -196,7 +198,7 @@ impl BoundaryWork { /// Process pending MIRs: check registration status and apply to registered accounts. /// MIRs to unregistered accounts stay in their source pot (no transfer). fn process_pending_mirs< - D: Domain, + D: CardanoDomain, >( &mut self, state: &D::State, @@ -278,7 +280,7 @@ impl BoundaryWork { } pub fn compute_deltas< - D: Domain, + D: CardanoDomain, >( &mut self, state: &D::State, @@ -387,7 +389,7 @@ impl BoundaryWork { /// Load pending rewards from state store (persisted by RUPD). fn load_pending_rewards< - D: Domain, + D: CardanoDomain, >( state: &D::State, incentives: EpochIncentives, @@ -432,7 +434,7 @@ impl BoundaryWork { } pub fn load< - D: Domain, + D: CardanoDomain, >( state: &D::State, genesis: Arc, diff --git a/crates/cardano/src/ewrap/work_unit.rs b/crates/cardano/src/ewrap/work_unit.rs index b63da6f1b..8abfa7db6 100644 --- a/crates/cardano/src/ewrap/work_unit.rs +++ b/crates/cardano/src/ewrap/work_unit.rs @@ -8,10 +8,12 @@ use std::sync::Arc; -use dolos_core::{config::CardanoConfig, BlockSlot, Domain, DomainError, WorkUnit}; +use dolos_core::{config::CardanoConfig, BlockSlot, DomainError, WorkUnit}; + +use crate::CardanoDomain; use tracing::{debug, info}; -use crate::{CardanoError, CardanoGenesis, CardanoLogic}; +use crate::CardanoGenesis; use super::BoundaryWork; @@ -44,10 +46,7 @@ impl EwrapWorkUnit { } } -impl WorkUnit for EwrapWorkUnit -where - D: Domain, -{ +impl WorkUnit for EwrapWorkUnit { fn name(&self) -> &'static str { "ewrap" } diff --git a/crates/cardano/src/genesis/mod.rs b/crates/cardano/src/genesis/mod.rs index 519936ba9..4d4dd7075 100644 --- a/crates/cardano/src/genesis/mod.rs +++ b/crates/cardano/src/genesis/mod.rs @@ -1,8 +1,10 @@ use dolos_core::{ - config::CardanoConfig, ChainError, ChainPoint, Domain, EntityKey, IndexStore as _, - IndexWriter as _, StateStore as _, StateWriter as _, + config::CardanoConfig, ChainError, ChainPoint, EntityKey, IndexStore as _, IndexWriter as _, + StateStore as _, StateWriter as _, }; +use crate::CardanoDomain; + use crate::{ indexes::index_delta_from_utxo_delta, pots::Pots, utils::nonce_stability_window, EpochState, EpochValue, EraBoundary, EraSummary, Lovelace, Nonces, PParamsSet, RollingStats, @@ -50,13 +52,7 @@ fn bootstrap_pots( }) } -pub fn bootstrap_epoch< - D: Domain< - Chain = crate::CardanoLogic, - ChainSpecificError = crate::CardanoError, - Genesis = crate::CardanoGenesis, - >, ->( +pub fn bootstrap_epoch( state: &D::State, genesis: &crate::CardanoGenesis, ) -> Result> { @@ -97,9 +93,7 @@ pub fn bootstrap_epoch< Ok(epoch) } -pub fn bootstrap_eras< - D: Domain, ->( +pub fn bootstrap_eras( state: &D::State, epoch: &EpochState, ) -> Result<(), ChainError> { @@ -131,9 +125,7 @@ pub fn bootstrap_eras< Ok(()) } -pub fn bootstrap_utxos< - D: Domain, ->( +pub fn bootstrap_utxos( state: &D::State, indexes: &D::Indexes, genesis: &crate::CardanoGenesis, @@ -162,13 +154,7 @@ pub fn bootstrap_utxos< Ok(()) } -pub fn execute< - D: Domain< - Chain = crate::CardanoLogic, - ChainSpecificError = crate::CardanoError, - Genesis = crate::CardanoGenesis, - >, ->( +pub fn execute( state: &D::State, indexes: &D::Indexes, genesis: &crate::CardanoGenesis, diff --git a/crates/cardano/src/genesis/staking.rs b/crates/cardano/src/genesis/staking.rs index 2ba8ef598..e5f775368 100644 --- a/crates/cardano/src/genesis/staking.rs +++ b/crates/cardano/src/genesis/staking.rs @@ -1,4 +1,6 @@ -use dolos_core::{ChainError, Domain, EntityKey, StateStore as _, StateWriter as _}; +use dolos_core::{ChainError, EntityKey, StateStore as _, StateWriter as _}; + +use crate::CardanoDomain; use pallas::codec::minicbor; use pallas::ledger::addresses::{Address, Network, StakeAddress, StakePayload}; use pallas::ledger::primitives::StakeCredential; @@ -111,9 +113,7 @@ fn parse_delegation(account: &str, pool: &str, genesis: &crate::CardanoGenesis) } } -pub fn bootstrap< - D: Domain, ->( +pub fn bootstrap( state: &D::State, genesis: &crate::CardanoGenesis, ) -> Result<(), ChainError> { diff --git a/crates/cardano/src/genesis/work_unit.rs b/crates/cardano/src/genesis/work_unit.rs index 218df1646..3e9236d5a 100644 --- a/crates/cardano/src/genesis/work_unit.rs +++ b/crates/cardano/src/genesis/work_unit.rs @@ -4,10 +4,12 @@ use std::sync::Arc; -use dolos_core::{config::CardanoConfig, ChainPoint, Domain, DomainError, WalStore as _, WorkUnit}; +use dolos_core::{config::CardanoConfig, ChainPoint, DomainError, WalStore as _, WorkUnit}; + +use crate::CardanoDomain; use tracing::{debug, info}; -use crate::{CardanoError, CardanoGenesis, CardanoLogic}; +use crate::CardanoGenesis; /// Work unit for bootstrapping the chain from genesis. pub struct GenesisWorkUnit { @@ -22,10 +24,7 @@ impl GenesisWorkUnit { } } -impl WorkUnit for GenesisWorkUnit -where - D: Domain, -{ +impl WorkUnit for GenesisWorkUnit { fn name(&self) -> &'static str { "genesis" } diff --git a/crates/cardano/src/lib.rs b/crates/cardano/src/lib.rs index 4fc582139..9355c96ec 100644 --- a/crates/cardano/src/lib.rs +++ b/crates/cardano/src/lib.rs @@ -54,6 +54,25 @@ pub use eras::*; pub use model::*; pub use utils::{mutable_slots, network_from_genesis}; +/// Trait alias for [`dolos_core::Domain`] implementations backed by Cardano chain logic. +/// +/// Equivalent to `Domain`, +/// but avoids repeating all three associated-type constraints at every call site. +/// Use this in place of the verbose bound anywhere inside the `cardano` crate. +pub trait CardanoDomain: dolos_core::Domain< + Chain = CardanoLogic, + ChainSpecificError = CardanoError, + Genesis = CardanoGenesis, +> {} + +impl CardanoDomain for T where + T: dolos_core::Domain< + Chain = CardanoLogic, + ChainSpecificError = CardanoError, + Genesis = CardanoGenesis, + > +{} + pub type Block<'a> = MultiEraBlock<'a>; pub type UtxoBody<'a> = MultiEraOutput<'a>; diff --git a/crates/cardano/src/roll/batch.rs b/crates/cardano/src/roll/batch.rs index 920381de1..8920b9fe8 100644 --- a/crates/cardano/src/roll/batch.rs +++ b/crates/cardano/src/roll/batch.rs @@ -15,7 +15,10 @@ use dolos_core::{ UtxoSetDelta, WalStore as _, }; use crate::indexes::CardanoIndexDeltaBuilder; -use crate::{CardanoDelta, CardanoEntity, CardanoLogic, OwnedMultiEraBlock, OwnedMultiEraOutput}; +use crate::{ + CardanoDelta, CardanoDomain, CardanoEntity, CardanoLogic, OwnedMultiEraBlock, + OwnedMultiEraOutput, +}; /// Container for entity deltas computed during block processing. #[derive(Debug, Default)] @@ -145,7 +148,7 @@ impl WorkBatch { pub fn load_utxos(&mut self, domain: &D) -> Result<(), DomainError> where - D: Domain, + D: CardanoDomain, { // TODO: paralelize in chunks @@ -268,7 +271,7 @@ impl WorkBatch { pub fn commit_state(&mut self, domain: &D) -> Result<(), DomainError> where - D: Domain, + D: CardanoDomain, { let writer = domain.state().start_writer()?; @@ -294,7 +297,7 @@ impl WorkBatch { pub fn commit_archive(&mut self, domain: &D) -> Result<(), DomainError> where - D: Domain, + D: CardanoDomain, { let writer = domain.archive().start_writer()?; @@ -367,7 +370,7 @@ impl WorkBatch { pub fn commit_indexes(&mut self, domain: &D) -> Result<(), DomainError> where - D: Domain, + D: CardanoDomain, { let delta = self.build_index_delta(); diff --git a/crates/cardano/src/roll/mod.rs b/crates/cardano/src/roll/mod.rs index 5a71d6ce5..94cda56fb 100644 --- a/crates/cardano/src/roll/mod.rs +++ b/crates/cardano/src/roll/mod.rs @@ -1,6 +1,8 @@ use std::{collections::HashMap, sync::Arc}; -use dolos_core::{ChainError, Domain, InvariantViolation, StateError, TxOrder, TxoRef}; +use dolos_core::{ChainError, InvariantViolation, StateError, TxOrder, TxoRef}; + +use crate::CardanoDomain; use pallas::{ codec::utils::KeepRaw, ledger::{ @@ -537,9 +539,7 @@ impl<'a> DeltaBuilder<'a> { } #[instrument(name = "roll", skip_all)] -pub fn compute_delta< - D: Domain, ->( +pub fn compute_delta( genesis: Arc, cache: &Cache, state: &D::State, diff --git a/crates/cardano/src/roll/work_unit.rs b/crates/cardano/src/roll/work_unit.rs index e7530b5dc..6971e919c 100644 --- a/crates/cardano/src/roll/work_unit.rs +++ b/crates/cardano/src/roll/work_unit.rs @@ -50,6 +50,7 @@ where Entity = CardanoEntity, EntityDelta = CardanoDelta, ChainSpecificError = CardanoError, + Genesis = CardanoGenesis, >, { fn name(&self) -> &'static str { From 36412301800d8e5368f3a6dbe85f8c194f87a2fe Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Sat, 4 Apr 2026 00:58:39 -0300 Subject: [PATCH 53/85] fix: use stack allocated mem for processing inputs --- crates/cardano/src/indexes/delta.rs | 5 +++-- crates/cardano/src/indexes/query.rs | 7 ++++--- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/crates/cardano/src/indexes/delta.rs b/crates/cardano/src/indexes/delta.rs index ac12e1d6d..3e02d665d 100644 --- a/crates/cardano/src/indexes/delta.rs +++ b/crates/cardano/src/indexes/delta.rs @@ -202,8 +202,9 @@ impl CardanoIndexDeltaBuilder { /// Add a spent TxO reference to the current block. pub fn add_spent_input(&mut self, input: &MultiEraInput) { let txo_ref = crate::txo_ref_from_input(input); - let mut bytes = txo_ref.0.as_slice().to_vec(); - bytes.extend_from_slice(txo_ref.1.to_be_bytes().as_slice()); + let mut bytes = [0u8; 36]; + bytes[0..32].copy_from_slice(txo_ref.0.as_slice()); + bytes[32..36].copy_from_slice(&txo_ref.1.to_be_bytes()); self.current_block() .tags .push(Tag::new(archive::SPENT_TXO, bytes)); diff --git a/crates/cardano/src/indexes/query.rs b/crates/cardano/src/indexes/query.rs index 23fdaa0c7..8eab1ee1b 100644 --- a/crates/cardano/src/indexes/query.rs +++ b/crates/cardano/src/indexes/query.rs @@ -539,9 +539,10 @@ where for tx in block.txs().iter() { for input in tx.inputs() { let txo_ref = crate::txo_ref_from_input(&input); - let mut bytes = txo_ref.0.as_slice().to_vec(); - bytes.extend_from_slice(txo_ref.1.to_be_bytes().as_slice()); - if bytes.as_slice() == spent.as_slice() { + let mut bytes = [0u8; 36]; + bytes[0..32].copy_from_slice(txo_ref.0.as_slice()); + bytes[32..36].copy_from_slice(&txo_ref.1.to_be_bytes()); + if bytes == spent.as_slice() { return Some(crate::pallas_hash_to_core(tx.hash())); } } From c91d32a169167d4261708ecdc463705e6c8e4b6e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Mon, 6 Apr 2026 10:57:54 -0300 Subject: [PATCH 54/85] fix: use CardanoDomain on roll workunit --- crates/cardano/src/roll/work_unit.rs | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/crates/cardano/src/roll/work_unit.rs b/crates/cardano/src/roll/work_unit.rs index 6971e919c..d738f89e5 100644 --- a/crates/cardano/src/roll/work_unit.rs +++ b/crates/cardano/src/roll/work_unit.rs @@ -5,11 +5,11 @@ use std::sync::Arc; -use dolos_core::{Domain, DomainError, MempoolUpdate, RawBlock, TipEvent, WorkUnit}; +use dolos_core::{DomainError, MempoolUpdate, RawBlock, TipEvent, WorkUnit}; use tracing::{debug, info}; use crate::roll::batch::WorkBatch; -use crate::{roll, Cache, CardanoDelta, CardanoEntity, CardanoError, CardanoGenesis, CardanoLogic}; +use crate::{roll, Cache, CardanoDelta, CardanoDomain, CardanoEntity, CardanoGenesis}; /// Work unit for processing a batch of blocks ("rolling" the chain forward). pub struct RollWorkUnit { @@ -45,13 +45,7 @@ impl RollWorkUnit { impl WorkUnit for RollWorkUnit where - D: Domain< - Chain = CardanoLogic, - Entity = CardanoEntity, - EntityDelta = CardanoDelta, - ChainSpecificError = CardanoError, - Genesis = CardanoGenesis, - >, + D: CardanoDomain, { fn name(&self) -> &'static str { "roll" From 82b82880aa715083abdd84d77736f30643cbc0af Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Mon, 6 Apr 2026 10:59:41 -0300 Subject: [PATCH 55/85] fix: remove TODO --- crates/cardano/src/lib.rs | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/crates/cardano/src/lib.rs b/crates/cardano/src/lib.rs index 9355c96ec..c1ad14c9b 100644 --- a/crates/cardano/src/lib.rs +++ b/crates/cardano/src/lib.rs @@ -59,11 +59,14 @@ pub use utils::{mutable_slots, network_from_genesis}; /// Equivalent to `Domain`, /// but avoids repeating all three associated-type constraints at every call site. /// Use this in place of the verbose bound anywhere inside the `cardano` crate. -pub trait CardanoDomain: dolos_core::Domain< +pub trait CardanoDomain: + dolos_core::Domain< Chain = CardanoLogic, ChainSpecificError = CardanoError, Genesis = CardanoGenesis, -> {} +> +{ +} impl CardanoDomain for T where T: dolos_core::Domain< @@ -71,7 +74,8 @@ impl CardanoDomain for T where ChainSpecificError = CardanoError, Genesis = CardanoGenesis, > -{} +{ +} pub type Block<'a> = MultiEraBlock<'a>; @@ -518,7 +522,6 @@ impl dolos_core::ChainLogic for CardanoLogic { let tx_hashes = blockv .txs() .iter() - // TODO: fix this pallas_hash_to_core most likely .map(|tx| pallas_hash_to_core(tx.hash())) .collect(); From fa846fc53c581dda57d5d08ea40d9befcc795fc5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Mon, 6 Apr 2026 11:06:23 -0300 Subject: [PATCH 56/85] fix: handle errors gracefully --- crates/cardano/src/estart/loading.rs | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/crates/cardano/src/estart/loading.rs b/crates/cardano/src/estart/loading.rs index 0bedab025..48232015f 100644 --- a/crates/cardano/src/estart/loading.rs +++ b/crates/cardano/src/estart/loading.rs @@ -86,14 +86,14 @@ impl super::WorkContext { .unwrap_or(0) }) .sum(); - - tracing::debug!( - remaining_count = remaining.len(), - total_avvm = total, - "AVVM reclamation at Shelley→Allegra boundary" - ); - - Ok(total) + let total = remaining.values().try_fold(0u64, |acc, utxo| { + let coin = crate::multi_era_output_from_era_cbor(utxo.as_ref()) + .map_err(|e| ChainError::ChainSpecific(crate::CardanoError::Traverse(e)))? + .value() + .coin(); + + Ok(acc + coin) + })?; } pub fn load( From 6e2cb739d8445a4f3da1bb644ddf87347c29cbc2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Mon, 6 Apr 2026 11:24:11 -0300 Subject: [PATCH 57/85] fix: simplified txo_ref bytes representation --- crates/cardano/src/indexes/delta.rs | 5 +---- crates/cardano/src/indexes/query.rs | 5 +---- crates/core/src/lib.rs | 10 ++++++++++ crates/redb3/src/archive/mod.rs | 9 ++++----- 4 files changed, 16 insertions(+), 13 deletions(-) diff --git a/crates/cardano/src/indexes/delta.rs b/crates/cardano/src/indexes/delta.rs index 3e02d665d..7ded7ec3e 100644 --- a/crates/cardano/src/indexes/delta.rs +++ b/crates/cardano/src/indexes/delta.rs @@ -201,10 +201,7 @@ impl CardanoIndexDeltaBuilder { /// Add a spent TxO reference to the current block. pub fn add_spent_input(&mut self, input: &MultiEraInput) { - let txo_ref = crate::txo_ref_from_input(input); - let mut bytes = [0u8; 36]; - bytes[0..32].copy_from_slice(txo_ref.0.as_slice()); - bytes[32..36].copy_from_slice(&txo_ref.1.to_be_bytes()); + let bytes = crate::txo_ref_from_input(input).to_index_bytes(); self.current_block() .tags .push(Tag::new(archive::SPENT_TXO, bytes)); diff --git a/crates/cardano/src/indexes/query.rs b/crates/cardano/src/indexes/query.rs index 8eab1ee1b..ca67a7a63 100644 --- a/crates/cardano/src/indexes/query.rs +++ b/crates/cardano/src/indexes/query.rs @@ -538,10 +538,7 @@ where |block| { for tx in block.txs().iter() { for input in tx.inputs() { - let txo_ref = crate::txo_ref_from_input(&input); - let mut bytes = [0u8; 36]; - bytes[0..32].copy_from_slice(txo_ref.0.as_slice()); - bytes[32..36].copy_from_slice(&txo_ref.1.to_be_bytes()); + let bytes = crate::txo_ref_from_input(&input).to_index_bytes(); if bytes == spent.as_slice() { return Some(crate::pallas_hash_to_core(tx.hash())); } diff --git a/crates/core/src/lib.rs b/crates/core/src/lib.rs index 904f7cbf5..49c134598 100644 --- a/crates/core/src/lib.rs +++ b/crates/core/src/lib.rs @@ -206,6 +206,16 @@ impl From for (TxHash, TxoIdx) { } } +impl TxoRef { + /// Serialize to the 36-byte index key format: `[tx_hash (32 bytes) || output_index (4 bytes, big-endian)]`. + pub fn to_index_bytes(&self) -> [u8; 36] { + let mut bytes = [0u8; 36]; + bytes[0..32].copy_from_slice(self.0.as_slice()); + bytes[32..36].copy_from_slice(&self.1.to_be_bytes()); + bytes + } +} + impl Display for TxoRef { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{}#{}", self.0, self.1) diff --git a/crates/redb3/src/archive/mod.rs b/crates/redb3/src/archive/mod.rs index 7ad3e8df9..bc5d4815a 100644 --- a/crates/redb3/src/archive/mod.rs +++ b/crates/redb3/src/archive/mod.rs @@ -12,7 +12,7 @@ use std::convert::Infallible; use dolos_core::{ config::RedbArchiveConfig, ArchiveError, BlockBody, BlockSlot, ChainPoint, EntityValue, - EraCbor, LogKey, Namespace, RawBlock, StateSchema, TxHash, TxOrder, + EraCbor, LogKey, Namespace, RawBlock, StateSchema, TxHash, TxOrder, TxoIdx, TxoRef, }; use ::redb::Durability; @@ -662,10 +662,9 @@ impl ArchiveStore { for tx in block.txs().iter() { for input in tx.inputs() { - // TODO: dudoso, ask Santiago - let mut bytes = input.hash().to_vec(); - bytes.extend_from_slice(u32::to_be_bytes(input.index() as u32).as_slice()); - if bytes.as_slice() == spent_txo { + let hash_bytes: [u8; 32] = **input.hash(); + let key = TxoRef(TxHash::from(hash_bytes), input.index() as TxoIdx).to_index_bytes(); + if key.as_slice() == spent_txo { let hash_bytes: [u8; 32] = *tx.hash(); return Ok(Some(dolos_core::hash::Hash::from(hash_bytes))); } From 43ab83733e620891ca4c833a1bc5852982f53b5e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Mon, 6 Apr 2026 11:30:39 -0300 Subject: [PATCH 58/85] fix: type issue on total output on era start --- crates/cardano/src/estart/loading.rs | 20 +++++++------------- 1 file changed, 7 insertions(+), 13 deletions(-) diff --git a/crates/cardano/src/estart/loading.rs b/crates/cardano/src/estart/loading.rs index 48232015f..f25487faa 100644 --- a/crates/cardano/src/estart/loading.rs +++ b/crates/cardano/src/estart/loading.rs @@ -78,22 +78,16 @@ impl super::WorkContext { let remaining = state.get_utxos(refs)?; // Sum the remaining values - let total: u64 = remaining - .values() - .map(|utxo| { - crate::multi_era_output_from_era_cbor(utxo.as_ref()) - .map(|o| o.value().coin()) - .unwrap_or(0) - }) - .sum(); - let total = remaining.values().try_fold(0u64, |acc, utxo| { - let coin = crate::multi_era_output_from_era_cbor(utxo.as_ref()) - .map_err(|e| ChainError::ChainSpecific(crate::CardanoError::Traverse(e)))? + let mut total = 0u64; + + for utxo in remaining.values() { + total += crate::multi_era_output_from_era_cbor(utxo.as_ref()) + .map_err(ChainError::ChainSpecific)? .value() .coin(); + } - Ok(acc + coin) - })?; + Ok(total) } pub fn load( From 34746e8f785b36c1da241785898c1b384097a1d9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Mon, 6 Apr 2026 11:33:35 -0300 Subject: [PATCH 59/85] fix: add static to ArchiveStore error --- crates/core/src/archive.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/core/src/archive.rs b/crates/core/src/archive.rs index 1244eee64..14beb0778 100644 --- a/crates/core/src/archive.rs +++ b/crates/core/src/archive.rs @@ -213,7 +213,7 @@ pub trait Skippable { } pub trait ArchiveStore: Clone + Send + Sync + 'static { - type ChainSpecificError: std::error::Error + Send + Sync; + type ChainSpecificError: std::error::Error + Send + Sync + 'static; type BlockIter<'a>: Iterator + DoubleEndedIterator + Skippable From 469cc65d945d6703e6cf12d318a0f7704b7addd6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Mon, 6 Apr 2026 11:46:21 -0300 Subject: [PATCH 60/85] fix: handle decoding failures on tx_produces and tx_consumes --- crates/cardano/src/lib.rs | 22 +++++++++++----------- crates/core/src/lib.rs | 4 ++-- crates/core/src/mempool.rs | 29 +++++++++++++++++++++++++---- 3 files changed, 38 insertions(+), 17 deletions(-) diff --git a/crates/cardano/src/lib.rs b/crates/cardano/src/lib.rs index c1ad14c9b..0b365dd32 100644 --- a/crates/cardano/src/lib.rs +++ b/crates/cardano/src/lib.rs @@ -604,24 +604,24 @@ impl dolos_core::ChainLogic for CardanoLogic { validate::validate_tx(cbor, utxos, tip, genesis) } - fn tx_produced_utxos(era_body: &EraCbor) -> Vec<(dolos_core::TxoRef, EraCbor)> { - let Ok(tx) = multi_era_tx_from_era_cbor(era_body) else { - return vec![]; - }; - tx.produces() + fn tx_produced_utxos( + era_body: &EraCbor, + ) -> Result, CardanoError> { + let tx = multi_era_tx_from_era_cbor(era_body)?; + Ok(tx + .produces() .iter() .map(|(idx, output)| { let txoref = txo_ref_from_pallas(tx.hash(), *idx as u32); let body = era_cbor_from_output(output); (txoref, body) }) - .collect() + .collect()) } - fn tx_consumed_ref(era_body: &EraCbor) -> Vec { - let Ok(tx) = multi_era_tx_from_era_cbor(era_body) else { - return vec![]; - }; - tx.consumes().iter().map(txo_ref_from_input).collect() + + fn tx_consumed_ref(era_body: &EraCbor) -> Result, CardanoError> { + let tx = multi_era_tx_from_era_cbor(era_body)?; + Ok(tx.consumes().iter().map(txo_ref_from_input).collect()) } fn find_tx_in_block( block: &[u8], diff --git a/crates/core/src/lib.rs b/crates/core/src/lib.rs index 49c134598..b38cb59b1 100644 --- a/crates/core/src/lib.rs +++ b/crates/core/src/lib.rs @@ -588,8 +588,8 @@ pub trait ChainLogic: Sized + Send + Sync { tip.saturating_sub(Self::mutable_slots(domain)) } - fn tx_produced_utxos(era_body: &EraCbor) -> Vec<(TxoRef, EraCbor)>; - fn tx_consumed_ref(era_body: &EraCbor) -> Vec; + fn tx_produced_utxos(era_body: &EraCbor) -> Result, Self::ChainSpecificError>; + fn tx_consumed_ref(era_body: &EraCbor) -> Result, Self::ChainSpecificError>; fn find_tx_in_block( block: &[u8], diff --git a/crates/core/src/mempool.rs b/crates/core/src/mempool.rs index e50516ddf..325d32280 100644 --- a/crates/core/src/mempool.rs +++ b/crates/core/src/mempool.rs @@ -5,7 +5,7 @@ use crate::TagDimension; use futures_core::Stream; use std::pin::Pin; -use tracing::debug; +use tracing::{debug, warn}; pub type Report = Vec; @@ -302,7 +302,14 @@ where for mtx in all_txs.into_iter() { debug!(mtx = %mtx.hash, "scanning mempool tx"); - for (txoref, utxo) in D::Chain::tx_produced_utxos(&mtx.payload) { + let utxos = match D::Chain::tx_produced_utxos(&mtx.payload) { + Ok(utxos) => utxos, + Err(e) => { + warn!(tx = %mtx.hash, error = %e, "failed to decode mempool tx outputs"); + continue; + } + }; + for (txoref, utxo) in utxos { if predicate(&utxo) { debug!(txoref = %txoref, "mempool utxo matches predicate"); refs.insert(txoref); @@ -321,7 +328,14 @@ fn exclude_inflight_stxis(refs: &mut HashSet, mempool: &D::Me for mtx in all_txs { debug!(tx = %mtx.hash, "checking inflight tx"); - for txoref in D::Chain::tx_consumed_ref(&mtx.payload) { + let consumed = match D::Chain::tx_consumed_ref(&mtx.payload) { + Ok(consumed) => consumed, + Err(e) => { + warn!(tx = %mtx.hash, error = %e, "failed to decode mempool tx inputs"); + continue; + } + }; + for txoref in consumed { if refs.remove(&txoref) { debug!(txoref = %txoref, "excluded stxi"); } @@ -337,7 +351,14 @@ fn select_mempool_utxos(refs: &mut HashSet, mempool: &D::Memp for mtx in all_txs { debug!(tx = %mtx.hash, "checking mempool tx"); - for (txoref, era_cbor) in D::Chain::tx_produced_utxos(&mtx.payload) { + let utxos = match D::Chain::tx_produced_utxos(&mtx.payload) { + Ok(utxos) => utxos, + Err(e) => { + warn!(tx = %mtx.hash, error = %e, "failed to decode mempool tx outputs"); + continue; + } + }; + for (txoref, era_cbor) in utxos { debug!(txoref = %txoref, "checking mempool utxo"); if refs.contains(&txoref) { debug!(txoref = %txoref, "selected utxo available in mempool tx"); From 9de4a35da2bd4b755c202c200bd4258eb98a7dec Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Mon, 6 Apr 2026 11:46:34 -0300 Subject: [PATCH 61/85] fix: handle network id missing on trp from genesis --- crates/trp/src/compiler.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/crates/trp/src/compiler.rs b/crates/trp/src/compiler.rs index 1d05a64a0..30f940ed4 100644 --- a/crates/trp/src/compiler.rs +++ b/crates/trp/src/compiler.rs @@ -35,7 +35,8 @@ fn build_pparams< >( domain: &D, ) -> Result { - let network = network_id_from_genesis(domain.genesis().as_ref()).unwrap(); + let network = network_id_from_genesis(domain.genesis().as_ref()) + .ok_or_else(|| Error::InternalError("unrecognized network ID in genesis".to_string()))?; let pparams = dolos_cardano::load_effective_pparams::(domain.state())?; From 7874e4ba945f93efa35d9dfbf505ce4242d3c233 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Mon, 6 Apr 2026 11:48:22 -0300 Subject: [PATCH 62/85] fix: use deduplicated refs for wal shortcircuite --- src/adapters/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/adapters/mod.rs b/src/adapters/mod.rs index 7515e228b..2d6ce0075 100644 --- a/src/adapters/mod.rs +++ b/src/adapters/mod.rs @@ -91,7 +91,7 @@ impl DomainAdapter { } } - if result.len() == refs.len() { + if result.len() == refs_set.len() { break; } } From a52a1391afd4837e3e7b84fd5c1c457258435c03 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Mon, 6 Apr 2026 12:28:27 -0300 Subject: [PATCH 63/85] fix: add evaluation report as associated type --- crates/core/src/archive.rs | 2 +- crates/core/src/lib.rs | 8 ++++++++ crates/core/src/mempool.rs | 14 ++------------ crates/core/src/submit.rs | 15 +++++++++++++++ 4 files changed, 26 insertions(+), 13 deletions(-) diff --git a/crates/core/src/archive.rs b/crates/core/src/archive.rs index 14beb0778..6e5a998bf 100644 --- a/crates/core/src/archive.rs +++ b/crates/core/src/archive.rs @@ -171,7 +171,7 @@ pub enum ArchiveError { } pub trait ArchiveWriter: Send + Sync + 'static { - type ChainSpecificError: std::error::Error + Send + Sync; + type ChainSpecificError: std::error::Error + Send + Sync + 'static; fn apply( &self, point: &ChainPoint, diff --git a/crates/core/src/lib.rs b/crates/core/src/lib.rs index b38cb59b1..4be904cfe 100644 --- a/crates/core/src/lib.rs +++ b/crates/core/src/lib.rs @@ -604,6 +604,14 @@ pub trait ChainLogic: Sized + Send + Sync { tip: Option, genesis: &Self::Genesis, ) -> Result>; + + /// Evaluate a transaction's scripts and return execution unit reports. + type EvalReport: Send + Sync; + + fn eval_tx>( + cbor: &[u8], + utxos: &MempoolAwareUtxoStore, + ) -> Result>; } #[derive(Debug, Error)] diff --git a/crates/core/src/mempool.rs b/crates/core/src/mempool.rs index 325d32280..bb40df231 100644 --- a/crates/core/src/mempool.rs +++ b/crates/core/src/mempool.rs @@ -1,14 +1,10 @@ use super::*; use crate::TagDimension; -//pub use pallas::ledger::validate::phase2::EvalReport; - use futures_core::Stream; use std::pin::Pin; use tracing::{debug, warn}; -pub type Report = Vec; - #[derive(Debug)] pub struct MempoolTx { pub hash: TxHash, @@ -17,10 +13,6 @@ pub struct MempoolTx { pub confirmations: u32, pub non_confirmations: u32, pub confirmed_at: Option, - - // this might be empty if the tx is cloned - // TODO: notify santiago there is an extra serialize/deserialize on mempool ops - pub report: Option, } impl PartialEq for MempoolTx { @@ -40,13 +32,12 @@ impl Clone for MempoolTx { confirmations: self.confirmations, non_confirmations: self.non_confirmations, confirmed_at: self.confirmed_at.clone(), - report: None, } } } impl MempoolTx { - pub fn new(hash: TxHash, payload: EraCbor, report: Report) -> Self { + pub fn new(hash: TxHash, payload: EraCbor) -> Self { Self { hash, payload, @@ -54,7 +45,6 @@ impl MempoolTx { confirmations: 0, non_confirmations: 0, confirmed_at: None, - report: Some(report), } } @@ -448,7 +438,7 @@ mod tests { fn test_event(hash: TxHash) -> MempoolEvent { MempoolEvent { - tx: MempoolTx::new(hash, EraCbor(7, vec![0x80]), vec![]), + tx: MempoolTx::new(hash, EraCbor(7, vec![0x80])), } } diff --git a/crates/core/src/submit.rs b/crates/core/src/submit.rs index 0f685ce3d..5c61dbf72 100644 --- a/crates/core/src/submit.rs +++ b/crates/core/src/submit.rs @@ -60,6 +60,21 @@ pub trait SubmitExt: Domain { /// # Returns /// /// The transaction hash if successfully submitted. + /// Evaluate a transaction's scripts against the current ledger state. + /// + /// Returns execution unit reports without submitting to the mempool. + #[instrument(skip_all)] + fn eval_tx( + &self, + cbor: &[u8], + ) -> Result<::EvalReport, DomainError> + { + let utxos = + MempoolAwareUtxoStore::<'_, Self>::new(self.state(), self.indexes(), self.mempool()); + + Ok(Self::Chain::eval_tx(cbor, &utxos)?) + } + #[instrument(skip_all)] fn receive_tx( &self, From 8bdc38f4839d836276789c3de5eb6a64c9bb6a7e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Mon, 6 Apr 2026 12:28:41 -0300 Subject: [PATCH 64/85] fix: remove report from mempool --- crates/redb3/src/mempool.rs | 3 --- 1 file changed, 3 deletions(-) diff --git a/crates/redb3/src/mempool.rs b/crates/redb3/src/mempool.rs index 0dd1a1aa7..a6a50436f 100644 --- a/crates/redb3/src/mempool.rs +++ b/crates/redb3/src/mempool.rs @@ -408,7 +408,6 @@ impl FinalizedEntry { confirmed_at: self .confirmed_at .map(|b| ChainPoint::from_bytes(b[..].try_into().unwrap())), - report: None, } } } @@ -511,7 +510,6 @@ impl InflightRecord { .confirmed_at .as_ref() .map(|b| ChainPoint::from_bytes(b[..].try_into().unwrap())), - report: None, } } } @@ -548,7 +546,6 @@ impl PendingTable { confirmations: 0, non_confirmations: 0, confirmed_at: None, - report: None, }); } Ok(result) From 351a2ee0b06e15ae27ae6d0eed2de1b62b5c27a6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Mon, 6 Apr 2026 12:28:53 -0300 Subject: [PATCH 65/85] fix: remove report from mempool --- crates/testing/src/mempool.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/testing/src/mempool.rs b/crates/testing/src/mempool.rs index df9792adf..753865f8c 100644 --- a/crates/testing/src/mempool.rs +++ b/crates/testing/src/mempool.rs @@ -5,7 +5,7 @@ use crate::streams::ScriptedStream; /// Build a minimal `MempoolTx` for testing. pub fn make_test_mempool_tx(hash: TxHash) -> MempoolTx { - MempoolTx::new(hash, EraCbor(7, vec![0x80]), vec![]) + MempoolTx::new(hash, EraCbor(7, vec![0x80])) } /// Build a minimal `MempoolEvent` at the `Pending` stage for testing. From 4dbcfd09c8cc74915a6ab460540bcf1ab018446e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Mon, 6 Apr 2026 12:41:22 -0300 Subject: [PATCH 66/85] feat(dolos-cardano): implement eval report with pallas --- crates/cardano/src/lib.rs | 9 +++++++++ crates/cardano/src/validate.rs | 3 +-- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/crates/cardano/src/lib.rs b/crates/cardano/src/lib.rs index 0b365dd32..c7decc7aa 100644 --- a/crates/cardano/src/lib.rs +++ b/crates/cardano/src/lib.rs @@ -604,6 +604,15 @@ impl dolos_core::ChainLogic for CardanoLogic { validate::validate_tx(cbor, utxos, tip, genesis) } + type EvalReport = pallas::ledger::validate::phase2::EvalReport; + + fn eval_tx>( + cbor: &[u8], + utxos: &MempoolAwareUtxoStore, + ) -> Result> { + validate::evaluate_tx(cbor, utxos) + } + fn tx_produced_utxos( era_body: &EraCbor, ) -> Result, CardanoError> { diff --git a/crates/cardano/src/validate.rs b/crates/cardano/src/validate.rs index c1eb677f2..5c2a813af 100644 --- a/crates/cardano/src/validate.rs +++ b/crates/cardano/src/validate.rs @@ -100,8 +100,7 @@ pub fn validate_tx>( let payload = EraCbor(era, cbor.into()); let tx_hash = crate::pallas_hash_to_core(hash); - let encoded_report = format!("{report:?}").into_bytes(); - let tx = MempoolTx::new(tx_hash, payload, encoded_report); + let tx = MempoolTx::new(tx_hash, payload); Ok(tx) } From 6a074f86c3f9d486d67140980184d7c073d42942 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Mon, 6 Apr 2026 12:41:40 -0300 Subject: [PATCH 67/85] fix: utxorpc uses the new refactor --- src/serve/grpc/mod.rs | 3 ++- src/serve/grpc/submit.rs | 49 ++++++++++++++++++++++++---------------- 2 files changed, 32 insertions(+), 20 deletions(-) diff --git a/src/serve/grpc/mod.rs b/src/serve/grpc/mod.rs index 680ea3301..9d5d58bfd 100644 --- a/src/serve/grpc/mod.rs +++ b/src/serve/grpc/mod.rs @@ -1,4 +1,4 @@ -use dolos_core::config::GrpcConfig; +use dolos_core::{config::GrpcConfig, ChainLogic}; use pallas::interop::utxorpc::{spec as u5c, LedgerContext}; use tonic::transport::{Certificate, Server, ServerTlsConfig}; use tower_http::cors::CorsLayer; @@ -22,6 +22,7 @@ pub struct Driver; impl dolos_core::Driver for Driver where D: Domain + LedgerContext, + D::Chain: ChainLogic, C: CancelToken, { type Config = GrpcConfig; diff --git a/src/serve/grpc/submit.rs b/src/serve/grpc/submit.rs index 14a698aca..39b740e88 100644 --- a/src/serve/grpc/submit.rs +++ b/src/serve/grpc/submit.rs @@ -1,5 +1,5 @@ use any_chain_eval::Chain; -use dolos_core::SubmitExt; +use dolos_core::{ChainLogic, SubmitExt}; use futures_core::Stream; use futures_util::{StreamExt as _, TryStreamExt as _}; use pallas::interop::utxorpc as u5c; @@ -61,23 +61,35 @@ fn event_to_wait_for_tx_response(event: MempoolEvent) -> WaitForTxResponse { } fn tx_eval_to_u5c( - eval: Result>, + eval: Result>, ) -> u5c::spec::cardano::TxEval { match eval { - Ok(tx) => { - let traces = tx - .report - .and_then(|b| String::from_utf8(b).ok()) - .map(|msg| vec![u5c::spec::cardano::EvalTrace { msg }]) - .unwrap_or_default(); - u5c::spec::cardano::TxEval { - ex_units: None, - redeemers: vec![], - fee: None, - traces, - ..Default::default() - } - } + Ok(report) => u5c::spec::cardano::TxEval { + ex_units: report.iter().try_fold( + u5c::spec::cardano::ExUnits::default(), + |acc, eval| { + Some(u5c::spec::cardano::ExUnits { + steps: acc.steps + eval.units.steps, + memory: acc.memory + eval.units.mem, + }) + }, + ), + redeemers: report + .iter() + .map(|x| u5c::spec::cardano::Redeemer { + purpose: x.tag as i32, + index: x.index, + ex_units: Some(u5c::spec::cardano::ExUnits { + steps: x.units.steps, + memory: x.units.mem, + }), + ..Default::default() + }) + .collect(), + fee: None, + traces: vec![], + ..Default::default() + }, Err(e) => u5c::spec::cardano::TxEval { errors: vec![u5c::spec::cardano::EvalError { msg: format!("{e:#?}"), @@ -91,6 +103,7 @@ fn tx_eval_to_u5c( impl submit_service_server::SubmitService for SubmitServiceImpl where D: Domain + LedgerContext, + D::Chain: ChainLogic, { type WaitForTxStream = Pin> + Send + 'static>>; @@ -202,9 +215,7 @@ where _ => return Err(Status::invalid_argument("missing or unsupported tx type")), }; - let chain = self.domain.read_chain(); - - let result = self.domain.validate_tx(&chain, &tx_raw); + let result = self.domain.eval_tx(&tx_raw); let result = tx_eval_to_u5c(result); let report = AnyChainEval { From 6db064b9a16f775bb06709a69c8ebdec6966453c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Mon, 6 Apr 2026 14:51:48 -0300 Subject: [PATCH 68/85] refactor: eracbor is now tagged payload --- Cargo.lock | 1 + crates/cardano/src/indexes/delta.rs | 6 +-- crates/cardano/src/lib.rs | 33 +++++++------- crates/cardano/src/owned.rs | 8 ++-- crates/cardano/src/utxoset.rs | 4 +- crates/cardano/src/validate.rs | 10 +++-- crates/core/src/async_query.rs | 4 +- crates/core/src/lib.rs | 64 ++++++++++++++------------- crates/core/src/mempool.rs | 10 ++--- crates/fjall/src/state/utxos.rs | 10 ++--- crates/minibf/src/lib.rs | 6 +-- crates/minibf/src/mapping.rs | 4 +- crates/minibf/src/routes/addresses.rs | 4 +- crates/minibf/src/routes/assets.rs | 12 ++--- crates/minikupo/src/routes/matches.rs | 6 +-- crates/redb3/Cargo.toml | 1 + crates/redb3/src/archive/mod.rs | 6 +-- crates/redb3/src/mempool.rs | 64 +++++++++++++-------------- crates/redb3/src/state/utxoset.rs | 8 ++-- crates/testing/src/lib.rs | 30 ++++++------- crates/testing/src/mempool.rs | 4 +- crates/testing/src/synthetic.rs | 4 +- crates/trp/src/mapping.rs | 6 +-- crates/trp/src/utxos.rs | 2 +- src/bin/dolos/eval.rs | 4 +- src/serve/grpc/query.rs | 2 +- src/serve/grpc/submit.rs | 2 +- src/sync/emulator.rs | 2 +- src/sync/submit.rs | 4 +- 29 files changed, 164 insertions(+), 157 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index d5881d6d8..79c86084c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1464,6 +1464,7 @@ dependencies = [ "futures-util", "hex", "itertools 0.14.0", + "minicbor 0.26.4", "pallas", "redb", "redb-extras", diff --git a/crates/cardano/src/indexes/delta.rs b/crates/cardano/src/indexes/delta.rs index 7ded7ec3e..38bc3c447 100644 --- a/crates/cardano/src/indexes/delta.rs +++ b/crates/cardano/src/indexes/delta.rs @@ -4,7 +4,7 @@ //! structures from Cardano block data. use dolos_core::{ - ArchiveIndexDelta, BlockSlot, ChainPoint, EraCbor, IndexDelta, Tag, TxoRef, UtxoIndexDelta, + ArchiveIndexDelta, BlockSlot, ChainPoint, TaggedPayload, IndexDelta, Tag, TxoRef, UtxoIndexDelta, UtxoSetDelta, }; use pallas::{ @@ -380,8 +380,8 @@ impl CardanoIndexDeltaBuilder { } } - /// Extract UTxO filter tags from raw EraCbor. - fn extract_tags_from_era_cbor(era_cbor: &EraCbor) -> Option> { + /// Extract UTxO filter tags from raw TaggedPayload. + fn extract_tags_from_era_cbor(era_cbor: &TaggedPayload) -> Option> { let output = crate::multi_era_output_from_era_cbor(era_cbor).ok()?; Some(Self::extract_utxo_tags(&output)) } diff --git a/crates/cardano/src/lib.rs b/crates/cardano/src/lib.rs index c7decc7aa..242d01335 100644 --- a/crates/cardano/src/lib.rs +++ b/crates/cardano/src/lib.rs @@ -10,7 +10,8 @@ pub use pallas; use dolos_core::{ config::CardanoConfig, BlockSlot, ChainError, ChainPoint, Domain, DomainError, EntityKey, - EraCbor, MempoolAwareUtxoStore, MempoolTx, MempoolUpdate, RawBlock, StateStore, TipEvent, + MempoolAwareUtxoStore, MempoolTx, MempoolUpdate, RawBlock, StateStore, TaggedPayload, + TipEvent, TxoRef, WorkUnit, }; @@ -93,17 +94,17 @@ pub fn pallas_hash_to_core( // Can the era integer be removed? Not sure. Santi said something about it. pub(crate) fn multi_era_tx_from_era_cbor( - era_body: &EraCbor, + era_body: &TaggedPayload, ) -> Result, CardanoError> { - Ok(MultiEraTx::decode(era_body.cbor())?) + Ok(MultiEraTx::decode(era_body.bytes())?) } pub(crate) fn txo_ref_from_pallas(hash: pallas::crypto::hash::Hash<32>, idx: u32) -> TxoRef { TxoRef(pallas_hash_to_core(hash), idx) } -pub(crate) fn era_cbor_from_output(output: &MultiEraOutput<'_>) -> EraCbor { - EraCbor(output.era().into(), output.encode()) +pub(crate) fn era_cbor_from_output(output: &MultiEraOutput<'_>) -> TaggedPayload { + TaggedPayload(output.era().into(), output.encode()) } pub(crate) fn txo_ref_from_input(input: &MultiEraInput<'_>) -> TxoRef { @@ -116,9 +117,9 @@ pub fn core_hash_to_pallas( (*h.as_ref()).into() } -fn multi_era_output_from_era_cbor(era_body: &EraCbor) -> Result, CardanoError> { - let era = pallas::ledger::traverse::Era::try_from(era_body.era())?; - Ok(MultiEraOutput::decode(era, era_body.cbor())?) +fn multi_era_output_from_era_cbor(era_body: &TaggedPayload) -> Result, CardanoError> { + let era = pallas::ledger::traverse::Era::try_from(era_body.tag())?; + Ok(MultiEraOutput::decode(era, era_body.bytes())?) } /// Cardano-specific work unit variants. @@ -494,7 +495,7 @@ impl dolos_core::ChainLogic for CardanoLogic { fn compute_undo( block: &dolos_core::Cbor, - inputs: &std::collections::HashMap>, + inputs: &std::collections::HashMap>, point: ChainPoint, ) -> Result> { let block_arc = Arc::new(block.clone()); @@ -534,7 +535,7 @@ impl dolos_core::ChainLogic for CardanoLogic { fn compute_catchup( block: &dolos_core::Cbor, - inputs: &std::collections::HashMap>, + inputs: &std::collections::HashMap>, point: ChainPoint, ) -> Result> { let block_arc = Arc::new(block.clone()); @@ -581,7 +582,7 @@ impl dolos_core::ChainLogic for CardanoLogic { fn decode_utxo( &self, - utxo: Arc, + utxo: Arc, ) -> Result> { let out = OwnedMultiEraOutput::decode(utxo) .map_err(CardanoError::from) @@ -614,8 +615,8 @@ impl dolos_core::ChainLogic for CardanoLogic { } fn tx_produced_utxos( - era_body: &EraCbor, - ) -> Result, CardanoError> { + era_body: &TaggedPayload, + ) -> Result, CardanoError> { let tx = multi_era_tx_from_era_cbor(era_body)?; Ok(tx .produces() @@ -628,21 +629,21 @@ impl dolos_core::ChainLogic for CardanoLogic { .collect()) } - fn tx_consumed_ref(era_body: &EraCbor) -> Result, CardanoError> { + fn tx_consumed_ref(era_body: &TaggedPayload) -> Result, CardanoError> { let tx = multi_era_tx_from_era_cbor(era_body)?; Ok(tx.consumes().iter().map(txo_ref_from_input).collect()) } fn find_tx_in_block( block: &[u8], tx_hash: &[u8], - ) -> Result, Self::ChainSpecificError> { + ) -> Result, Self::ChainSpecificError> { let block = MultiEraBlock::decode(block)?; let result = block .txs() .iter() .enumerate() .find(|(_, tx)| tx.hash().as_slice() == tx_hash) - .map(|(idx, tx)| (EraCbor(block.era().into(), tx.encode()), idx)); + .map(|(idx, tx)| (TaggedPayload(block.era().into(), tx.encode()), idx)); Ok(result) } } diff --git a/crates/cardano/src/owned.rs b/crates/cardano/src/owned.rs index 668e4b6b0..8f96a88ea 100644 --- a/crates/cardano/src/owned.rs +++ b/crates/cardano/src/owned.rs @@ -1,4 +1,4 @@ -use dolos_core::{BlockBody, BlockHash, BlockSlot, EraCbor, RawBlock, RawUtxoMap, TxoRef}; +use dolos_core::{BlockBody, BlockHash, BlockSlot, RawBlock, RawUtxoMap, TaggedPayload, TxoRef}; use pallas::ledger::traverse::{MultiEraBlock, MultiEraOutput}; use self_cell::self_cell; use std::sync::Arc; @@ -42,7 +42,7 @@ impl dolos_core::Block for OwnedMultiEraBlock { self_cell!( pub struct OwnedMultiEraOutput { - owner: Arc, + owner: Arc, #[not_covariant] dependent: MultiEraOutput, @@ -50,9 +50,9 @@ self_cell!( ); impl OwnedMultiEraOutput { - pub fn decode(buf: Arc) -> Result { + pub fn decode(buf: Arc) -> Result { Self::try_new(buf, |x| { - let EraCbor(era, cbor) = x.as_ref(); + let TaggedPayload(era, cbor) = x.as_ref(); let era = pallas::ledger::traverse::Era::try_from(*era)?; diff --git a/crates/cardano/src/utxoset.rs b/crates/cardano/src/utxoset.rs index 408918ffe..4039aefee 100644 --- a/crates/cardano/src/utxoset.rs +++ b/crates/cardano/src/utxoset.rs @@ -173,7 +173,7 @@ pub fn build_custom_utxos_delta( .era .unwrap_or(pallas::ledger::traverse::Era::Conway.into()); - let eracbor = EraCbor(era, utxo.cbor.clone()); + let eracbor = TaggedPayload(era, utxo.cbor.clone()); delta .produced_utxo @@ -212,7 +212,7 @@ mod tests { .map(|key| { ( key, - OwnedMultiEraOutput::decode(Arc::new(EraCbor( + OwnedMultiEraOutput::decode(Arc::new(TaggedPayload( block.era().into(), valid_utxo.clone(), ))) diff --git a/crates/cardano/src/validate.rs b/crates/cardano/src/validate.rs index 5c2a813af..5d57e9004 100644 --- a/crates/cardano/src/validate.rs +++ b/crates/cardano/src/validate.rs @@ -1,6 +1,8 @@ use std::borrow::Cow; -use dolos_core::{ChainError, ChainPoint, Domain, EraCbor, MempoolAwareUtxoStore, MempoolTx}; +use dolos_core::{ + ChainError, ChainPoint, Domain, MempoolAwareUtxoStore, MempoolTx, TaggedPayload, +}; use crate::{CardanoError, CardanoGenesis}; use pallas::ledger::{ @@ -97,7 +99,7 @@ pub fn validate_tx>( ); let era = u16::from(tx.era()); - let payload = EraCbor(era, cbor.into()); + let payload = TaggedPayload(era, cbor.into()); let tx_hash = crate::pallas_hash_to_core(hash); let tx = MempoolTx::new(tx_hash, payload); @@ -137,11 +139,11 @@ pub fn evaluate_tx>( .get_utxos(input_refs)? .into_iter() .map(|(TxoRef(a, b), eracbor)| { - let era = eracbor.era().try_into().expect("era out of range"); + let era = eracbor.tag().try_into().expect("era out of range"); ( pallas::ledger::validate::utils::TxoRef::from((crate::core_hash_to_pallas(a), b)), - pallas::ledger::validate::utils::EraCbor::from((era, eracbor.cbor().into())), + pallas::ledger::validate::utils::EraCbor::from((era, eracbor.bytes().into())), ) }) .collect(); diff --git a/crates/core/src/async_query.rs b/crates/core/src/async_query.rs index d3aa0cdcd..690eaeb1c 100644 --- a/crates/core/src/async_query.rs +++ b/crates/core/src/async_query.rs @@ -4,7 +4,7 @@ use tokio::sync::Semaphore; use crate::{ archive::ArchiveStore, indexes::IndexStore, ArchiveError, BlockBody, BlockSlot, ChainError, - ChainLogic, ChainPoint, Domain, DomainError, EraCbor, IndexError, TagDimension, TxOrder, + ChainLogic, ChainPoint, Domain, DomainError, IndexError, TagDimension, TaggedPayload, TxOrder, }; #[derive(Debug, Clone)] @@ -139,7 +139,7 @@ where pub async fn tx_cbor( &self, tx_hash: Vec, - ) -> Result, DomainError> { + ) -> Result, DomainError> { let tx_hash_lookup = tx_hash.clone(); let Some(raw) = self .run_blocking(move |domain| { diff --git a/crates/core/src/lib.rs b/crates/core/src/lib.rs index 4be904cfe..982102a41 100644 --- a/crates/core/src/lib.rs +++ b/crates/core/src/lib.rs @@ -69,7 +69,7 @@ pub type Cbor = Vec; pub type BlockBody = Cbor; pub type RawBlock = Arc; pub type RawBlockBatch = Vec; -pub type RawUtxoMap = HashMap>; +pub type RawUtxoMap = HashMap>; pub type BlockHash = crate::hash::Hash<32>; pub type BlockHeader = Cbor; pub type TxHash = crate::hash::Hash<32>; @@ -113,65 +113,69 @@ pub use point::*; pub use state::*; pub use wal::*; +/// A chain-agnostic tagged payload: a `u16` discriminant (whose meaning is +/// chain-specific, e.g. era for Cardano) paired with raw bytes. #[derive(Debug, Eq, PartialEq, Clone, Serialize, Deserialize)] -pub struct EraCbor(pub Era, pub Cbor); +pub struct TaggedPayload(pub u16, pub Vec); -impl EraCbor { - pub fn era(&self) -> Era { +impl TaggedPayload { + pub fn tag(&self) -> u16 { self.0 } - pub fn cbor(&self) -> &[u8] { + pub fn bytes(&self) -> &[u8] { &self.1 } + } -impl AsRef<[u8]> for EraCbor { +impl AsRef<[u8]> for TaggedPayload { fn as_ref(&self) -> &[u8] { &self.1 } } -impl From<(Era, Cbor)> for EraCbor { - fn from(value: (Era, Cbor)) -> Self { +impl From<(u16, Vec)> for TaggedPayload { + fn from(value: (u16, Vec)) -> Self { Self(value.0, value.1) } } -impl From for (Era, Cbor) { - fn from(value: EraCbor) -> Self { +impl From for (u16, Vec) { + fn from(value: TaggedPayload) -> Self { (value.0, value.1) } } -//impl From> for EraCbor { + +//impl From> for TaggedPayload { // fn from(value: MultiEraOutput<'_>) -> Self { -// EraCbor(value.era().into(), value.encode()) +// TaggedPayload(value.era().into(), value.encode()) // } //} // -//impl<'a> TryFrom<&'a EraCbor> for MultiEraOutput<'a> { +//impl<'a> TryFrom<&'a TaggedPayload> for MultiEraOutput<'a> { // type Error = pallas::codec::minicbor::decode::Error; // -// fn try_from(value: &'a EraCbor) -> Result { +// fn try_from(value: &'a TaggedPayload) -> Result { // let era = value.0.try_into().expect("era out of range"); // MultiEraOutput::decode(era, &value.1) // } //} // -//impl<'a> TryFrom<&'a EraCbor> for MultiEraTx<'a> { +//impl<'a> TryFrom<&'a TaggedPayload> for MultiEraTx<'a> { // type Error = pallas::codec::minicbor::decode::Error; // -// fn try_from(value: &'a EraCbor) -> Result { +// fn try_from(value: &'a TaggedPayload) -> Result { // let era = value.0.try_into().expect("era out of range"); // MultiEraTx::decode_for_era(era, &value.1) // } //} // -//impl TryFrom for MultiEraUpdate<'_> { +//impl TryFrom for MultiEraUpdate<'_> { // type Error = pallas::codec::minicbor::decode::Error; // -// fn try_from(value: EraCbor) -> Result { +// fn try_from(value: TaggedPayload) -> Result { // let era = value.0.try_into().expect("era out of range"); // MultiEraUpdate::decode_for_era(era, &value.1) // } @@ -267,16 +271,16 @@ pub enum BrokenInvariant { EpochBoundaryIncomplete, } -pub type UtxoMap = HashMap>; +pub type UtxoMap = HashMap>; pub type UtxoSet = HashSet; #[derive(Default, Debug, Clone)] pub struct UtxoSetDelta { - pub produced_utxo: HashMap>, - pub consumed_utxo: HashMap>, - pub recovered_stxi: HashMap>, - pub undone_utxo: HashMap>, + pub produced_utxo: HashMap>, + pub consumed_utxo: HashMap>, + pub recovered_stxi: HashMap>, + pub undone_utxo: HashMap>, } #[derive(Debug, Clone)] @@ -292,7 +296,7 @@ where { pub block: Cbor, pub delta: Vec, - pub inputs: HashMap>, + pub inputs: HashMap>, } impl LogValue @@ -555,7 +559,7 @@ pub trait ChainLogic: Sized + Send + Sync { /// to reverse the block's effects. fn compute_undo( block: &Cbor, - inputs: &HashMap>, + inputs: &HashMap>, point: ChainPoint, ) -> Result>; @@ -567,14 +571,14 @@ pub trait ChainLogic: Sized + Send + Sync { /// stores that are behind the state store. fn compute_catchup( block: &Cbor, - inputs: &HashMap>, + inputs: &HashMap>, point: ChainPoint, ) -> Result>; // TODO: remove from the interface - this is Cardano-specific fn decode_utxo( &self, - utxo: Arc, + utxo: Arc, ) -> Result>; // TODO: remove from the interface - this is Cardano-specific @@ -588,13 +592,13 @@ pub trait ChainLogic: Sized + Send + Sync { tip.saturating_sub(Self::mutable_slots(domain)) } - fn tx_produced_utxos(era_body: &EraCbor) -> Result, Self::ChainSpecificError>; - fn tx_consumed_ref(era_body: &EraCbor) -> Result, Self::ChainSpecificError>; + fn tx_produced_utxos(era_body: &TaggedPayload) -> Result, Self::ChainSpecificError>; + fn tx_consumed_ref(era_body: &TaggedPayload) -> Result, Self::ChainSpecificError>; fn find_tx_in_block( block: &[u8], tx_hash: &[u8], - ) -> Result, Self::ChainSpecificError>; + ) -> Result, Self::ChainSpecificError>; // Validate a transaction against the current ledger state. fn validate_tx>( diff --git a/crates/core/src/mempool.rs b/crates/core/src/mempool.rs index bb40df231..36b1d1ee4 100644 --- a/crates/core/src/mempool.rs +++ b/crates/core/src/mempool.rs @@ -8,7 +8,7 @@ use tracing::{debug, warn}; #[derive(Debug)] pub struct MempoolTx { pub hash: TxHash, - pub payload: EraCbor, + pub payload: TaggedPayload, pub stage: MempoolTxStage, pub confirmations: u32, pub non_confirmations: u32, @@ -37,7 +37,7 @@ impl Clone for MempoolTx { } impl MempoolTx { - pub fn new(hash: TxHash, payload: EraCbor) -> Self { + pub fn new(hash: TxHash, payload: TaggedPayload) -> Self { Self { hash, payload, @@ -283,7 +283,7 @@ pub struct MempoolAwareUtxoStore<'a, D: Domain> { fn scan_mempool_utxos(predicate: F, mempool: &D::Mempool) -> HashSet where - F: Fn(&EraCbor) -> bool, + F: Fn(&TaggedPayload) -> bool, { let mut refs = HashSet::new(); @@ -393,7 +393,7 @@ impl<'a, D: Domain> MempoolAwareUtxoStore<'a, D> { predicate: F, ) -> Result where - F: Fn(&EraCbor) -> bool, + F: Fn(&TaggedPayload) -> bool, { let from_mempool = scan_mempool_utxos::(predicate, self.mempool); @@ -438,7 +438,7 @@ mod tests { fn test_event(hash: TxHash) -> MempoolEvent { MempoolEvent { - tx: MempoolTx::new(hash, EraCbor(7, vec![0x80])), + tx: MempoolTx::new(hash, TaggedPayload(7, vec![0x80])), } } diff --git a/crates/fjall/src/state/utxos.rs b/crates/fjall/src/state/utxos.rs index e6bf6c2b8..fe9846a8f 100644 --- a/crates/fjall/src/state/utxos.rs +++ b/crates/fjall/src/state/utxos.rs @@ -6,7 +6,7 @@ use std::collections::HashMap; use std::sync::Arc; -use dolos_core::{EraCbor, TxoRef, UtxoMap, UtxoSetDelta}; +use dolos_core::{TaggedPayload, TxoRef, UtxoMap, UtxoSetDelta}; use fjall::{Keyspace, OwnedWriteBatch, Readable}; use crate::keys::{ @@ -70,7 +70,7 @@ pub fn get_utxos( if let Some(value) = readable.get(keyspace, key).map_err(Error::Fjall)? { if let Some((era, cbor)) = decode_utxo_value(&value) { - result.insert(txo_ref.clone(), Arc::new(EraCbor(era, cbor))); + result.insert(txo_ref.clone(), Arc::new(TaggedPayload(era, cbor))); } } } @@ -85,7 +85,7 @@ pub fn get_utxos( /// MVCC (Multi-Version Concurrency Control). pub struct UtxosIterator { /// Collected UTxOs from scan - utxos: Vec<(TxoRef, Arc)>, + utxos: Vec<(TxoRef, Arc)>, /// Current position pos: usize, } @@ -106,7 +106,7 @@ impl UtxosIterator { if key_bytes.len() == TXO_REF_SIZE { let txo_ref = decode_txo_ref(&key_bytes); if let Some((era, cbor)) = decode_utxo_value(&value_bytes) { - utxos.push((txo_ref, Arc::new(EraCbor(era, cbor)))); + utxos.push((txo_ref, Arc::new(TaggedPayload(era, cbor)))); } } } @@ -116,7 +116,7 @@ impl UtxosIterator { } impl Iterator for UtxosIterator { - type Item = Result<(TxoRef, Arc), Error>; + type Item = Result<(TxoRef, Arc), Error>; fn next(&mut self) -> Option { if self.pos < self.utxos.len() { diff --git a/crates/minibf/src/lib.rs b/crates/minibf/src/lib.rs index 712d28ffc..fe01ff352 100644 --- a/crates/minibf/src/lib.rs +++ b/crates/minibf/src/lib.rs @@ -21,7 +21,7 @@ use tracing::Level; use dolos_core::{ config::MinibfConfig, ArchiveStore as _, AsyncQueryFacade, BlockSlot, CancelToken, Domain, - Entity, EntityKey, EraCbor, LogKey, ServeError, StateError, StateStore as _, SubmitExt, + Entity, EntityKey, TaggedPayload, LogKey, ServeError, StateError, StateStore as _, SubmitExt, TemporalKey, TxOrder, }; @@ -56,7 +56,7 @@ impl Deref for Facade { } } -pub type TxMap = HashMap, Option>; +pub type TxMap = HashMap, Option>; pub type BlockWithTx = (Vec, TxOrder); pub type BlockWithTxMap = HashMap, BlockWithTx>; @@ -151,7 +151,7 @@ impl Facade { Ok(log.pparams.live().cloned().unwrap_or_default()) } - pub async fn get_tx(&self, hash: Hash<32>) -> Result, StatusCode> + pub async fn get_tx(&self, hash: Hash<32>) -> Result, StatusCode> where D: Clone + Send + Sync + 'static, { diff --git a/crates/minibf/src/mapping.rs b/crates/minibf/src/mapping.rs index 0e2bc5acd..4a7cb03f2 100644 --- a/crates/minibf/src/mapping.rs +++ b/crates/minibf/src/mapping.rs @@ -57,7 +57,7 @@ use dolos_cardano::{ pallas_extras, pallas_hash_to_core, AccountState, ChainSummary, DRepState, PParamsSet, PoolHash, PoolState, }; -use dolos_core::{BlockSlot, Domain, EraCbor, TxHash, TxOrder, TxoIdx, TxoRef}; +use dolos_core::{BlockSlot, Domain, TaggedPayload, TxHash, TxOrder, TxoIdx, TxoRef}; use crate::Facade; @@ -870,7 +870,7 @@ impl<'a> TxModelBuilder<'a> { Ok(deps) } - pub fn load_dep(&mut self, key: TxHash, cbor: &'a EraCbor) -> Result<(), StatusCode> { + pub fn load_dep(&mut self, key: TxHash, cbor: &'a TaggedPayload) -> Result<(), StatusCode> { let era = try_into_or_500!(cbor.0); let tx = MultiEraTx::decode_for_era(era, &cbor.1) diff --git a/crates/minibf/src/routes/addresses.rs b/crates/minibf/src/routes/addresses.rs index f21fa3ed1..e1ed3504c 100644 --- a/crates/minibf/src/routes/addresses.rs +++ b/crates/minibf/src/routes/addresses.rs @@ -22,7 +22,7 @@ use dolos_cardano::{ indexes::{AsyncCardanoQueryExt, CardanoIndexExt, SlotOrder}, pallas_extras, CardanoError, ChainSummary, }; -use dolos_core::{BlockBody, BlockSlot, Domain, EraCbor, StateStore as _, TxoRef}; +use dolos_core::{BlockBody, BlockSlot, Domain, TaggedPayload, StateStore as _, TxoRef}; use pallas::ledger::traverse::Era; use crate::{ @@ -408,7 +408,7 @@ where } for input in tx.consumes() { - if let Some(EraCbor(era, cbor)) = domain + if let Some(TaggedPayload(era, cbor)) = domain .query() .tx_cbor(input.hash().as_slice().to_vec()) .await diff --git a/crates/minibf/src/routes/assets.rs b/crates/minibf/src/routes/assets.rs index 62c8629cd..a14c9c6f9 100644 --- a/crates/minibf/src/routes/assets.rs +++ b/crates/minibf/src/routes/assets.rs @@ -18,7 +18,7 @@ use dolos_cardano::{ model::AssetState, CardanoError, ChainSummary, }; -use dolos_core::{BlockSlot, Domain, EraCbor, IndexStore as _, StateStore as _}; +use dolos_core::{BlockSlot, Domain, TaggedPayload, IndexStore as _, StateStore as _}; use futures_util::StreamExt; use itertools::Itertools; use pallas::{ @@ -310,7 +310,7 @@ struct AssetModelBuilder { subject: Vec, unit: String, asset_state: dolos_cardano::model::AssetState, - initial_tx: Option, + initial_tx: Option, registry_url: Option, } @@ -337,7 +337,7 @@ impl AssetModelBuilder { let ref_state = domain.read_cardano_entity::(entity_key.as_slice())?; if let Some(metadata_tx) = ref_state.and_then(|state| state.metadata_tx) { - if let Some(EraCbor(era, cbor)) = domain + if let Some(TaggedPayload(era, cbor)) = domain .query() .tx_cbor(metadata_tx.as_slice().to_vec()) .await @@ -367,8 +367,8 @@ impl AssetModelBuilder { } } - if let Some(EraCbor(era, cbor)) = &cip25_tx { - let tx = decode_era_tx(*era, cbor)?; + if let Some(payload) = &cip25_tx { + let tx = decode_era_tx(payload.tag(), payload.bytes())?; if let Some((_, standard, ref_asset_bytes)) = &cip68_reference { if let Some(metadata) = @@ -620,7 +620,7 @@ where } for input in tx.consumes() { - if let Some(EraCbor(era, cbor)) = domain + if let Some(TaggedPayload(era, cbor)) = domain .query() .tx_cbor(input.hash().as_slice().to_vec()) .await diff --git a/crates/minikupo/src/routes/matches.rs b/crates/minikupo/src/routes/matches.rs index 5abaf6199..2b0f4e509 100644 --- a/crates/minikupo/src/routes/matches.rs +++ b/crates/minikupo/src/routes/matches.rs @@ -8,7 +8,7 @@ use dolos_cardano::{ indexes::CardanoIndexExt, network_from_genesis, pallas_extras, pallas_hash_to_core, CardanoError, CardanoGenesis, }; -use dolos_core::{Domain, EraCbor, IndexStore as _, StateStore as _, TxoRef, UtxoSet}; +use dolos_core::{Domain, TaggedPayload, IndexStore as _, StateStore as _, TxoRef, UtxoSet}; use pallas::codec::minicbor; use pallas::ledger::{ addresses::{Address, StakeAddress}, @@ -359,7 +359,7 @@ async fn refs_for_output_ref_pattern( refs } patterns::OutputIndexPattern::Any => { - let Some(EraCbor(era, cbor)) = facade + let Some(TaggedPayload(era, cbor)) = facade .query() .tx_cbor(tx_id.to_vec()) .await @@ -439,7 +439,7 @@ async fn build_matches ArchiveStore { Ok(None) } - pub fn get_tx(&self, tx_hash: &[u8]) -> Result, RedbArchiveError> { + pub fn get_tx(&self, tx_hash: &[u8]) -> Result, RedbArchiveError> { let rx = self.db().begin_read()?; let Some(slot) = indexes::Indexes::get_by_tx_hash(&rx, tx_hash)? else { return Ok(None); @@ -688,7 +688,7 @@ impl ArchiveStore { let block = MultiEraBlock::decode(raw.as_slice()) .map_err(|e| RedbArchiveError(ArchiveError::InternalError(e.to_string())))?; if let Some(tx) = block.txs().iter().find(|x| x.hash().to_vec() == tx_hash) { - return Ok(Some(EraCbor(block.era().into(), tx.encode()))); + return Ok(Some(TaggedPayload(block.era().into(), tx.encode()))); } Ok(None) diff --git a/crates/redb3/src/mempool.rs b/crates/redb3/src/mempool.rs index a6a50436f..5ff0f570e 100644 --- a/crates/redb3/src/mempool.rs +++ b/crates/redb3/src/mempool.rs @@ -1,16 +1,15 @@ use std::collections::HashSet; use std::sync::Arc; -use pallas::codec::minicbor::{self, Decode, Encode}; +use minicbor::{Decode, Encode}; -// Provides minicbor Encode/Decode for EraCbor (which no longer derives them) -// using the same on-disk CBOR format: array(2) [ uint(era), bytes(cbor) ] -mod era_cbor_codec { - use dolos_core::EraCbor; - use pallas::codec::minicbor; +// Provides minicbor Encode/Decode for TaggedPayload (which no longer derives them) +// using the on-disk CBOR format: array(2) [ uint(tag), bytes(payload) ] +mod tagged_payload_codec { + use dolos_core::TaggedPayload; pub fn encode( - v: &EraCbor, + v: &TaggedPayload, e: &mut minicbor::Encoder, _: &mut C, ) -> Result<(), minicbor::encode::Error> { @@ -21,20 +20,19 @@ mod era_cbor_codec { pub fn decode<'b, C>( d: &mut minicbor::Decoder<'b>, _: &mut C, - ) -> Result { + ) -> Result { d.array()?; - let era = d.u16()?; - let cbor = d.bytes()?.to_vec(); - Ok(EraCbor(era, cbor)) + let tag = d.u16()?; + let payload = d.bytes()?.to_vec(); + Ok(TaggedPayload(tag, payload)) } } -mod opt_era_cbor_codec { - use dolos_core::EraCbor; - use pallas::codec::minicbor; +mod opt_tagged_payload_codec { + use dolos_core::TaggedPayload; pub fn encode( - v: &Option, + v: &Option, e: &mut minicbor::Encoder, ctx: &mut C, ) -> Result<(), minicbor::encode::Error> { @@ -43,19 +41,19 @@ mod opt_era_cbor_codec { e.null()?; Ok(()) } - Some(inner) => super::era_cbor_codec::encode(inner, e, ctx), + Some(inner) => super::tagged_payload_codec::encode(inner, e, ctx), } } pub fn decode<'b, C>( d: &mut minicbor::Decoder<'b>, ctx: &mut C, - ) -> Result, minicbor::decode::Error> { + ) -> Result, minicbor::decode::Error> { if d.datatype()? == minicbor::data::Type::Null { d.null()?; Ok(None) } else { - Ok(Some(super::era_cbor_codec::decode(d, ctx)?)) + Ok(Some(super::tagged_payload_codec::decode(d, ctx)?)) } } } @@ -66,8 +64,8 @@ use tokio_stream::wrappers::BroadcastStream; use tracing::{debug, warn}; use dolos_core::{ - config::RedbMempoolConfig, ChainPoint, EraCbor, MempoolError, MempoolEvent, MempoolPage, - MempoolStore, MempoolTx, MempoolTxStage, TxHash, TxStatus, + config::RedbMempoolConfig, ChainPoint, MempoolError, MempoolEvent, MempoolPage, + MempoolStore, MempoolTx, MempoolTxStage, TaggedPayload, TxHash, TxStatus, }; // ── Error newtype (mirrors wal/mod.rs pattern) ────────────────────────── @@ -241,9 +239,9 @@ impl redb::Key for DbTxHash { } } -/// Newtype wrapping `EraCbor` for the pending table value (foreign type). +/// Newtype wrapping `TaggedPayload` for the pending table value (foreign type). #[derive(Debug)] -struct DbEraCbor(EraCbor); +struct DbEraCbor(TaggedPayload); impl redb::Value for DbEraCbor { type SelfType<'a> @@ -264,8 +262,8 @@ impl redb::Value for DbEraCbor { Self: 'a, { let mut d = minicbor::Decoder::new(data); - let era_cbor = era_cbor_codec::decode(&mut d, &mut ()).unwrap(); - Self(era_cbor) + let payload = tagged_payload_codec::decode(&mut d, &mut ()).unwrap(); + Self(payload) } fn as_bytes<'a, 'b: 'a>(value: &'a Self::SelfType<'b>) -> Self::AsBytes<'a> @@ -274,7 +272,7 @@ impl redb::Value for DbEraCbor { { let mut buf = Vec::new(); let mut e = minicbor::Encoder::new(&mut buf); - era_cbor_codec::encode(&value.0, &mut e, &mut ()).unwrap(); + tagged_payload_codec::encode(&value.0, &mut e, &mut ()).unwrap(); buf } @@ -301,8 +299,8 @@ struct InflightRecord { stage: InflightStage, #[n(1)] confirmations: u32, - #[cbor(n(2), with = "era_cbor_codec")] - payload: EraCbor, + #[cbor(n(2), with = "tagged_payload_codec")] + payload: TaggedPayload, #[cbor(n(3), with = "minicbor::bytes")] confirmed_at: Option>, #[n(4)] @@ -351,8 +349,8 @@ struct FinalizedEntry { confirmations: u32, #[cbor(n(2), with = "minicbor::bytes")] confirmed_at: Option>, - #[cbor(n(3), with = "opt_era_cbor_codec")] - payload: Option, + #[cbor(n(3), with = "opt_tagged_payload_codec")] + payload: Option, #[n(4)] dropped: Option, } @@ -401,7 +399,7 @@ impl FinalizedEntry { }; MempoolTx { hash: TxHash::from(hash_bytes), - payload: self.payload.unwrap_or(EraCbor(0, vec![])), + payload: self.payload.unwrap_or(TaggedPayload(0, vec![])), stage, confirmations: self.confirmations, non_confirmations: 0, @@ -413,7 +411,7 @@ impl FinalizedEntry { } impl InflightRecord { - fn new(payload: EraCbor) -> Self { + fn new(payload: TaggedPayload) -> Self { Self { stage: InflightStage::Propagated, confirmations: 0, @@ -579,7 +577,7 @@ impl PendingTable { fn insert( wx: &redb::WriteTransaction, hash: &TxHash, - payload: &EraCbor, + payload: &TaggedPayload, ) -> Result<(), RedbMempoolError> { let mut table = wx.open_table(Self::DEF)?; let seq = match table.last()? { @@ -594,7 +592,7 @@ impl PendingTable { fn drain_by_hashes( wx: &redb::WriteTransaction, hashes: &HashSet, - ) -> Result, RedbMempoolError> { + ) -> Result, RedbMempoolError> { let mut table = wx.open_table(Self::DEF)?; let extracted = table.extract_if(|key, _value| hashes.contains(&key.hash()))?; extracted diff --git a/crates/redb3/src/state/utxoset.rs b/crates/redb3/src/state/utxoset.rs index f4941e0aa..2e11c0a67 100644 --- a/crates/redb3/src/state/utxoset.rs +++ b/crates/redb3/src/state/utxoset.rs @@ -1,4 +1,4 @@ -use dolos_core::{EraCbor, TxoRef, UtxoMap, UtxoSetDelta}; +use dolos_core::{TaggedPayload, TxoRef, UtxoMap, UtxoSetDelta}; use redb::{ Range, ReadTransaction, ReadableDatabase, ReadableTable as _, ReadableTableMetadata as _, TableDefinition, TableStats, WriteTransaction, @@ -15,7 +15,7 @@ type UtxosValue = (u16, &'static [u8]); pub struct UtxosIterator(Range<'static, UtxosKey, UtxosValue>); impl Iterator for UtxosIterator { - type Item = Result<(TxoRef, EraCbor), ::redb::StorageError>; + type Item = Result<(TxoRef, TaggedPayload), ::redb::StorageError>; fn next(&mut self) -> Option { let x = self.0.next()?; @@ -26,7 +26,7 @@ impl Iterator for UtxosIterator { let (era, cbor) = v.value(); let cbor = cbor.to_owned(); - let v = EraCbor(era, cbor); + let v = TaggedPayload(era, cbor); (k, v) }); @@ -61,7 +61,7 @@ impl UtxosTable { if let Some(body) = table.get(&(key.0.as_array(), key.1))? { let (era, cbor) = body.value(); let cbor = cbor.to_owned(); - let value = Arc::new(EraCbor(era, cbor)); + let value = Arc::new(TaggedPayload(era, cbor)); out.insert(key, value); } diff --git a/crates/testing/src/lib.rs b/crates/testing/src/lib.rs index 668953805..529b503ee 100644 --- a/crates/testing/src/lib.rs +++ b/crates/testing/src/lib.rs @@ -31,14 +31,14 @@ pub mod mempool; pub mod streams; pub trait UtxoGenerator { - fn generate(&self, address: &TestAddress) -> EraCbor; + fn generate(&self, address: &TestAddress) -> TaggedPayload; } impl UtxoGenerator for F where - F: Fn(&TestAddress) -> EraCbor, + F: Fn(&TestAddress) -> TaggedPayload, { - fn generate(&self, address: &TestAddress) -> EraCbor { + fn generate(&self, address: &TestAddress) -> TaggedPayload { self(address) } } @@ -234,14 +234,14 @@ pub fn fake_genesis_utxo( address: impl Into, ordinal: usize, amount: u64, -) -> (TxoRef, EraCbor) { +) -> (TxoRef, TaggedPayload) { let tx_hash = genesis_tx_hash(); let txoref = TxoRef(dolos_cardano::pallas_hash_to_core(tx_hash), ordinal as u32); (txoref, utxo_with_value(address, Value::Coin(amount))) } -pub fn replace_utxo_address(utxo: Arc, new_address: TestAddress) -> Arc { - let EraCbor(_, cbor) = utxo.as_ref(); +pub fn replace_utxo_address(utxo: Arc, new_address: TestAddress) -> Arc { + let TaggedPayload(_, cbor) = utxo.as_ref(); let output = MultiEraOutput::decode(Era::Conway, cbor).unwrap(); @@ -251,7 +251,7 @@ pub fn replace_utxo_address(utxo: Arc, new_address: TestAddress) -> Arc output.address = new_address.to_bytes().into(); - Arc::new(EraCbor( + Arc::new(TaggedPayload( Era::Conway.into(), minicbor::to_vec(&output).unwrap(), )) @@ -273,8 +273,8 @@ pub fn replace_utxo_map_txhash(utxos: UtxoMap, tx_sequence: u64) -> UtxoMap { .collect() } -pub fn get_utxo_address_and_value(utxo: &EraCbor) -> (Vec, u64) { - let EraCbor(_, cbor) = utxo; +pub fn get_utxo_address_and_value(utxo: &TaggedPayload) -> (Vec, u64) { + let TaggedPayload(_, cbor) = utxo; let output = MultiEraOutput::decode(Era::Conway, cbor).unwrap(); @@ -291,7 +291,7 @@ pub fn get_utxo_address_and_value(utxo: &EraCbor) -> (Vec, u64) { ) } -pub fn assert_utxo_address_and_value(utxo: &EraCbor, address: impl Into>, value: u64) { +pub fn assert_utxo_address_and_value(utxo: &TaggedPayload, address: impl Into>, value: u64) { let (output_address, output_value) = get_utxo_address_and_value(utxo); assert_eq!(output_address, address.into()); @@ -307,7 +307,7 @@ where } } -pub fn print_utxo(txoref: &TxoRef, utxo: &EraCbor) { +pub fn print_utxo(txoref: &TxoRef, utxo: &TaggedPayload) { let (output_address, output_value) = get_utxo_address_and_value(utxo); let bech32 = Address::from_bytes(&output_address).unwrap().to_string(); @@ -385,7 +385,7 @@ where } } -pub fn utxo_with_value(address: impl Into, value: Value) -> EraCbor { +pub fn utxo_with_value(address: impl Into, value: Value) -> TaggedPayload { let output = pallas::ledger::primitives::conway::TransactionOutput::PostAlonzo( PostAlonzoTransactionOutput { address: address.into().to_bytes().into(), @@ -396,13 +396,13 @@ pub fn utxo_with_value(address: impl Into, value: Value) -> EraCbor .into(), ); - EraCbor( + TaggedPayload( pallas::ledger::traverse::Era::Conway.into(), pallas::codec::minicbor::to_vec(&output).unwrap(), ) } -pub fn utxo_with_random_amount(address: impl Into, amount: Range) -> EraCbor { +pub fn utxo_with_random_amount(address: impl Into, amount: Range) -> TaggedPayload { let amount = rand::rng().random_range(amount); utxo_with_value(address, Value::Coin(amount)) @@ -414,7 +414,7 @@ pub fn utxo_with_random_asset( address: impl Into, asset: impl Into, asset_amount: Range, -) -> EraCbor { +) -> TaggedPayload { let rnd_amount = rand::rng().random_range(asset_amount); let asset: TestAsset = asset.into(); diff --git a/crates/testing/src/mempool.rs b/crates/testing/src/mempool.rs index 753865f8c..6c6b28a48 100644 --- a/crates/testing/src/mempool.rs +++ b/crates/testing/src/mempool.rs @@ -1,11 +1,11 @@ use dolos_core::mempool::{MempoolEvent, MempoolTx, MempoolTxStage}; -use dolos_core::{ChainPoint, EraCbor, MempoolError, MempoolStore, TxHash, TxStatus}; +use dolos_core::{ChainPoint, MempoolError, MempoolStore, TaggedPayload, TxHash, TxStatus}; use crate::streams::ScriptedStream; /// Build a minimal `MempoolTx` for testing. pub fn make_test_mempool_tx(hash: TxHash) -> MempoolTx { - MempoolTx::new(hash, EraCbor(7, vec![0x80])) + MempoolTx::new(hash, TaggedPayload(7, vec![0x80])) } /// Build a minimal `MempoolEvent` at the `Pending` stage for testing. diff --git a/crates/testing/src/synthetic.rs b/crates/testing/src/synthetic.rs index 3d75dd729..c75c4938c 100644 --- a/crates/testing/src/synthetic.rs +++ b/crates/testing/src/synthetic.rs @@ -208,7 +208,7 @@ pub fn build_synthetic_blocks( let submit_tx_hash = tx_sequence_to_hash(block_count as u64 * txs_per_block as u64 + 1); let submit_ref = TxoRef(submit_tx_hash, 0); let submit_utxo = utxo_with_value(submit_address.clone(), Value::Coin(submit_amount)); - let crate::EraCbor(_, submit_cbor) = submit_utxo; + let crate::TaggedPayload(_, submit_cbor) = submit_utxo; chain_config.custom_utxos.push(CustomUtxo { ref_: submit_ref.clone(), era: Some(pallas::ledger::traverse::Era::Conway.into()), @@ -237,7 +237,7 @@ pub fn build_synthetic_blocks( let seed_tx_hash = tx_sequence_to_hash(1 + (offset * txs_per_block + tx_offset) as u64); let seed_ref = TxoRef(seed_tx_hash, 0); let seed_utxo = utxo_with_value(cfg.seed_address.clone(), Value::Coin(cfg.seed_amount)); - let crate::EraCbor(_, seed_cbor) = seed_utxo; + let crate::TaggedPayload(_, seed_cbor) = seed_utxo; chain_config.custom_utxos.push(CustomUtxo { ref_: seed_ref, diff --git a/crates/trp/src/mapping.rs b/crates/trp/src/mapping.rs index b5e8e0d49..68f99044c 100644 --- a/crates/trp/src/mapping.rs +++ b/crates/trp/src/mapping.rs @@ -3,7 +3,7 @@ use std::sync::Arc; use tx3_resolver::{Expression, StructExpr}; use dolos_cardano::pallas_hash_to_core; -use dolos_core::{EraCbor, TxoRef}; +use dolos_core::{TaggedPayload, TxoRef}; use pallas::{ codec::utils::KeyValuePairs, ledger::{ @@ -110,11 +110,11 @@ pub fn into_tx3_utxoref(txoref: TxoRef) -> tx3_resolver::UtxoRef { pub fn into_tx3_utxo( txoref: TxoRef, - utxo: Arc, + utxo: Arc, ) -> Result> { let r#ref = into_tx3_utxoref(txoref); - let EraCbor(era, cbor) = utxo.as_ref(); + let TaggedPayload(era, cbor) = utxo.as_ref(); let era = Era::try_from(*era).map_err(|e| tx3_resolver::Error::StoreError(e.to_string()))?; diff --git a/crates/trp/src/utxos.rs b/crates/trp/src/utxos.rs index 9b884876e..111c69c4c 100644 --- a/crates/trp/src/utxos.rs +++ b/crates/trp/src/utxos.rs @@ -14,7 +14,7 @@ fn search_state_utxos>( store: &MempoolAwareUtxoStore, ) -> Result, IndexError> { // Dummy filter that always returns true (we want all UTxOs matching the index) - let no_filter = |_: &dolos_core::EraCbor| true; + let no_filter = |_: &dolos_core::TaggedPayload| true; let refs = match pattern { UtxoPattern::ByAddress(address) => { diff --git a/src/bin/dolos/eval.rs b/src/bin/dolos/eval.rs index ea15e1866..7aec6287a 100644 --- a/src/bin/dolos/eval.rs +++ b/src/bin/dolos/eval.rs @@ -9,7 +9,7 @@ use std::{borrow::Cow, path::PathBuf}; use dolos::{ adapters::DomainAdapter, - core::{Domain, EraCbor, StateStore as _, TxoRef}, + core::{Domain, StateStore as _, TxoRef}, }; use dolos_cardano::{core_hash_to_pallas, pallas_hash_to_core}; @@ -66,7 +66,7 @@ pub async fn run(config: &RootConfig, args: &Args) -> miette::Result<()> { let mut utxos2 = UTxOs::new(); for (ref_, body) in resolved.iter() { - let EraCbor(era, cbor) = body.as_ref(); + let dolos_core::TaggedPayload(era, cbor) = body.as_ref(); let era = (*era) .try_into() diff --git a/src/serve/grpc/query.rs b/src/serve/grpc/query.rs index 2743b0cf4..ef67e8b01 100644 --- a/src/serve/grpc/query.rs +++ b/src/serve/grpc/query.rs @@ -210,7 +210,7 @@ fn from_u5c_txoref(txo: u5c::query::TxoRef) -> Result { async fn into_u5c_utxo + LedgerContext>( txo: &TxoRef, - body: &EraCbor, + body: &TaggedPayload, mapper: &interop::Mapper, domain: &S, ) -> Result> { diff --git a/src/serve/grpc/submit.rs b/src/serve/grpc/submit.rs index 39b740e88..18aca3b80 100644 --- a/src/serve/grpc/submit.rs +++ b/src/serve/grpc/submit.rs @@ -45,7 +45,7 @@ fn event_to_watch_mempool_response(event: MempoolEvent) -> WatchMempoolResponse WatchMempoolResponse { tx: TxInMempool { r#ref: event.tx.hash.as_slice().to_vec().into(), - native_bytes: event.tx.payload.cbor().to_vec().into(), + native_bytes: event.tx.payload.bytes().to_vec().into(), stage: tx_stage_to_u5c(event.tx.stage.clone()), parsed_state: None, // TODO } diff --git a/src/sync/emulator.rs b/src/sync/emulator.rs index 9170e508d..6f862e149 100644 --- a/src/sync/emulator.rs +++ b/src/sync/emulator.rs @@ -54,7 +54,7 @@ impl Worker { for (i, tx) in txs.iter().enumerate() { debug!(tx = hex::encode(tx.hash), "adding tx to emulated block"); - let EraCbor(era, cbor) = &tx.payload; + let dolos_core::TaggedPayload(era, cbor) = &tx.payload; let era = pallas::ledger::traverse::Era::try_from(*era).or_panic()?; diff --git a/src/sync/submit.rs b/src/sync/submit.rs index 77e71fc58..c441029fe 100644 --- a/src/sync/submit.rs +++ b/src/sync/submit.rs @@ -16,7 +16,7 @@ fn to_n2n_era(era: u16) -> u16 { } fn to_n2n_reply(mempool_tx: &MempoolTx) -> TxIdAndSize { - let EraCbor(era, bytes) = &mempool_tx.payload; + let TaggedPayload(era, bytes) = &mempool_tx.payload; let era = to_n2n_era(*era); @@ -26,7 +26,7 @@ fn to_n2n_reply(mempool_tx: &MempoolTx) -> TxIdAndSize { } fn to_n2n_body(mempool_tx: MempoolTx) -> EraTxBody { - let EraCbor(era, bytes) = mempool_tx.payload; + let TaggedPayload(era, bytes) = mempool_tx.payload; let era = to_n2n_era(era); From baac09b4304e6f280532f0afb6d88224f6b19f31 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Tue, 7 Apr 2026 11:11:15 -0300 Subject: [PATCH 69/85] fix: dont return before committing written pools --- crates/cardano/src/genesis/staking.rs | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/crates/cardano/src/genesis/staking.rs b/crates/cardano/src/genesis/staking.rs index e5f775368..b602cf341 100644 --- a/crates/cardano/src/genesis/staking.rs +++ b/crates/cardano/src/genesis/staking.rs @@ -132,15 +132,13 @@ pub fn bootstrap( writer.write_entity_typed(&EntityKey::from(state.operator.as_slice()), &state)?; } - let Some(delegations) = &staking.stake else { - return Ok(()); - }; - - for (account, pool) in delegations { - let state = parse_delegation(account, pool, genesis); - let key = minicbor::to_vec(&state.credential).unwrap(); - let key = EntityKey::from(key); - writer.write_entity_typed(&key, &state)?; + if let Some(delegations) = &staking.stake { + for (account, pool) in delegations { + let state = parse_delegation(account, pool, genesis); + let key = minicbor::to_vec(&state.credential).unwrap(); + let key = EntityKey::from(key); + writer.write_entity_typed(&key, &state)?; + } } writer.commit()?; From 4ef537e596a094a7edac0950368127e77c387682 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Tue, 7 Apr 2026 11:13:31 -0300 Subject: [PATCH 70/85] fix: use rupd time registration snapshot --- crates/cardano/src/rupd/loading.rs | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/crates/cardano/src/rupd/loading.rs b/crates/cardano/src/rupd/loading.rs index b62cce9a2..a0fc10b3b 100644 --- a/crates/cardano/src/rupd/loading.rs +++ b/crates/cardano/src/rupd/loading.rs @@ -126,7 +126,7 @@ impl StakeSnapshot { state: &D::State, stake_epoch: u64, protocol: EraProtocol, - _rupd_slot: u64, + rupd_slot: u64, ) -> Result> { let mut snapshot = Self::default(); @@ -153,9 +153,7 @@ impl StakeSnapshot { for record in accounts { let (_, account) = record?; - let is_reg = account.is_registered(); - - if is_reg { + if account.is_registered_at(rupd_slot) { snapshot .registered_accounts .insert(account.credential.clone()); From cce804d98e514abe28e4ac3d2cc95a763baec453 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Tue, 7 Apr 2026 11:28:57 -0300 Subject: [PATCH 71/85] fix: handle genesis missing fields --- crates/cardano/src/estart/nonces.rs | 8 +++--- crates/cardano/src/genesis/mod.rs | 2 +- crates/cardano/src/lib.rs | 6 +++-- crates/cardano/src/rupd/loading.rs | 2 +- crates/cardano/src/utils.rs | 42 +++++++++++++++++++++-------- crates/core/src/lib.rs | 17 ++++++------ crates/minibf/src/routes/network.rs | 8 ++++-- src/bin/dolos/data/compute_nonce.rs | 6 +++-- src/bin/dolos/init.rs | 5 +++- 9 files changed, 65 insertions(+), 31 deletions(-) diff --git a/crates/cardano/src/estart/nonces.rs b/crates/cardano/src/estart/nonces.rs index d8868f68c..e64c554d5 100644 --- a/crates/cardano/src/estart/nonces.rs +++ b/crates/cardano/src/estart/nonces.rs @@ -34,11 +34,13 @@ impl EntityDelta for NonceTransition { } } -fn next_largest_stable_slot(ctx: &super::WorkContext) -> BlockSlot { - let stability_window = nonce_stability_window(ctx.active_protocol.into(), &ctx.genesis); +fn next_largest_stable_slot( + ctx: &super::WorkContext, +) -> Result> { + let stability_window = nonce_stability_window(ctx.active_protocol.into(), &ctx.genesis)?; let epoch_finish_slot = ctx.chain_summary.epoch_start(ctx.starting_epoch_no() + 1); - sub!(epoch_finish_slot, stability_window) + Ok(sub!(epoch_finish_slot, stability_window)) } fn initial_nonces(ctx: &super::WorkContext) -> Option { diff --git a/crates/cardano/src/genesis/mod.rs b/crates/cardano/src/genesis/mod.rs index 4d4dd7075..4ab82efe9 100644 --- a/crates/cardano/src/genesis/mod.rs +++ b/crates/cardano/src/genesis/mod.rs @@ -77,7 +77,7 @@ pub fn bootstrap_epoch( pparams, initial_pots: pots, largest_stable_slot: genesis.shelley.epoch_length.unwrap() as u64 - - nonce_stability_window(protocol as u16, genesis), + - nonce_stability_window(protocol as u16, genesis)?, nonces, previous_nonce_tail: None, number: 0, diff --git a/crates/cardano/src/lib.rs b/crates/cardano/src/lib.rs index 242d01335..863e6e2c1 100644 --- a/crates/cardano/src/lib.rs +++ b/crates/cardano/src/lib.rs @@ -387,7 +387,7 @@ impl dolos_core::ChainLogic for CardanoLogic { // into the epoch, capturing addrsRew (registered accounts) for the pre-Babbage // prefilter. Using 4k/f instead of 3k/f ensures the state at RUPD time includes // all deregistrations up to the correct threshold. - let stability_window = utils::randomness_stability_window(&genesis); + let stability_window = utils::randomness_stability_window(&genesis)?; Ok(Self { config, @@ -591,7 +591,9 @@ impl dolos_core::ChainLogic for CardanoLogic { Ok(out) } - fn mutable_slots(domain: &impl Domain) -> BlockSlot { + fn mutable_slots( + domain: &impl Domain, + ) -> Result> { utils::mutable_slots(&domain.genesis()) } diff --git a/crates/cardano/src/rupd/loading.rs b/crates/cardano/src/rupd/loading.rs index a0fc10b3b..b6a4258fa 100644 --- a/crates/cardano/src/rupd/loading.rs +++ b/crates/cardano/src/rupd/loading.rs @@ -308,7 +308,7 @@ impl RupdWork { // Pre-Babbage pre-filtering uses this to exclude unregistered accounts from // reward computation. let rupd_slot = work.chain.epoch_start(current_epoch) - + crate::utils::randomness_stability_window(genesis); + + crate::utils::randomness_stability_window(genesis)?; work.snapshot = StakeSnapshot::load::(state, snapshot_epoch, protocol, rupd_slot)?; diff --git a/crates/cardano/src/utils.rs b/crates/cardano/src/utils.rs index 6adc0ba18..381849a68 100644 --- a/crates/cardano/src/utils.rs +++ b/crates/cardano/src/utils.rs @@ -17,10 +17,16 @@ pub fn network_from_genesis(genesis: &CardanoGenesis) -> Network { /// Reads the relevant genesis config values and uses the security window /// guarantee formula from consensus to calculate the latest slot that can be /// considered immutable. -pub fn mutable_slots(genesis: &CardanoGenesis) -> u64 { +pub fn mutable_slots( + genesis: &CardanoGenesis, +) -> Result> { let k = genesis.byron.protocol_consts.k as f64; - let f = genesis.shelley.active_slots_coeff.unwrap() as f64; - ((3.0 * k) / f).ceil() as u64 + let f = genesis + .shelley + .active_slots_coeff + .ok_or_else(|| ChainError::GenesisFieldMissing("active_slots_coeff".to_string()))? + as f64; + Ok(((3.0 * k) / f).ceil() as u64) } /// Computes the amount of mutable slots in chain. @@ -29,7 +35,9 @@ pub fn mutable_slots(genesis: &CardanoGenesis) -> u64 { /// guarantee formula from consensus to calculate the latest slot that can be /// considered immutable. Same as `mutable_slots`, added for the code to be similar in naming /// convention to other implementations. -pub fn stability_window(genesis: &CardanoGenesis) -> u64 { +pub fn stability_window( + genesis: &CardanoGenesis, +) -> Result> { mutable_slots(genesis) } @@ -37,17 +45,26 @@ pub fn stability_window(genesis: &CardanoGenesis) -> u64 { /// /// Similar to `mutable_slots` but with 4 instead of 3 as the constant. See the following issue for /// refference: https://github.com/IntersectMBO/cardano-ledger/issues/1914 -pub fn randomness_stability_window(genesis: &CardanoGenesis) -> u64 { +pub fn randomness_stability_window( + genesis: &CardanoGenesis, +) -> Result> { let k = genesis.byron.protocol_consts.k as f64; - let f = genesis.shelley.active_slots_coeff.unwrap() as f64; - ((4.0 * k) / f).ceil() as u64 + let f = genesis + .shelley + .active_slots_coeff + .ok_or_else(|| ChainError::GenesisFieldMissing("active_slots_coeff".to_string()))? + as f64; + Ok(((4.0 * k) / f).ceil() as u64) } /// Get the window of slots used to calculate eta_h for epoch nonce calculation. /// /// This is supposed be `randomness_stability_window` but due to a bug in the code it is dependant /// on the protocol. See https://github.com/IntersectMBO/cardano-ledger/issues/1914. -pub fn nonce_stability_window(protocol: u16, genesis: &CardanoGenesis) -> u64 { +pub fn nonce_stability_window( + protocol: u16, + genesis: &CardanoGenesis, +) -> Result> { if protocol >= 9 { randomness_stability_window(genesis) } else { @@ -61,8 +78,11 @@ pub fn nonce_stability_window(protocol: u16, genesis: &CardanoGenesis) -> u64 { /// uses the security window guarantee formula from consensus to calculate the /// latest slot that can be considered immutable. This is used mainly to define /// which slots can be finalized in the ledger store (aka: compaction). -pub fn lastest_immutable_slot(tip: BlockSlot, genesis: &CardanoGenesis) -> BlockSlot { - tip.saturating_sub(mutable_slots(genesis)) +pub fn lastest_immutable_slot( + tip: BlockSlot, + genesis: &CardanoGenesis, +) -> Result> { + Ok(tip.saturating_sub(mutable_slots(genesis)?)) } pub fn float_to_rational(x: f32) -> pallas::ledger::primitives::conway::RationalNumber { @@ -151,7 +171,7 @@ mod tests { let tip: BlockSlot = 1_000_000; - let result = lastest_immutable_slot(tip, &genesis); + let result = lastest_immutable_slot(tip, &genesis).unwrap(); // slot delta in hours let delta_in_hours = tip.saturating_sub(result) / (60 * 60); diff --git a/crates/core/src/lib.rs b/crates/core/src/lib.rs index 982102a41..343a2e1a3 100644 --- a/crates/core/src/lib.rs +++ b/crates/core/src/lib.rs @@ -582,14 +582,16 @@ pub trait ChainLogic: Sized + Send + Sync { ) -> Result>; // TODO: remove from the interface - this is Cardano-specific - fn mutable_slots(domain: &impl Domain) -> BlockSlot; + fn mutable_slots( + domain: &impl Domain, + ) -> Result>; // TODO: remove from the interface - this is Cardano-specific fn last_immutable_slot( domain: &impl Domain, tip: BlockSlot, - ) -> BlockSlot { - tip.saturating_sub(Self::mutable_slots(domain)) + ) -> Result> { + Ok(tip.saturating_sub(Self::mutable_slots(domain)?)) } fn tx_produced_utxos(era_body: &TaggedPayload) -> Result, Self::ChainSpecificError>; @@ -714,11 +716,10 @@ pub trait Domain: Send + Sync + Clone + 'static { const MAX_PRUNE_SLOTS_PER_HOUSEKEEPING: u64 = 10_000; fn housekeeping(&self) -> Result> { - let max_ledger_slots = self - .storage_config() - .state - .max_history() - .unwrap_or(Self::Chain::mutable_slots(self)); + let max_ledger_slots = match self.storage_config().state.max_history() { + Some(x) => x, + None => Self::Chain::mutable_slots(self)?, + }; info!(max_ledger_slots, "pruning ledger for excess history"); diff --git a/crates/minibf/src/routes/network.rs b/crates/minibf/src/routes/network.rs index 9a2bb60dc..aa8aadcad 100644 --- a/crates/minibf/src/routes/network.rs +++ b/crates/minibf/src/routes/network.rs @@ -125,7 +125,9 @@ impl<'a> IntoModel> for ChainModelBuilder<'a> { parameters: Box::new(NetworkErasInnerParameters { epoch_length: self.genesis.shelley.epoch_length.unwrap() as i32, slot_length: self.genesis.shelley.slot_length.unwrap() as i32, - safe_zone: mutable_slots(self.genesis) as i32, + safe_zone: mutable_slots(self.genesis) + .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)? + as i32, }), }; out.push(other.clone()); @@ -170,7 +172,9 @@ impl<'a> IntoModel> for ChainModelBuilder<'a> { parameters: Box::new(NetworkErasInnerParameters { epoch_length: era.epoch_length as i32, slot_length: era.slot_length as i32, - safe_zone: dolos_cardano::mutable_slots(self.genesis) as i32, + safe_zone: dolos_cardano::mutable_slots(self.genesis) + .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)? + as i32, }), }; diff --git a/src/bin/dolos/data/compute_nonce.rs b/src/bin/dolos/data/compute_nonce.rs index dd59a4895..7be8fe139 100644 --- a/src/bin/dolos/data/compute_nonce.rs +++ b/src/bin/dolos/data/compute_nonce.rs @@ -52,8 +52,10 @@ pub fn compute_nonce>( } let (protocol, era) = summary.protocol_and_era_for_epoch(epoch); - let largest_stable_slot = - era.epoch_start(epoch) - nonce_stability_window(*protocol, domain.genesis().as_ref()); + let largest_stable_slot = era.epoch_start(epoch) + - nonce_stability_window(*protocol, domain.genesis().as_ref()) + .into_diagnostic() + .context("missing active_slots_coeff in genesis")?; let mut nonces = Nonces::bootstrap(domain.genesis().shelley_hash); diff --git a/src/bin/dolos/init.rs b/src/bin/dolos/init.rs index 40e303292..0008812b5 100644 --- a/src/bin/dolos/init.rs +++ b/src/bin/dolos/init.rs @@ -392,7 +392,10 @@ impl ConfigEditor { // Add max rollback window for network from Genesis. if self.0.sync.max_rollback.is_none() { let genesis = network.load_included_genesis(); - self.0.sync.max_rollback = Some(mutable_slots(&genesis)); + self.0.sync.max_rollback = Some( + mutable_slots(&genesis) + .expect("built-in Cardano genesis must include active_slots_coeff"), + ); } } From e197c22f5908a885b8333437d41fccc7b26b7457 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Tue, 7 Apr 2026 11:29:03 -0300 Subject: [PATCH 72/85] fix: handle genesis missing fields --- crates/cardano/src/estart/nonces.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/cardano/src/estart/nonces.rs b/crates/cardano/src/estart/nonces.rs index e64c554d5..9cb76be25 100644 --- a/crates/cardano/src/estart/nonces.rs +++ b/crates/cardano/src/estart/nonces.rs @@ -71,7 +71,7 @@ impl super::BoundaryVisitor for BoundaryVisitor { &mut self, ctx: &mut super::WorkContext, ) -> Result<(), ChainError> { - let next_slot = next_largest_stable_slot(ctx); + let next_slot = next_largest_stable_slot(ctx)?; let next_nonce = next_nonce(ctx); ctx.deltas.add_for_entity(NonceTransition { From 5bdaabbd1fd4576f8fb7f06027777e717892c493 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Tue, 7 Apr 2026 11:36:16 -0300 Subject: [PATCH 73/85] fix: return typed validation error on missing chain context --- crates/cardano/src/lib.rs | 3 +++ crates/cardano/src/validate.rs | 24 ++++++++++++++++-------- 2 files changed, 19 insertions(+), 8 deletions(-) diff --git a/crates/cardano/src/lib.rs b/crates/cardano/src/lib.rs index 863e6e2c1..fa10af1dd 100644 --- a/crates/cardano/src/lib.rs +++ b/crates/cardano/src/lib.rs @@ -305,6 +305,9 @@ pub enum CardanoError { #[error("invalid governance proposal params")] InvalidProposalParams, + + #[error("chain tip is not available")] + MissingChainPoint, } #[derive(Clone)] diff --git a/crates/cardano/src/validate.rs b/crates/cardano/src/validate.rs index 5d57e9004..25c52c15c 100644 --- a/crates/cardano/src/validate.rs +++ b/crates/cardano/src/validate.rs @@ -27,18 +27,26 @@ pub fn validate_tx>( let network_id = match genesis.shelley.network_id.as_ref() { Some(network) => match network.as_str() { - "Mainnet" => Some(NetworkId::Mainnet.into()), - "Testnet" => Some(NetworkId::Testnet.into()), - _ => None, + "Mainnet" => Ok(NetworkId::Mainnet.into()), + "Testnet" => Ok(NetworkId::Testnet.into()), + _ => Err(ChainError::GenesisFieldMissing("network_id".to_string())), }, - None => None, - } - .unwrap(); + None => Err(ChainError::GenesisFieldMissing("network_id".to_string())), + }?; + + let prot_magic = genesis + .shelley + .network_magic + .ok_or_else(|| ChainError::GenesisFieldMissing("network_magic".to_string()))?; + + let block_slot = tip + .ok_or(ChainError::ChainSpecific(CardanoError::MissingChainPoint))? + .slot(); let env = pallas::ledger::validate::utils::Environment { prot_params: pparams, - prot_magic: genesis.shelley.network_magic.unwrap(), - block_slot: tip.clone().unwrap().slot(), + prot_magic, + block_slot, network_id, acnt: Some(pallas::ledger::validate::utils::AccountState::default()), }; From 9c5a4ed0945dcac208b70b8ad2c72f6610b829a9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Tue, 7 Apr 2026 11:37:16 -0300 Subject: [PATCH 74/85] fix: remove commented out error --- crates/core/src/archive.rs | 2 -- 1 file changed, 2 deletions(-) diff --git a/crates/core/src/archive.rs b/crates/core/src/archive.rs index 6e5a998bf..ad22fd77b 100644 --- a/crates/core/src/archive.rs +++ b/crates/core/src/archive.rs @@ -158,8 +158,6 @@ pub enum ArchiveError { #[error("decoding error: {0}")] DecodingError(String), - //#[error("block decoding error")] - //BlockDecodingError(#[from] pallas::ledger::traverse::Error), #[error("entity decoding error")] EntityDecodingError(String), From 76fd39af00a9d75b6d85471bc214f9c751ac3c43 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Tue, 7 Apr 2026 11:55:03 -0300 Subject: [PATCH 75/85] fix: cleanup cardano errors from core --- crates/cardano/src/eras.rs | 8 +- crates/cardano/src/lib.rs | 22 +++- crates/cardano/src/model.rs | 6 +- crates/core/src/lib.rs | 114 +------------------- crates/minibf/src/lib.rs | 5 +- crates/minibf/src/mapping.rs | 6 +- crates/minibf/src/routes/epochs/mod.rs | 6 +- crates/minibf/src/routes/governance.rs | 4 +- crates/minibf/src/routes/network.rs | 7 +- crates/minibf/src/routes/pools.rs | 4 +- crates/minibf/src/routes/txs.rs | 8 +- src/serve/o7s_unix/mod.rs | 6 +- src/serve/o7s_unix/statequery.rs | 6 +- src/serve/o7s_unix/utils/account.rs | 4 +- src/serve/o7s_unix/utils/protocol_params.rs | 4 +- src/serve/o7s_unix/utils/stake_snapshots.rs | 2 +- 16 files changed, 65 insertions(+), 147 deletions(-) diff --git a/crates/cardano/src/eras.rs b/crates/cardano/src/eras.rs index 4ec34b316..df44f9967 100644 --- a/crates/cardano/src/eras.rs +++ b/crates/cardano/src/eras.rs @@ -241,9 +241,9 @@ pub fn log_epoch_range_to_key_range( (start_slot, end_slot, range) } -pub fn load_active_era( +pub fn load_active_era>( state: &D::State, -) -> Result<(EraProtocol, EraSummary), ChainError> { +) -> Result<(EraProtocol, EraSummary), ChainError> { let eras = state.iter_entities_typed::(EraSummary::NS, None)?; match eras.last() { @@ -252,8 +252,8 @@ pub fn load_active_era( let protocol = EraProtocol::from(key); Ok((protocol, summary)) } - Err(_) => Err(ChainError::EraNotFound), + Err(_) => Err(ChainError::ChainSpecific(crate::CardanoError::EraNotFound)), }, - None => Err(ChainError::EraNotFound), + None => Err(ChainError::ChainSpecific(crate::CardanoError::EraNotFound)), } } diff --git a/crates/cardano/src/lib.rs b/crates/cardano/src/lib.rs index fa10af1dd..51308d591 100644 --- a/crates/cardano/src/lib.rs +++ b/crates/cardano/src/lib.rs @@ -308,6 +308,18 @@ pub enum CardanoError { #[error("chain tip is not available")] MissingChainPoint, + + #[error("protocol params not found: {0}")] + PParamsNotFound(String), + + #[error("no active epoch")] + NoActiveEpoch, + + #[error("era not found")] + EraNotFound, + + #[error("epoch value version not found for epoch {0}")] + EpochValueVersionNotFound(u64), } #[derive(Clone)] @@ -653,21 +665,21 @@ impl dolos_core::ChainLogic for CardanoLogic { } } -pub fn load_effective_pparams( +pub fn load_effective_pparams>( state: &D::State, -) -> Result> { +) -> Result> { let epoch = load_epoch::(state)?; let active = epoch.pparams.unwrap_live(); Ok(active.clone()) } -pub fn load_epoch( +pub fn load_epoch>( state: &D::State, -) -> Result> { +) -> Result> { let epoch = state .read_entity_typed::(EpochState::NS, &EntityKey::from(CURRENT_EPOCH_KEY))? - .ok_or(ChainError::NoActiveEpoch)?; + .ok_or(ChainError::ChainSpecific(CardanoError::NoActiveEpoch))?; Ok(epoch) } diff --git a/crates/cardano/src/model.rs b/crates/cardano/src/model.rs index b113720ec..6673f135d 100644 --- a/crates/cardano/src/model.rs +++ b/crates/cardano/src/model.rs @@ -307,7 +307,9 @@ where pub fn try_snapshot_at(&self, epoch: Epoch) -> Result<&T, ChainError> { match self.snapshot_at(epoch) { Some(value) => Ok(value), - None => Err(ChainError::EpochValueVersionNotFound(epoch)), + None => Err(ChainError::ChainSpecific( + crate::CardanoError::EpochValueVersionNotFound(epoch), + )), } } } @@ -1234,7 +1236,7 @@ macro_rules! ensure_pparam { ($kind:ident, $ty:ty) => { paste::paste! { pub fn [](&self) -> Result<$ty, ChainError> { - self.$kind().ok_or(ChainError::PParamsNotFound(stringify!($kind).to_string())) + self.$kind().ok_or(ChainError::ChainSpecific(crate::CardanoError::PParamsNotFound(stringify!($kind).to_string()))) } } }; diff --git a/crates/core/src/lib.rs b/crates/core/src/lib.rs index 343a2e1a3..591f2a58e 100644 --- a/crates/core/src/lib.rs +++ b/crates/core/src/lib.rs @@ -8,14 +8,6 @@ //! be processed together). A batch is usually split into chunks for parallel //! processing. -//use pallas::{ -// codec::minicbor::{self, Decode, Encode}, -// crypto::hash::{Hash, Hasher}, -// ledger::{ -// primitives::Epoch, -// traverse::{MultiEraInput, MultiEraOutput, MultiEraTx, MultiEraUpdate}, -// }, -//}; use serde::{Deserialize, Serialize}; use std::{ collections::{HashMap, HashSet}, @@ -126,7 +118,6 @@ impl TaggedPayload { pub fn bytes(&self) -> &[u8] { &self.1 } - } impl AsRef<[u8]> for TaggedPayload { @@ -147,54 +138,6 @@ impl From for (u16, Vec) { } } - -//impl From> for TaggedPayload { -// fn from(value: MultiEraOutput<'_>) -> Self { -// TaggedPayload(value.era().into(), value.encode()) -// } -//} -// -//impl<'a> TryFrom<&'a TaggedPayload> for MultiEraOutput<'a> { -// type Error = pallas::codec::minicbor::decode::Error; -// -// fn try_from(value: &'a TaggedPayload) -> Result { -// let era = value.0.try_into().expect("era out of range"); -// MultiEraOutput::decode(era, &value.1) -// } -//} -// -//impl<'a> TryFrom<&'a TaggedPayload> for MultiEraTx<'a> { -// type Error = pallas::codec::minicbor::decode::Error; -// -// fn try_from(value: &'a TaggedPayload) -> Result { -// let era = value.0.try_into().expect("era out of range"); -// MultiEraTx::decode_for_era(era, &value.1) -// } -//} -// -//impl TryFrom for MultiEraUpdate<'_> { -// type Error = pallas::codec::minicbor::decode::Error; -// -// fn try_from(value: TaggedPayload) -> Result { -// let era = value.0.try_into().expect("era out of range"); -// MultiEraUpdate::decode_for_era(era, &value.1) -// } -//} -// -//impl From<&MultiEraInput<'_>> for TxoRef { -// fn from(value: &MultiEraInput<'_>) -> Self { -// TxoRef(*value.hash(), value.index() as u32) -// } -//} -// -//impl From for Vec { -// fn from(value: TxoRef) -> Self { -// let mut bytes = value.0.to_vec(); -// bytes.extend_from_slice(value.1.to_be_bytes().as_slice()); -// bytes -// } -//} - #[derive(Debug, Eq, PartialEq, Hash, Clone, Serialize, Deserialize)] pub struct TxoRef(pub TxHash, pub TxoIdx); @@ -410,82 +353,29 @@ pub type Phase2Log = Vec; #[derive(Debug, Error)] pub enum ChainError { - // keep #[error("can't receive block until previous work is completed")] CantReceiveBlock(RawBlock), - // keep #[error(transparent)] BrokenInvariant(#[from] BrokenInvariant), - // ChainSpecific - //#[error("decoding error")] - //DecodingError(#[from] pallas::ledger::traverse::Error), - - //// ChainSpecifci - //#[error("cbor error")] - //CborDecodingError(#[from] pallas::codec::minicbor::decode::Error), #[error("invalid namespace: {0}")] InvalidNamespace(Namespace), - // Chain specific - //#[error("address decoding error")] - //AddressDecoding(#[from] pallas::ledger::addresses::Error), - - // chain specific - // TODO: check StateError #[error(transparent)] StateError(#[from] StateError), - // keep #[error(transparent)] IndexError(#[from] IndexError), - // keep #[error(transparent)] ArchiveError(#[from] ArchiveError), - // keep ? #[error("genesis field missing: {0}")] GenesisFieldMissing(String), - // keep ? - #[error("protocol params not found: {0}")] - PParamsNotFound(String), - - // keep - #[error("no active epoch")] - NoActiveEpoch, - - // keep -> maybe rename? - #[error("era not found")] - EraNotFound, - - // keep -> maybe rename? - #[error("epoch value version not found for epoch {0}")] - EpochValueVersionNotFound(Epoch), - - // keep? idk. too cardano - //#[error("missing rewards")] - //MissingRewards, - - // keep? too cardano - //#[error("invalid pool params")] - //InvalidPoolParams, - - // keep? too cardano - //#[error("invalid proposal params")] - //InvalidProposalParams, #[error(transparent)] ChainSpecific(E), - // #[error("phase-1 script rejected the transaction: {0}")] - // Phase1ValidationRejected(#[from] pallas::ledger::validate::utils::ValidationError), - - // #[error("couldn't evaluate phase-2 script: {0}")] - // Phase2EvaluationError(String), - - // #[error("phase-2 script rejected the transaction")] - // Phase2ValidationRejected(Phase2Log), } pub trait Genesis: Clone + Send + Sync + 'static {} @@ -594,7 +484,9 @@ pub trait ChainLogic: Sized + Send + Sync { Ok(tip.saturating_sub(Self::mutable_slots(domain)?)) } - fn tx_produced_utxos(era_body: &TaggedPayload) -> Result, Self::ChainSpecificError>; + fn tx_produced_utxos( + era_body: &TaggedPayload, + ) -> Result, Self::ChainSpecificError>; fn tx_consumed_ref(era_body: &TaggedPayload) -> Result, Self::ChainSpecificError>; fn find_tx_in_block( diff --git a/crates/minibf/src/lib.rs b/crates/minibf/src/lib.rs index fe01ff352..4247770ac 100644 --- a/crates/minibf/src/lib.rs +++ b/crates/minibf/src/lib.rs @@ -110,7 +110,10 @@ impl Facade { Ok(summary) } - pub fn get_current_effective_pparams(&self) -> Result { + pub fn get_current_effective_pparams(&self) -> Result + where + D: Domain, + { let pparams = dolos_cardano::load_effective_pparams::(self.state()) .map_err(log_and_500("failed to load effective pparams"))?; diff --git a/crates/minibf/src/mapping.rs b/crates/minibf/src/mapping.rs index 4a7cb03f2..81c56e7e5 100644 --- a/crates/minibf/src/mapping.rs +++ b/crates/minibf/src/mapping.rs @@ -54,8 +54,8 @@ use blockfrost_openapi::models::{ }; use dolos_cardano::{ - pallas_extras, pallas_hash_to_core, AccountState, ChainSummary, DRepState, PParamsSet, - PoolHash, PoolState, + pallas_extras, pallas_hash_to_core, AccountState, CardanoError, ChainSummary, DRepState, + PParamsSet, PoolHash, PoolState, }; use dolos_core::{BlockSlot, Domain, TaggedPayload, TxHash, TxOrder, TxoIdx, TxoRef}; @@ -615,7 +615,7 @@ impl<'a> TxModelBuilder<'a> { } } - pub fn with_historical_pparams( + pub fn with_historical_pparams>( self, facade: &Facade, ) -> Result { diff --git a/crates/minibf/src/routes/epochs/mod.rs b/crates/minibf/src/routes/epochs/mod.rs index cc6f58efc..a9e50403a 100644 --- a/crates/minibf/src/routes/epochs/mod.rs +++ b/crates/minibf/src/routes/epochs/mod.rs @@ -6,7 +6,7 @@ use axum::{ use blockfrost_openapi::models::epoch_param_content::EpochParamContent; use pallas::ledger::{primitives::Epoch, traverse::MultiEraBlock}; -use dolos_cardano::CardanoGenesis; +use dolos_cardano::{CardanoError, CardanoGenesis}; use dolos_core::{archive::Skippable as _, ArchiveStore, Domain}; use crate::{ @@ -19,7 +19,7 @@ use crate::{ pub mod cost_models; pub mod mapping; -pub async fn latest_parameters>( +pub async fn latest_parameters>( State(domain): State>, ) -> Result, Error> { let tip = domain.get_tip_slot()?; @@ -42,7 +42,7 @@ pub async fn latest_parameters>( Ok(model.into_response()?) } -pub async fn by_number_parameters>( +pub async fn by_number_parameters>( State(domain): State>, Path(epoch): Path, ) -> Result, Error> { diff --git a/crates/minibf/src/routes/governance.rs b/crates/minibf/src/routes/governance.rs index 9c61e2ed1..1e67f2f0f 100644 --- a/crates/minibf/src/routes/governance.rs +++ b/crates/minibf/src/routes/governance.rs @@ -3,7 +3,7 @@ use axum::{ http::StatusCode, Json, }; -use dolos_cardano::{model::DRepState, pallas_extras, ChainSummary, PParamsSet}; +use dolos_cardano::{model::DRepState, pallas_extras, CardanoError, ChainSummary, PParamsSet}; use dolos_core::{ArchiveStore as _, BlockSlot, Domain}; use pallas::ledger::primitives::Epoch; @@ -177,7 +177,7 @@ impl<'a> IntoModel for DrepModelBuilder< } } -pub async fn drep_by_id( +pub async fn drep_by_id>( Path(drep): Path, State(domain): State>, ) -> Result, StatusCode> diff --git a/crates/minibf/src/routes/network.rs b/crates/minibf/src/routes/network.rs index aa8aadcad..2315d7f9b 100644 --- a/crates/minibf/src/routes/network.rs +++ b/crates/minibf/src/routes/network.rs @@ -7,7 +7,8 @@ use blockfrost_openapi::models::{ network_supply::NetworkSupply, }; use dolos_cardano::{ - model::EpochState, mutable_slots, AccountState, CardanoGenesis, EraProtocol, EraSummary, + model::EpochState, mutable_slots, AccountState, CardanoError, CardanoGenesis, EraProtocol, + EraSummary, FixedNamespace, }; use dolos_core::{BlockSlot, Domain, StateStore}; @@ -253,7 +254,7 @@ impl<'a> IntoModel for NetworkModelBuilder<'a> { } } -fn compute_network_sync>( +fn compute_network_sync>( domain: Facade, ) -> Result where @@ -292,7 +293,7 @@ where pub async fn naked(State(domain): State>) -> Result, StatusCode> where Option: From, - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, { const TTL: std::time::Duration = std::time::Duration::from_secs(30); diff --git a/crates/minibf/src/routes/pools.rs b/crates/minibf/src/routes/pools.rs index 86f69cb16..0d0c2f5cc 100644 --- a/crates/minibf/src/routes/pools.rs +++ b/crates/minibf/src/routes/pools.rs @@ -11,7 +11,7 @@ use blockfrost_openapi::models::{ }; use dolos_cardano::{ model::{AccountState, PoolState}, - CardanoGenesis, FixedNamespace, PoolDelegation, StakeLog, + CardanoError, CardanoGenesis, FixedNamespace, PoolDelegation, StakeLog, }; use dolos_core::{ArchiveStore, BlockSlot, Domain, EntityKey, TemporalKey}; use futures::future::join_all; @@ -39,7 +39,7 @@ fn decode_pool_id(pool_id: &str) -> Result, Error> { Err(Error::Code(StatusCode::BAD_REQUEST)) } -pub async fn all_extended( +pub async fn all_extended>( Query(params): Query, State(domain): State>, ) -> Result>, Error> diff --git a/crates/minibf/src/routes/txs.rs b/crates/minibf/src/routes/txs.rs index 2a77d9ecd..377c663a9 100644 --- a/crates/minibf/src/routes/txs.rs +++ b/crates/minibf/src/routes/txs.rs @@ -32,7 +32,11 @@ pub async fn by_hash( State(domain): State>, ) -> Result, StatusCode> where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + + Clone + + Send + + Sync + + 'static, Option: From, Option: From, Option: From, @@ -168,7 +172,7 @@ pub async fn by_hash_redeemers( State(domain): State>, ) -> Result>, StatusCode> where - D: Domain + Clone + Send + Sync + 'static, + D: Domain + Clone + Send + Sync + 'static, { let hash = hex::decode(tx_hash).map_err(|_| StatusCode::BAD_REQUEST)?; diff --git a/src/serve/o7s_unix/mod.rs b/src/serve/o7s_unix/mod.rs index 21bdeef4c..7e0abc33c 100644 --- a/src/serve/o7s_unix/mod.rs +++ b/src/serve/o7s_unix/mod.rs @@ -20,7 +20,7 @@ pub struct DriverConfig { //#[cfg(test)] //mod tests; -async fn handle_session, C: CancelToken>( +async fn handle_session, C: CancelToken>( domain: D, connection: NodeServer, cancel: C, @@ -56,7 +56,7 @@ async fn handle_session, C: C } async fn accept_client_connections< - D: Domain, + D: Domain, C: CancelToken, >( domain: D, @@ -92,7 +92,7 @@ async fn accept_client_connections< pub struct Driver; -impl, C: CancelToken> dolos_core::Driver +impl, C: CancelToken> dolos_core::Driver for Driver { type Config = DriverConfig; diff --git a/src/serve/o7s_unix/statequery.rs b/src/serve/o7s_unix/statequery.rs index 91244edf5..ca56afe5d 100644 --- a/src/serve/o7s_unix/statequery.rs +++ b/src/serve/o7s_unix/statequery.rs @@ -18,13 +18,13 @@ use utils::{ build_stake_snapshots_response, build_utxo_by_address_response, }; -pub struct Session> { +pub struct Session> { domain: D, connection: localstate::Server, acquired_point: Option, } -impl> Session { +impl> Session { fn tip_cursor(&self) -> Result { let point = self .domain @@ -407,7 +407,7 @@ impl> Session { } } -pub async fn handle_session, C: CancelToken>( +pub async fn handle_session, C: CancelToken>( domain: D, connection: localstate::Server, cancel: C, diff --git a/src/serve/o7s_unix/utils/account.rs b/src/serve/o7s_unix/utils/account.rs index b14490501..8f73bd67b 100644 --- a/src/serve/o7s_unix/utils/account.rs +++ b/src/serve/o7s_unix/utils/account.rs @@ -5,7 +5,9 @@ use pallas::network::miniprotocols::localstate::queries_v16 as q16; use tracing::debug; /// Build response for GetAccountState query (treasury and reserves) -pub fn build_account_state_response(domain: &D) -> Result { +pub fn build_account_state_response>( + domain: &D, +) -> Result { let epoch_state = load_epoch::(domain.state()) .map_err(|e| Error::server(format!("failed to load epoch state: {}", e)))?; diff --git a/src/serve/o7s_unix/utils/protocol_params.rs b/src/serve/o7s_unix/utils/protocol_params.rs index 7097b52e1..67a8b8f42 100644 --- a/src/serve/o7s_unix/utils/protocol_params.rs +++ b/src/serve/o7s_unix/utils/protocol_params.rs @@ -3,7 +3,9 @@ use dolos_cardano::load_effective_pparams; use pallas::codec::utils::{AnyUInt, KeyValuePairs}; use pallas::network::miniprotocols::localstate::queries_v16 as q16; -pub fn build_protocol_params(domain: &D) -> Result { +pub fn build_protocol_params>( + domain: &D, +) -> Result { let pparams = load_effective_pparams::(domain.state()) .map_err(|e| Error::server(format!("failed to load protocol params: {}", e)))?; diff --git a/src/serve/o7s_unix/utils/stake_snapshots.rs b/src/serve/o7s_unix/utils/stake_snapshots.rs index 5ca5726ef..0483a6420 100644 --- a/src/serve/o7s_unix/utils/stake_snapshots.rs +++ b/src/serve/o7s_unix/utils/stake_snapshots.rs @@ -9,7 +9,7 @@ use pallas::network::miniprotocols::localtxsubmission::SMaybe; use std::collections::{BTreeSet, HashMap}; use tracing::debug; -pub fn build_stake_snapshots_response( +pub fn build_stake_snapshots_response>( domain: &D, pools_filter: &SMaybe, ) -> Result { From a29aee8e7e189b13f9c2e42298451e1db50b21b9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Tue, 7 Apr 2026 12:06:53 -0300 Subject: [PATCH 76/85] fix: add static to chainspecificerror on chainlogic and workunit --- crates/core/src/lib.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/core/src/lib.rs b/crates/core/src/lib.rs index 591f2a58e..650bb5fc3 100644 --- a/crates/core/src/lib.rs +++ b/crates/core/src/lib.rs @@ -399,7 +399,7 @@ pub trait ChainLogic: Sized + Send + Sync { type Utxo: Sized + Send + Sync; type Delta: EntityDelta; type Genesis: Genesis; - type ChainSpecificError: std::error::Error + Send + Sync; + type ChainSpecificError: std::error::Error + Send + Sync + 'static; /// The concrete work unit type produced by this chain logic. type WorkUnit>: WorkUnit; @@ -565,7 +565,7 @@ pub trait Domain: Send + Sync + Clone + 'static { type Entity: Entity; type EntityDelta: EntityDelta + std::fmt::Debug; type Genesis: Genesis; - type ChainSpecificError: std::error::Error + Send + Sync; + type ChainSpecificError: std::error::Error + Send + Sync + 'static; type Chain: ChainLogic< Delta = Self::EntityDelta, From 23d15c868683f714cc144881a561375e16e0e2a1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Tue, 7 Apr 2026 12:39:15 -0300 Subject: [PATCH 77/85] fix: core uses TxHash for methods that take TxHash as input --- crates/cardano/src/lib.rs | 4 +- crates/core/src/async_query.rs | 13 +++---- crates/core/src/lib.rs | 6 +-- crates/minibf/src/lib.rs | 12 +++--- crates/minibf/src/routes/addresses.rs | 4 +- crates/minibf/src/routes/assets.rs | 10 ++--- crates/minibf/src/routes/txs.rs | 54 +++++++++++++-------------- crates/minibf/src/routes/utxos.rs | 2 +- crates/minikupo/src/routes/matches.rs | 4 +- src/serve/grpc/query.rs | 9 ++++- 10 files changed, 59 insertions(+), 59 deletions(-) diff --git a/crates/cardano/src/lib.rs b/crates/cardano/src/lib.rs index 51308d591..8f6c3c6af 100644 --- a/crates/cardano/src/lib.rs +++ b/crates/cardano/src/lib.rs @@ -652,14 +652,14 @@ impl dolos_core::ChainLogic for CardanoLogic { } fn find_tx_in_block( block: &[u8], - tx_hash: &[u8], + tx_hash: &dolos_core::TxHash, ) -> Result, Self::ChainSpecificError> { let block = MultiEraBlock::decode(block)?; let result = block .txs() .iter() .enumerate() - .find(|(_, tx)| tx.hash().as_slice() == tx_hash) + .find(|(_, tx)| tx.hash().as_slice() == tx_hash.as_slice()) .map(|(idx, tx)| (TaggedPayload(block.era().into(), tx.encode()), idx)); Ok(result) } diff --git a/crates/core/src/async_query.rs b/crates/core/src/async_query.rs index 690eaeb1c..b73f18b16 100644 --- a/crates/core/src/async_query.rs +++ b/crates/core/src/async_query.rs @@ -4,7 +4,8 @@ use tokio::sync::Semaphore; use crate::{ archive::ArchiveStore, indexes::IndexStore, ArchiveError, BlockBody, BlockSlot, ChainError, - ChainLogic, ChainPoint, Domain, DomainError, IndexError, TagDimension, TaggedPayload, TxOrder, + ChainLogic, ChainPoint, Domain, DomainError, IndexError, TagDimension, TaggedPayload, TxHash, + TxOrder, }; #[derive(Debug, Clone)] @@ -113,12 +114,11 @@ where pub async fn block_by_tx_hash( &self, - tx_hash: Vec, + tx_hash: TxHash, ) -> Result, DomainError> { - let tx_hash_lookup = tx_hash.clone(); let Some(raw) = self .run_blocking(move |domain| { - let slot = domain.indexes().slot_by_tx_hash(&tx_hash_lookup)?; + let slot = domain.indexes().slot_by_tx_hash(tx_hash.as_ref())?; let Some(slot) = slot else { return Ok(None); }; @@ -138,12 +138,11 @@ where pub async fn tx_cbor( &self, - tx_hash: Vec, + tx_hash: TxHash, ) -> Result, DomainError> { - let tx_hash_lookup = tx_hash.clone(); let Some(raw) = self .run_blocking(move |domain| { - let slot = domain.indexes().slot_by_tx_hash(&tx_hash_lookup)?; + let slot = domain.indexes().slot_by_tx_hash(tx_hash.as_ref())?; let Some(slot) = slot else { return Ok(None); }; diff --git a/crates/core/src/lib.rs b/crates/core/src/lib.rs index 650bb5fc3..19cbee68c 100644 --- a/crates/core/src/lib.rs +++ b/crates/core/src/lib.rs @@ -189,10 +189,6 @@ impl FromStr for TxoRef { } } -// TODO: remove legacy -// #[derive(Debug, Eq, PartialEq, Hash)] -// pub struct ChainPoint(pub BlockSlot, pub BlockHash); - #[derive(Debug, Error)] pub enum BrokenInvariant { #[error("missing utxo {0:?}")] @@ -491,7 +487,7 @@ pub trait ChainLogic: Sized + Send + Sync { fn find_tx_in_block( block: &[u8], - tx_hash: &[u8], + tx_hash: &TxHash, ) -> Result, Self::ChainSpecificError>; // Validate a transaction against the current ledger state. diff --git a/crates/minibf/src/lib.rs b/crates/minibf/src/lib.rs index 4247770ac..43bcf49ab 100644 --- a/crates/minibf/src/lib.rs +++ b/crates/minibf/src/lib.rs @@ -6,7 +6,7 @@ use axum::{ }; use dolos_cardano::{ model::{AccountState, AssetState, DRepState, EpochState, FixedNamespace, PoolState}, - CardanoError, CardanoGenesis, ChainSummary, PParamsSet, + pallas_hash_to_core, CardanoError, CardanoGenesis, ChainSummary, PParamsSet, }; use pallas::{ crypto::hash::Hash, @@ -22,7 +22,7 @@ use tracing::Level; use dolos_core::{ config::MinibfConfig, ArchiveStore as _, AsyncQueryFacade, BlockSlot, CancelToken, Domain, Entity, EntityKey, TaggedPayload, LogKey, ServeError, StateError, StateStore as _, SubmitExt, - TemporalKey, TxOrder, + TemporalKey, TxHash, TxOrder, }; mod cache; @@ -70,13 +70,13 @@ impl Facade { pub async fn get_block_by_tx_hash( &self, - tx_hash: &[u8], + tx_hash: TxHash, ) -> Result<(Vec, TxOrder), StatusCode> where D: Clone + Send + Sync + 'static, { self.query() - .block_by_tx_hash(tx_hash.to_vec()) + .block_by_tx_hash(tx_hash) .await .map_err(log_and_500("failed to query block by tx hash"))? .ok_or(StatusCode::NOT_FOUND) @@ -160,7 +160,7 @@ impl Facade { { let tx = self .query() - .tx_cbor(hash.as_slice().to_vec()) + .tx_cbor(pallas_hash_to_core(hash)) .await .map_err(log_and_500("failed to fetch tx cbor"))?; @@ -194,7 +194,7 @@ impl Facade { for hash in hashes.into_iter() { let block = self .query() - .block_by_tx_hash(hash.as_slice().to_vec()) + .block_by_tx_hash(pallas_hash_to_core(hash)) .await .map_err(log_and_500("failed to fetch block_with_tx batch"))?; if let Some(block) = block { diff --git a/crates/minibf/src/routes/addresses.rs b/crates/minibf/src/routes/addresses.rs index e1ed3504c..79d0fcc4a 100644 --- a/crates/minibf/src/routes/addresses.rs +++ b/crates/minibf/src/routes/addresses.rs @@ -20,7 +20,7 @@ use pallas::ledger::{ use dolos_cardano::{ indexes::{AsyncCardanoQueryExt, CardanoIndexExt, SlotOrder}, - pallas_extras, CardanoError, ChainSummary, + pallas_extras, pallas_hash_to_core, CardanoError, ChainSummary, }; use dolos_core::{BlockBody, BlockSlot, Domain, TaggedPayload, StateStore as _, TxoRef}; use pallas::ledger::traverse::Era; @@ -410,7 +410,7 @@ where for input in tx.consumes() { if let Some(TaggedPayload(era, cbor)) = domain .query() - .tx_cbor(input.hash().as_slice().to_vec()) + .tx_cbor(pallas_hash_to_core(*input.hash())) .await .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)? { diff --git a/crates/minibf/src/routes/assets.rs b/crates/minibf/src/routes/assets.rs index a14c9c6f9..ac331dd21 100644 --- a/crates/minibf/src/routes/assets.rs +++ b/crates/minibf/src/routes/assets.rs @@ -16,7 +16,7 @@ use dolos_cardano::{ cip68::{cip_68_reference_asset, encode_to_hex, parse_cip68_metadata_map, Cip68TokenStandard}, indexes::{AsyncCardanoQueryExt, CardanoIndexExt, SlotOrder}, model::AssetState, - CardanoError, ChainSummary, + pallas_hash_to_core, CardanoError, ChainSummary, }; use dolos_core::{BlockSlot, Domain, TaggedPayload, IndexStore as _, StateStore as _}; use futures_util::StreamExt; @@ -339,7 +339,7 @@ impl AssetModelBuilder { if let Some(metadata_tx) = ref_state.and_then(|state| state.metadata_tx) { if let Some(TaggedPayload(era, cbor)) = domain .query() - .tx_cbor(metadata_tx.as_slice().to_vec()) + .tx_cbor(pallas_hash_to_core(metadata_tx)) .await .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)? { @@ -358,7 +358,7 @@ impl AssetModelBuilder { if Some(metadata_tx) != self.asset_state.initial_tx { let metadata_cbor = domain .query() - .tx_cbor(metadata_tx.as_slice().to_vec()) + .tx_cbor(pallas_hash_to_core(metadata_tx)) .await .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?; if metadata_cbor.is_some() { @@ -488,7 +488,7 @@ where let initial_tx = if let Some(initial_tx) = asset_state.initial_tx { domain .query() - .tx_cbor(initial_tx.as_slice().to_vec()) + .tx_cbor(pallas_hash_to_core(initial_tx)) .await .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)? } else { @@ -622,7 +622,7 @@ where for input in tx.consumes() { if let Some(TaggedPayload(era, cbor)) = domain .query() - .tx_cbor(input.hash().as_slice().to_vec()) + .tx_cbor(pallas_hash_to_core(*input.hash())) .await .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)? { diff --git a/crates/minibf/src/routes/txs.rs b/crates/minibf/src/routes/txs.rs index 377c663a9..ad8566f4f 100644 --- a/crates/minibf/src/routes/txs.rs +++ b/crates/minibf/src/routes/txs.rs @@ -19,7 +19,7 @@ use dolos_cardano::{ core_hash_to_pallas, indexes::AsyncCardanoQueryExt, pallas_hash_to_core, AccountState, CardanoError, CardanoGenesis, DRepState, PoolState, }; -use dolos_core::Domain; +use dolos_core::{Domain, TxHash}; use crate::{ hacks, log_and_500, @@ -41,12 +41,12 @@ where Option: From, Option: From, { - let hash = hex::decode(tx_hash).map_err(|_| StatusCode::BAD_REQUEST)?; + let hash = tx_hash.parse::().map_err(|_| StatusCode::BAD_REQUEST)?; - let (raw, order) = match domain.get_block_by_tx_hash(&hash).await { + let (raw, order) = match domain.get_block_by_tx_hash(hash).await { Ok(block) => block, Err(StatusCode::NOT_FOUND) => { - return Ok(Json(hacks::genesis_tx_content_for_hash(&domain, &hash)?)); + return Ok(Json(hacks::genesis_tx_content_for_hash(&domain, hash.as_ref())?)); } Err(err) => return Err(err), }; @@ -69,9 +69,9 @@ pub async fn by_hash_cbor( where D: Domain + Clone + Send + Sync + 'static, { - let hash = hex::decode(tx_hash).map_err(|_| StatusCode::BAD_REQUEST)?; + let hash = tx_hash.parse::().map_err(|_| StatusCode::BAD_REQUEST)?; - let (raw, order) = domain.get_block_by_tx_hash(&hash).await?; + let (raw, order) = domain.get_block_by_tx_hash(hash).await?; let tx = TxModelBuilder::new(&raw, order)?; @@ -89,13 +89,13 @@ where + Sync + 'static, { - let hash = hex::decode(tx_hash).map_err(|_| StatusCode::BAD_REQUEST)?; + let hash = tx_hash.parse::().map_err(|_| StatusCode::BAD_REQUEST)?; - let (raw, order) = match domain.get_block_by_tx_hash(&hash).await { + let (raw, order) = match domain.get_block_by_tx_hash(hash).await { Ok(block) => block, Err(StatusCode::NOT_FOUND) => { return Ok(Json( - hacks::genesis_tx_utxos_for_hash(&domain, &hash).await?, + hacks::genesis_tx_utxos_for_hash(&domain, hash.as_ref()).await?, )); } Err(err) => return Err(err), @@ -142,9 +142,9 @@ pub async fn by_hash_metadata( where D: Domain + Clone + Send + Sync + 'static, { - let hash = hex::decode(tx_hash).map_err(|_| StatusCode::BAD_REQUEST)?; + let hash = tx_hash.parse::().map_err(|_| StatusCode::BAD_REQUEST)?; - let (raw, order) = domain.get_block_by_tx_hash(&hash).await?; + let (raw, order) = domain.get_block_by_tx_hash(hash).await?; let tx = TxModelBuilder::new(&raw, order)?; @@ -158,9 +158,9 @@ pub async fn by_hash_metadata_cbor( where D: Domain + Clone + Send + Sync + 'static, { - let hash = hex::decode(tx_hash).map_err(|_| StatusCode::BAD_REQUEST)?; + let hash = tx_hash.parse::().map_err(|_| StatusCode::BAD_REQUEST)?; - let (raw, order) = domain.get_block_by_tx_hash(&hash).await?; + let (raw, order) = domain.get_block_by_tx_hash(hash).await?; let builder = TxModelBuilder::new(&raw, order)?; @@ -174,9 +174,9 @@ pub async fn by_hash_redeemers( where D: Domain + Clone + Send + Sync + 'static, { - let hash = hex::decode(tx_hash).map_err(|_| StatusCode::BAD_REQUEST)?; + let hash = tx_hash.parse::().map_err(|_| StatusCode::BAD_REQUEST)?; - let (raw, order) = domain.get_block_by_tx_hash(&hash).await?; + let (raw, order) = domain.get_block_by_tx_hash(hash).await?; let chain = domain.get_chain_summary()?; @@ -205,9 +205,9 @@ pub async fn by_hash_withdrawals( where D: Domain + Clone + Send + Sync + 'static, { - let hash = hex::decode(tx_hash).map_err(|_| StatusCode::BAD_REQUEST)?; + let hash = tx_hash.parse::().map_err(|_| StatusCode::BAD_REQUEST)?; - let (raw, order) = domain.get_block_by_tx_hash(&hash).await?; + let (raw, order) = domain.get_block_by_tx_hash(hash).await?; let tx = TxModelBuilder::new(&raw, order)?; @@ -221,9 +221,9 @@ pub async fn by_hash_delegations( where D: Domain + Clone + Send + Sync + 'static, { - let hash = hex::decode(tx_hash).map_err(|_| StatusCode::BAD_REQUEST)?; + let hash = tx_hash.parse::().map_err(|_| StatusCode::BAD_REQUEST)?; - let (raw, order) = domain.get_block_by_tx_hash(&hash).await?; + let (raw, order) = domain.get_block_by_tx_hash(hash).await?; let network = domain.get_network_id()?; let chain = domain.get_chain_summary()?; @@ -242,9 +242,9 @@ pub async fn by_hash_mirs( where D: Domain + Clone + Send + Sync + 'static, { - let hash = hex::decode(tx_hash).map_err(|_| StatusCode::BAD_REQUEST)?; + let hash = tx_hash.parse::().map_err(|_| StatusCode::BAD_REQUEST)?; - let (raw, order) = domain.get_block_by_tx_hash(&hash).await?; + let (raw, order) = domain.get_block_by_tx_hash(hash).await?; let network = domain.get_network_id()?; @@ -260,9 +260,9 @@ pub async fn by_hash_pool_retires( where D: Domain + Clone + Send + Sync + 'static, { - let hash = hex::decode(tx_hash).map_err(|_| StatusCode::BAD_REQUEST)?; + let hash = tx_hash.parse::().map_err(|_| StatusCode::BAD_REQUEST)?; - let (raw, order) = domain.get_block_by_tx_hash(&hash).await?; + let (raw, order) = domain.get_block_by_tx_hash(hash).await?; let tx = TxModelBuilder::new(&raw, order)?; @@ -277,11 +277,11 @@ where D: Domain + Clone + Send + Sync + 'static, Option: From, { - let hash = hex::decode(tx_hash).map_err(|_| StatusCode::BAD_REQUEST)?; + let hash = tx_hash.parse::().map_err(|_| StatusCode::BAD_REQUEST)?; let network = domain.get_network_id()?; - let (raw, order) = domain.get_block_by_tx_hash(&hash).await?; + let (raw, order) = domain.get_block_by_tx_hash(hash).await?; let chain = domain.get_chain_summary()?; @@ -301,11 +301,11 @@ pub async fn by_hash_stakes( where D: Domain + Clone + Send + Sync + 'static, { - let hash = hex::decode(tx_hash).map_err(|_| StatusCode::BAD_REQUEST)?; + let hash = tx_hash.parse::().map_err(|_| StatusCode::BAD_REQUEST)?; let network = domain.get_network_id()?; - let (raw, order) = domain.get_block_by_tx_hash(&hash).await?; + let (raw, order) = domain.get_block_by_tx_hash(hash).await?; let tx = TxModelBuilder::new(&raw, order)?.with_network(network); diff --git a/crates/minibf/src/routes/utxos.rs b/crates/minibf/src/routes/utxos.rs index f80a4f930..855e3ed97 100644 --- a/crates/minibf/src/routes/utxos.rs +++ b/crates/minibf/src/routes/utxos.rs @@ -45,7 +45,7 @@ where async move { match domain .query() - .block_by_tx_hash(tx.as_slice().to_vec()) + .block_by_tx_hash(tx) .await { Ok(Some((cbor, txorder))) => { diff --git a/crates/minikupo/src/routes/matches.rs b/crates/minikupo/src/routes/matches.rs index 2b0f4e509..0dd31f97d 100644 --- a/crates/minikupo/src/routes/matches.rs +++ b/crates/minikupo/src/routes/matches.rs @@ -361,7 +361,7 @@ async fn refs_for_output_ref_pattern( patterns::OutputIndexPattern::Any => { let Some(TaggedPayload(era, cbor)) = facade .query() - .tx_cbor(tx_id.to_vec()) + .tx_cbor(pallas_hash_to_core(tx_hash)) .await .map_err(|_| MatchError::Internal)? else { @@ -454,7 +454,7 @@ async fn build_matches { let Some((raw_block, tx_index)) = facade .query() - .block_by_tx_hash(tx_hash.as_slice().to_vec()) + .block_by_tx_hash(tx_hash) .await .map_err(|_| MatchError::Internal)? else { diff --git a/src/serve/grpc/query.rs b/src/serve/grpc/query.rs index ef67e8b01..625e063ed 100644 --- a/src/serve/grpc/query.rs +++ b/src/serve/grpc/query.rs @@ -429,11 +429,16 @@ where info!("received new grpc query - read_tx"); - let tx_hash = message.hash; + let tx_hash_bytes = message.hash; + let tx_hash_arr: [u8; 32] = tx_hash_bytes + .as_ref() + .try_into() + .map_err(|_| Status::invalid_argument("invalid tx hash length"))?; + let tx_hash = dolos_core::TxHash::from(tx_hash_arr); let query = dolos_core::AsyncQueryFacade::new(self.domain.clone()); let (block_bytes, tx_index) = query - .block_by_tx_hash(tx_hash.to_vec()) + .block_by_tx_hash(tx_hash) .await .map_err(|e| Status::internal(e.to_string()))? .ok_or_else(|| Status::not_found("tx hash not found"))?; From 4d932bc104c8fc33a692bb4dc2f1fb82f6a87643 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Tue, 7 Apr 2026 12:41:16 -0300 Subject: [PATCH 78/85] fix: dont truncate slot in test helper --- crates/core/src/lib.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/crates/core/src/lib.rs b/crates/core/src/lib.rs index 19cbee68c..13031c5a1 100644 --- a/crates/core/src/lib.rs +++ b/crates/core/src/lib.rs @@ -653,8 +653,7 @@ mod tests { pub fn slot_to_hash(slot: u64) -> BlockHash { let mut bytes = [0u8; 32]; - let slot_bytes = (slot as i32).to_le_bytes(); - bytes[..4].copy_from_slice(&slot_bytes); + bytes[..8].copy_from_slice(&slot.to_le_bytes()); BlockHash::new(bytes) } From a2af8cb2ebea2e55d541a2be25a7ed0079ae2c35 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Tue, 7 Apr 2026 12:42:04 -0300 Subject: [PATCH 79/85] fix: remove commented out code --- crates/core/src/point.rs | 21 --------------------- 1 file changed, 21 deletions(-) diff --git a/crates/core/src/point.rs b/crates/core/src/point.rs index dd5ec69fd..519e78d5b 100644 --- a/crates/core/src/point.rs +++ b/crates/core/src/point.rs @@ -92,27 +92,6 @@ impl PartialOrd for ChainPoint { } } -//impl From for ChainPoint { -// fn from(value: PallasPoint) -> Self { -// match value { -// PallasPoint::Origin => ChainPoint::Origin, -// PallasPoint::Specific(s, h) => ChainPoint::Specific(s, h.as_slice().into()), -// } -// } -//} - -//impl TryFrom for PallasPoint { -// type Error = (); -// -// fn try_from(value: ChainPoint) -> Result { -// match value { -// ChainPoint::Origin => Ok(PallasPoint::Origin), -// ChainPoint::Specific(s, h) => Ok(PallasPoint::Specific(s, h.to_vec())), -// ChainPoint::Slot(_) => Err(()), -// } -// } -//} - impl From<&T> for ChainPoint where T: Block, From 5e23a279c496bbdf240837ca664cd78db826df8d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Tue, 7 Apr 2026 12:43:45 -0300 Subject: [PATCH 80/85] fix: simply error mapping --- crates/minibf/src/routes/utxos.rs | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/crates/minibf/src/routes/utxos.rs b/crates/minibf/src/routes/utxos.rs index 855e3ed97..669379880 100644 --- a/crates/minibf/src/routes/utxos.rs +++ b/crates/minibf/src/routes/utxos.rs @@ -36,18 +36,13 @@ where MultiEraOutput::decode(era, &v.1).map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?; Ok::<_, StatusCode>((k, output)) }) - .try_collect() - .map_err(|e: StatusCode| e)?; + .try_collect()?; let tx_deps: Vec<_> = utxos.keys().map(|txoref| txoref.0).unique().collect(); let block_deps: HashMap = join_all(tx_deps.iter().map(|tx| { let tx = *tx; async move { - match domain - .query() - .block_by_tx_hash(tx) - .await - { + match domain.query().block_by_tx_hash(tx).await { Ok(Some((cbor, txorder))) => { let Ok(block) = MultiEraBlock::decode(&cbor) else { return Some(Err(StatusCode::INTERNAL_SERVER_ERROR)); From 5b309186e829bd1053bcec1f4c5dde77fe62b85e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Tue, 7 Apr 2026 12:45:16 -0300 Subject: [PATCH 81/85] fix: simplify import --- crates/testing/src/blocks.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/testing/src/blocks.rs b/crates/testing/src/blocks.rs index 56f52185f..57e806276 100644 --- a/crates/testing/src/blocks.rs +++ b/crates/testing/src/blocks.rs @@ -59,7 +59,7 @@ pub fn make_conway_block(slot: BlockSlot) -> (ChainPoint, RawBlock) { let wrapper = (Era::Conway as u16, block); let raw_bytes = pallas::codec::minicbor::to_vec(&wrapper).unwrap(); - let chain_point = ChainPoint::Specific(slot, dolos_cardano::pallas_hash_to_core(hash)); + let chain_point = ChainPoint::Specific(slot, pallas_hash_to_core(hash)); (chain_point, Arc::new(raw_bytes)) } From 2898a8549cea4f49e56ad2484e169ac4f699b593 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Tue, 7 Apr 2026 12:47:50 -0300 Subject: [PATCH 82/85] fix: mempool checks if tagged payload is array of two on decoding --- crates/redb3/src/mempool.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/crates/redb3/src/mempool.rs b/crates/redb3/src/mempool.rs index 5ff0f570e..9463b84ef 100644 --- a/crates/redb3/src/mempool.rs +++ b/crates/redb3/src/mempool.rs @@ -21,7 +21,10 @@ mod tagged_payload_codec { d: &mut minicbor::Decoder<'b>, _: &mut C, ) -> Result { - d.array()?; + let len = d.array()?; + if len != Some(2) { + return Err(minicbor::decode::Error::message("expected 2-element array for TaggedPayload")); + } let tag = d.u16()?; let payload = d.bytes()?.to_vec(); Ok(TaggedPayload(tag, payload)) From 7f51f19da9c6e12ece19a49b4c2b21f821dd9a30 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Tue, 7 Apr 2026 12:57:55 -0300 Subject: [PATCH 83/85] fix: fail gracefully --- src/adapters/mod.rs | 36 ++++++++++++++++++++++++++---------- 1 file changed, 26 insertions(+), 10 deletions(-) diff --git a/src/adapters/mod.rs b/src/adapters/mod.rs index 2d6ce0075..e4c020ef4 100644 --- a/src/adapters/mod.rs +++ b/src/adapters/mod.rs @@ -67,9 +67,10 @@ impl DomainAdapter { pub fn get_historical_utxos( &self, refs: &[pallas::interop::utxorpc::TxoRef], - ) -> Option { + ) -> Result, DomainError> + { if refs.is_empty() { - return Some(Default::default()); + return Ok(Some(Default::default())); } let mut result = std::collections::HashMap::new(); @@ -79,11 +80,16 @@ impl DomainAdapter { .map(|(h, i)| TxoRef(pallas_hash_to_core(h), i)) .collect(); - let iter = self.wal().iter_logs(None, None).ok()?; + let iter = self.wal().iter_logs(None, None).map_err(DomainError::WalError)?; for (_, log) in iter.rev() { for (txo_ref, era_cbor) in &log.inputs { if refs_set.contains(txo_ref) { - let era = era_cbor.0.try_into().expect("era out of range"); + let era = era_cbor.0.try_into().map_err(|_| { + DomainError::Internal(format!( + "era out of range: txo_ref={txo_ref:?}, era={}", + era_cbor.0 + )) + })?; result.insert( (core_hash_to_pallas(txo_ref.0), txo_ref.1), (era, era_cbor.1.clone()), @@ -97,9 +103,9 @@ impl DomainAdapter { } if result.is_empty() { - None + Ok(None) } else { - Some(result) + Ok(Some(result)) } } } @@ -196,12 +202,22 @@ impl pallas::interop::utxorpc::LedgerContext for DomainAdapter { .map(|(h, i)| TxoRef(pallas_hash_to_core(*h), *i)) .collect(); - let some = dolos_core::StateStore::get_utxos(self.state(), refs) + let some: pallas::interop::utxorpc::UtxoMap = dolos_core::StateStore::get_utxos(self.state(), refs) .ok()? .into_iter() - .map(|(k, v)| { - let era = v.0.try_into().expect("era out of range"); - ((core_hash_to_pallas(k.0), k.1), (era, v.1.clone())) + .filter_map(|(k, v)| { + let era = v + .0 + .try_into() + .map_err(|_| { + tracing::error!( + txo_ref = ?(k.0, k.1), + era = v.0, + "era out of range during UTxO lookup, skipping entry" + ) + }) + .ok()?; + Some(((core_hash_to_pallas(k.0), k.1), (era, v.1.clone()))) }) .collect(); From cd3e1ad4f38e1c29de5df5f7af40c1d34b168a0b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Tue, 7 Apr 2026 12:59:15 -0300 Subject: [PATCH 84/85] fix: remove pointless boxed error --- src/bin/dolos/data/dump_logs.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/bin/dolos/data/dump_logs.rs b/src/bin/dolos/data/dump_logs.rs index 8033f17e8..3341614d5 100644 --- a/src/bin/dolos/data/dump_logs.rs +++ b/src/bin/dolos/data/dump_logs.rs @@ -624,9 +624,9 @@ fn setup_tracing_for_format(config: &RootConfig, format: OutputFormat) -> miette crate::common::setup_tracing(&config.logging, &config.telemetry) } -fn decode_stake_credential(key: &EntityKey) -> Result> { +fn decode_stake_credential(key: &EntityKey) -> Result { let mut decoder = minicbor::Decoder::new(key.as_ref()); - decoder.decode().map_err(|e| Box::new(e) as _) + decoder.decode() } fn log_slot_from_key(key: &LogKey) -> u64 { From 47cfdc3b6da9a2e8c458abd30f1607dbba9cf5ca Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20Ludue=C3=B1a?= Date: Tue, 7 Apr 2026 13:04:08 -0300 Subject: [PATCH 85/85] fix: handle point conversion errors --- src/relay/chainsync.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/relay/chainsync.rs b/src/relay/chainsync.rs index 04a4ad162..f50f45dfe 100644 --- a/src/relay/chainsync.rs +++ b/src/relay/chainsync.rs @@ -148,8 +148,8 @@ impl Session { let points: Vec<_> = points .into_iter() - .filter_map(|p| pallas_point_to_chain(p).ok()) - .collect(); + .map(pallas_point_to_chain) + .collect::>()?; let intersect = ChainCrawler::::start(&self.domain, &points).unwrap();