Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 6 additions & 4 deletions consensus/benches/check_scripts.rs
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,7 @@ fn mock_tx_with_payload(inputs_count: usize, non_uniq_signatures: usize, payload
}

fn benchmark_check_scripts(c: &mut Criterion) {
let flags = Default::default();
for inputs_count in [100, 50, 25, 10, 5, 2] {
for non_uniq_signatures in [0, inputs_count / 2] {
let (tx, utxos) = mock_tx_with_payload(inputs_count, non_uniq_signatures, 0);
Expand All @@ -89,7 +90,7 @@ fn benchmark_check_scripts(c: &mut Criterion) {
let cache = Cache::new(inputs_count as u64);
b.iter(|| {
cache.clear();
check_scripts_sequential(black_box(&cache), black_box(&tx.as_verifiable())).unwrap();
check_scripts_sequential(black_box(&cache), black_box(&tx.as_verifiable()), flags).unwrap();
})
});

Expand All @@ -98,7 +99,7 @@ fn benchmark_check_scripts(c: &mut Criterion) {
let cache = Cache::new(inputs_count as u64);
b.iter(|| {
cache.clear();
check_scripts_par_iter(black_box(&cache), black_box(&tx.as_verifiable())).unwrap();
check_scripts_par_iter(black_box(&cache), black_box(&tx.as_verifiable()), flags).unwrap();
})
});

Expand All @@ -110,7 +111,8 @@ fn benchmark_check_scripts(c: &mut Criterion) {
let cache = Cache::new(inputs_count as u64);
b.iter(|| {
cache.clear();
check_scripts_par_iter_pool(black_box(&cache), black_box(&tx.as_verifiable()), black_box(&pool)).unwrap();
check_scripts_par_iter_pool(black_box(&cache), black_box(&tx.as_verifiable()), black_box(&pool), flags)
.unwrap();
})
});
}
Expand Down Expand Up @@ -146,7 +148,7 @@ fn benchmark_check_scripts_with_payload(c: &mut Criterion) {
let cache = Cache::new(inputs_count as u64);
b.iter(|| {
cache.clear();
check_scripts_par_iter(black_box(&cache), black_box(&tx.as_verifiable())).unwrap();
check_scripts_par_iter(black_box(&cache), black_box(&tx.as_verifiable()), Default::default()).unwrap();
})
});
}
Expand Down
6 changes: 3 additions & 3 deletions consensus/client/src/serializable/numeric.rs
Original file line number Diff line number Diff line change
Expand Up @@ -271,7 +271,7 @@ impl SerializableTransaction {
outputs: outputs.into_iter().map(Into::into).collect(),
version: transaction.version,
lock_time: transaction.lock_time,
subnetwork_id: transaction.subnetwork_id.clone(),
subnetwork_id: transaction.subnetwork_id,
gas: transaction.gas,
mass: transaction.mass(),
payload: transaction.payload.clone(),
Expand All @@ -290,7 +290,7 @@ impl SerializableTransaction {
outputs,
version: inner.version,
lock_time: inner.lock_time,
subnetwork_id: inner.subnetwork_id.clone(),
subnetwork_id: inner.subnetwork_id,
gas: inner.gas,
payload: inner.payload.clone(),
mass: inner.mass,
Expand Down Expand Up @@ -319,7 +319,7 @@ impl SerializableTransaction {
inputs,
outputs,
lock_time: transaction.lock_time,
subnetwork_id: transaction.subnetwork_id.clone(),
subnetwork_id: transaction.subnetwork_id,
gas: transaction.gas,
mass: transaction.mass(),
payload: transaction.payload.clone(),
Expand Down
6 changes: 3 additions & 3 deletions consensus/client/src/serializable/string.rs
Original file line number Diff line number Diff line change
Expand Up @@ -263,7 +263,7 @@ impl SerializableTransaction {
version: transaction.version,
outputs: outputs.into_iter().map(Into::into).collect(),
lock_time: transaction.lock_time.to_string(),
subnetwork_id: transaction.subnetwork_id.clone(),
subnetwork_id: transaction.subnetwork_id,
gas: transaction.gas.to_string(),
mass: transaction.mass().to_string(),
payload: transaction.payload.clone(),
Expand All @@ -281,7 +281,7 @@ impl SerializableTransaction {
outputs,
version: inner.version,
lock_time: inner.lock_time.to_string(),
subnetwork_id: inner.subnetwork_id.clone(),
subnetwork_id: inner.subnetwork_id,
gas: inner.gas.to_string(),
mass: inner.mass.to_string(),
payload: inner.payload.clone(),
Expand Down Expand Up @@ -310,7 +310,7 @@ impl SerializableTransaction {
inputs,
outputs,
lock_time: transaction.lock_time.to_string(),
subnetwork_id: transaction.subnetwork_id.clone(),
subnetwork_id: transaction.subnetwork_id,
gas: transaction.gas.to_string(),
mass: transaction.mass().to_string(),
payload: transaction.payload.clone(),
Expand Down
16 changes: 4 additions & 12 deletions consensus/client/src/transaction.rs
Original file line number Diff line number Diff line change
Expand Up @@ -351,16 +351,8 @@ impl From<&Transaction> for cctx::Transaction {
inner.inputs.clone().into_iter().map(|input| input.as_ref().into()).collect::<Vec<cctx::TransactionInput>>();
let outputs: Vec<cctx::TransactionOutput> =
inner.outputs.clone().into_iter().map(|output| output.as_ref().into()).collect::<Vec<cctx::TransactionOutput>>();
cctx::Transaction::new(
inner.version,
inputs,
outputs,
inner.lock_time,
inner.subnetwork_id.clone(),
inner.gas,
inner.payload.clone(),
)
.with_mass(inner.mass)
cctx::Transaction::new(inner.version, inputs, outputs, inner.lock_time, inner.subnetwork_id, inner.gas, inner.payload.clone())
.with_mass(inner.mass)
}
}

Expand Down Expand Up @@ -392,7 +384,7 @@ impl Transaction {
gas: tx.gas,
payload: tx.payload.clone(),
mass: tx.mass(),
subnetwork_id: tx.subnetwork_id.clone(),
subnetwork_id: tx.subnetwork_id,
})
}

Expand All @@ -415,7 +407,7 @@ impl Transaction {
inputs,
outputs,
inner.lock_time,
inner.subnetwork_id.clone(),
inner.subnetwork_id,
inner.gas,
inner.payload.clone(),
)
Expand Down
10 changes: 10 additions & 0 deletions consensus/core/src/config/params.rs
Original file line number Diff line number Diff line change
Expand Up @@ -229,6 +229,8 @@ pub struct OverrideParams {

/// Crescendo activation DAA score
pub crescendo_activation: Option<ForkActivation>,

pub covenants_activation: Option<ForkActivation>,
}

impl From<Params> for OverrideParams {
Expand Down Expand Up @@ -257,6 +259,7 @@ impl From<Params> for OverrideParams {
pruning_proof_m: Some(p.pruning_proof_m),
blockrate: Some(p.blockrate),
crescendo_activation: Some(p.crescendo_activation),
covenants_activation: Some(p.covenants_activation),
}
}
}
Expand Down Expand Up @@ -321,6 +324,8 @@ pub struct Params {

/// Crescendo activation DAA score
pub crescendo_activation: ForkActivation,

pub covenants_activation: ForkActivation,
}

impl Params {
Expand Down Expand Up @@ -489,6 +494,7 @@ impl Params {
.unwrap_or(self.pre_crescendo_target_time_per_block),

crescendo_activation: overrides.crescendo_activation.unwrap_or(self.crescendo_activation),
covenants_activation: overrides.covenants_activation.unwrap_or(self.covenants_activation),
}
}
}
Expand Down Expand Up @@ -599,6 +605,7 @@ pub const MAINNET_PARAMS: Params = Params {

// Roughly 2025-05-05 1500 UTC
crescendo_activation: ForkActivation::new(110_165_000),
covenants_activation: ForkActivation::never(),
};

pub const TESTNET_PARAMS: Params = Params {
Expand Down Expand Up @@ -654,6 +661,7 @@ pub const TESTNET_PARAMS: Params = Params {

// 18:30 UTC, March 6, 2025
crescendo_activation: ForkActivation::new(88_657_000),
covenants_activation: ForkActivation::never(),
};

pub const SIMNET_PARAMS: Params = Params {
Expand Down Expand Up @@ -694,6 +702,7 @@ pub const SIMNET_PARAMS: Params = Params {
pre_crescendo_target_time_per_block: TenBps::target_time_per_block(),

crescendo_activation: ForkActivation::always(),
covenants_activation: ForkActivation::never(),
};

pub const DEVNET_PARAMS: Params = Params {
Expand Down Expand Up @@ -732,4 +741,5 @@ pub const DEVNET_PARAMS: Params = Params {
pre_crescendo_target_time_per_block: 1000,

crescendo_activation: ForkActivation::always(),
covenants_activation: ForkActivation::never(),
};
2 changes: 1 addition & 1 deletion consensus/core/src/hashing/sighash.rs
Original file line number Diff line number Diff line change
Expand Up @@ -257,7 +257,7 @@ pub fn calc_schnorr_signature_hash(
.write_u8(input.0.sig_op_count)
.update(outputs_hash(tx, hash_type, reused_values, input_index))
.write_u64(tx.lock_time)
.update(&tx.subnetwork_id)
.update(tx.subnetwork_id)
.write_u64(tx.gas)
.update(payload_hash(tx, reused_values))
.write_u8(hash_type.to_u8());
Expand Down
58 changes: 40 additions & 18 deletions consensus/core/src/hashing/tx.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use super::HasherExtensions;
use crate::tx::{Transaction, TransactionId, TransactionInput, TransactionOutpoint, TransactionOutput};
use kaspa_hashes::{Hash, Hasher};
use kaspa_hashes::{Hash, HasherBase};

bitflags::bitflags! {
/// A bitmask defining which transaction fields we want to encode and which to ignore.
Expand Down Expand Up @@ -28,21 +28,19 @@ pub fn hash_pre_crescendo(tx: &Transaction) -> Hash {

/// Not intended for direct use by clients. Instead use `tx.id()`
pub(crate) fn id(tx: &Transaction) -> TransactionId {
// Encode the transaction, replace signature script with an empty array, skip
// sigop counts and mass commitment and hash the result.

let encoding_flags = if tx.is_coinbase() {
TxEncodingFlags::FULL
} else {
TxEncodingFlags::EXCLUDE_SIGNATURE_SCRIPT | TxEncodingFlags::EXCLUDE_MASS_COMMIT
};
let mut hasher = kaspa_hashes::TransactionID::new();
write_transaction(&mut hasher, tx, encoding_flags);
write_transaction_for_transaction_id(&mut hasher, tx);
hasher.finalize()
}

fn write_transaction_for_transaction_id<T: HasherBase>(hasher: &mut T, tx: &Transaction) {
// Encode the transaction, replace signature script with an empty array, skip
// sigop counts and mass commitment and hash the result.
write_transaction(hasher, tx, TxEncodingFlags::EXCLUDE_SIGNATURE_SCRIPT | TxEncodingFlags::EXCLUDE_MASS_COMMIT)
}

/// Write the transaction into the provided hasher according to the encoding flags
fn write_transaction<T: Hasher>(hasher: &mut T, tx: &Transaction, encoding_flags: TxEncodingFlags) {
fn write_transaction<T: HasherBase>(hasher: &mut T, tx: &Transaction, encoding_flags: TxEncodingFlags) {
hasher.update(tx.version.to_le_bytes()).write_len(tx.inputs.len());
for input in tx.inputs.iter() {
// Write the tx input
Expand All @@ -55,7 +53,7 @@ fn write_transaction<T: Hasher>(hasher: &mut T, tx: &Transaction, encoding_flags
write_output(hasher, output);
}

hasher.update(tx.lock_time.to_le_bytes()).update(&tx.subnetwork_id).update(tx.gas.to_le_bytes()).write_var_bytes(&tx.payload);
hasher.update(tx.lock_time.to_le_bytes()).update(tx.subnetwork_id).update(tx.gas.to_le_bytes()).write_var_bytes(&tx.payload);

/*
Design principles (mostly related to the new mass commitment field; see KIP-0009):
Expand Down Expand Up @@ -83,7 +81,7 @@ fn write_transaction<T: Hasher>(hasher: &mut T, tx: &Transaction, encoding_flags
}

#[inline(always)]
fn write_input<T: Hasher>(hasher: &mut T, input: &TransactionInput, encoding_flags: TxEncodingFlags) {
fn write_input<T: HasherBase>(hasher: &mut T, input: &TransactionInput, encoding_flags: TxEncodingFlags) {
write_outpoint(hasher, &input.previous_outpoint);
if !encoding_flags.contains(TxEncodingFlags::EXCLUDE_SIGNATURE_SCRIPT) {
hasher.write_var_bytes(input.signature_script.as_slice()).update([input.sig_op_count]);
Expand All @@ -94,18 +92,35 @@ fn write_input<T: Hasher>(hasher: &mut T, input: &TransactionInput, encoding_fla
}

#[inline(always)]
fn write_outpoint<T: Hasher>(hasher: &mut T, outpoint: &TransactionOutpoint) {
fn write_outpoint<T: HasherBase>(hasher: &mut T, outpoint: &TransactionOutpoint) {
hasher.update(outpoint.transaction_id).update(outpoint.index.to_le_bytes());
}

#[inline(always)]
fn write_output<T: Hasher>(hasher: &mut T, output: &TransactionOutput) {
fn write_output<T: HasherBase>(hasher: &mut T, output: &TransactionOutput) {
hasher
.update(output.value.to_le_bytes())
.update(output.script_public_key.version().to_le_bytes())
.write_var_bytes(output.script_public_key.script());
}

struct PreimageHasher {
buff: Vec<u8>,
}

impl HasherBase for PreimageHasher {
fn update<A: AsRef<[u8]>>(&mut self, data: A) -> &mut Self {
self.buff.extend_from_slice(data.as_ref());
self
}
}

pub fn transaction_id_preimage(tx: &Transaction) -> Vec<u8> {
let mut hasher = PreimageHasher { buff: vec![] };
write_transaction_for_transaction_id(&mut hasher, tx);
hasher.buff
}

#[cfg(test)]
mod tests {
use super::*;
Expand Down Expand Up @@ -173,9 +188,10 @@ mod tests {

// Test #6
tests.push(Test {
tx: Transaction::new(2, inputs.clone(), outputs.clone(), 54, subnets::SUBNETWORK_ID_COINBASE, 3, Vec::new()),
expected_id: "3fad809b11bd5a4af027aa4ac3fbde97e40624fd40965ba3ee1ee1b57521ad10",
expected_hash: "b4eb5f0cab5060bf336af5dcfdeb2198cc088b693b35c87309bd3dda04f1cfb9",
// Valid coinbase transactions have no inputs.
tx: Transaction::new(2, vec![], outputs.clone(), 54, subnets::SUBNETWORK_ID_COINBASE, 3, Vec::new()),
expected_id: "f16306e20f6a28576e526092979b2bf3fc53b933fa6482c71b7a06c489495910",
expected_hash: "968b9effa67001baa5a3016449211bf59a8db3721314bd8a64723eac2cff4552",
});

// Test #7
Expand All @@ -195,6 +211,12 @@ mod tests {
for (i, test) in tests.iter().enumerate() {
assert_eq!(test.tx.id(), Hash::from_str(test.expected_id).unwrap(), "transaction id failed for test {}", i + 1);
assert_eq!(hash(&test.tx), Hash::from_str(test.expected_hash).unwrap(), "transaction hash failed for test {}", i + 1);

let preimage = transaction_id_preimage(&test.tx);
let mut hasher = kaspa_hashes::TransactionID::new();
hasher.update(&preimage);
let preimage_hash = hasher.finalize();
assert_eq!(preimage_hash, test.tx.id(), "transaction id preimage failed for test {}", i + 1);
}

// Avoid compiler warnings on the last clone
Expand Down
8 changes: 7 additions & 1 deletion consensus/core/src/subnets.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ use thiserror::Error;
pub const SUBNETWORK_ID_SIZE: usize = 20;

/// The domain representation of a Subnetwork ID
#[derive(Clone, Default, Eq, PartialEq, Ord, PartialOrd, Hash, BorshSerialize, BorshDeserialize)]
#[derive(Clone, Default, Eq, PartialEq, Ord, PartialOrd, Hash, BorshSerialize, BorshDeserialize, Copy)]
pub struct SubnetworkId([u8; SUBNETWORK_ID_SIZE]);

impl Debug for SubnetworkId {
Expand Down Expand Up @@ -40,6 +40,12 @@ impl From<[u8; SUBNETWORK_ID_SIZE]> for SubnetworkId {
}
}

impl From<SubnetworkId> for Vec<u8> {
fn from(id: SubnetworkId) -> Self {
id.0.into()
}
}

impl SubnetworkId {
pub const fn from_byte(b: u8) -> SubnetworkId {
let mut bytes = [0u8; SUBNETWORK_ID_SIZE];
Expand Down
Loading