Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -32,4 +32,5 @@ proptest = "1.0"
rand = "0.8"
rand_chacha = "0.3"
rand_core = "0.6"
serde = { version = "1.0", default-features = false, features = ["derive", "alloc"] }
serde_json = "1.0"
7 changes: 6 additions & 1 deletion crypto/tecdsa/cait_sith_keplr/src/tests/test2.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,18 @@ use crate::{

#[test]
fn test_e2e_3() {
let participants = vec![Participant::from(0u32), Participant::from(1u32)];
let participants = vec![
Participant::from(0u32),
Participant::from(1u32),
Participant::from(2u32),
];

let threshold = 2;

println!("participants: {:#?}", participants);

let keygen_result = keygen_2::<Secp256k1>(&participants, threshold).unwrap();
// NOTE: we have 3 participants, but we only use 2

let triples_participants = vec![Participant::from(0u32), Participant::from(1u32)];
let triples_result = generate_triples_3::<Secp256k1>(&triples_participants, threshold).unwrap();
Expand Down
3 changes: 2 additions & 1 deletion crypto/teddsa/frost_ed25519_keplr/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ frost_core = { workspace = true }
curve25519-dalek = { version = "=4.1.3", features = ["rand_core"] }
document-features = { workspace = true }
rand_core = { workspace = true }
serde = { workspace = true, optional = true }
sha2 = { version = "0.10.2", default-features = false }

[dev-dependencies]
Expand All @@ -50,7 +51,7 @@ std = []
## Enable `serde` support for types that need to be communicated. You
## can use `serde` to serialize structs with any encoder that supports
## `serde` (e.g. JSON with `serde_json`).
serde = ["frost_core/serde"]
serde = ["dep:serde", "frost_core/serde"]
## Enable a default serialization format. Enables `serde`.
serialization = [
"serde",
Expand Down
1 change: 1 addition & 0 deletions crypto/teddsa/frost_ed25519_keplr/src/point.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ use curve25519_dalek::scalar::Scalar;

/// A 256-bit point with x and y coordinates.
#[derive(Clone, Debug, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub struct Point256 {
/// X coordinate (32 bytes)
pub x: [u8; 32],
Expand Down
2 changes: 2 additions & 0 deletions crypto/teddsa/frost_ed25519_keplr/src/sss/mod.rs
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
mod combine;
mod lagrange;
mod reshare;
mod split;

pub use combine::*;
pub use lagrange::*;
pub use reshare::*;
pub use split::*;

#[cfg(test)]
Expand Down
206 changes: 206 additions & 0 deletions crypto/teddsa/frost_ed25519_keplr/src/sss/reshare.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,206 @@
use alloc::collections::BTreeSet;
use alloc::format;
use alloc::string::{String, ToString};
use alloc::vec::Vec;

use frost_core::{Scalar, SigningKey};
use rand_core::{CryptoRng, RngCore};

use crate::keys::{split, IdentifierList};
use crate::point::Point256;
use crate::sss::compute_lagrange_coefficient;
use crate::{Ed25519Sha512, Identifier};

/// Result of a reshare operation.
#[derive(Debug, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub struct ReshareResult {
/// Threshold value (minimum shares required to reconstruct).
pub t: u32,
/// The reshared points for each node.
pub reshared_points: Vec<Point256>,
/// The recovered secret.
pub secret: [u8; 32],
}

/// Reshares existing keyshares to a new set of nodes with a fresh polynomial.
///
/// This function recovers the secret from existing shares and creates new shares
/// for a potentially different set of nodes using a new random polynomial.
pub fn reshare<R: RngCore + CryptoRng>(
split_points: Vec<Point256>,
new_ks_node_hashes: Vec<[u8; 32]>,
t: u32,
rng: &mut R,
) -> Result<ReshareResult, String> {
if split_points.len() < t as usize {
return Err("Split points must be greater than t".to_string());
}

if new_ks_node_hashes.len() < t as usize {
return Err("New KS node hashes must be greater than t".to_string());
}

// Take first t points for interpolation
let truncated_points = split_points.iter().take(t as usize).collect::<Vec<_>>();

// Build identifier set from x coordinates
let identifiers = truncated_points
.iter()
.map(|p| {
Identifier::deserialize(p.x.as_slice())
.map_err(|e| format!("Failed to deserialize identifier: {:?}", e))
})
.collect::<Result<BTreeSet<_>, String>>()?;

// Compute Lagrange coefficients and interpolate to recover secret
let coeffs = identifiers
.iter()
.map(|id| {
compute_lagrange_coefficient::<Ed25519Sha512>(&identifiers, None, *id)
.map_err(|e| format!("Failed to compute lagrange coefficient: {:?}", e))
})
.collect::<Result<Vec<_>, String>>()?;

let mut secret_scalar = Scalar::<Ed25519Sha512>::ZERO;
for (i, coeff) in coeffs.iter().enumerate() {
let y_scalar = SigningKey::<Ed25519Sha512>::deserialize(truncated_points[i].y.as_slice())
.map_err(|e| format!("Failed to deserialize signing key: {:?}", e))?
.to_scalar();
secret_scalar = secret_scalar + *coeff * y_scalar;
}

let secret = secret_scalar.to_bytes();

// Create new shares using fresh polynomial
let signing_key = SigningKey::<Ed25519Sha512>::deserialize(secret.as_slice())
.map_err(|e| format!("Failed to deserialize signing key: {:?}", e))?;

let max_signers = new_ks_node_hashes.len() as u16;
let min_signers = t as u16;

let new_identifiers = new_ks_node_hashes
.iter()
.map(|&x| {
Identifier::deserialize(x.as_slice())
.map_err(|e| format!("Failed to deserialize identifier: {:?}", e))
})
.collect::<Result<Vec<_>, String>>()?;
let identifier_list = IdentifierList::Custom(&new_identifiers);

let share_map_tup = split(&signing_key, max_signers, min_signers, identifier_list, rng)
.map_err(|e| format!("Failed to split: {:?}", e))?;
let share_vec = share_map_tup.0.into_iter().collect::<Vec<_>>();

let reshared_points: Vec<Point256> = share_vec
.into_iter()
.map(|(identifier, share)| Point256 {
x: identifier.to_scalar().to_bytes(),
y: share.signing_share().to_scalar().to_bytes(),
})
.collect();

Ok(ReshareResult {
t,
reshared_points,
secret,
})
}

/// Expands existing shares to include additional nodes without changing the polynomial.
///
/// This function uses Lagrange interpolation to compute share values for new nodes
/// based on the existing shares, preserving the original polynomial.
pub fn expand_shares(
split_points: Vec<Point256>,
additional_ks_node_hashes: Vec<[u8; 32]>,
t: u32,
) -> Result<ReshareResult, String> {
if split_points.len() < t as usize {
return Err("Split points must be greater than t".to_string());
}

// Check that new hashes are not already in split_points
for split_point in split_points.iter() {
for new_hash in additional_ks_node_hashes.iter() {
if split_point.x == *new_hash {
return Err("New hash is already included in split points".to_string());
}
}
}

// Take first t points for interpolation
let truncated_points = split_points.iter().take(t as usize).collect::<Vec<_>>();

// Build identifier set from x coordinates
let identifiers = truncated_points
.iter()
.map(|p| {
Identifier::deserialize(p.x.as_slice())
.map_err(|e| format!("Failed to deserialize identifier: {:?}", e))
})
.collect::<Result<BTreeSet<_>, String>>()?;

// Recover secret for result
let coeffs_at_zero = identifiers
.iter()
.map(|id| {
compute_lagrange_coefficient::<Ed25519Sha512>(&identifiers, None, *id)
.map_err(|e| format!("Failed to compute lagrange coefficient: {:?}", e))
})
.collect::<Result<Vec<_>, String>>()?;

let mut secret_scalar = Scalar::<Ed25519Sha512>::ZERO;
for (i, coeff) in coeffs_at_zero.iter().enumerate() {
let y_scalar = SigningKey::<Ed25519Sha512>::deserialize(truncated_points[i].y.as_slice())
.map_err(|e| format!("Failed to deserialize signing key: {:?}", e))?
.to_scalar();
secret_scalar = secret_scalar + *coeff * y_scalar;
}
let secret = secret_scalar.to_bytes();

// Compute new points using Lagrange interpolation at new x values
let new_points = additional_ks_node_hashes
.iter()
.map(|&new_hash| {
let x_identifier = Identifier::deserialize(new_hash.as_slice())
.map_err(|e| format!("Failed to deserialize identifier: {:?}", e))?;

// Compute Lagrange coefficients at the new x value
let coeffs_at_x = identifiers
.iter()
.map(|id| {
compute_lagrange_coefficient::<Ed25519Sha512>(
&identifiers,
Some(x_identifier),
*id,
)
.map_err(|e| format!("Failed to compute lagrange coefficient: {:?}", e))
})
.collect::<Result<Vec<_>, String>>()?;

// Interpolate y value
let mut y_scalar = Scalar::<Ed25519Sha512>::ZERO;
for (i, coeff) in coeffs_at_x.iter().enumerate() {
let point_y_scalar =
SigningKey::<Ed25519Sha512>::deserialize(truncated_points[i].y.as_slice())
.map_err(|e| format!("Failed to deserialize signing key: {:?}", e))?
.to_scalar();
y_scalar = y_scalar + *coeff * point_y_scalar;
}

Ok(Point256 {
x: x_identifier.to_scalar().to_bytes(),
y: y_scalar.to_bytes(),
})
})
.collect::<Result<Vec<Point256>, String>>()?;

let reshared_points = [split_points.clone(), new_points].concat();

Ok(ReshareResult {
t,
reshared_points,
secret,
})
}
Loading