Skip to content

Commit b3dbae5

Browse files
authored
crypto: add ed25519 key "reshare" impl (Shamir SS)
* crypto: add reshare and expand_shares functions * crypto: enhance serialization support for sss * crypto: fix type errors in test * tecdsa: fix out of bound error in test
1 parent dd48529 commit b3dbae5

File tree

20 files changed

+460
-44
lines changed

20 files changed

+460
-44
lines changed

Cargo.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -32,4 +32,5 @@ proptest = "1.0"
3232
rand = "0.8"
3333
rand_chacha = "0.3"
3434
rand_core = "0.6"
35+
serde = { version = "1.0", default-features = false, features = ["derive", "alloc"] }
3536
serde_json = "1.0"

crypto/tecdsa/cait_sith_keplr/src/tests/test2.rs

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,13 +13,18 @@ use crate::{
1313

1414
#[test]
1515
fn test_e2e_3() {
16-
let participants = vec![Participant::from(0u32), Participant::from(1u32)];
16+
let participants = vec![
17+
Participant::from(0u32),
18+
Participant::from(1u32),
19+
Participant::from(2u32),
20+
];
1721

1822
let threshold = 2;
1923

2024
println!("participants: {:#?}", participants);
2125

2226
let keygen_result = keygen_2::<Secp256k1>(&participants, threshold).unwrap();
27+
// NOTE: we have 3 participants, but we only use 2
2328

2429
let triples_participants = vec![Participant::from(0u32), Participant::from(1u32)];
2530
let triples_result = generate_triples_3::<Secp256k1>(&triples_participants, threshold).unwrap();

crypto/teddsa/frost_ed25519_keplr/Cargo.toml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@ frost_core = { workspace = true }
2626
curve25519-dalek = { version = "=4.1.3", features = ["rand_core"] }
2727
document-features = { workspace = true }
2828
rand_core = { workspace = true }
29+
serde = { workspace = true, optional = true }
2930
sha2 = { version = "0.10.2", default-features = false }
3031

3132
[dev-dependencies]
@@ -50,7 +51,7 @@ std = []
5051
## Enable `serde` support for types that need to be communicated. You
5152
## can use `serde` to serialize structs with any encoder that supports
5253
## `serde` (e.g. JSON with `serde_json`).
53-
serde = ["frost_core/serde"]
54+
serde = ["dep:serde", "frost_core/serde"]
5455
## Enable a default serialization format. Enables `serde`.
5556
serialization = [
5657
"serde",

crypto/teddsa/frost_ed25519_keplr/src/point.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@ use curve25519_dalek::scalar::Scalar;
44

55
/// A 256-bit point with x and y coordinates.
66
#[derive(Clone, Debug, PartialEq, Eq)]
7+
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
78
pub struct Point256 {
89
/// X coordinate (32 bytes)
910
pub x: [u8; 32],

crypto/teddsa/frost_ed25519_keplr/src/sss/mod.rs

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,11 @@
11
mod combine;
22
mod lagrange;
3+
mod reshare;
34
mod split;
45

56
pub use combine::*;
67
pub use lagrange::*;
8+
pub use reshare::*;
79
pub use split::*;
810

911
#[cfg(test)]
Lines changed: 206 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,206 @@
1+
use alloc::collections::BTreeSet;
2+
use alloc::format;
3+
use alloc::string::{String, ToString};
4+
use alloc::vec::Vec;
5+
6+
use frost_core::{Scalar, SigningKey};
7+
use rand_core::{CryptoRng, RngCore};
8+
9+
use crate::keys::{split, IdentifierList};
10+
use crate::point::Point256;
11+
use crate::sss::compute_lagrange_coefficient;
12+
use crate::{Ed25519Sha512, Identifier};
13+
14+
/// Result of a reshare operation.
15+
#[derive(Debug, Clone, PartialEq, Eq)]
16+
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
17+
pub struct ReshareResult {
18+
/// Threshold value (minimum shares required to reconstruct).
19+
pub t: u32,
20+
/// The reshared points for each node.
21+
pub reshared_points: Vec<Point256>,
22+
/// The recovered secret.
23+
pub secret: [u8; 32],
24+
}
25+
26+
/// Reshares existing keyshares to a new set of nodes with a fresh polynomial.
27+
///
28+
/// This function recovers the secret from existing shares and creates new shares
29+
/// for a potentially different set of nodes using a new random polynomial.
30+
pub fn reshare<R: RngCore + CryptoRng>(
31+
split_points: Vec<Point256>,
32+
new_ks_node_hashes: Vec<[u8; 32]>,
33+
t: u32,
34+
rng: &mut R,
35+
) -> Result<ReshareResult, String> {
36+
if split_points.len() < t as usize {
37+
return Err("Split points must be greater than t".to_string());
38+
}
39+
40+
if new_ks_node_hashes.len() < t as usize {
41+
return Err("New KS node hashes must be greater than t".to_string());
42+
}
43+
44+
// Take first t points for interpolation
45+
let truncated_points = split_points.iter().take(t as usize).collect::<Vec<_>>();
46+
47+
// Build identifier set from x coordinates
48+
let identifiers = truncated_points
49+
.iter()
50+
.map(|p| {
51+
Identifier::deserialize(p.x.as_slice())
52+
.map_err(|e| format!("Failed to deserialize identifier: {:?}", e))
53+
})
54+
.collect::<Result<BTreeSet<_>, String>>()?;
55+
56+
// Compute Lagrange coefficients and interpolate to recover secret
57+
let coeffs = identifiers
58+
.iter()
59+
.map(|id| {
60+
compute_lagrange_coefficient::<Ed25519Sha512>(&identifiers, None, *id)
61+
.map_err(|e| format!("Failed to compute lagrange coefficient: {:?}", e))
62+
})
63+
.collect::<Result<Vec<_>, String>>()?;
64+
65+
let mut secret_scalar = Scalar::<Ed25519Sha512>::ZERO;
66+
for (i, coeff) in coeffs.iter().enumerate() {
67+
let y_scalar = SigningKey::<Ed25519Sha512>::deserialize(truncated_points[i].y.as_slice())
68+
.map_err(|e| format!("Failed to deserialize signing key: {:?}", e))?
69+
.to_scalar();
70+
secret_scalar = secret_scalar + *coeff * y_scalar;
71+
}
72+
73+
let secret = secret_scalar.to_bytes();
74+
75+
// Create new shares using fresh polynomial
76+
let signing_key = SigningKey::<Ed25519Sha512>::deserialize(secret.as_slice())
77+
.map_err(|e| format!("Failed to deserialize signing key: {:?}", e))?;
78+
79+
let max_signers = new_ks_node_hashes.len() as u16;
80+
let min_signers = t as u16;
81+
82+
let new_identifiers = new_ks_node_hashes
83+
.iter()
84+
.map(|&x| {
85+
Identifier::deserialize(x.as_slice())
86+
.map_err(|e| format!("Failed to deserialize identifier: {:?}", e))
87+
})
88+
.collect::<Result<Vec<_>, String>>()?;
89+
let identifier_list = IdentifierList::Custom(&new_identifiers);
90+
91+
let share_map_tup = split(&signing_key, max_signers, min_signers, identifier_list, rng)
92+
.map_err(|e| format!("Failed to split: {:?}", e))?;
93+
let share_vec = share_map_tup.0.into_iter().collect::<Vec<_>>();
94+
95+
let reshared_points: Vec<Point256> = share_vec
96+
.into_iter()
97+
.map(|(identifier, share)| Point256 {
98+
x: identifier.to_scalar().to_bytes(),
99+
y: share.signing_share().to_scalar().to_bytes(),
100+
})
101+
.collect();
102+
103+
Ok(ReshareResult {
104+
t,
105+
reshared_points,
106+
secret,
107+
})
108+
}
109+
110+
/// Expands existing shares to include additional nodes without changing the polynomial.
111+
///
112+
/// This function uses Lagrange interpolation to compute share values for new nodes
113+
/// based on the existing shares, preserving the original polynomial.
114+
pub fn expand_shares(
115+
split_points: Vec<Point256>,
116+
additional_ks_node_hashes: Vec<[u8; 32]>,
117+
t: u32,
118+
) -> Result<ReshareResult, String> {
119+
if split_points.len() < t as usize {
120+
return Err("Split points must be greater than t".to_string());
121+
}
122+
123+
// Check that new hashes are not already in split_points
124+
for split_point in split_points.iter() {
125+
for new_hash in additional_ks_node_hashes.iter() {
126+
if split_point.x == *new_hash {
127+
return Err("New hash is already included in split points".to_string());
128+
}
129+
}
130+
}
131+
132+
// Take first t points for interpolation
133+
let truncated_points = split_points.iter().take(t as usize).collect::<Vec<_>>();
134+
135+
// Build identifier set from x coordinates
136+
let identifiers = truncated_points
137+
.iter()
138+
.map(|p| {
139+
Identifier::deserialize(p.x.as_slice())
140+
.map_err(|e| format!("Failed to deserialize identifier: {:?}", e))
141+
})
142+
.collect::<Result<BTreeSet<_>, String>>()?;
143+
144+
// Recover secret for result
145+
let coeffs_at_zero = identifiers
146+
.iter()
147+
.map(|id| {
148+
compute_lagrange_coefficient::<Ed25519Sha512>(&identifiers, None, *id)
149+
.map_err(|e| format!("Failed to compute lagrange coefficient: {:?}", e))
150+
})
151+
.collect::<Result<Vec<_>, String>>()?;
152+
153+
let mut secret_scalar = Scalar::<Ed25519Sha512>::ZERO;
154+
for (i, coeff) in coeffs_at_zero.iter().enumerate() {
155+
let y_scalar = SigningKey::<Ed25519Sha512>::deserialize(truncated_points[i].y.as_slice())
156+
.map_err(|e| format!("Failed to deserialize signing key: {:?}", e))?
157+
.to_scalar();
158+
secret_scalar = secret_scalar + *coeff * y_scalar;
159+
}
160+
let secret = secret_scalar.to_bytes();
161+
162+
// Compute new points using Lagrange interpolation at new x values
163+
let new_points = additional_ks_node_hashes
164+
.iter()
165+
.map(|&new_hash| {
166+
let x_identifier = Identifier::deserialize(new_hash.as_slice())
167+
.map_err(|e| format!("Failed to deserialize identifier: {:?}", e))?;
168+
169+
// Compute Lagrange coefficients at the new x value
170+
let coeffs_at_x = identifiers
171+
.iter()
172+
.map(|id| {
173+
compute_lagrange_coefficient::<Ed25519Sha512>(
174+
&identifiers,
175+
Some(x_identifier),
176+
*id,
177+
)
178+
.map_err(|e| format!("Failed to compute lagrange coefficient: {:?}", e))
179+
})
180+
.collect::<Result<Vec<_>, String>>()?;
181+
182+
// Interpolate y value
183+
let mut y_scalar = Scalar::<Ed25519Sha512>::ZERO;
184+
for (i, coeff) in coeffs_at_x.iter().enumerate() {
185+
let point_y_scalar =
186+
SigningKey::<Ed25519Sha512>::deserialize(truncated_points[i].y.as_slice())
187+
.map_err(|e| format!("Failed to deserialize signing key: {:?}", e))?
188+
.to_scalar();
189+
y_scalar = y_scalar + *coeff * point_y_scalar;
190+
}
191+
192+
Ok(Point256 {
193+
x: x_identifier.to_scalar().to_bytes(),
194+
y: y_scalar.to_bytes(),
195+
})
196+
})
197+
.collect::<Result<Vec<Point256>, String>>()?;
198+
199+
let reshared_points = [split_points.clone(), new_points].concat();
200+
201+
Ok(ReshareResult {
202+
t,
203+
reshared_points,
204+
secret,
205+
})
206+
}

0 commit comments

Comments
 (0)