Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
82 changes: 82 additions & 0 deletions banderwagon/src/element.rs
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,37 @@ impl Element {
Some(element)
}

pub fn batch_from_bytes(bytes: &[u8]) -> Option<Vec<Element>> {
let n_elements = 1 + (bytes.len() - 1) / Self::compressed_serialized_size();
let mut xs = Vec::with_capacity(n_elements);
for chunk in bytes.chunks(Self::compressed_serialized_size()) {
// Switch from big endian to little endian, as arkworks library uses little endian
let mut chunked_bytes = chunk.to_vec();
chunked_bytes.reverse();
let x: Fq = Fq::deserialize_compressed(&chunked_bytes[..]).ok()?;
xs.push(x);
}

// Get points in the group, but possibly not in the prime subgroup
let points = Self::batch_get_point_from_x(&xs);
let mut elements = Vec::with_capacity(n_elements);

for point in points {
let point = point?; // Short-circuits if point is None
let element = Element(EdwardsProjective::new_unchecked(
point.x,
point.y,
point.x * point.y,
Fq::one(),
));
if !element.subgroup_check() {
return None; // Short-circuit on failed subgroup check
}
elements.push(element);
}
Some(elements)
}

pub const fn compressed_serialized_size() -> usize {
32
}
Expand All @@ -125,6 +156,36 @@ impl Element {
Some(EdwardsAffine::new_unchecked(x, y))
}

pub fn batch_get_point_from_x(xs: &[Fq]) -> Vec<Option<EdwardsAffine>> {
let mut ys_squared = Vec::with_capacity(xs.len());

for x in xs {
// y^2 = dx^2 - 1
ys_squared.push(BandersnatchConfig::COEFF_D * x.square() - Fq::one());
}

// y^2 = 1 / (dx^2 - 1)
batch_inversion(&mut ys_squared);

for (x, y_squared) in xs.iter().zip(ys_squared.iter_mut()) {
*y_squared *= BandersnatchConfig::COEFF_A * x.square() - Fq::one();
}

let mut elements = Vec::with_capacity(xs.len());
for (x, y_squared) in xs.iter().zip(ys_squared.iter()) {
match y_squared.sqrt() {
Some(mut y) => {
if !is_positive(y) {
y = -y;
}
elements.push(Some(EdwardsAffine::new_unchecked(*x, y)));
}
None => elements.push(None),
}
}
elements
}

fn map_to_field(&self) -> Fq {
self.0.x / self.0.y
}
Expand Down Expand Up @@ -250,6 +311,27 @@ mod tests {
assert_eq!(expected_i, got[i]);
}
}

#[test]
fn from_batch_from_bytes() {
let mut points = Vec::new();
for i in 0..10 {
points.push(Element::prime_subgroup_generator() * Fr::from(i));
}
let mut compressed = [0u8; 320];
for (i, point) in points.clone().into_iter().enumerate() {
let bytes = point.to_bytes();
let start_index = i * 32;
let end_index = start_index + 32;
compressed[start_index..end_index].copy_from_slice(&bytes);
}

let got = Element::batch_from_bytes(&compressed).unwrap();

for i in 0..10 {
assert_eq!(points[i], got[i]);
}
}
}

#[cfg(test)]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,27 @@ public static native byte[] updateSparse(
*/
public static native byte[] compressMany(byte[] commitments);

/**
* Uncompresses a compressed commitment.
*
* <p>Converts a serialised commitment from compressed to uncompressed form.
*
* @param commitment compressed serialised commitment.
* @return uncompressed serialised commitment.
*/
public static native byte[] uncompress(byte[] commitment);

/**
* UnCompresses many compressed commitments.
*
* <p>Converts serialised commitment from compressed to uncompressed form. The vectorised version
* is highly optimised, making use of Montgoméry's batch inversion trick.
*
* @param commitments uncompressed serialised commitments.
* @return compressed serialised commitments.
*/
public static native byte[] uncompressMany(byte[] commitments);

/**
* Convert a commitment to its corresponding scalar.
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,45 @@ public void testCallLibraryWithMaxElements() {
assertThat(result).isEqualTo(expected);
}

@Test
public void testCompressRoundTrip() {
// uncompress(compress(x)) might not give the same representation as the original x.
// So to test, we compress again. We could have hashed it too.
Bytes input =
Bytes.fromHexString(
"0x0128b513cfb016d3d836b5fa4a8a1260395d4ca831d65027aa74b832d92e0d6d9beb8d5e42b78b99e4eb233e7eca6276c6f4bd235b35c091546e2a2119bc1455");
Bytes expected = Bytes.wrap(LibIpaMultipoint.compress(input.toArray()));
Bytes result =
Bytes.wrap(LibIpaMultipoint.compress(LibIpaMultipoint.uncompress(expected.toArray())));
assertThat(result).isEqualTo(expected);
}

@Test
public void testUncompressRoundTrip() {
Bytes32 expected =
Bytes32.fromHexString("0x3337896554fd3960bef9a4d0ff658ee8ee470cf9ca88a3c807cbe128536c5c05");
Bytes32 result =
Bytes32.wrap(LibIpaMultipoint.compress(LibIpaMultipoint.uncompress(expected.toArray())));
assertThat(result).isEqualTo(expected);
}

@Test
public void testCompressManyRoundTrip() {
Bytes first = Bytes.fromHexString(
"0x0c7f8df856f6860c9f2c6cb0f86c10228e511cca1c4a08263189d629940cb189706cbaa63c436901b6355e10a524337d97688fa5b0cf6b2b91b98e654547f728").reverse();
Bytes input = Bytes.concatenate(
Bytes.fromHexString(
"0x0c7f8df856f6860c9f2c6cb0f86c10228e511cca1c4a08263189d629940cb189706cbaa63c436901b6355e10a524337d97688fa5b0cf6b2b91b98e654547f728").reverse(),
Bytes.fromHexString(
"0x0128b513cfb016d3d836b5fa4a8a1260395d4ca831d65027aa74b832d92e0d6d9beb8d5e42b78b99e4eb233e7eca6276c6f4bd235b35c091546e2a2119bc1455"),
Bytes.fromHexString(
"0x0128b513cfb016d3d836b5fa4a8a1260395d4ca831d65027aa74b832d92e0d6d9beb8d5e42b78b99e4eb233e7eca6276c6f4bd235b35c091546e2a2119bc1455"));
byte[] compressed = LibIpaMultipoint.compressMany(input.toArray());
Bytes result = Bytes.wrap(LibIpaMultipoint.compressMany(LibIpaMultipoint.uncompressMany(compressed)));

assertThat(result).isEqualTo(Bytes.wrap(compressed));
}

@Test
public void testUpdateCommitmentSparseIdentityCommitment() {
// Numbers and result is taken from:
Expand Down
86 changes: 85 additions & 1 deletion bindings/java/rust_code/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,10 @@
* SPDX-License-Identifier: Apache-2.0
*/
mod parsers;
use parsers::{parse_commitment, parse_commitments, parse_indices, parse_scalars};
use parsers::{
parse_commitment, parse_commitments, parse_compressed_commitment, parse_compressed_commitments,
parse_indices, parse_scalars,
};

mod utils;
use utils::{
Expand Down Expand Up @@ -216,6 +219,47 @@ pub extern "system" fn Java_verkle_cryptography_LibIpaMultipoint_compress<'local
result
}

#[no_mangle]
pub extern "system" fn Java_verkle_cryptography_LibIpaMultipoint_uncompress<'local>(
mut env: JNIEnv<'local>,
_class: JClass<'_>,
commitment: JByteArray,
) -> JByteArray<'local> {
let compressed = match parse_compressed_commitment(&env, commitment) {
Ok(v) => v,
Err(e) => {
env.throw_new("java/lang/IllegalArgumentException", e)
.expect("Failed to throw exception for commit inputs.");
return JByteArray::default();
}
};
let commitment = match ffi_interface::deserialize_commitment(compressed) {
Ok(v) => v,
Err(e) => {
let error_message = format!(
"Invalid compressed commitment input. Couldn't convert to a correct subgroup element: {:?}",
e
);
env.throw_new("java/lang/IllegalArgumentException", error_message)
.expect("Failed to throw exception for compressed commitment input.");
return JByteArray::default();
}
};
let result = match env.byte_array_from_slice(&commitment) {
Ok(s) => s,
Err(e) => {
let error_message = format!(
"Invalid commitment output. Couldn't convert to byte array: {:?}",
e
);
env.throw_new("java/lang/IllegalArgumentException", &error_message)
.expect("Couldn't convert to byte array");
return JByteArray::default();
}
};
result
}

#[no_mangle]
pub extern "system" fn Java_verkle_cryptography_LibIpaMultipoint_compressMany<'local>(
mut env: JNIEnv<'local>,
Expand Down Expand Up @@ -249,6 +293,46 @@ pub extern "system" fn Java_verkle_cryptography_LibIpaMultipoint_compressMany<'l
result
}

#[no_mangle]
pub extern "system" fn Java_verkle_cryptography_LibIpaMultipoint_uncompressMany<'local>(
mut env: JNIEnv<'local>,
_class: JClass<'_>,
commitments: JByteArray,
) -> JByteArray<'local> {
let compressed = match parse_compressed_commitments(&env, commitments) {
Ok(v) => v,
Err(e) => {
env.throw_new("java/lang/IllegalArgumentException", e)
.expect("Failed to throw exception for commit inputs.");
return JByteArray::default();
}
};
let commitments: Vec<u8> = match ffi_interface::batch_deserialize_commitment(&compressed) {
Ok(v) => v.into_iter().flat_map(|array| array.into_iter()).collect(),
Err(_e) => {
env.throw_new(
"java/lang/IllegalArgumentException",
"Could not deserialize commitments",
)
.expect("Failed to throw exception for commit inputs.");
return JByteArray::default();
}
};
let result = match env.byte_array_from_slice(&commitments) {
Ok(s) => s,
Err(e) => {
let error_message = format!(
"Invalid commitment output. Couldn't convert to byte array: {:?}",
e
);
env.throw_new("java/lang/IllegalArgumentException", &error_message)
.expect("Couldn't convert to byte array");
return JByteArray::default();
}
};
result
}

#[no_mangle]
pub extern "system" fn Java_verkle_cryptography_LibIpaMultipoint_hash<'local>(
mut env: JNIEnv<'local>,
Expand Down
31 changes: 30 additions & 1 deletion bindings/java/rust_code/src/parsers.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use ffi_interface::CommitmentBytes;
use ffi_interface::{CommitmentBytes, CommitmentCompressedBytes};
use jni::{objects::JByteArray, JNIEnv};
use std::convert::TryFrom;

Expand Down Expand Up @@ -47,3 +47,32 @@ pub fn parse_commitments<'a>(

Ok(commitment_bytes)
}

pub fn parse_compressed_commitment(
env: &JNIEnv,
commitment: JByteArray<'_>,
) -> Result<CommitmentCompressedBytes, String> {
let commitment_bytes = env
.convert_byte_array(commitment)
.map_err(|_| "cannot convert byte vector to vector")?;

let result: CommitmentCompressedBytes =
CommitmentCompressedBytes::try_from(commitment_bytes)
.map_err(|_| "Wrong commitment size: should be 32 bytes".to_string())?;
Ok(result)
}

pub fn parse_compressed_commitments<'a>(
env: &JNIEnv<'a>,
commitment: JByteArray<'a>,
) -> Result<Vec<u8>, String> {
let commitment_bytes = env
.convert_byte_array(commitment)
.map_err(|_| "cannot convert byte vector to vector")?;

if commitment_bytes.len() % 32 != 0 {
return Err("Wrong input size: should be a multiple of 32 bytes".to_string());
};

Ok(commitment_bytes)
}
16 changes: 16 additions & 0 deletions bindings/java/rust_code/verkle_cryptography_LibIpaMultipoint.h

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 5 additions & 1 deletion ffi_interface/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@
// TODO: we ideally don't want to export these.
// - deserialize_update_commitment_sparse should not be exported and is an abstraction leak
pub use serialization::{
deserialize_commitment, deserialize_update_commitment_sparse, serialize_commitment,
batch_deserialize_commitment, deserialize_commitment, deserialize_update_commitment_sparse,
serialize_commitment,
};

use banderwagon::Element;
Expand Down Expand Up @@ -60,6 +61,9 @@
/// A serialized uncompressed group element
pub type CommitmentBytes = [u8; 64];

/// A serialized compressed group element
pub type CommitmentCompressedBytes = [u8; 32];

/// A serialized scalar field element
pub type ScalarBytes = [u8; 32];

Expand Down Expand Up @@ -413,7 +417,7 @@

#[cfg(test)]
mod tests {
use crate::{verify_execution_witness, Context};

Check warning on line 420 in ffi_interface/src/lib.rs

View workflow job for this annotation

GitHub Actions / Test Suite

use of deprecated function `verify_execution_witness`: Parsing of the execution witness and preprocessing its input should be done by clients in the future
use banderwagon::Fr;
use ipa_multipoint::committer::Committer;
use verkle_trie::proof::golang_proof_format::{EXECUTION_WITNESS_JSON, PREVIOUS_STATE_ROOT};
Expand All @@ -422,7 +426,7 @@

#[test]
fn exec_witness_works() {
let result = verify_execution_witness(PREVIOUS_STATE_ROOT, EXECUTION_WITNESS_JSON);

Check warning on line 429 in ffi_interface/src/lib.rs

View workflow job for this annotation

GitHub Actions / Test Suite

use of deprecated function `verify_execution_witness`: Parsing of the execution witness and preprocessing its input should be done by clients in the future
assert!(result);
}

Expand Down Expand Up @@ -520,8 +524,8 @@
#[cfg(test)]
mod pedersen_hash_tests {

use banderwagon::Fr;

Check warning on line 527 in ffi_interface/src/lib.rs

View workflow job for this annotation

GitHub Actions / Test Suite

unused import: `banderwagon::Fr`
use ipa_multipoint::committer::Committer;

Check warning on line 528 in ffi_interface/src/lib.rs

View workflow job for this annotation

GitHub Actions / Test Suite

unused import: `ipa_multipoint::committer::Committer`

use crate::{
add_commitment, commit_to_scalars, get_tree_key, get_tree_key_hash, hash_commitment,
Expand Down
14 changes: 14 additions & 0 deletions ffi_interface/src/serialization.rs
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,20 @@ pub fn deserialize_commitment(serialized_commitment: [u8; 32]) -> Result<Commitm
})?;
Ok(element.to_bytes_uncompressed())
}
pub fn batch_deserialize_commitment(
serialized_commitments: &[u8],
) -> Result<Vec<CommitmentBytes>, Error> {
let elements = Element::batch_from_bytes(&serialized_commitments).ok_or_else(|| {
Error::CouldNotDeserializeCommitment {
bytes: serialized_commitments.to_vec(),
}
})?;
let commitments = elements
.into_iter()
.map(|element| element.to_bytes_uncompressed())
.collect::<Vec<_>>();
Ok(commitments)
}

#[must_use]
pub fn deserialize_proof_query(bytes: &[u8]) -> ProverQuery {
Expand Down
Loading