Skip to content

Commit 7ea689d

Browse files
Removed sp-arithmetic (#126)
* Removed sp-arithmetic * pr fixes
1 parent a6ef519 commit 7ea689d

File tree

11 files changed

+37
-48
lines changed

11 files changed

+37
-48
lines changed

Cargo.lock

Lines changed: 2 additions & 3 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

Cargo.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,6 @@ scale-info = { version = "2", default-features = false, features = ["derive"] }
1010

1111
# Polkadot SDK
1212
binary-merkle-tree = { git = "https://github.com/availproject/polkadot-sdk", tag = "polkadot-1.7.1-patch-12", default-features = false }
13-
sp-arithmetic = { git = "https://github.com/availproject/polkadot-sdk", tag = "polkadot-1.7.1-patch-12", default-features = false }
1413
sp-std = { git = "https://github.com/availproject/polkadot-sdk", tag = "polkadot-1.7.1-patch-12", default-features = false }
1514
sp-trie = { git = "https://github.com/availproject/polkadot-sdk", tag = "polkadot-1.7.1-patch-12", default-features = false }
1615
sp-runtime = { git = "https://github.com/availproject/polkadot-sdk", tag = "polkadot-1.7.1-patch-12", default-features = false }
@@ -55,6 +54,7 @@ hash-db = { version = "0.16.0", default-features = false }
5554
rayon = "1.5.2"
5655
once_cell = "1.8.0"
5756
nalgebra = { version = "0.32.2", default-features = false }
57+
num-traits = { version = "0.2.18", default-features = false }
5858

5959
# ETH
6060
ethabi-decode = { git = "https://github.com/Snowfork/ethabi-decode.git", branch = "master", default-features = false }

core/Cargo.toml

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@ log.workspace = true
1717
static_assertions.workspace = true
1818
thiserror-no-std.workspace = true
1919
tiny-keccak.workspace = true
20+
num-traits = { workspace = true }
2021

2122
# Parity
2223
binary-merkle-tree = { workspace = true, optional = true }
@@ -30,7 +31,6 @@ sp-runtime-interface = { workspace = true, optional = true }
3031
sp-trie = { workspace = true, optional = true }
3132
sp-storage = { workspace = true, optional = true }
3233
sp-debug-derive = { workspace = true, optional = true }
33-
sp-arithmetic.workspace = true
3434
sp-std.workspace = true
3535

3636
blake2b_simd.workspace = true
@@ -62,7 +62,6 @@ std = [
6262
"primitive-types/std",
6363
"scale-info/std",
6464
"serde?/std",
65-
"sp-arithmetic/std",
6665
"sp-runtime-interface?/std",
6766
"sp-runtime?/std",
6867
"sp-std/std",

core/src/constants.rs

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,13 @@
11
use core::num::NonZeroU32;
2-
use sp_arithmetic::Perbill;
32
use static_assertions::{const_assert, const_assert_eq};
43

54
/// We allow `Normal` extrinsics to fill up the block up to 85%, the rest can be used
65
/// by Operational extrinsics.
7-
pub const NORMAL_DISPATCH_RATIO: Perbill = Perbill::from_percent(85);
6+
pub const NORMAL_DISPATCH_RATIO: u8 = 85;
87

98
/// We allow `Normal` data submissions to fill up the matrix up to 100%, there is no
109
/// Operational or mandatory extrinsic that goes in the matrix.
11-
pub const DA_DISPATCH_RATIO: Perbill = Perbill::from_percent(100);
10+
pub const DA_DISPATCH_RATIO: u8 = 100;
1211

1312
const_assert!(BLOCK_CHUNK_SIZE.get() > 0);
1413
pub const BLOCK_CHUNK_SIZE: NonZeroU32 = unsafe { NonZeroU32::new_unchecked(32) };

core/src/lib.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,8 @@ use core::fmt::Debug;
55

66
use codec::{Decode, Encode, MaxEncodedLen};
77
use derive_more::{Add, Constructor, Deref, Into, Mul};
8+
use num_traits::Zero;
89
use scale_info::TypeInfo;
9-
use sp_arithmetic::traits::Zero;
1010

1111
#[cfg(feature = "runtime")]
1212
use sp_debug_derive::RuntimeDebug;

kate/Cargo.toml

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@ poly-multiproof = { workspace = true, default-features = false, features = ["bls
1717

1818
# Parity & Substrate
1919
codec = { package = "parity-scale-codec", version = "3", default-features = false, features = ["derive"] }
20-
sp-arithmetic.workspace = true
2120
sp-std.workspace = true
2221

2322
# 3rd-party
@@ -61,7 +60,6 @@ std = [
6160
"rand_chacha/std",
6261
"serde",
6362
"serde_json/std",
64-
"sp-arithmetic/std",
6563
]
6664

6765
serde = [ "avail-core/serde", "dep:serde" ]

kate/benches/reconstruct.rs

Lines changed: 6 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,6 @@ use nalgebra::DMatrix;
1414
use rand::{prelude::IteratorRandom, Rng, SeedableRng};
1515
use rand_chacha::ChaChaRng;
1616
use serde::{Deserialize, Serialize};
17-
use sp_arithmetic::{traits::SaturatedConversion, Percent};
1817

1918
const XTS_JSON_SETS: &str = include_str!("reconstruct.data.json");
2019

@@ -81,15 +80,14 @@ fn sample_cells_from_matrix(matrix: &DMatrix<ArkScalar>, columns: Option<&[u16]>
8180
fn random_cells(
8281
max_cols: BlockLengthColumns,
8382
max_rows: BlockLengthRows,
84-
percents: Percent,
83+
percents: u8,
8584
) -> Vec<Cell> {
86-
let max_cols = max_cols.into();
87-
let max_rows = max_rows.into();
85+
let max_cols: u32 = max_cols.into();
86+
let max_rows: u32 = max_rows.into();
8887

8988
let rng = &mut ChaChaRng::from_seed([0u8; 32]);
90-
let amount: usize = percents
91-
.mul_ceil::<u32>(max_cols * max_rows)
92-
.saturated_into();
89+
let amount = (percents as u32 * (max_cols * max_rows)).div_ceil(100);
90+
let amount: usize = usize::try_from(amount).unwrap_or(usize::MAX);
9391

9492
(0..max_cols)
9593
.flat_map(move |col| {
@@ -148,7 +146,7 @@ fn reconstruct(xts: &[AppExtrinsic]) {
148146
usize::try_from(dims_cols).unwrap(),
149147
usize::try_from(dims_cols).unwrap(),
150148
);
151-
for cell in random_cells(dims.cols(), dims.rows(), Percent::one()) {
149+
for cell in random_cells(dims.cols(), dims.rows(), 100) {
152150
let row: u32 = cell.row.into();
153151

154152
let proof = build_proof(&public_params, dims, &matrix, &[cell], &metrics).unwrap();

kate/recovery/Cargo.toml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,6 @@ poly-multiproof = { workspace = true, default-features = false, features = ["bls
1212

1313
# Parity
1414
codec = { package = "parity-scale-codec", version = "3", default-features = false, features = ["derive"] }
15-
sp-arithmetic.workspace = true
1615
sp-std.workspace = true
1716

1817
# 3rd-parties
@@ -26,6 +25,7 @@ once_cell = { workspace = true, optional = true }
2625
rand = { workspace = true, optional = true }
2726
rand_chacha = { workspace = true, optional = true }
2827
serde = { workspace = true, optional = true }
28+
num-traits = { workspace = true, optional = true }
2929

3030
[dev-dependencies]
3131
hex.workspace = true
@@ -40,8 +40,8 @@ std = [
4040
"rand/std",
4141
"rand_chacha/std",
4242
"serde",
43-
"sp-arithmetic/std",
4443
"sp-std/std",
44+
"num-traits",
4545
]
4646

4747
serde = [ "dep:serde" ]

kate/recovery/src/com.rs

Lines changed: 5 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -18,12 +18,9 @@ use super::commons::{ArkEvaluationDomain, ArkScalar};
1818
#[cfg(feature = "std")]
1919
use codec::{Decode, IoReader};
2020
#[cfg(feature = "std")]
21-
use poly_multiproof::{ark_ff::Field, ark_poly::EvaluationDomain, traits::AsBytes};
21+
pub use num_traits::{One, Zero};
2222
#[cfg(feature = "std")]
23-
pub use sp_arithmetic::{
24-
traits::{One, SaturatedConversion as _, Zero},
25-
Percent,
26-
};
23+
use poly_multiproof::{ark_ff::Field, ark_poly::EvaluationDomain, traits::AsBytes};
2724
#[cfg(feature = "std")]
2825
use static_assertions::{const_assert, const_assert_ne};
2926
#[cfg(feature = "std")]
@@ -83,14 +80,13 @@ impl std::error::Error for ReconstructionError {
8380
pub fn columns_positions<R: rand::RngCore>(
8481
dimensions: matrix::Dimensions,
8582
positions: &[matrix::Position],
86-
factor: Percent,
83+
factor: u8,
8784
rng: &mut R,
8885
) -> Vec<matrix::Position> {
8986
use rand::seq::SliceRandom;
9087

91-
let cells = factor
92-
.mul_ceil(dimensions.extended_rows())
93-
.saturated_into::<usize>();
88+
let cells = (factor as u32 * dimensions.extended_rows()).div_ceil(100);
89+
let cells = usize::try_from(cells).unwrap_or(usize::MAX);
9490

9591
let columns: HashSet<u16> = HashSet::from_iter(positions.iter().map(|position| position.col));
9692

kate/src/com.rs

Lines changed: 16 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,6 @@ use rand_chacha::{
2626
use rayon::prelude::*;
2727
#[cfg(feature = "serde")]
2828
use serde::{Deserialize, Serialize};
29-
use sp_arithmetic::traits::SaturatedConversion;
3029
use static_assertions::const_assert_eq;
3130

3231
use crate::{
@@ -291,7 +290,8 @@ fn pad_to_chunk<const CHUNK_SIZE: usize>(chunk: DataChunk) -> Vec<u8> {
291290
}
292291

293292
fn pad_iec_9797_1(mut data: Vec<u8>) -> Vec<DataChunk> {
294-
let padded_size = padded_len_of_pad_iec_9797_1(data.len().saturated_into());
293+
let data_len = u32::try_from(data.len()).unwrap_or(u32::MAX);
294+
let padded_size = padded_len_of_pad_iec_9797_1(data_len);
295295
data.resize(padded_size as usize, 0u8);
296296

297297
// Transform into `DataChunk`.
@@ -495,7 +495,8 @@ pub fn build_proof<M: Metrics>(
495495
res[PROOF_SIZE..].copy_from_slice(&point_bytes);
496496
});
497497

498-
metrics.proof_build_time(total_start.elapsed(), cells.len().saturated_into());
498+
let cells_len = u32::try_from(cells.len()).unwrap_or(u32::MAX);
499+
metrics.proof_build_time(total_start.elapsed(), cells_len);
499500

500501
if let Ok(mut errors) = locked_errors.lock() {
501502
if let Some(error) = errors.pop() {
@@ -522,7 +523,8 @@ pub fn par_build_commitments<const CHUNK_SIZE: usize, M: Metrics>(
522523
let (tx_layout, block, block_dims) =
523524
flatten_and_pad_block::<CHUNK_SIZE>(rows, cols, extrinsics_by_key, rng_seed)?;
524525

525-
metrics.block_dims_and_size(block_dims, block.len().saturated_into());
526+
let block_len = u32::try_from(block.len()).unwrap_or(u32::MAX);
527+
metrics.block_dims_and_size(block_dims, block_len);
526528

527529
let ext_matrix = par_extend_data_matrix(block_dims, &block, metrics)?;
528530

@@ -663,6 +665,7 @@ mod tests {
663665
constants::kate::{CHUNK_SIZE, COMMITMENT_SIZE, DATA_CHUNK_SIZE},
664666
DataLookup,
665667
};
668+
use core::usize;
666669
use hex_literal::hex;
667670
use kate_recovery::{
668671
com::*,
@@ -676,7 +679,6 @@ mod tests {
676679
prelude::*,
677680
};
678681
use rand::{prelude::IteratorRandom, Rng, SeedableRng};
679-
use sp_arithmetic::Percent;
680682
use std::{convert::TryInto, iter::repeat};
681683
use test_case::test_case;
682684

@@ -885,15 +887,14 @@ mod tests {
885887
fn random_cells(
886888
max_cols: BlockLengthColumns,
887889
max_rows: BlockLengthRows,
888-
percents: Percent,
890+
percents: u8,
889891
) -> Vec<Cell> {
890-
let max_cols = max_cols.into();
891-
let max_rows = max_rows.into();
892+
let max_cols: u32 = max_cols.into();
893+
let max_rows: u32 = max_rows.into();
892894

893895
let rng = &mut ChaChaRng::from_seed([0u8; 32]);
894-
let amount: usize = percents
895-
.mul_ceil::<u32>(max_cols * max_rows)
896-
.saturated_into();
896+
let amount = (percents as u32 * (max_cols * max_rows)).div_ceil(100);
897+
let amount = usize::try_from(amount).unwrap_or(usize::MAX);
897898

898899
(0..max_cols)
899900
.flat_map(move |col| {
@@ -923,7 +924,7 @@ mod tests {
923924
// let dims_cols = usize::try_from(dims.cols.0).unwrap();
924925
// let public_params = testnet::public_params(dims_cols);
925926
let public_params = couscous::multiproof_params();
926-
for cell in random_cells(dims.cols, dims.rows, Percent::one() ) {
927+
for cell in random_cells(dims.cols, dims.rows, 100 ) {
927928
let row = usize::try_from(cell.row.0).unwrap();
928929

929930
let proof = build_proof(&public_params, dims, &matrix, &[cell], &metrics).unwrap();
@@ -1199,12 +1200,12 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat
11991200
#[test_case( build_extrinsics(&[]) => padded_len_group(&[], 32) ; "Empty chunk list")]
12001201
#[test_case( build_extrinsics(&[4096]) => padded_len_group(&[4096], 32) ; "4K chunk")]
12011202
fn test_padding_len(extrinsics: Vec<Vec<u8>>) -> u32 {
1202-
extrinsics
1203+
let sum = extrinsics
12031204
.into_iter()
12041205
.flat_map(pad_iec_9797_1)
12051206
.map(|chunk| pad_to_chunk::<TCHUNK_SIZE>(chunk).len())
1206-
.sum::<usize>()
1207-
.saturated_into()
1207+
.sum::<usize>();
1208+
u32::try_from(sum).unwrap_or(u32::MAX)
12081209
}
12091210

12101211
#[test]

0 commit comments

Comments
 (0)