Skip to content

Commit aab15aa

Browse files
authored
revert!: rsmt2d#277 and rsmt2d#287 (#295)
While attempting to bump celestia-app to the v0.12.0-rc2, I noticed that the `RegisterTree` design leaks an implementation detail to celestia-app: the registering and managing of `treeName`s. Celestia-app has two categories of of trees: 1. erasured namespaced merkle tree in [nmt_wrapper.go](https://github.com/celestiaorg/celestia-app/blob/main/pkg/wrapper/nmt_wrapper.go) 2. EDS subtree root cacher [nmt_caching.go](https://github.com/celestiaorg/celestia-app/blob/main/pkg/inclusion/nmt_caching.go) Each of those categories has trees based on square size and NMT options. Celestia-app needs to be careful to register all the appropriate trees once (and only once) before they are used (via `Compute` or `Import`). I'd like to explore a less breaking option to get celestia-node the original desired feature which was #275. In the meantime, I think we should revert the two big breaking changes so that main can remain release-able. Revert #277 Revert #287 Closes #295 because no longer relevant if we merge this.
1 parent 50c7622 commit aab15aa

8 files changed

+79
-365
lines changed

default_tree.go

-48
This file was deleted.

extendeddatacrossword_test.go

+8-12
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ func TestRepairExtendedDataSquare(t *testing.T) {
4242
flattened[12], flattened[13] = nil, nil
4343

4444
// Re-import the data square.
45-
eds, err := ImportExtendedDataSquare(flattened, codec, DefaultTreeName)
45+
eds, err := ImportExtendedDataSquare(flattened, codec, NewDefaultTree)
4646
if err != nil {
4747
t.Errorf("ImportExtendedDataSquare failed: %v", err)
4848
}
@@ -67,7 +67,7 @@ func TestRepairExtendedDataSquare(t *testing.T) {
6767
flattened[12], flattened[13], flattened[14] = nil, nil, nil
6868

6969
// Re-import the data square.
70-
eds, err := ImportExtendedDataSquare(flattened, codec, DefaultTreeName)
70+
eds, err := ImportExtendedDataSquare(flattened, codec, NewDefaultTree)
7171
if err != nil {
7272
t.Errorf("ImportExtendedDataSquare failed: %v", err)
7373
}
@@ -237,7 +237,7 @@ func BenchmarkRepair(b *testing.B) {
237237

238238
// Generate a new range original data square then extend it
239239
square := genRandDS(originalDataWidth, shareSize)
240-
eds, err := ComputeExtendedDataSquare(square, codec, DefaultTreeName)
240+
eds, err := ComputeExtendedDataSquare(square, codec, NewDefaultTree)
241241
if err != nil {
242242
b.Error(err)
243243
}
@@ -275,7 +275,7 @@ func BenchmarkRepair(b *testing.B) {
275275
}
276276

277277
// Re-import the data square.
278-
eds, _ = ImportExtendedDataSquare(flattened, codec, DefaultTreeName)
278+
eds, _ = ImportExtendedDataSquare(flattened, codec, NewDefaultTree)
279279

280280
b.StartTimer()
281281

@@ -301,7 +301,7 @@ func createTestEds(codec Codec, shareSize int) *ExtendedDataSquare {
301301
eds, err := ComputeExtendedDataSquare([][]byte{
302302
ones, twos,
303303
threes, fours,
304-
}, codec, DefaultTreeName)
304+
}, codec, NewDefaultTree)
305305
if err != nil {
306306
panic(err)
307307
}
@@ -390,14 +390,8 @@ func TestCorruptedEdsReturnsErrByzantineData_UnorderedShares(t *testing.T) {
390390

391391
codec := NewLeoRSCodec()
392392

393-
edsWidth := 4 // number of shares per row/column in the extended data square
394-
odsWidth := edsWidth / 2 // number of shares per row/column in the original data square
395-
err := RegisterTree("testing-tree", newConstructor(uint64(odsWidth), nmt.NamespaceIDSize(namespaceSize)))
396-
assert.NoError(t, err)
397-
398393
// create a DA header
399394
eds := createTestEdsWithNMT(t, codec, shareSize, namespaceSize, 1, 2, 3, 4)
400-
401395
assert.NotNil(t, eds)
402396
dAHeaderRoots, err := eds.getRowRoots()
403397
assert.NoError(t, err)
@@ -442,8 +436,10 @@ func createTestEdsWithNMT(t *testing.T, codec Codec, shareSize, namespaceSize in
442436
for i, shareValue := range sharesValue {
443437
shares[i] = bytes.Repeat([]byte{byte(shareValue)}, shareSize)
444438
}
439+
edsWidth := 4 // number of shares per row/column in the extended data square
440+
odsWidth := edsWidth / 2 // number of shares per row/column in the original data square
445441

446-
eds, err := ComputeExtendedDataSquare(shares, codec, "testing-tree")
442+
eds, err := ComputeExtendedDataSquare(shares, codec, newConstructor(uint64(odsWidth), nmt.NamespaceIDSize(namespaceSize)))
447443
require.NoError(t, err)
448444

449445
return eds

extendeddatasquare.go

+7-29
Original file line numberDiff line numberDiff line change
@@ -15,39 +15,29 @@ import (
1515
type ExtendedDataSquare struct {
1616
*dataSquare
1717
codec Codec
18-
treeName string
1918
originalDataWidth uint
2019
}
2120

2221
func (eds *ExtendedDataSquare) MarshalJSON() ([]byte, error) {
2322
return json.Marshal(&struct {
2423
DataSquare [][]byte `json:"data_square"`
2524
Codec string `json:"codec"`
26-
Tree string `json:"tree"`
2725
}{
2826
DataSquare: eds.dataSquare.Flattened(),
2927
Codec: eds.codec.Name(),
30-
Tree: eds.treeName,
3128
})
3229
}
3330

3431
func (eds *ExtendedDataSquare) UnmarshalJSON(b []byte) error {
3532
var aux struct {
3633
DataSquare [][]byte `json:"data_square"`
3734
Codec string `json:"codec"`
38-
Tree string `json:"tree"`
3935
}
4036

41-
err := json.Unmarshal(b, &aux)
42-
if err != nil {
37+
if err := json.Unmarshal(b, &aux); err != nil {
4338
return err
4439
}
45-
46-
if aux.Tree == "" {
47-
aux.Tree = DefaultTreeName
48-
}
49-
50-
importedEds, err := ImportExtendedDataSquare(aux.DataSquare, codecs[aux.Codec], aux.Tree)
40+
importedEds, err := ImportExtendedDataSquare(aux.DataSquare, codecs[aux.Codec], NewDefaultTree)
5141
if err != nil {
5242
return err
5343
}
@@ -60,7 +50,7 @@ func (eds *ExtendedDataSquare) UnmarshalJSON(b []byte) error {
6050
func ComputeExtendedDataSquare(
6151
data [][]byte,
6252
codec Codec,
63-
treeName string,
53+
treeCreatorFn TreeConstructorFn,
6454
) (*ExtendedDataSquare, error) {
6555
if len(data) > codec.MaxChunks() {
6656
return nil, errors.New("number of chunks exceeds the maximum")
@@ -71,18 +61,12 @@ func ComputeExtendedDataSquare(
7161
if err != nil {
7262
return nil, err
7363
}
74-
75-
treeCreatorFn, err := TreeFn(treeName)
76-
if err != nil {
77-
return nil, err
78-
}
79-
8064
ds, err := newDataSquare(data, treeCreatorFn, uint(chunkSize))
8165
if err != nil {
8266
return nil, err
8367
}
8468

85-
eds := ExtendedDataSquare{dataSquare: ds, codec: codec, treeName: treeName}
69+
eds := ExtendedDataSquare{dataSquare: ds, codec: codec}
8670
err = eds.erasureExtendSquare(codec)
8771
if err != nil {
8872
return nil, err
@@ -95,7 +79,7 @@ func ComputeExtendedDataSquare(
9579
func ImportExtendedDataSquare(
9680
data [][]byte,
9781
codec Codec,
98-
treeName string,
82+
treeCreatorFn TreeConstructorFn,
9983
) (*ExtendedDataSquare, error) {
10084
if len(data) > 4*codec.MaxChunks() {
10185
return nil, errors.New("number of chunks exceeds the maximum")
@@ -106,18 +90,12 @@ func ImportExtendedDataSquare(
10690
if err != nil {
10791
return nil, err
10892
}
109-
110-
treeCreatorFn, err := TreeFn(treeName)
111-
if err != nil {
112-
return nil, err
113-
}
114-
11593
ds, err := newDataSquare(data, treeCreatorFn, uint(chunkSize))
11694
if err != nil {
11795
return nil, err
11896
}
11997

120-
eds := ExtendedDataSquare{dataSquare: ds, codec: codec, treeName: treeName}
98+
eds := ExtendedDataSquare{dataSquare: ds, codec: codec}
12199
err = validateEdsWidth(eds.width)
122100
if err != nil {
123101
return nil, err
@@ -248,7 +226,7 @@ func (eds *ExtendedDataSquare) erasureExtendCol(codec Codec, i uint) error {
248226
}
249227

250228
func (eds *ExtendedDataSquare) deepCopy(codec Codec) (ExtendedDataSquare, error) {
251-
imported, err := ImportExtendedDataSquare(eds.Flattened(), codec, eds.treeName)
229+
imported, err := ImportExtendedDataSquare(eds.Flattened(), codec, eds.createTreeFn)
252230
return *imported, err
253231
}
254232

0 commit comments

Comments
 (0)