Skip to content

Commit

Permalink
chore!: bump to rsmt2d v0.10.0 (#2062)
Browse files Browse the repository at this point in the history
Closes #2060

Marked as breaking b/c after this PR `NewDataAvailabilityHeader` may
return an error.
  • Loading branch information
rootulp authored and evan-forbes committed Jul 11, 2023
1 parent a850f3e commit 7a176b2
Show file tree
Hide file tree
Showing 13 changed files with 70 additions and 28 deletions.
10 changes: 9 additions & 1 deletion app/prepare_proposal.go
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,15 @@ func (app *App) PrepareProposal(req abci.RequestPrepareProposal) abci.ResponsePr

// create the new data root by creating the data availability header (merkle
// roots of each row and col of the erasure data).
dah := da.NewDataAvailabilityHeader(eds)
dah, err := da.NewDataAvailabilityHeader(eds)
if err != nil {
app.Logger().Error(
"failure to create new data availability header",
"error",
err.Error(),
)
panic(err)
}

// tendermint doesn't need to use any of the erasure data, as only the
// protobuf encoded version of the block data is gossiped.
Expand Down
6 changes: 5 additions & 1 deletion app/process_proposal.go
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,11 @@ func (app *App) ProcessProposal(req abci.RequestProcessProposal) (resp abci.Resp
return reject()
}

dah := da.NewDataAvailabilityHeader(eds)
dah, err := da.NewDataAvailabilityHeader(eds)
if err != nil {
logInvalidPropBlockError(app.Logger(), req.Header, "failure to create new data availability header", err)
return reject()
}
// by comparing the hashes we know the computed IndexWrappers (with the share indexes of the PFB's blobs)
// are identical and that square layout is consistent. This also means that the share commitment rules
// have been followed and thus each blobs share commitment should be valid
Expand Down
3 changes: 2 additions & 1 deletion app/test/integration_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -358,7 +358,8 @@ func (s *IntegrationTestSuite) TestShareInclusionProof() {
func ExtendBlobTest(t *testing.T, block *coretypes.Block) {
eds, err := app.ExtendBlock(block.Data, block.Header.Version.App)
require.NoError(t, err)
dah := da.NewDataAvailabilityHeader(eds)
dah, err := da.NewDataAvailabilityHeader(eds)
require.NoError(t, err)
if !assert.Equal(t, dah.Hash(), block.DataHash.Bytes()) {
// save block to json file for further debugging if this occurs
b, err := json.MarshalIndent(block, "", " ")
Expand Down
6 changes: 4 additions & 2 deletions app/test/process_proposal_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -288,7 +288,8 @@ func TestProcessProposal(t *testing.T) {
eds, err := da.ExtendShares(shares.ToBytes(dataSquare))
require.NoError(t, err)

dah := da.NewDataAvailabilityHeader(eds)
dah, err := da.NewDataAvailabilityHeader(eds)
require.NoError(t, err)
// replace the hash of the prepare proposal response with the hash of a data
// square with a tampered sequence start indicator
d.Hash = dah.Hash()
Expand Down Expand Up @@ -323,6 +324,7 @@ func calculateNewDataHash(t *testing.T, txs [][]byte) []byte {
require.NoError(t, err)
eds, err := da.ExtendShares(shares.ToBytes(dataSquare))
require.NoError(t, err)
dah := da.NewDataAvailabilityHeader(eds)
dah, err := da.NewDataAvailabilityHeader(eds)
require.NoError(t, err)
return dah.Hash()
}
2 changes: 1 addition & 1 deletion go.mod
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ require (
require (
cosmossdk.io/errors v1.0.0-beta.7
cosmossdk.io/math v1.0.0-rc.0
github.com/celestiaorg/rsmt2d v0.9.0
github.com/celestiaorg/rsmt2d v0.10.0
github.com/cosmos/cosmos-proto v1.0.0-alpha8
github.com/cosmos/cosmos-sdk v0.46.13
github.com/cosmos/gogoproto v1.4.10
Expand Down
4 changes: 2 additions & 2 deletions go.sum
Original file line number Diff line number Diff line change
Expand Up @@ -180,8 +180,8 @@ github.com/celestiaorg/nmt v0.17.0 h1:/k8YLwJvuHgT/jQ435zXKaDX811+sYEMXL4B/vYdSL
github.com/celestiaorg/nmt v0.17.0/go.mod h1:ZndCeAR4l9lxm7W51ouoyTo1cxhtFgK+4DpEIkxRA3A=
github.com/celestiaorg/quantum-gravity-bridge v1.3.0 h1:9zPIp7w1FWfkPnn16y3S4FpFLnQtS7rm81CUVcHEts0=
github.com/celestiaorg/quantum-gravity-bridge v1.3.0/go.mod h1:6WOajINTDEUXpSj5UZzod16UZ96ZVB/rFNKyM+Mt1gI=
github.com/celestiaorg/rsmt2d v0.9.0 h1:kon78I748ZqjNzI8OAqPN+2EImuZuanj/6gTh8brX3o=
github.com/celestiaorg/rsmt2d v0.9.0/go.mod h1:E06nDxfoeBDltWRvTR9dLviiUZI5/6mLXAuhSJzz3Iw=
github.com/celestiaorg/rsmt2d v0.10.0 h1:8dprr6CW5mCk5YPnbiLdirojw9YsJOE+XB+GORb8sT0=
github.com/celestiaorg/rsmt2d v0.10.0/go.mod h1:BiCZkCJfhDHUEOJKXUeu+CudjluecKvRTqHcuxKvodc=
github.com/cenkalti/backoff v2.2.1+incompatible h1:tNowT99t7UNflLxfYYSlKYsBpXdEet03Pg2g16Swow4=
github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM=
github.com/cenkalti/backoff/v4 v4.1.1/go.mod h1:scbssz8iZGpm3xbr14ovlUdkxfGXNInqkPWOWmG2CLw=
Expand Down
28 changes: 20 additions & 8 deletions pkg/da/data_availability_header.go
Original file line number Diff line number Diff line change
Expand Up @@ -38,18 +38,27 @@ type DataAvailabilityHeader struct {
hash []byte
}

// NewDataAvailabilityHeader generates a DataAvailability header using the provided square size and shares
func NewDataAvailabilityHeader(eds *rsmt2d.ExtendedDataSquare) DataAvailabilityHeader {
// generate the row and col roots using the EDS
// NewDataAvailabilityHeader generates a DataAvailability header using the
// provided extended data square.
func NewDataAvailabilityHeader(eds *rsmt2d.ExtendedDataSquare) (DataAvailabilityHeader, error) {
rowRoots, err := eds.RowRoots()
if err != nil {
return DataAvailabilityHeader{}, err
}
colRoots, err := eds.ColRoots()
if err != nil {
return DataAvailabilityHeader{}, err
}

dah := DataAvailabilityHeader{
RowRoots: eds.RowRoots(),
ColumnRoots: eds.ColRoots(),
RowRoots: rowRoots,
ColumnRoots: colRoots,
}

// generate the hash of the data using the new roots
// Generate the hash of the data using the new roots
dah.Hash()

return dah
return dah, nil
}

func ExtendShares(s [][]byte) (*rsmt2d.ExtendedDataSquare, error) {
Expand Down Expand Up @@ -167,7 +176,10 @@ func MinDataAvailabilityHeader() DataAvailabilityHeader {
if err != nil {
panic(err)
}
dah := NewDataAvailabilityHeader(eds)
dah, err := NewDataAvailabilityHeader(eds)
if err != nil {
panic(err)
}
return dah
}

Expand Down
15 changes: 9 additions & 6 deletions pkg/da/data_availability_header_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -58,10 +58,11 @@ func TestNewDataAvailabilityHeader(t *testing.T) {
t.Run(tt.name, func(t *testing.T) {
eds, err := ExtendShares(tt.shares)
require.NoError(t, err)
resdah := NewDataAvailabilityHeader(eds)
require.Equal(t, tt.squareSize*2, uint64(len(resdah.ColumnRoots)), tt.name)
require.Equal(t, tt.squareSize*2, uint64(len(resdah.RowRoots)), tt.name)
require.Equal(t, tt.expectedHash, resdah.hash, tt.name)
got, err := NewDataAvailabilityHeader(eds)
require.NoError(t, err)
require.Equal(t, tt.squareSize*2, uint64(len(got.ColumnRoots)), tt.name)
require.Equal(t, tt.squareSize*2, uint64(len(got.RowRoots)), tt.name)
require.Equal(t, tt.expectedHash, got.hash, tt.name)
})
}
}
Expand Down Expand Up @@ -106,7 +107,8 @@ func TestDataAvailabilityHeaderProtoConversion(t *testing.T) {
shares := generateShares(appconsts.DefaultSquareSizeUpperBound * appconsts.DefaultSquareSizeUpperBound)
eds, err := ExtendShares(shares)
require.NoError(t, err)
bigdah := NewDataAvailabilityHeader(eds)
bigdah, err := NewDataAvailabilityHeader(eds)
require.NoError(t, err)

tests := []test{
{
Expand Down Expand Up @@ -143,7 +145,8 @@ func Test_DAHValidateBasic(t *testing.T) {
shares := generateShares(maxSize)
eds, err := ExtendShares(shares)
require.NoError(t, err)
bigdah := NewDataAvailabilityHeader(eds)
bigdah, err := NewDataAvailabilityHeader(eds)
require.NoError(t, err)

// make a mutant dah that has too many roots
var tooBigDah DataAvailabilityHeader
Expand Down
3 changes: 2 additions & 1 deletion pkg/inclusion/nmt_caching_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,8 @@ func TestEDSSubRootCacher(t *testing.T) {
eds, err := rsmt2d.ComputeExtendedDataSquare(d, appconsts.DefaultCodec(), stc.Constructor)
require.NoError(t, err)

dah := da.NewDataAvailabilityHeader(eds)
dah, err := da.NewDataAvailabilityHeader(eds)
require.NoError(t, err)

for i := range dah.RowRoots[:squareSize] {
expectedSubTreeRoots := calculateSubTreeRoots(t, eds.Row(uint(i))[:squareSize], 2)
Expand Down
12 changes: 10 additions & 2 deletions pkg/proof/proof.go
Original file line number Diff line number Diff line change
Expand Up @@ -70,10 +70,18 @@ func NewShareInclusionProof(
return types.ShareProof{}, err
}

edsRowRoots := eds.RowRoots()
edsRowRoots, err := eds.RowRoots()
if err != nil {
return types.ShareProof{}, err
}

edsColRoots, err := eds.ColRoots()
if err != nil {
return types.ShareProof{}, err
}

// create the binary merkle inclusion proof for all the square rows to the data root
_, allProofs := merkle.ProofsFromByteSlices(append(edsRowRoots, eds.ColRoots()...))
_, allProofs := merkle.ProofsFromByteSlices(append(edsRowRoots, edsColRoots...))
rowProofs := make([]*merkle.Proof, endRow-startRow+1)
rowRoots := make([]tmbytes.HexBytes, endRow-startRow+1)
for i := startRow; i <= endRow; i++ {
Expand Down
3 changes: 2 additions & 1 deletion pkg/proof/proof_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,8 @@ func TestNewShareInclusionProof(t *testing.T) {

// create the new data root by creating the data availability header (merkle
// roots of each row and col of the erasure data).
dah := da.NewDataAvailabilityHeader(eds)
dah, err := da.NewDataAvailabilityHeader(eds)
require.NoError(t, err)
dataRoot := dah.Hash()

type test struct {
Expand Down
3 changes: 2 additions & 1 deletion pkg/square/square_fuzz_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,8 @@ func FuzzSquare(f *testing.F) {
cacher := inclusion.NewSubtreeCacher(uint64(s.Size()))
eds, err := rsmt2d.ComputeExtendedDataSquare(shares.ToBytes(s), appconsts.DefaultCodec(), cacher.Constructor)
require.NoError(t, err)
dah := da.NewDataAvailabilityHeader(eds)
dah, err := da.NewDataAvailabilityHeader(eds)
require.NoError(t, err)

decoder := encoding.MakeConfig(app.ModuleEncodingRegisters...).TxConfig.TxDecoder()

Expand Down
3 changes: 2 additions & 1 deletion pkg/square/square_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -224,7 +224,8 @@ func TestSquareShareCommitments(t *testing.T) {
cacher := inclusion.NewSubtreeCacher(uint64(dataSquare.Size()))
eds, err := rsmt2d.ComputeExtendedDataSquare(shares.ToBytes(dataSquare), appconsts.DefaultCodec(), cacher.Constructor)
require.NoError(t, err)
dah := da.NewDataAvailabilityHeader(eds)
dah, err := da.NewDataAvailabilityHeader(eds)
require.NoError(t, err)
decoder := encoding.MakeConfig(app.ModuleEncodingRegisters...).TxConfig.TxDecoder()

for pfbIndex := 0; pfbIndex < numTxs; pfbIndex++ {
Expand Down

0 comments on commit 7a176b2

Please sign in to comment.