Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add fft semantics change at start #420

Open
wants to merge 4 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 1 addition & 2 deletions clients/retrieval_client.go
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,6 @@ func (r *retrievalClient) RetrieveBlob(
if !ok {
return nil, fmt.Errorf("no assignment to operator %v", reply.OperatorID)
}

err = r.verifier.VerifyFrames(reply.Chunks, assignment.GetIndices(), blobHeader.BlobCommitments, encodingParams)
if err != nil {
r.logger.Error("failed to verify chunks from operator", "operator", reply.OperatorID, "err", err)
Expand All @@ -173,5 +172,5 @@ func (r *retrievalClient) RetrieveBlob(
indices = append(indices, assignment.GetIndices()...)
}

return r.verifier.Decode(chunks, indices, encodingParams, uint64(blobHeader.Length)*encoding.BYTES_PER_COEFFICIENT)
return r.verifier.Decode(chunks, indices, encodingParams, uint64(blobHeader.Length)*encoding.NUMBER_FR_SECURITY_BYTES)
}
11 changes: 6 additions & 5 deletions clients/tests/retrieval_client_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ import (
"github.com/Layr-Labs/eigenda/encoding/kzg"
"github.com/Layr-Labs/eigenda/encoding/kzg/prover"
"github.com/Layr-Labs/eigenda/encoding/kzg/verifier"
"github.com/Layr-Labs/eigenda/encoding/rs"
indexermock "github.com/Layr-Labs/eigenda/indexer/mock"
"github.com/Layr-Labs/eigensdk-go/logging"
"github.com/consensys/gnark-crypto/ecc/bn254"
Expand Down Expand Up @@ -69,6 +70,7 @@ var (

func setup(t *testing.T) {

gettysburgAddressBytesIFFT, _ := rs.ConvertByteEvalToPaddedCoeffs(gettysburgAddressBytes)
var err error
chainState, err = coremock.MakeChainDataMock(core.OperatorIndex(numOperators))
if err != nil {
Expand Down Expand Up @@ -120,15 +122,15 @@ func setup(t *testing.T) {
RequestHeader: core.BlobRequestHeader{
SecurityParams: securityParams,
},
Data: gettysburgAddressBytes,
Data: gettysburgAddressBytesIFFT,
}
operatorState, err = indexedChainState.GetOperatorState(context.Background(), (0), []core.QuorumID{quorumID})
if err != nil {
t.Fatalf("failed to get operator state: %s", err)
}

blobSize := uint(len(blob.Data))
blobLength := encoding.GetBlobLength(uint(blobSize))
blobLength := encoding.GetBlobLengthInternal(uint(blobSize))

chunkLength, err := coordinator.CalculateChunkLength(operatorState, blobLength, 0, securityParams[0])
if err != nil {
Expand All @@ -151,7 +153,7 @@ func setup(t *testing.T) {

params := encoding.ParamsFromMins(chunkLength, info.TotalChunks)

commitments, chunks, err := p.EncodeAndProve(blob.Data, params)
commitments, chunks, err := p.EncodeAndProveSymbols(blob.Data, params)
if err != nil {
t.Fatal(err)
}
Expand Down Expand Up @@ -266,7 +268,6 @@ func TestValidBlobHeader(t *testing.T) {
data, err := retrievalClient.RetrieveBlob(context.Background(), batchHeaderHash, 0, 0, batchRoot, 0)
assert.NoError(t, err)
recovered := bytes.TrimRight(data, "\x00")
assert.Len(t, data, 1488)
assert.Len(t, data, 64*31) // ifft 48 -> 64, when it is first encoded to 48, each field element contains 31 bytes
assert.Equal(t, gettysburgAddressBytes, recovered)

}
12 changes: 9 additions & 3 deletions core/test/core_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ import (
"github.com/Layr-Labs/eigenda/encoding/kzg"
"github.com/Layr-Labs/eigenda/encoding/kzg/prover"
"github.com/Layr-Labs/eigenda/encoding/kzg/verifier"
"github.com/Layr-Labs/eigenda/encoding/rs"
"github.com/gammazero/workerpool"
"github.com/hashicorp/go-multierror"
"github.com/stretchr/testify/assert"
Expand Down Expand Up @@ -73,11 +74,16 @@ func makeTestBlob(t *testing.T, length int, securityParams []*core.SecurityParam
t.Fatal(err)
}

dataIFFT, err := rs.ConvertByteEvalToPaddedCoeffs(data)
if err != nil {
t.Fatal(err)
}

blob := core.Blob{
RequestHeader: core.BlobRequestHeader{
SecurityParams: securityParams,
},
Data: data,
Data: dataIFFT,
}
return blob
}
Expand Down Expand Up @@ -122,7 +128,7 @@ func prepareBatch(t *testing.T, operatorCount uint, blobs []core.Blob, bn uint)
}

blobSize := uint(len(blob.Data))
blobLength := encoding.GetBlobLength(blobSize)
blobLength := encoding.GetBlobLengthInternal(blobSize)

chunkLength, err := asn.CalculateChunkLength(state, blobLength, 0, securityParam)
if err != nil {
Expand All @@ -145,7 +151,7 @@ func prepareBatch(t *testing.T, operatorCount uint, blobs []core.Blob, bn uint)

params := encoding.ParamsFromMins(chunkLength, info.TotalChunks)

commitments, chunks, err := p.EncodeAndProve(blob.Data, params)
commitments, chunks, err := p.EncodeAndProveSymbols(blob.Data, params)
if err != nil {
t.Fatal(err)
}
Expand Down
57 changes: 56 additions & 1 deletion disperser/apiserver/server.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import (
"context"
"errors"
"fmt"
"math"
"math/rand"
"net"
"slices"
Expand All @@ -20,6 +21,8 @@ import (
"github.com/Layr-Labs/eigenda/core/auth"
"github.com/Layr-Labs/eigenda/disperser"
"github.com/Layr-Labs/eigenda/encoding"
"github.com/Layr-Labs/eigenda/encoding/fft"
"github.com/Layr-Labs/eigenda/encoding/rs"
"github.com/Layr-Labs/eigensdk-go/logging"
"github.com/ethereum/go-ethereum/common/hexutil"
"github.com/ethereum/go-ethereum/crypto"
Expand Down Expand Up @@ -49,6 +52,8 @@ type DispersalServer struct {

metrics *disperser.Metrics

transformer map[uint64]*fft.FFTSettings // eval to coeff

logger logging.Logger
}

Expand Down Expand Up @@ -87,6 +92,7 @@ func NewDispersalServer(
logger: logger,
ratelimiter: ratelimiter,
authenticator: authenticator,
transformer: make(map[uint64]*fft.FFTSettings),
mu: &sync.RWMutex{},
quorumConfig: QuorumConfig{},
}
Expand Down Expand Up @@ -246,6 +252,31 @@ func (s *DispersalServer) disperseBlob(ctx context.Context, blob *core.Blob, aut
}
}

// convert the number padded field element with every 31 bytes as 1 element
paddedBlobFr := rs.ToPaddedFrArray(blob.Data)
blobLengthPowOf2 := uint64(len(paddedBlobFr))

// get FFT domain if not existing
tf, ok := s.transformer[blobLengthPowOf2]
if !ok {
n := uint8(math.Log2(float64(blobLengthPowOf2)))
s.transformer[blobLengthPowOf2] = fft.NewFFTSettings(n)
tf = s.transformer[blobLengthPowOf2]
}

// convert eval to coeff
coeffsFr, err := tf.ConvertEvalsToCoeffs(paddedBlobFr)
if err != nil {
return nil, api.NewInvalidArgError(err.Error())
}

// the input bytes to be feed into encoder streamer of eigenda, every element takes full
// 32 bytes, because after fft transformation, the resulting field element most likely
// will have leading bit non-zero
coeffsBytes := rs.ToByteArrayWith254Bits(coeffsFr)

blob.Data = coeffsBytes

requestedAt := uint64(time.Now().UnixNano())
metadataKey, err := s.blobStore.StoreBlob(ctx, blob, requestedAt)
if err != nil {
Expand Down Expand Up @@ -398,6 +429,7 @@ func (s *DispersalServer) checkRateLimitsAndAddRatesToHeader(ctx context.Context
requestParams := make([]common.RequestParams, 0)

blobSize := len(blob.Data)
// before IFFT, we still treats every 31 bytes as a field element
length := encoding.GetBlobLength(uint(blobSize))

for i, param := range blob.RequestHeader.SecurityParams {
Expand Down Expand Up @@ -677,10 +709,33 @@ func (s *DispersalServer) RetrieveBlob(ctx context.Context, req *pb.RetrieveBlob
return nil, api.NewInternalError("failed to get blob data, please retry")
}

// at disperseBlob functions, every Fr stored in shared storage contains full 32 bytes
blobLengthPowOf2 := uint64(len(data) / encoding.NUMBER_FR_SECURITY_BYTES)

tf, ok := s.transformer[blobLengthPowOf2]
if !ok {
n := uint8(math.Log2(float64(blobLengthPowOf2)))
s.transformer[blobLengthPowOf2] = fft.NewFFTSettings(n)
tf = s.transformer[blobLengthPowOf2]
}

// convert bytes to gnark
coeffsFr := rs.ToFrArrayWith254Bits(data)

evalFr, err := tf.ConvertCoeffsToEvals(coeffsFr)
if err != nil {
s.logger.Error("Failed to IFFT during retrieve blob", "err", err)
s.metrics.HandleFailedRequest(codes.Internal.String(), "", len(data), "RetrieveBlob")
return nil, api.NewInternalError("failed to get blob data, please retry")
}

// back to the original data in evalutaion form, which has leading bit to be 0
evalBytes := rs.ToByteArray(evalFr, blobLengthPowOf2*encoding.BYTES_PER_COEFFICIENT)

s.metrics.HandleSuccessfulRequest("", len(data), "RetrieveBlob")

return &pb.RetrieveBlobReply{
Data: data,
Data: evalBytes,
}, nil
}

Expand Down
2 changes: 1 addition & 1 deletion disperser/apiserver/server_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -325,7 +325,7 @@ func TestRetrieveBlob(t *testing.T) {
retrieveData, err := retrieveBlob(t, dispersalServer, requestID, 1)
assert.NoError(t, err)

assert.Equal(t, data, retrieveData)
assert.Equal(t, data, retrieveData[:len(data)])
}

}
Expand Down
3 changes: 2 additions & 1 deletion disperser/batcher/encoding_streamer.go
Original file line number Diff line number Diff line change
Expand Up @@ -297,7 +297,8 @@ func (e *EncodingStreamer) RequestEncodingForBlob(ctx context.Context, metadata
continue
}

blobLength := encoding.GetBlobLength(metadata.RequestMetadata.BlobSize)
// After api server, the every 32 bytes is a field element, use internal function
blobLength := encoding.GetBlobLengthInternal(metadata.RequestMetadata.BlobSize)

chunkLength, err := e.assignmentCoordinator.CalculateChunkLength(state.OperatorState, blobLength, e.StreamerConfig.TargetNumChunks, quorum)
if err != nil {
Expand Down
6 changes: 3 additions & 3 deletions disperser/batcher/encoding_streamer_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -441,7 +441,7 @@ func TestPartialBlob(t *testing.T) {
assert.NotNil(t, encodedBlob1.BlobHeader.BlobCommitments)
assert.NotNil(t, encodedBlob1.BlobHeader.BlobCommitments.Commitment)
assert.NotNil(t, encodedBlob1.BlobHeader.BlobCommitments.LengthProof)
assert.Equal(t, encodedBlob1.BlobHeader.BlobCommitments.Length, uint(48))
assert.Equal(t, encodedBlob1.BlobHeader.BlobCommitments.Length, uint(46)) // size divides 32 because encoder accepts 32 bytes as a unit
assert.Len(t, encodedBlob1.BlobHeader.QuorumInfos, 1)
assert.ElementsMatch(t, encodedBlob1.BlobHeader.QuorumInfos, []*core.BlobQuorumInfo{{
SecurityParam: core.SecurityParam{
Expand Down Expand Up @@ -669,7 +669,7 @@ func TestGetBatch(t *testing.T) {
assert.NotNil(t, encodedBlob1.BlobHeader.BlobCommitments)
assert.NotNil(t, encodedBlob1.BlobHeader.BlobCommitments.Commitment)
assert.NotNil(t, encodedBlob1.BlobHeader.BlobCommitments.LengthProof)
assert.Equal(t, encodedBlob1.BlobHeader.BlobCommitments.Length, uint(48))
assert.Equal(t, encodedBlob1.BlobHeader.BlobCommitments.Length, uint(46))
assert.Len(t, encodedBlob1.BlobHeader.QuorumInfos, 2)
assert.ElementsMatch(t, encodedBlob1.BlobHeader.QuorumInfos, []*core.BlobQuorumInfo{
{
Expand Down Expand Up @@ -702,7 +702,7 @@ func TestGetBatch(t *testing.T) {
assert.NotNil(t, encodedBlob2.BlobHeader.BlobCommitments)
assert.NotNil(t, encodedBlob2.BlobHeader.BlobCommitments.Commitment)
assert.NotNil(t, encodedBlob2.BlobHeader.BlobCommitments.LengthProof)
assert.Equal(t, encodedBlob2.BlobHeader.BlobCommitments.Length, uint(48))
assert.Equal(t, encodedBlob2.BlobHeader.BlobCommitments.Length, uint(46))
assert.Len(t, encodedBlob2.BlobHeader.QuorumInfos, 1)
assert.ElementsMatch(t, encodedBlob2.BlobHeader.QuorumInfos, []*core.BlobQuorumInfo{{
SecurityParam: core.SecurityParam{
Expand Down
2 changes: 1 addition & 1 deletion disperser/encoder/server.go
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ func (s *Server) handleEncoding(ctx context.Context, req *pb.EncodeBlobRequest)
NumChunks: uint64(req.EncodingParams.NumChunks),
}

commits, chunks, err := s.prover.EncodeAndProve(req.Data, encodingParams)
commits, chunks, err := s.prover.EncodeAndProveSymbols(req.Data, encodingParams)

if err != nil {
return nil, err
Expand Down
28 changes: 17 additions & 11 deletions disperser/encoder/server_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ import (

"github.com/Layr-Labs/eigenda/encoding/kzg"
encmock "github.com/Layr-Labs/eigenda/encoding/mock"
"github.com/Layr-Labs/eigenda/encoding/rs"
"github.com/Layr-Labs/eigensdk-go/logging"

"github.com/Layr-Labs/eigenda/core"
Expand All @@ -27,7 +28,8 @@ import (
)

var (
gettysburgAddressBytes = []byte("Fourscore and seven years ago our fathers brought forth, on this continent, a new nation, conceived in liberty, and dedicated to the proposition that all men are created equal. Now we are engaged in a great civil war, testing whether that nation, or any nation so conceived, and so dedicated, can long endure. We are met on a great battle-field of that war. We have come to dedicate a portion of that field, as a final resting-place for those who here gave their lives, that that nation might live. It is altogether fitting and proper that we should do this. But, in a larger sense, we cannot dedicate, we cannot consecrate—we cannot hallow—this ground. The brave men, living and dead, who struggled here, have consecrated it far above our poor power to add or detract. The world will little note, nor long remember what we say here, but it can never forget what they did here. It is for us the living, rather, to be dedicated here to the unfinished work which they who fought here have thus far so nobly advanced. It is rather for us to be here dedicated to the great task remaining before us—that from these honored dead we take increased devotion to that cause for which they here gave the last full measure of devotion—that we here highly resolve that these dead shall not have died in vain—that this nation, under God, shall have a new birth of freedom, and that government of the people, by the people, for the people, shall not perish from the earth.")
gettysburgAddressBytes = []byte("Fourscore and seven years ago our fathers brought forth, on this continent, a new nation, conceived in liberty, and dedicated to the proposition that all men are created equal. Now we are engaged in a great civil war, testing whether that nation, or any nation so conceived, and so dedicated, can long endure. We are met on a great battle-field of that war. We have come to dedicate a portion of that field, as a final resting-place for those who here gave their lives, that that nation might live. It is altogether fitting and proper that we should do this. But, in a larger sense, we cannot dedicate, we cannot consecrate—we cannot hallow—this ground. The brave men, living and dead, who struggled here, have consecrated it far above our poor power to add or detract. The world will little note, nor long remember what we say here, but it can never forget what they did here. It is for us the living, rather, to be dedicated here to the unfinished work which they who fought here have thus far so nobly advanced. It is rather for us to be here dedicated to the great task remaining before us—that from these honored dead we take increased devotion to that cause for which they here gave the last full measure of devotion—that we here highly resolve that these dead shall not have died in vain—that this nation, under God, shall have a new birth of freedom, and that government of the people, by the people, for the people, shall not perish from the earth.")
gettysburgAddressBytesIFFT = []byte{}
)

var logger = logging.NewNoopLogger()
Expand Down Expand Up @@ -66,11 +68,13 @@ func getTestData() (core.Blob, encoding.EncodingParams) {
},
}

// use IFFT of data as inputs to tests
gettysburgAddressBytesIFFT, _ = rs.ConvertByteEvalToPaddedCoeffs(gettysburgAddressBytes)
testBlob := core.Blob{
RequestHeader: core.BlobRequestHeader{
SecurityParams: securityParams,
},
Data: gettysburgAddressBytes,
Data: gettysburgAddressBytesIFFT,
}

indexedChainState, _ := coremock.MakeChainDataMock(core.OperatorIndex(10))
Expand Down Expand Up @@ -136,12 +140,13 @@ func TestEncodeBlob(t *testing.T) {
}
assert.NotNil(t, chunksData)

// Indices obtained from Encoder_Test
indices := []encoding.ChunkNumber{
0, 1, 2, 3, 4, 5, 6, 7,
// Indices obtained, since powerof2(64 * 10 / 55) = 16. The core assignment logic multiply by 2
indices := make([]encoding.ChunkNumber, 32)
for i := 0; i < 32; i++ {
indices[i] = encoding.ChunkNumber(i)
}

maxInputSize := uint64(len(gettysburgAddressBytes)) + 10
maxInputSize := uint64(len(gettysburgAddressBytesIFFT))
decoded, err := testProver.Decode(chunksData, indices, testEncodingParams, maxInputSize)
assert.Nil(t, err)
recovered := bytes.TrimRight(decoded, "\x00")
Expand Down Expand Up @@ -188,7 +193,7 @@ func TestThrottling(t *testing.T) {
Length: 10,
}

encoder.On("EncodeAndProve", mock.Anything, mock.Anything).Return(blobCommitment, []*encoding.Frame{}, nil)
encoder.On("EncodeAndProveSymbols", mock.Anything, mock.Anything).Return(blobCommitment, []*encoding.Frame{}, nil)
encoderServerConfig := ServerConfig{
GrpcPort: "3000",
MaxConcurrentRequests: concurrentRequests,
Expand Down Expand Up @@ -275,12 +280,13 @@ func TestEncoderPointsLoading(t *testing.T) {
}
assert.NotNil(t, chunksData)

// Indices obtained from Encoder_Test
indices := []encoding.ChunkNumber{
0, 1, 2, 3, 4, 5, 6, 7,
// Indices obtained, since powerof2(64 * 10 / 55) = 16. The core assignment logic multiply by 2
indices := make([]encoding.ChunkNumber, 32)
for i := 0; i < 32; i++ {
indices[i] = encoding.ChunkNumber(i)
}

maxInputSize := uint64(len(gettysburgAddressBytes)) + 10
maxInputSize := uint64(len(gettysburgAddressBytesIFFT))
decoded, err := testProver.Decode(chunksData, indices, testEncodingParams, maxInputSize)
assert.Nil(t, err)
recovered := bytes.TrimRight(decoded, "\x00")
Expand Down
2 changes: 1 addition & 1 deletion disperser/local_encoder_client.go
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ func NewLocalEncoderClient(prover encoding.Prover) *LocalEncoderClient {
func (m *LocalEncoderClient) EncodeBlob(ctx context.Context, data []byte, encodingParams encoding.EncodingParams) (*encoding.BlobCommitments, []*encoding.Frame, error) {
m.mu.Lock()
defer m.mu.Unlock()
commits, chunks, err := m.prover.EncodeAndProve(data, encodingParams)
commits, chunks, err := m.prover.EncodeAndProveSymbols(data, encodingParams)
if err != nil {
return nil, nil, err
}
Expand Down
3 changes: 3 additions & 0 deletions encoding/encoding.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,9 @@ type Prover interface {
// for any number M such that M*params.ChunkLength > BlobCommitments.Length, then any set of M chunks will be sufficient to
// reconstruct the blob.
EncodeAndProve(data []byte, params EncodingParams) (BlobCommitments, []*Frame, error)

// EncodeAndProveSymbols behaves the same way as EncodeAndProve, except it takes bytes (every 32 byts is a symbol) as inputs
EncodeAndProveSymbols(data []byte, params EncodingParams) (BlobCommitments, []*Frame, error)
}

type Verifier interface {
Expand Down
Loading
Loading