Documentation
¶
Overview ¶
Package da provides Data Availability layer infrastructure for the Lux blockchain. DA is consensus-critical: validators must certify availability before block finalization.
Index ¶
- Constants
- Variables
- func ComputeDARoot(commitments []*DACommitment) []byte
- type BlockDAInfo
- type Chunk
- type DABlob
- type DACert
- type DACommitment
- type ErasureCodedBlob
- type ErasureConfig
- type Sample
- type Sampler
- type SamplingConfig
- type SamplingResult
- type Store
- func (s *Store) DeleteBlob(ctx context.Context, blobID ids.ID) error
- func (s *Store) GetBlob(ctx context.Context, blobID ids.ID) (*DABlob, error)
- func (s *Store) GetCert(ctx context.Context, blobID ids.ID) (*DACert, error)
- func (s *Store) GetChunks(ctx context.Context, blobID ids.ID, indices []uint32) ([]*Chunk, error)
- func (s *Store) PruneExpired(ctx context.Context) (int, error)
- func (s *Store) Stats() map[string]interface{}
- func (s *Store) StoreBlob(ctx context.Context, blob *DABlob) error
- func (s *Store) StoreCert(ctx context.Context, cert *DACert) error
- type StoreConfig
- type Validator
- func (v *Validator) CalculateConfidence(sampleCount int, successRate float64) float64
- func (v *Validator) CertifyAvailability(ctx context.Context, commitment *DACommitment, signature []byte) (*DACert, error)
- func (v *Validator) SampleAndVerify(ctx context.Context, blobID ids.ID, seed []byte) (*SamplingResult, error)
- func (v *Validator) ValidateBlob(ctx context.Context, blob *DABlob) (*DACommitment, error)
- func (v *Validator) VerifyCert(ctx context.Context, cert *DACert) (bool, error)
- type ValidatorConfig
Constants ¶
const ( // DefaultChunkSize is the default size of each data chunk DefaultChunkSize = 512 // DefaultFieldModulus is the modulus for finite field operations (BLS12-381) DefaultFieldModulus = "52435875175126190479447740508185965837690552500527637822603658699938581184513" // MaxBlobSize is the maximum size of a blob MaxBlobSize = 128 * 1024 // 128KB // MinSampleCount is the minimum number of samples for availability verification MinSampleCount = 16 )
Variables ¶
var ( // ErrBlobTooLarge indicates the blob exceeds maximum size ErrBlobTooLarge = errors.New("blob exceeds maximum size") // ErrInvalidProof indicates an invalid availability proof ErrInvalidProof = errors.New("invalid availability proof") // ErrInvalidCommitment indicates an invalid KZG commitment ErrInvalidCommitment = errors.New("invalid commitment") // ErrChunkNotFound indicates a chunk was not found ErrChunkNotFound = errors.New("chunk not found") // ErrInsufficientSamples indicates not enough samples were provided ErrInsufficientSamples = errors.New("insufficient samples for availability verification") )
var ( // ErrBlobNotFound indicates blob was not found ErrBlobNotFound = errors.New("blob not found") // ErrCertNotFound indicates certificate was not found ErrCertNotFound = errors.New("DA certificate not found") )
Functions ¶
func ComputeDARoot ¶
func ComputeDARoot(commitments []*DACommitment) []byte
ComputeDARoot computes the DA root from commitments
Types ¶
type BlockDAInfo ¶
type BlockDAInfo struct {
BlobCommitments [][]byte `json:"blobCommitments"` // Commitments for all blobs
DARoot []byte `json:"daRoot"` // Root of DA commitments
WitnessRoot []byte `json:"witnessRoot"` // Root of witnesses/proofs
BlobCount uint32 `json:"blobCount"` // Number of blobs in block
TotalDataSize uint64 `json:"totalDataSize"` // Total data size in bytes
}
BlockDAInfo contains DA-related information for a block header
type Chunk ¶
type Chunk struct {
Index uint32 `json:"index"`
Data []byte `json:"data"`
Proof []byte `json:"proof"` // KZG proof for this chunk
Commitment []byte `json:"commitment"`
}
Chunk represents a chunk of data with its proof
type DABlob ¶
type DABlob struct {
ID ids.ID `json:"id"`
Data []byte `json:"data"`
Commitment []byte `json:"commitment"` // KZG commitment
Chunks []*Chunk `json:"chunks"`
ChunkCount uint32 `json:"chunkCount"`
ChunkSize uint32 `json:"chunkSize"`
Height uint64 `json:"height"` // Block height where blob was included
Timestamp time.Time `json:"timestamp"`
Submitter ids.ID `json:"submitter"` // Who submitted the blob
}
DABlob represents a data availability blob
func (*DABlob) CreateCommitment ¶
func (b *DABlob) CreateCommitment() *DACommitment
CreateCommitment creates a DACommitment from the blob
func (*DABlob) VerifyChunk ¶
VerifyChunk verifies a chunk against the commitment
type DACert ¶
type DACert struct {
Commitment *DACommitment `json:"commitment"`
Signatures [][]byte `json:"signatures"` // Validator signatures
SignerBitmap []byte `json:"signerBitmap"` // Bitmap of signing validators
Threshold uint32 `json:"threshold"` // Required signature threshold
Timestamp int64 `json:"timestamp"`
}
DACert represents a Data Availability Certificate
type DACommitment ¶
type DACommitment struct {
BlobID ids.ID `json:"blobId"`
Commitment []byte `json:"commitment"` // KZG commitment
ChunkCount uint32 `json:"chunkCount"`
DataRoot []byte `json:"dataRoot"` // Merkle root of chunks
ErasureRoot []byte `json:"erasureRoot"` // Erasure coding root
Height uint64 `json:"height"`
ValidatorSigs []byte `json:"validatorSigs"` // Aggregated validator signatures
}
DACommitment represents a commitment to data availability
type ErasureCodedBlob ¶
type ErasureCodedBlob struct {
*DABlob
DataShards [][]byte `json:"dataShards"`
ParityShards [][]byte `json:"parityShards"`
ErasureRoot []byte `json:"erasureRoot"`
}
ErasureCodedBlob represents an erasure-coded blob
func ApplyErasureCoding ¶
func ApplyErasureCoding(blob *DABlob, config *ErasureConfig) (*ErasureCodedBlob, error)
ApplyErasureCoding applies erasure coding to a blob
func (*ErasureCodedBlob) CanRecover ¶
func (e *ErasureCodedBlob) CanRecover(availableIndices []uint32) bool
CanRecover checks if the blob can be recovered from available shards
type ErasureConfig ¶
type ErasureConfig struct {
DataShards int `json:"dataShards"` // Original data shards
ParityShards int `json:"parityShards"` // Parity shards for recovery
}
ErasureConfig configures erasure coding parameters
func DefaultErasureConfig ¶
func DefaultErasureConfig() *ErasureConfig
DefaultErasureConfig returns default erasure coding configuration
type Sample ¶
type Sample struct {
ChunkIndex uint32 `json:"chunkIndex"`
Data []byte `json:"data"`
Proof []byte `json:"proof"`
}
Sample represents a random sample for availability verification
type Sampler ¶
type Sampler struct {
// contains filtered or unexported fields
}
Sampler handles data availability sampling
func NewSampler ¶
func NewSampler(config *SamplingConfig) *Sampler
NewSampler creates a new DA sampler
func (*Sampler) GenerateSampleIndices ¶
GenerateSampleIndices generates random sample indices for a blob
func (*Sampler) VerifySamples ¶
func (s *Sampler) VerifySamples(blob *DABlob, samples []*Sample) *SamplingResult
VerifySamples verifies sampled chunks and returns sampling result
type SamplingConfig ¶
type SamplingConfig struct {
SampleCount int `json:"sampleCount"` // Number of samples to request
Threshold float64 `json:"threshold"` // Required success rate (0-1)
Timeout int `json:"timeout"` // Sampling timeout in seconds
RetryCount int `json:"retryCount"` // Number of retries per sample
}
SamplingConfig configures DA sampling parameters
func DefaultSamplingConfig ¶
func DefaultSamplingConfig() *SamplingConfig
DefaultSamplingConfig returns default sampling configuration
type SamplingResult ¶
type SamplingResult struct {
BlobID ids.ID `json:"blobId"`
Samples []*Sample `json:"samples"`
SampleCount int `json:"sampleCount"`
Available bool `json:"available"`
Confidence float64 `json:"confidence"` // Confidence level (0-1)
Timestamp time.Time `json:"timestamp"`
}
SamplingResult represents the result of data availability sampling
type Store ¶
type Store struct {
// contains filtered or unexported fields
}
Store manages DA blob storage and retrieval
func NewStore ¶
func NewStore(db database.Database, config *StoreConfig) *Store
NewStore creates a new DA store
func (*Store) DeleteBlob ¶
DeleteBlob deletes a DA blob
func (*Store) PruneExpired ¶
PruneExpired removes expired blobs
type StoreConfig ¶
type StoreConfig struct {
MaxBlobCache int `json:"maxBlobCache"` // Maximum blobs to cache in memory
RetentionPeriod int64 `json:"retentionPeriod"` // Blob retention in seconds
EnableErasure bool `json:"enableErasure"` // Enable erasure coding
ErasureDataRatio int `json:"erasureDataRatio"` // Data to parity ratio
}
StoreConfig configures the DA store
func DefaultStoreConfig ¶
func DefaultStoreConfig() *StoreConfig
DefaultStoreConfig returns default store configuration
type Validator ¶
type Validator struct {
// contains filtered or unexported fields
}
Validator handles DA validation and certification
func NewValidator ¶
func NewValidator(store *Store, config *ValidatorConfig) *Validator
NewValidator creates a new DA validator
func (*Validator) CalculateConfidence ¶
CalculateConfidence calculates confidence level from sample count
func (*Validator) CertifyAvailability ¶
func (v *Validator) CertifyAvailability(ctx context.Context, commitment *DACommitment, signature []byte) (*DACert, error)
CertifyAvailability certifies data is available
func (*Validator) SampleAndVerify ¶
func (v *Validator) SampleAndVerify(ctx context.Context, blobID ids.ID, seed []byte) (*SamplingResult, error)
SampleAndVerify performs DA sampling and verification
func (*Validator) ValidateBlob ¶
ValidateBlob validates a DA blob and returns commitment
type ValidatorConfig ¶
type ValidatorConfig struct {
MinSignatures int `json:"minSignatures"` // Minimum validator signatures for cert
SamplingEnabled bool `json:"samplingEnabled"` // Enable light client sampling
ConfidenceTarget float64 `json:"confidenceTarget"` // Target confidence level
}
ValidatorConfig configures the DA validator
func DefaultValidatorConfig ¶
func DefaultValidatorConfig() *ValidatorConfig
DefaultValidatorConfig returns default validator configuration