Documentation
      ¶
    
    
  
    
  
    Index ¶
- Constants
 - Variables
 - func EstimateBatchProcessing(start, end uint64, batchSize uint, sleepAfterEachBatchCommit time.Duration, ...) (batchCount uint64, totalDuration time.Duration)
 - func LoopPruneExecutionDataFromRootToLatestSealed(ctx context.Context, log zerolog.Logger, metrics module.ExecutionMetrics, ...) error
 - func NewChunkDataPackPruningEngine(log zerolog.Logger, metrics module.ExecutionMetrics, state protocol.State, ...) *component.ComponentManager
 - type ChunkDataPackPruner
 - type LatestPrunable
 - type PruningConfig
 
Constants ¶
      View Source
      
  
const NextHeightForUnprunedExecutionDataPackKey = "NextHeightForUnprunedExecutionDataPackKey"
    Variables ¶
      View Source
      
  
var DefaultConfig = PruningConfig{ Threshold: 30 * 60 * 60 * 24 * 1.2, BatchSize: 1200, SleepAfterEachBatchCommit: 12 * time.Second, SleepAfterEachIteration: math.MaxInt64, }
Functions ¶
func EstimateBatchProcessing ¶
func EstimateBatchProcessing( start, end uint64, batchSize uint, sleepAfterEachBatchCommit time.Duration, commitDuration time.Duration, ) ( batchCount uint64, totalDuration time.Duration)
estimateBatchProcessing estimates the number of batches and the total duration start, end are both inclusive
func LoopPruneExecutionDataFromRootToLatestSealed ¶
func LoopPruneExecutionDataFromRootToLatestSealed( ctx context.Context, log zerolog.Logger, metrics module.ExecutionMetrics, state protocol.State, protocolDB storage.DB, headers storage.Headers, chunkDataPacks storage.ChunkDataPacks, results storage.ExecutionResults, chunkDataPacksDB *pebble.DB, config PruningConfig, ) error
func NewChunkDataPackPruningEngine ¶
func NewChunkDataPackPruningEngine( log zerolog.Logger, metrics module.ExecutionMetrics, state protocol.State, protocolDB storage.DB, headers storage.Headers, chunkDataPacks storage.ChunkDataPacks, results storage.ExecutionResults, chunkDataPacksDB *pebble.DB, config PruningConfig, ) *component.ComponentManager
NewChunkDataPackPruningEngine creates a component that prunes chunk data packs from root to the latest sealed block.
Types ¶
type ChunkDataPackPruner ¶
type ChunkDataPackPruner struct {
	*pruners.ChunkDataPackPruner
}
    func NewChunkDataPackPruner ¶
func NewChunkDataPackPruner(chunkDataPacks storage.ChunkDataPacks, results storage.ExecutionResults) *ChunkDataPackPruner
func (*ChunkDataPackPruner) ExecuteByBlockID ¶
func (c *ChunkDataPackPruner) ExecuteByBlockID(blockID flow.Identifier, batch storage.ReaderBatchWriter) (exception error)
type LatestPrunable ¶
type LatestPrunable struct {
	*latest.LatestSealedAndExecuted
	// contains filtered or unexported fields
}
    LatestPrunable decides which blocks are prunable we don't want to prune all the sealed blocks, but keep a certain number of them so that the data is still available for querying
type PruningConfig ¶
type PruningConfig struct {
	Threshold                 uint64        // The threshold is the number of blocks that we want to keep in the database.
	BatchSize                 uint          // The batch size is the number of blocks that we want to delete in one batch.
	SleepAfterEachBatchCommit time.Duration // The sleep time after each batch commit.
	SleepAfterEachIteration   time.Duration // The sleep time after each iteration.
}
     Click to show internal directories. 
   Click to hide internal directories.