Documentation
¶
Overview ¶
Package warplib provides the core download engine for WarpDL. This file contains utilities for file operations, particularly for cross-device file moves which are common when the download temp directory and final destination are on different filesystems/drives.
Item is a struct that represents a download. It contains all the necessary information about a download. Package warplib provides core structures and utilities for managing download items and their associated metadata in the WarpDL application.
Index ¶
- Constants
- Variables
- func GetPath(directory, file string) string
- func IsLongPath(path string) bool
- func NewHTTPClientFromEnvironment() (*http.Client, error)
- func NewHTTPClientWithProxy(proxyURL string) (*http.Client, error)
- func NewHTTPClientWithProxyAndTimeout(proxyURL string, timeoutMs int) (*http.Client, error)
- func NewHasher(algo ChecksumAlgorithm) (hash.Hash, error)
- func NormalizePath(path string) string
- func ParseSpeedLimit(s string) (int64, error)
- func Place[t any](src []t, e t, index int) (dst []t)
- func RedirectPolicy(maxRedirects int) func(*http.Request, []*http.Request) error
- func SanitizeFilename(name string) string
- func SetConfigDir(dir string) error
- func SortInt64s(x []int64)
- func StripURLCredentials(rawURL string) string
- func SupportedSchemes(r *SchemeRouter) []string
- func ValidateDownloadDirectory(path string) error
- func ValidateItemParts(parts map[int64]*ItemPart) error
- func ValidateProtocol(p Protocol) error
- func WarpChmod(path string, perm os.FileMode) error
- func WarpCreate(path string) (*os.File, error)
- func WarpMkdir(path string, perm os.FileMode) error
- func WarpMkdirAll(path string, perm os.FileMode) error
- func WarpOpen(path string) (*os.File, error)
- func WarpOpenFile(path string, flag int, perm os.FileMode) (*os.File, error)
- func WarpRemove(path string) error
- func WarpRemoveAll(path string) error
- func WarpRename(src, dst string) error
- func WarpStat(path string) (os.FileInfo, error)
- type AddDownloadOpts
- type AsyncCallbackProxyReader
- type CallbackProxyReader
- type ChecksumAlgorithm
- type ChecksumConfig
- type ChecksumProgressHandlerFunc
- type ChecksumResult
- type ChecksumValidationHandlerFunc
- type CompileCompleteHandlerFunc
- type CompileProgressHandlerFunc
- type CompileSkippedHandlerFunc
- type CompileStartHandlerFunc
- type ContentLength
- type DownloadCapabilities
- type DownloadCompleteHandlerFunc
- type DownloadError
- type DownloadProgressHandlerFunc
- type DownloadStoppedHandlerFunc
- type Downloader
- func (d *Downloader) Close() error
- func (d *Downloader) GetContentLength() ContentLength
- func (d *Downloader) GetContentLengthAsInt() int64
- func (d *Downloader) GetContentLengthAsString() string
- func (d *Downloader) GetDownloadDirectory() string
- func (d *Downloader) GetFileName() string
- func (d *Downloader) GetHash() string
- func (d *Downloader) GetMaxConnections() int32
- func (d *Downloader) GetMaxParts() int32
- func (d *Downloader) GetSavePath() (svPath string)
- func (d *Downloader) IsStopped() bool
- func (d *Downloader) Log(s string, a ...any)
- func (d *Downloader) NumConnections() int32
- func (d *Downloader) Resume(parts map[int64]*ItemPart) (err error)
- func (d *Downloader) Start() (err error)
- func (d *Downloader) Stop()
- type DownloaderFactory
- type DownloaderOpts
- type DownloaderOptsFunc
- type ErrorCategory
- type ErrorHandlerFunc
- type ExpectedChecksum
- type Handlers
- type Header
- type Headers
- type Int64Slice
- type Item
- func (i *Item) CloseDownloader() error
- func (i *Item) GetAbsolutePath() (aPath string)
- func (i *Item) GetDownloaded() ContentLength
- func (i *Item) GetMaxConnections() (int32, error)
- func (i *Item) GetMaxParts() (int32, error)
- func (i *Item) GetPercentage() int64
- func (i *Item) GetSavePath() (svPath string)
- func (i *Item) GetTotalSize() ContentLength
- func (i *Item) HasParts() bool
- func (i *Item) IsDownloading() bool
- func (i *Item) IsStopped() bool
- func (i *Item) Resume() error
- func (i *Item) Start() error
- func (i *Item) StopDownload() error
- type ItemPart
- type ItemSlice
- type ItemsMap
- type Manager
- func (m *Manager) AddDownload(d *Downloader, opts *AddDownloadOpts) (err error)
- func (m *Manager) AddProtocolDownload(pd ProtocolDownloader, probe ProbeResult, cleanURL string, proto Protocol, ...) error
- func (m *Manager) Close() error
- func (m *Manager) Flush() error
- func (m *Manager) FlushOne(hash string) error
- func (m *Manager) GetCompletedItems() []*Item
- func (m *Manager) GetIncompleteItems() []*Item
- func (m *Manager) GetItem(hash string) (item *Item)
- func (m *Manager) GetItems() []*Item
- func (m *Manager) GetPublicItems() []*Item
- func (m *Manager) GetQueue() *QueueManager
- func (m *Manager) GetScheduledItems() []*Item
- func (m *Manager) ResumeDownload(client *http.Client, hash string, opts *ResumeDownloadOpts) (item *Item, err error)
- func (m *Manager) SetMaxConcurrentDownloads(maxConcurrent int, onStartDownload func(hash string))
- func (m *Manager) SetSchemeRouter(r *SchemeRouter)
- func (m *Manager) UpdateItem(item *Item)
- type ManagerData
- type Part
- type Priority
- type ProbeResult
- type Protocol
- type ProtocolDownloader
- type ProxyConfig
- type QueueManager
- func (qm *QueueManager) ActiveCount() int
- func (qm *QueueManager) Add(hash string, priority Priority)
- func (qm *QueueManager) GetActiveHashes() []string
- func (qm *QueueManager) GetState() QueueState
- func (qm *QueueManager) GetWaitingItems() []QueuedItemState
- func (qm *QueueManager) IsPaused() bool
- func (qm *QueueManager) LoadState(state QueueState)
- func (qm *QueueManager) MaxConcurrent() int
- func (qm *QueueManager) Move(hash string, position int) error
- func (qm *QueueManager) OnComplete(hash string)
- func (qm *QueueManager) Pause()
- func (qm *QueueManager) Resume()
- func (qm *QueueManager) WaitingCount() int
- type QueueState
- type QueuedItemState
- type RateLimitedReadCloser
- type RateLimitedReader
- type RespawnPartHandlerFunc
- type ResumeDownloadOpts
- type ResumeProgressHandlerFunc
- type RetryConfig
- type RetryExhaustedHandlerFunc
- type RetryHandlerFunc
- type RetryState
- type ScheduleState
- type SchemeRouter
- type SizeOption
- type SpawnPartHandlerFunc
- type VMap
- func (vm *VMap[kT, vT]) Delete(key kT)
- func (vm *VMap[kT, vT]) Dump() (keys []kT, vals []vT)
- func (vm *VMap[kT, vT]) Get(key kT) (val vT)
- func (vm *VMap[kT, vT]) GetUnsafe(key kT) (val vT)
- func (vm *VMap[kT, vT]) Make()
- func (vm *VMap[kT, vT]) Range(f func(key kT, val vT) bool)
- func (vm *VMap[kT, vT]) Set(key kT, val vT)
- type WorkStealHandlerFunc
Constants ¶
const ( LongPathThreshold = 240 // Threshold before applying prefix LongPathPrefix = `\\?\` // Extended-length path prefix UNCPrefix = `\\` // UNC path prefix UNCLongPathPrefix = `\\?\UNC\` // Extended UNC prefix )
Constants for Windows long path support
const ( // B represents one byte. B int64 = 1 // KB represents one kilobyte (1024 bytes). KB = 1024 * B // MB represents one megabyte (1024 kilobytes). MB = 1024 * KB // GB represents one gigabyte (1024 megabytes). GB = 1024 * MB // TB represents one terabyte (1024 gigabytes). TB = 1024 * GB )
Size unit constants for byte conversions.
const ( DEF_MAX_CONNS = 1 DEF_CHUNK_SIZE = 32 * KB DEF_USER_AGENT = "Warp/1.0" MIN_PART_SIZE = 512 * KB // DEF_MAX_FILE_SIZE is the default maximum file size limit (100GB). // Set to -1 to disable the limit. DEF_MAX_FILE_SIZE = 100 * GB )
const ( // DefaultFileMode is the permission mode for created files. // Owner can read/write, group and others can only read. DefaultFileMode = 0644 // DefaultDirMode is the permission mode for created directories. // Owner has full access, group and others can read and traverse. DefaultDirMode = 0755 )
const ( DEF_MAX_RETRIES = 5 DEF_BASE_DELAY = 500 * time.Millisecond DEF_MAX_DELAY = 30 * time.Second DEF_JITTER_FACTOR = 0.5 DEF_BACKOFF_FACTOR = 2.0 )
Default retry configuration values
const ( // WORK_STEAL_SPEED_THRESHOLD is the minimum download speed (bytes/sec) // a part must achieve to be considered "fast" enough to steal work. // Parts completing faster than this may steal work from slower adjacent parts. WORK_STEAL_SPEED_THRESHOLD = 10 * MB // >10MB/s // WORK_STEAL_MIN_REMAINING is the minimum remaining bytes in an adjacent // part to be eligible for work stealing. This prevents excessive overhead // from stealing very small work amounts. WORK_STEAL_MIN_REMAINING = 5 * MB // >5MB )
Work stealing constants define thresholds for dynamic part merging.
const ConfigDirEnv = "WARPDL_CONFIG_DIR"
ConfigDirEnv is the environment variable name used to override the default configuration directory.
const ( // DefaultMaxRedirects is the maximum number of redirect hops allowed. // Matches Go's default http.Client behavior. DefaultMaxRedirects = 10 )
const MAIN_HASH = "main"
MAIN_HASH is the identifier used for the main download hash.
const (
// Header keys
USER_AGENT_KEY = "User-Agent"
)
Variables ¶
var ( // ErrFileNameNotFound is returned when a download is attempted without specifying a file name. ErrFileNameNotFound = errors.New("file name can't be empty") // ErrContentLengthInvalid is returned when the content length header contains an invalid value. ErrContentLengthInvalid = errors.New("content length is invalid") // ErrContentLengthNotImplemented is returned when attempting to download a file with unknown size. ErrContentLengthNotImplemented = errors.New("unknown size downloads not implemented yet") // ErrNotSupported is returned when the file type or download method is not supported. ErrNotSupported = errors.New("file you're trying to download is not supported yet") // ErrItemDownloaderNotFound is returned when a downloader instance cannot be found for an item. ErrItemDownloaderNotFound = errors.New("item downloader not found") // ErrDownloadNotFound is returned when the requested download item does not exist in the manager. ErrDownloadNotFound = errors.New("item you are trying to download is not found") // ErrDownloadNotResumable is returned when attempting to resume a download that does not support resumption. ErrDownloadNotResumable = errors.New("item you are trying to download is not resumable") // ErrFlushHashNotFound is returned when attempting to flush a download item that does not exist. ErrFlushHashNotFound = errors.New("item you are trying to flush is not found") // ErrFlushItemDownloading is returned when attempting to flush a download item that is currently active. ErrFlushItemDownloading = errors.New("item you are trying to flush is currently downloading") // ErrDownloadDataMissing is returned when download data files are missing or corrupted. // User must run 'warpdl flush <hash>' to remove the corrupt entry. ErrDownloadDataMissing = errors.New("download data is missing or corrupted, run 'warpdl flush <hash>' to remove") // ErrMaxRetriesExceeded is returned when all retry attempts have been exhausted. ErrMaxRetriesExceeded = errors.New("maximum retry attempts exceeded") // ErrPrematureEOF is returned when EOF occurs before expected bytes are received. ErrPrematureEOF = errors.New("premature EOF: connection closed before download complete") // ErrFileExists is returned when attempting to download to a path where a file already exists. ErrFileExists = errors.New("file already exists at destination path") // ErrCrossDeviceMove is returned when a file move operation fails due to // source and destination being on different filesystems/drives. ErrCrossDeviceMove = errors.New("cross-device move not supported by rename, use copy+delete") // ErrInsufficientDiskSpace is returned when there is not enough disk space available // to download the file. ErrInsufficientDiskSpace = errors.New("insufficient disk space") // ErrFileTooLarge is returned when the file size exceeds the maximum allowed file size. ErrFileTooLarge = errors.New("file size exceeds maximum allowed limit") // ErrItemPartNil is returned when an ItemPart in the parts map is nil. ErrItemPartNil = errors.New("item part is nil") // ErrItemPartInvalidRange is returned when ItemPart has FinalOffset <= start offset. ErrItemPartInvalidRange = errors.New("item part has invalid offset range") // ErrPartDesync indicates memPart and Parts maps are out of sync. ErrPartDesync = errors.New("memPart/Parts desync: hash exists but offset not in parts") // ErrChecksumMismatch is returned when the downloaded file's checksum // does not match the expected checksum from the server. ErrChecksumMismatch = errors.New("checksum mismatch") // but no checksum is provided by the server. ErrChecksumUnavailable = errors.New("no checksum available from server") // ErrChecksumAlgorithmUnsupported is returned when the server provides // a checksum algorithm that is not supported. ErrChecksumAlgorithmUnsupported = errors.New("unsupported checksum algorithm") // ErrDirectoryNotFound is returned when the specified download directory does not exist. ErrDirectoryNotFound = errors.New("download directory does not exist") // ErrNotADirectory is returned when the specified path is not a directory. ErrNotADirectory = errors.New("path is not a directory") // ErrDirectoryNotWritable is returned when the download directory is not writable. ErrDirectoryNotWritable = errors.New("download directory is not writable") // ErrQueueHashNotFound is returned when attempting to move a download that is not in the waiting queue. ErrQueueHashNotFound = errors.New("download not found in waiting queue") // ErrCannotMoveActive is returned when attempting to move an active download in the queue. ErrCannotMoveActive = errors.New("cannot move active download, only waiting downloads can be moved") )
var ( // ConfigDir is the absolute path to the warp configuration directory. ConfigDir string // DlDataDir is the absolute path to the download data directory where segment files are stored. DlDataDir string )
var ( ErrEmptyProxyURL = errors.New("proxy URL cannot be empty") ErrUnsupportedScheme = errors.New("unsupported proxy scheme") ErrInvalidProxyURL = errors.New("invalid proxy URL") )
var ( // ErrTooManyRedirects is returned when a redirect chain exceeds the configured max hops. ErrTooManyRedirects = errors.New("redirect loop detected") // ErrCrossProtocolRedirect is returned when a redirect crosses from HTTP/HTTPS // to a non-HTTP protocol (e.g., FTP). ErrCrossProtocolRedirect = errors.New("cross-protocol redirect not supported") )
var ( // SizeOptionBy is a SizeOption configured for bytes. SizeOptionBy = SizeOption{B, "Bytes"} // SizeOptionKB is a SizeOption configured for kilobytes. SizeOptionKB = SizeOption{KB, "KB"} // SizeOptionMB is a SizeOption configured for megabytes. SizeOptionMB = SizeOption{MB, "MB"} // SizeOptionGB is a SizeOption configured for gigabytes. SizeOptionGB = SizeOption{GB, "GB"} // SizeOptionTB is a SizeOption configured for terabytes. SizeOptionTB = SizeOption{TB, "TB"} )
var ErrProbeRequired = fmt.Errorf("Probe must be called before Download or Resume")
ErrProbeRequired is returned when Download or Resume is called without first calling Probe.
var ErrUnsupportedDownloadScheme = fmt.Errorf("unsupported scheme")
ErrUnsupportedDownloadScheme is returned when a URL has an unregistered download scheme. The full error message includes which schemes are supported.
var KnownHostsPath = filepath.Join(ConfigDir, "known_hosts")
KnownHostsPath is the path to WarpDL's TOFU known_hosts file. Isolated from system ~/.ssh/known_hosts to avoid polluting system SSH state. The path is updated when SetConfigDir is called (via ConfigDir).
Functions ¶
func IsLongPath ¶ added in v1.3.18
IsLongPath returns true if path length exceeds threshold
func NewHTTPClientFromEnvironment ¶ added in v1.3.11
NewHTTPClientFromEnvironment creates an HTTP client using proxy settings from environment variables. It checks HTTP_PROXY, http_proxy, HTTPS_PROXY, https_proxy, and ALL_PROXY.
func NewHTTPClientWithProxy ¶ added in v1.3.11
NewHTTPClientWithProxy creates an HTTP client configured to use the specified proxy. If proxyURL is empty, returns a default HTTP client without proxy. The returned client always has CheckRedirect set to enforce redirect policy.
func NewHTTPClientWithProxyAndTimeout ¶ added in v1.3.11
NewHTTPClientWithProxyAndTimeout creates an HTTP client with proxy and custom timeout. Timeout is specified in milliseconds.
func NewHasher ¶ added in v1.3.22
func NewHasher(algo ChecksumAlgorithm) (hash.Hash, error)
NewHasher creates a hash.Hash for the specified algorithm
func NormalizePath ¶ added in v1.3.18
NormalizePath applies Windows long path prefix if needed. On non-Windows platforms, it still normalizes slashes for Windows-style paths but does not add the \\?\ prefix.
func ParseSpeedLimit ¶ added in v1.3.24
ParseSpeedLimit parses a human-readable speed limit string. Returns bytes per second. 0 means unlimited.
Supported formats:
- Plain bytes: "100", "1024"
- With B suffix: "100B", "1024B"
- Kilobytes: "512KB", "512kb"
- Megabytes: "1MB", "1.5mb"
- Gigabytes: "1GB", "2.5gb"
Returns an error for invalid formats.
func Place ¶
Place inserts element e at the specified index in src and returns a new slice. The original slice is not modified.
func RedirectPolicy ¶ added in v1.4.1
RedirectPolicy returns a CheckRedirect function that: 1. Enforces a maximum number of redirect hops 2. Rejects cross-protocol redirects (HTTP/HTTPS -> non-HTTP) 3. Strips sensitive/custom headers on cross-origin redirects
For cross-origin header stripping, Go 1.24+ already strips the Authorization header automatically (CVE-2024-45336 fix). This function additionally strips custom user headers while preserving safe standard headers.
func SanitizeFilename ¶ added in v1.3.13
SanitizeFilename removes or replaces characters invalid on Windows/Unix filesystems. It preserves the file extension and handles URL-encoded characters.
func SetConfigDir ¶ added in v1.1.0
SetConfigDir sets the configuration directory to the specified path. It creates the directory and its subdirectories if they do not exist.
func SortInt64s ¶
func SortInt64s(x []int64)
SortInt64s sorts a slice of int64 values in increasing order.
func StripURLCredentials ¶ added in v1.4.1
StripURLCredentials removes userinfo (username:password) from a URL string. Returns the cleaned URL string. If parsing fails (should not happen for already-validated URLs), returns the original URL unchanged. Exported because internal/api calls it cross-package in Plan 03-03.
func SupportedSchemes ¶ added in v1.4.1
func SupportedSchemes(r *SchemeRouter) []string
SupportedSchemes returns a sorted list of all registered schemes.
func ValidateDownloadDirectory ¶ added in v1.3.22
ValidateDownloadDirectory checks if the given path is a valid, writable directory. Returns nil if valid, or a specific error: - ErrDirectoryNotFound if path doesn't exist - ErrNotADirectory if path is a file - ErrDirectoryNotWritable if directory is not writable
func ValidateItemParts ¶ added in v1.3.22
ValidateItemParts validates a map of ItemParts for nil values and invalid ranges.
func ValidateProtocol ¶ added in v1.4.1
ValidateProtocol returns an error if p is not a known Protocol value. Manager.InitManager calls this after decoding ManagerData to detect files from a newer warpdl version with protocol values not yet supported.
func WarpCreate ¶ added in v1.3.18
WarpCreate creates a file with secure default permissions (0644). This replaces os.Create which uses 0666 by default.
func WarpMkdir ¶ added in v1.3.18
WarpMkdir creates a single directory (pass-through on non-Windows)
func WarpMkdirAll ¶ added in v1.3.18
WarpMkdirAll creates a directory path (pass-through on non-Windows)
func WarpOpenFile ¶ added in v1.3.18
WarpOpenFile opens a file with flags and permissions (pass-through on non-Windows)
func WarpRemove ¶ added in v1.3.18
WarpRemove removes a file or directory (pass-through on non-Windows)
func WarpRemoveAll ¶ added in v1.3.18
WarpRemoveAll removes a path and any children (pass-through on non-Windows)
func WarpRename ¶ added in v1.3.18
WarpRename renames a file or directory (pass-through on non-Windows)
Types ¶
type AddDownloadOpts ¶
type AddDownloadOpts struct {
IsHidden bool
IsChildren bool
ChildHash string
AbsoluteLocation string
Priority Priority
SkipQueue bool
// SSHKeyPath is the SSH key path to persist in Item for SFTP resume.
// Empty means default key paths are tried on resume.
SSHKeyPath string
}
AddDownloadOpts contains optional parameters for AddDownload.
type AsyncCallbackProxyReader ¶ added in v1.1.0
type AsyncCallbackProxyReader struct {
// contains filtered or unexported fields
}
AsyncCallbackProxyReader wraps an io.Reader and invokes a callback function asynchronously in a goroutine after each read operation with the number of bytes read.
func NewAsyncCallbackProxyReader ¶ added in v1.1.0
func NewAsyncCallbackProxyReader(reader io.Reader, callback func(n int), logger *log.Logger) *AsyncCallbackProxyReader
NewAsyncCallbackProxyReader creates a new AsyncCallbackProxyReader that wraps the given reader and calls the callback function asynchronously in a goroutine after each read with the byte count.
func (*AsyncCallbackProxyReader) Read ¶ added in v1.1.0
func (p *AsyncCallbackProxyReader) Read(b []byte) (n int, err error)
Read reads data from the underlying reader into b and invokes the callback asynchronously in a goroutine with the number of bytes read.
func (*AsyncCallbackProxyReader) Wait ¶ added in v1.3.19
func (p *AsyncCallbackProxyReader) Wait()
Wait blocks until all async callback goroutines have completed.
type CallbackProxyReader ¶ added in v1.1.0
type CallbackProxyReader struct {
// contains filtered or unexported fields
}
CallbackProxyReader wraps an io.Reader and invokes a callback function synchronously after each read operation with the number of bytes read.
func NewCallbackProxyReader ¶ added in v1.1.0
func NewCallbackProxyReader(reader io.Reader, callback func(n int)) *CallbackProxyReader
NewCallbackProxyReader creates a new CallbackProxyReader that wraps the given reader and calls the callback function synchronously after each read with the byte count.
type ChecksumAlgorithm ¶ added in v1.3.22
type ChecksumAlgorithm string
ChecksumAlgorithm represents supported hash algorithms
const ( ChecksumMD5 ChecksumAlgorithm = "md5" ChecksumSHA256 ChecksumAlgorithm = "sha256" ChecksumSHA512 ChecksumAlgorithm = "sha512" )
func SelectBestAlgorithm ¶ added in v1.3.22
func SelectBestAlgorithm(checksums []ExpectedChecksum) ChecksumAlgorithm
SelectBestAlgorithm returns the strongest algorithm from the list Priority: SHA-512 > SHA-256 > MD5
type ChecksumConfig ¶ added in v1.3.22
ChecksumConfig configures checksum validation behavior
func DefaultChecksumConfig ¶ added in v1.3.22
func DefaultChecksumConfig() ChecksumConfig
DefaultChecksumConfig returns the default checksum configuration
type ChecksumProgressHandlerFunc ¶ added in v1.3.22
type ChecksumProgressHandlerFunc func(bytesHashed int64)
ChecksumProgressHandlerFunc is called periodically during hash computation. bytesHashed is the total number of bytes hashed so far.
type ChecksumResult ¶ added in v1.3.22
type ChecksumResult struct {
Algorithm ChecksumAlgorithm
Expected []byte
Actual []byte
Match bool
}
ChecksumResult contains the result of checksum validation
type ChecksumValidationHandlerFunc ¶ added in v1.3.22
type ChecksumValidationHandlerFunc func(result ChecksumResult)
ChecksumValidationHandlerFunc is called when checksum validation completes. result contains the validation outcome including expected vs actual checksums.
type CompileCompleteHandlerFunc ¶
CompileCompleteHandlerFunc is a function that handles the completion of a compile. It takes a hash string and the total number of bytes read as arguments.
type CompileProgressHandlerFunc ¶
CompileProgressHandlerFunc is a function that handles the progress of a compile. It takes a hash string and the number of bytes read as arguments.
type CompileSkippedHandlerFunc ¶
CompileSkippedHandlerFunc is a function that handles the skipping of a compile. It takes a hash string and the total number of bytes read as arguments.
type CompileStartHandlerFunc ¶
type CompileStartHandlerFunc func(hash string)
CompileStartHandlerFunc is a function that handles the start of a compile. It takes a hash string as an argument.
type ContentLength ¶
type ContentLength int64
ContentLength represents the size of a download item. It is used to store the total size of the download item and the amount of data that has been downloaded.
func (ContentLength) Format ¶
func (c ContentLength) Format(sep string, sizeOpts ...SizeOption) (clen string)
Format returns the formatted string representation of the ContentLength.
func (*ContentLength) IsUnknown ¶
func (c *ContentLength) IsUnknown() (unknown bool)
IsUnknown returns whether the ContentLength is unknown.
func (ContentLength) String ¶
func (c ContentLength) String() (clen string)
String returns the string representation of the ContentLength.
type DownloadCapabilities ¶ added in v1.4.1
type DownloadCapabilities struct {
// SupportsParallel indicates the protocol can download multiple segments concurrently.
SupportsParallel bool
// SupportsResume indicates the protocol can resume a partially downloaded file.
SupportsResume bool
}
DownloadCapabilities describes what optional features a protocol downloader supports. Zero value is safe: no capabilities assumed for unknown protocols.
type DownloadCompleteHandlerFunc ¶
DownloadCompleteHandlerFunc is a function that handles the completion of a download. It takes a hash string and the total number of bytes read as arguments.
type DownloadError ¶ added in v1.4.1
type DownloadError struct {
// Protocol identifies the protocol that produced the error (e.g., "http", "ftp").
Protocol string
// Op is the operation that failed (e.g., "probe", "download", "connect").
Op string
// Cause is the underlying error.
Cause error
// contains filtered or unexported fields
}
DownloadError is a structured error from a protocol downloader. Use errors.As to extract and inspect download errors.
func NewPermanentError ¶ added in v1.4.1
func NewPermanentError(protocol, op string, cause error) *DownloadError
NewPermanentError creates a DownloadError that should not be retried.
func NewTransientError ¶ added in v1.4.1
func NewTransientError(protocol, op string, cause error) *DownloadError
NewTransientError creates a DownloadError that may be retried.
func (*DownloadError) Error ¶ added in v1.4.1
func (e *DownloadError) Error() string
Error implements the error interface. Format: "protocol op: cause"
func (*DownloadError) IsTransient ¶ added in v1.4.1
func (e *DownloadError) IsTransient() bool
IsTransient returns true if this error is transient and may be retried.
func (*DownloadError) Unwrap ¶ added in v1.4.1
func (e *DownloadError) Unwrap() error
Unwrap returns the underlying cause, enabling errors.Is/As chaining.
type DownloadProgressHandlerFunc ¶
DownloadProgressHandlerFunc is a function that handles the progress of a download. It takes a hash string and the number of bytes read as arguments.
type DownloadStoppedHandlerFunc ¶ added in v1.1.0
type DownloadStoppedHandlerFunc func()
DownloadStoppedHandlerFunc is a function that handles the stopping of a download.
type Downloader ¶
type Downloader struct {
// contains filtered or unexported fields
}
Downloader is a struct that manages the download process of a single file. It includes information such as the download URL, file name, download location, download progress, and download handlers.
func NewDownloader ¶
func NewDownloader(client *http.Client, url string, opts *DownloaderOpts, optFuncs ...DownloaderOptsFunc) (d *Downloader, err error)
NewDownloader creates a new downloader with provided arguments. Use downloader.Start() to download the file.
func (*Downloader) Close ¶ added in v1.3.18
func (d *Downloader) Close() error
Close releases all resources held by the Downloader. This includes the log file writer and any open files. It should be called when the downloader is no longer needed, especially if Start() or Resume() was never called.
func (*Downloader) GetContentLength ¶
func (d *Downloader) GetContentLength() ContentLength
GetContentLength returns the content length (size of the downloading item).
func (*Downloader) GetContentLengthAsInt ¶
func (d *Downloader) GetContentLengthAsInt() int64
GetContentLengthAsInt returns the content length as int64.
func (*Downloader) GetContentLengthAsString ¶
func (d *Downloader) GetContentLengthAsString() string
GetContentLengthAsString returns the content length as a string.
func (*Downloader) GetDownloadDirectory ¶
func (d *Downloader) GetDownloadDirectory() string
GetDownloadDirectory returns the download directory.
func (*Downloader) GetFileName ¶
func (d *Downloader) GetFileName() string
GetFileName returns the file name this download.
func (*Downloader) GetHash ¶ added in v1.1.0
func (d *Downloader) GetHash() string
GetHash returns the unique identifier hash for this download.
func (*Downloader) GetMaxConnections ¶ added in v1.1.0
func (d *Downloader) GetMaxConnections() int32
GetMaxConnections returns the maximum number of possible connections.
func (*Downloader) GetMaxParts ¶ added in v1.1.0
func (d *Downloader) GetMaxParts() int32
GetMaxParts returns the maximum number of possible parts.
func (*Downloader) GetSavePath ¶
func (d *Downloader) GetSavePath() (svPath string)
GetSavePath returns the final location of file being downloading.
func (*Downloader) IsStopped ¶ added in v1.3.31
func (d *Downloader) IsStopped() bool
IsStopped returns true if the download was intentionally stopped.
func (*Downloader) Log ¶
func (d *Downloader) Log(s string, a ...any)
Log adds the provided string to download's log file. It can't be used once download is complete.
func (*Downloader) NumConnections ¶
func (d *Downloader) NumConnections() int32
NumConnections returns the number of connections running currently.
func (*Downloader) Resume ¶
func (d *Downloader) Resume(parts map[int64]*ItemPart) (err error)
Resume resumes the download of the file with provided parts. It blocks the current goroutine until the download is complete.
func (*Downloader) Start ¶
func (d *Downloader) Start() (err error)
Start downloads the file and blocks current goroutine until the downloading is complete.
func (*Downloader) Stop ¶ added in v1.1.0
func (d *Downloader) Stop()
Stop stops the download process. Note: This only signals stop and cancels context. It does NOT wait for goroutines to finish because Stop() may be called from within a callback (e.g., progress handler) running inside a download goroutine. Use Close() for full cleanup after Start()/Resume() returns.
type DownloaderFactory ¶ added in v1.4.1
type DownloaderFactory func(rawURL string, opts *DownloaderOpts) (ProtocolDownloader, error)
DownloaderFactory creates a ProtocolDownloader for a given URL. The factory is responsible for all protocol-specific initialization.
type DownloaderOpts ¶
type DownloaderOpts struct {
ForceParts bool
NumBaseParts int32
// FileName is used to set name of to-be-downloaded
// file explicitly.
//
// Note: Warplib sets the file name sent by server
// if file name not set explicitly.
FileName string
// DownloadDirectory sets the download directory for
// file to be downloaded.
DownloadDirectory string
// MaxConnections sets the maximum number of parallel
// network connections to be used for the downloading the file.
MaxConnections int32
// MaxSegments sets the maximum number of file segments
// to be created for the downloading the file.
MaxSegments int32
Headers Headers
Handlers *Handlers
SkipSetup bool
// RetryConfig configures retry behavior for transient errors.
// If nil, DefaultRetryConfig() is used.
RetryConfig *RetryConfig
// Overwrite allows replacing an existing file at the destination path.
// If false and the file exists, the download will fail with ErrFileExists.
Overwrite bool
// ProxyURL specifies the proxy server URL to use for the download.
// Supported schemes: http, https, socks5.
// Example: "http://proxy.example.com:8080" or "socks5://localhost:1080"
ProxyURL string
// RequestTimeout specifies the timeout for individual HTTP requests.
// If zero, no per-request timeout is applied.
RequestTimeout time.Duration
// MaxFileSize specifies the maximum allowed file size for downloads.
// If zero, uses DEF_MAX_FILE_SIZE (100GB).
// If negative (-1), no limit is enforced.
MaxFileSize int64
// ChecksumConfig configures checksum validation behavior.
// If nil, uses DefaultChecksumConfig().
// Set Enabled=false to disable validation entirely.
ChecksumConfig *ChecksumConfig
// SpeedLimit specifies the maximum download speed in bytes per second.
// If zero or negative, no limit is applied.
// The limit is distributed equally among active download parts.
SpeedLimit int64
// DisableWorkStealing disables dynamic work stealing where fast parts
// take over remaining work from slow adjacent parts.
// When false (default), work stealing is enabled.
DisableWorkStealing bool
// SSHKeyPath specifies a custom SSH private key file path for SFTP downloads.
// If empty, default paths (~/.ssh/id_ed25519, ~/.ssh/id_rsa) are tried.
// Not used for HTTP or FTP protocols.
SSHKeyPath string
}
Optional fields of downloader
type DownloaderOptsFunc ¶ added in v1.3.10
type DownloaderOptsFunc func(*Downloader)
DownloaderOptsFunc is a functional option for configuring a Downloader.
func WithOverwrite ¶ added in v1.3.10
func WithOverwrite(overwrite bool) DownloaderOptsFunc
WithOverwrite sets whether to overwrite existing files at the destination path.
type ErrorCategory ¶ added in v1.3.9
type ErrorCategory int
ErrorCategory classifies errors for retry decisions
const ( ErrCategoryFatal ErrorCategory = iota // Non-retryable errors (404, canceled) ErrCategoryRetryable // Transient errors (EOF, timeout, reset) ErrCategoryThrottled // Rate limiting errors (429, 503) )
func ClassifyError ¶ added in v1.3.9
func ClassifyError(err error) ErrorCategory
ClassifyError determines how an error should be handled for retry purposes
type ErrorHandlerFunc ¶
ErrorHandlerFunc is a function that handles errors. It takes a hash string and an error as arguments.
type ExpectedChecksum ¶ added in v1.3.22
type ExpectedChecksum struct {
Algorithm ChecksumAlgorithm
Value []byte // raw bytes (decoded from base64)
}
ExpectedChecksum contains the expected hash value and algorithm
func ExtractChecksums ¶ added in v1.3.22
func ExtractChecksums(h http.Header) []ExpectedChecksum
ExtractChecksums extracts checksums from HTTP headers Checks both Digest and Content-MD5 headers
func ParseContentMD5Header ¶ added in v1.3.22
func ParseContentMD5Header(header string) (*ExpectedChecksum, error)
ParseContentMD5Header parses RFC 2616 Content-MD5 header Format: Base64-encoded MD5
func ParseDigestHeader ¶ added in v1.3.22
func ParseDigestHeader(header string) ([]ExpectedChecksum, error)
ParseDigestHeader parses RFC 3230 Digest header Format: "sha-256=BASE64VALUE" or "sha-512=BASE64,sha-256=BASE64"
type Handlers ¶
type Handlers struct {
SpawnPartHandler SpawnPartHandlerFunc
RespawnPartHandler RespawnPartHandlerFunc
DownloadProgressHandler DownloadProgressHandlerFunc
ResumeProgressHandler ResumeProgressHandlerFunc
ErrorHandler ErrorHandlerFunc
DownloadCompleteHandler DownloadCompleteHandlerFunc
CompileStartHandler CompileStartHandlerFunc
CompileProgressHandler CompileProgressHandlerFunc
CompileSkippedHandler CompileSkippedHandlerFunc
CompileCompleteHandler CompileCompleteHandlerFunc
DownloadStoppedHandler DownloadStoppedHandlerFunc
RetryHandler RetryHandlerFunc
RetryExhaustedHandler RetryExhaustedHandlerFunc
ChecksumValidationHandler ChecksumValidationHandlerFunc
ChecksumProgressHandler ChecksumProgressHandlerFunc
// WorkStealHandler is called when work stealing occurs between parts.
WorkStealHandler WorkStealHandlerFunc
}
Handlers holds callback functions for various download lifecycle events. Each handler is invoked at the corresponding stage of the download process.
type Header ¶
Header represents a key-value pair.
func (*Header) RedactedValue ¶ added in v1.4.1
RedactedValue returns the header value for safe logging. Cookie and Set-Cookie values are replaced with [REDACTED].
type Headers ¶
type Headers []Header
Headers represents a list of headers.
func StripUnsafeFromHeaders ¶ added in v1.4.1
StripUnsafeFromHeaders removes non-safe headers from a Headers slice. This is called when a cross-origin redirect is detected and d.headers needs to be cleaned so that subsequent requests (prepareDownloader, segment downloads) to the new origin don't leak credentials. Returns a new Headers slice containing only safe headers.
func (Headers) Get ¶
Get returns the index of the header with the given key. If the header is not found, the second return value is false.
func (*Headers) InitOrUpdate ¶
InitOrUpdate initializes or updates the header with the given key and value. If the header is already present, it is not updated.
func (Headers) LogSafe ¶ added in v1.4.1
LogSafe returns a slice of "Key: Value" strings with sensitive headers redacted. This is safe for debug logging — Cookie and Set-Cookie values are replaced with [REDACTED].
type Int64Slice ¶
type Int64Slice []int64
Int64Slice attaches the methods of sort.Interface to []int64, sorting in increasing order.
func (Int64Slice) Len ¶
func (x Int64Slice) Len() int
Len returns the number of elements in the slice.
func (Int64Slice) Less ¶
func (x Int64Slice) Less(i, j int) bool
Less reports whether the element at index i should sort before the element at index j.
func (Int64Slice) Swap ¶
func (x Int64Slice) Swap(i, j int)
Swap exchanges the elements at indices i and j.
type Item ¶
type Item struct {
// Hash is the unique identifier of the download item.
Hash string `json:"hash"`
// Name is the name of the download item.
Name string `json:"name"`
// Url is the download url of the download item.
Url string `json:"url"`
// Headers used for the download
Headers Headers `json:"headers"`
// DateAdded is the time when the download item was added.
DateAdded time.Time `json:"date_added"`
// TotalSize is the total size of the download item.
TotalSize ContentLength `json:"total_size"`
// Downloaded is the total size of the download item that has been downloaded.
Downloaded ContentLength `json:"downloaded"`
// DownloadLocation is the location where the download item is saved.
DownloadLocation string `json:"download_location"`
// AbsoluteLocation is the absolute path where the download item is saved.
AbsoluteLocation string `json:"absolute_location"`
// ChildHash is a hash representing the child item, if applicable.
ChildHash string `json:"child_hash"`
// Hidden is a flag indicating whether the item is hidden.
Hidden bool `json:"hidden"`
// Children is a flag indicating whether this item is a child of any other download item.
Children bool `json:"children"`
// Parts is a map of download parts, where each part is represented by an ItemPart.
Parts map[int64]*ItemPart `json:"parts"`
// Resumable is a flag indicating whether the download can be resumed.
Resumable bool `json:"resumable"`
// Protocol identifies which download protocol to use when resuming this item.
// Zero value is ProtoHTTP (0), ensuring backward compatibility with GOB files
// encoded before Phase 2 added this field — GOB zero-initializes missing fields.
// INVARIANT: ProtoHTTP must remain iota=0 or all pre-Phase-2 files will break.
Protocol Protocol `json:"protocol"`
// SSHKeyPath is the path to the SSH private key used for SFTP downloads.
// Persisted so resume uses the same key as the initial download.
// Empty means default key paths (~/.ssh/id_ed25519, ~/.ssh/id_rsa) are tried.
// GOB backward-compatible: missing field decodes as empty string (zero value).
SSHKeyPath string `json:"ssh_key_path,omitempty"`
// ScheduledAt is the absolute trigger time for one-shot scheduled downloads.
// Zero value means not scheduled. GOB backward-compatible (zero value safe).
ScheduledAt time.Time `json:"scheduled_at,omitempty"`
// CronExpr is the cron expression for recurring downloads (e.g., "0 2 * * *").
// Empty string means one-shot (not recurring). GOB backward-compatible.
CronExpr string `json:"cron_expr,omitempty"`
// ScheduleState tracks the lifecycle of a scheduled download.
// Zero value (ScheduleStateNone) means not scheduled. GOB backward-compatible.
ScheduleState ScheduleState `json:"schedule_state,omitempty"`
// CookieSourcePath is the path to the cookie file or "auto" for auto-detection.
// Persisted so cookies can be re-imported on resume/retry/recurring (FR-024).
// Cookie VALUES are never persisted (FR-023). Empty means no cookies.
// GOB backward-compatible: missing field decodes as empty string (zero value).
CookieSourcePath string `json:"cookie_source_path,omitempty"`
// contains filtered or unexported fields
}
Item represents a download item with its associated metadata and state. It includes information such as the item's unique identifier, name, URL, headers, size, download progress, and storage location.
func (*Item) CloseDownloader ¶ added in v1.3.18
CloseDownloader closes the downloader and releases all file handles. Use this when a download is aborted before Start()/Resume() completes.
func (*Item) GetAbsolutePath ¶
GetAbsolutePath returns the absolute path for the download item.
func (*Item) GetDownloaded ¶ added in v1.4.1
func (i *Item) GetDownloaded() ContentLength
GetDownloaded returns the downloaded byte count with proper synchronization. Safe to call from any goroutine. Returns the raw field value when mu is nil (e.g., items constructed in tests without a Manager).
func (*Item) GetMaxConnections ¶ added in v1.1.0
GetMaxConnections returns the maximum number of connections for the download item.
func (*Item) GetMaxParts ¶ added in v1.1.0
GetMaxParts returns the maximum number of parts for the download item.
func (*Item) GetPercentage ¶
GetPercentage returns the download progress as a percentage. Uses mu for thread-safe access to Downloaded and TotalSize.
func (*Item) GetSavePath ¶
GetSavePath returns the save path for the download item.
func (*Item) GetTotalSize ¶ added in v1.4.1
func (i *Item) GetTotalSize() ContentLength
GetTotalSize returns the total size with proper synchronization. Safe to call from any goroutine. Returns the raw field value when mu is nil.
func (*Item) HasParts ¶ added in v1.4.3
HasParts reports whether the item has any persisted part state.
func (*Item) IsDownloading ¶ added in v1.2.0
IsDownloading returns true if the item is currently being downloaded.
func (*Item) IsStopped ¶ added in v1.3.31
IsStopped returns true if the download was intentionally stopped.
func (*Item) Resume ¶
Resume resumes the download of the item. Fixed Race 2: Takes snapshot of Parts under Item lock before calling Resume. For FTP/SFTP: passes stored resumeHandlers to ProtocolDownloader.Resume(). For HTTP: resumeHandlers is nil, preserving patchHandlers-installed struct field handlers.
func (*Item) Start ¶ added in v1.4.3
Start begins a fresh download using the currently assigned downloader.
func (*Item) StopDownload ¶ added in v1.1.0
StopDownload pauses the download of the item.
type ItemPart ¶
type ItemPart struct {
// Hash is the unique identifier for this part of the download.
Hash string `json:"hash"`
// FinalOffset is the ending byte offset of this part in the download.
FinalOffset int64 `json:"final_offset"`
// Compiled indicates whether this part has been successfully compiled or merged.
Compiled bool `json:"compiled"`
}
ItemPart represents a part of a download item. It contains metadata about a specific segment of the download, including its unique hash, final offset, and compilation status.
type ItemSlice ¶ added in v1.1.0
type ItemSlice []*Item
ItemSlice attaches the methods of sort.Interface to []*Item, sorting by DateAdded in chronological order.
type ItemsMap ¶
ItemsMap is a map of download items, where each item is indexed by its unique identifier.
type Manager ¶
type Manager struct {
// contains filtered or unexported fields
}
Manager is a struct that manages the download items and their respective downloaders.
func InitManager ¶
InitManager creates a new manager instance.
func (*Manager) AddDownload ¶
func (m *Manager) AddDownload(d *Downloader, opts *AddDownloadOpts) (err error)
AddDownload adds a new download item entry. If the queue is enabled, the download is registered with the queue. The queue's onStart callback will be invoked when a slot is available (immediately if under capacity, or when another download completes). The *Downloader is wrapped in an httpProtocolDownloader adapter and stored in item.dAlloc as a ProtocolDownloader.
func (*Manager) AddProtocolDownload ¶ added in v1.4.1
func (m *Manager) AddProtocolDownload(pd ProtocolDownloader, probe ProbeResult, cleanURL string, proto Protocol, handlers *Handlers, opts *AddDownloadOpts) error
AddProtocolDownload adds a new download item for a non-HTTP protocol downloader. cleanURL is the URL with credentials stripped — safe for GOB persistence. proto identifies the protocol (ProtoFTP, ProtoFTPS, ProtoSFTP).
func (*Manager) FlushOne ¶
FlushOne flushes away the download item with the given hash. Fixed Race 6: Uses write lock for entire operation to prevent TOCTOU.
func (*Manager) GetCompletedItems ¶
GetCompletedItems returns all the completed items in the manager. Uses thread-safe getters for Downloaded/TotalSize to avoid data races.
func (*Manager) GetIncompleteItems ¶
GetIncompleteItems returns all the incomplete items in the manager. Uses thread-safe getters for Downloaded/TotalSize to avoid data races.
func (*Manager) GetItem ¶
GetItem returns the item with the given hash from the manager. It returns nil if the item does not exist.
func (*Manager) GetPublicItems ¶
GetPublicItems returns all the public items in the manager. It excludes child items from the result.
func (*Manager) GetQueue ¶ added in v1.3.36
func (m *Manager) GetQueue() *QueueManager
GetQueue returns the QueueManager if enabled, or nil if disabled.
func (*Manager) GetScheduledItems ¶ added in v1.4.1
GetScheduledItems returns all items with ScheduleState == "scheduled". Thread-safe: acquires read lock on the manager.
func (*Manager) ResumeDownload ¶
func (m *Manager) ResumeDownload(client *http.Client, hash string, opts *ResumeDownloadOpts) (item *Item, err error)
ResumeDownload resumes a download item. For HTTP items, it validates segment-file integrity and creates an HTTP downloader. For FTP/FTPS/SFTP items, it skips segment-file checks (single-stream to dest file) and dispatches through SchemeRouter to create a protocol-specific downloader.
func (*Manager) SetMaxConcurrentDownloads ¶ added in v1.3.36
SetMaxConcurrentDownloads enables the download queue with a concurrency limit. When a slot becomes available for a queued download, onStartDownload is called with the hash. The callback should start the download (e.g., via ResumeDownload or by getting the item's downloader and calling Start). If maxConcurrent is 0 or negative, the queue is disabled. If queue state was persisted, it will be restored (waiting items preserved).
func (*Manager) SetSchemeRouter ¶ added in v1.4.1
func (m *Manager) SetSchemeRouter(r *SchemeRouter)
SetSchemeRouter sets the scheme router for protocol dispatch during resume. Used by daemon startup to provide the router to the Manager.
func (*Manager) UpdateItem ¶
UpdateItem updates the item in the manager's items map.
type ManagerData ¶ added in v1.3.36
type ManagerData struct {
Items ItemsMap
QueueState *QueueState
}
ManagerData is the persistent state of the Manager. It wraps items and optional queue state for GOB encoding.
type Priority ¶ added in v1.3.36
type Priority int
Priority represents the priority level for queued downloads.
type ProbeResult ¶ added in v1.4.1
type ProbeResult struct {
// FileName is the suggested file name from the server.
// Empty if the server did not provide one.
FileName string
// ContentLength is the total size in bytes.
// -1 means unknown/streaming.
ContentLength int64
// Resumable indicates whether the download can be resumed after interruption.
Resumable bool
// Checksums holds any expected checksums provided by the server.
Checksums []ExpectedChecksum
}
ProbeResult holds metadata discovered during the Probe phase.
type Protocol ¶ added in v1.4.1
type Protocol uint8
Protocol identifies the download protocol for an Item. It is stored as uint8 in GOB-encoded userdata.warp files. The zero value (ProtoHTTP = 0) is the default and ensures backward compatibility with all pre-Phase-2 GOB files that lack this field.
IMPORTANT: Do NOT reorder these constants. The iota values are persisted in GOB-encoded files and must remain stable forever.
const ( // ProtoHTTP is the default HTTP/HTTPS protocol. // MUST be 0 (iota start) for GOB backward compatibility. ProtoHTTP Protocol = iota // 0 — default, matches zero value in old GOB files // ProtoFTP is the FTP protocol (plain text). ProtoFTP // 1 // ProtoFTPS is the FTP-over-TLS protocol. ProtoFTPS // 2 // ProtoSFTP is the SSH File Transfer Protocol. ProtoSFTP // 3 )
type ProtocolDownloader ¶ added in v1.4.1
type ProtocolDownloader interface {
// Probe fetches file metadata from the server without downloading content.
// Must be called before Download or Resume. Safe to call multiple times.
Probe(ctx context.Context) (ProbeResult, error)
// Download starts a fresh download. Probe must have been called first.
// handlers receives event callbacks during download.
Download(ctx context.Context, handlers *Handlers) error
// Resume continues a previously interrupted download.
// Probe must have been called first.
// parts contains the partially-downloaded segment state from the Item.
Resume(ctx context.Context, parts map[int64]*ItemPart, handlers *Handlers) error
// Capabilities returns what optional features this downloader supports.
// Safe to call before Probe (returns safe zero values).
Capabilities() DownloadCapabilities
// Close releases all resources held by this downloader.
Close() error
// Stop signals the download to stop. Non-blocking.
Stop()
// IsStopped returns true if Stop was called or the download is complete.
IsStopped() bool
// GetMaxConnections returns the configured maximum parallel connections.
GetMaxConnections() int32
// GetMaxParts returns the configured maximum parallel segments.
GetMaxParts() int32
// GetHash returns the unique identifier for this download.
GetHash() string
// GetFileName returns the file name for the download.
GetFileName() string
// GetDownloadDirectory returns the directory where the file will be saved.
GetDownloadDirectory() string
// GetSavePath returns the full path where the file will be saved.
GetSavePath() string
// GetContentLength returns the total size of the download.
GetContentLength() ContentLength
}
ProtocolDownloader is the abstraction layer between the manager/item infrastructure and any concrete download protocol (HTTP, FTP, SFTP, etc.).
Lifecycle:
- Create via a DownloaderFactory or SchemeRouter.NewDownloader
- Call Probe to fetch file metadata (required before Download/Resume)
- Call Download (new) or Resume (existing) to transfer data
- Call Close to release resources when done
type ProxyConfig ¶ added in v1.3.11
ProxyConfig holds the parsed proxy configuration.
func ParseProxyURL ¶ added in v1.3.11
func ParseProxyURL(proxyURL string) (*ProxyConfig, error)
ParseProxyURL parses and validates a proxy URL string.
func (*ProxyConfig) URL ¶ added in v1.3.11
func (p *ProxyConfig) URL() string
URL returns the proxy URL as a string.
type QueueManager ¶ added in v1.3.36
type QueueManager struct {
// contains filtered or unexported fields
}
QueueManager manages concurrent download limits. Downloads beyond maxConcurrent are queued and started when slots free up.
func NewQueueManager ¶ added in v1.3.36
func NewQueueManager(maxConcurrent int, onStart func(hash string)) *QueueManager
NewQueueManager creates a new QueueManager with the given concurrency limit. onStart is called when a download is activated (can be nil).
func (*QueueManager) ActiveCount ¶ added in v1.3.36
func (qm *QueueManager) ActiveCount() int
ActiveCount returns the number of currently active downloads.
func (*QueueManager) Add ¶ added in v1.3.36
func (qm *QueueManager) Add(hash string, priority Priority)
Add adds a download to the queue. If under capacity, it becomes active immediately. Otherwise, it's queued based on priority.
func (*QueueManager) GetActiveHashes ¶ added in v1.3.36
func (qm *QueueManager) GetActiveHashes() []string
GetActiveHashes returns a copy of the active download hashes.
func (*QueueManager) GetState ¶ added in v1.3.36
func (qm *QueueManager) GetState() QueueState
GetState returns the current queue state for persistence. Active items are not included (they'll be re-queued on restart).
func (*QueueManager) GetWaitingItems ¶ added in v1.3.36
func (qm *QueueManager) GetWaitingItems() []QueuedItemState
GetWaitingItems returns a copy of the waiting queue items with their positions.
func (*QueueManager) IsPaused ¶ added in v1.3.36
func (qm *QueueManager) IsPaused() bool
IsPaused returns whether the queue is paused.
func (*QueueManager) LoadState ¶ added in v1.3.36
func (qm *QueueManager) LoadState(state QueueState)
LoadState restores queue state from persistence. Active items are reset to empty (previously active items should be re-queued).
func (*QueueManager) MaxConcurrent ¶ added in v1.3.36
func (qm *QueueManager) MaxConcurrent() int
MaxConcurrent returns the maximum number of concurrent downloads.
func (*QueueManager) Move ¶ added in v1.3.36
func (qm *QueueManager) Move(hash string, position int) error
Move reorders a waiting item to a new position in the queue. Active downloads cannot be moved. Position is clamped to valid range [0, len-1].
func (*QueueManager) OnComplete ¶ added in v1.3.36
func (qm *QueueManager) OnComplete(hash string)
OnComplete marks a download as complete and starts the next waiting download if available.
func (*QueueManager) Pause ¶ added in v1.3.36
func (qm *QueueManager) Pause()
Pause pauses the queue, preventing auto-start of waiting items.
func (*QueueManager) Resume ¶ added in v1.3.36
func (qm *QueueManager) Resume()
Resume resumes the queue, enabling auto-start and starting waiting items up to capacity.
func (*QueueManager) WaitingCount ¶ added in v1.3.36
func (qm *QueueManager) WaitingCount() int
WaitingCount returns the number of downloads waiting in the queue.
type QueueState ¶ added in v1.3.36
type QueueState struct {
MaxConcurrent int
Waiting []QueuedItemState
Paused bool
}
QueueState holds the persistent state of the queue. Active items are not persisted (they'll be re-queued on restart).
type QueuedItemState ¶ added in v1.3.36
QueuedItemState is the exported version of queuedItem for GOB persistence.
type RateLimitedReadCloser ¶ added in v1.3.24
type RateLimitedReadCloser struct {
*RateLimitedReader
// contains filtered or unexported fields
}
RateLimitedReadCloser wraps an io.ReadCloser with rate limiting.
func NewRateLimitedReadCloser ¶ added in v1.3.24
func NewRateLimitedReadCloser(rc io.ReadCloser, limit int64) *RateLimitedReadCloser
NewRateLimitedReadCloser creates a rate-limited ReadCloser. limit is in bytes per second. 0 or negative means unlimited.
func (*RateLimitedReadCloser) Close ¶ added in v1.3.24
func (r *RateLimitedReadCloser) Close() error
Close closes the underlying ReadCloser.
type RateLimitedReader ¶ added in v1.3.24
type RateLimitedReader struct {
// contains filtered or unexported fields
}
RateLimitedReader wraps an io.Reader and limits the read rate. A limit of 0 or negative means unlimited (no throttling).
func NewRateLimitedReader ¶ added in v1.3.24
func NewRateLimitedReader(r io.Reader, limit int64) *RateLimitedReader
NewRateLimitedReader creates a rate-limited reader. limit is in bytes per second. 0 or negative means unlimited.
func (*RateLimitedReader) GetLimit ¶ added in v1.3.24
func (r *RateLimitedReader) GetLimit() int64
GetLimit returns the current rate limit in bytes per second.
func (*RateLimitedReader) Read ¶ added in v1.3.24
func (r *RateLimitedReader) Read(b []byte) (n int, err error)
Read implements io.Reader with rate limiting using a token bucket algorithm.
func (*RateLimitedReader) SetLimit ¶ added in v1.3.24
func (r *RateLimitedReader) SetLimit(limit int64)
SetLimit updates the rate limit dynamically. 0 or negative means unlimited.
type RespawnPartHandlerFunc ¶
RespawnPartHandlerFunc is a function that handles the respawning of a part. It takes a hash string, the initial offset of the part, the new initial offset and the new final offset as arguments. This handler is called when a part is respawned with new part size.
type ResumeDownloadOpts ¶
type ResumeDownloadOpts struct {
ForceParts bool
// MaxConnections sets the maximum number of parallel
// network connections to be used for the downloading the file.
MaxConnections int32
// MaxSegments sets the maximum number of file segments
// to be created for the downloading the file.
MaxSegments int32
Headers Headers
Handlers *Handlers
// RetryConfig configures retry behavior for transient errors.
// If nil, DefaultRetryConfig() is used.
RetryConfig *RetryConfig
// RequestTimeout specifies the timeout for individual HTTP requests.
// If zero, no per-request timeout is applied.
RequestTimeout time.Duration
// SpeedLimit specifies the maximum download speed in bytes per second.
// If zero, no limit is applied.
SpeedLimit int64
}
ResumeDownloadOpts contains optional parameters for ResumeDownload.
type ResumeProgressHandlerFunc ¶
ResumeProgressHandlerFunc is a function that handles the progress of a resume. It takes a hash string and the number of bytes read as arguments.
type RetryConfig ¶ added in v1.3.9
type RetryConfig struct {
MaxRetries int // Maximum number of retry attempts (0 = unlimited)
BaseDelay time.Duration // Initial delay before first retry
MaxDelay time.Duration // Maximum delay between retries
JitterFactor float64 // Random jitter factor (0-1)
BackoffFactor float64 // Exponential backoff multiplier
}
RetryConfig holds configuration for retry behavior
func DefaultRetryConfig ¶ added in v1.3.9
func DefaultRetryConfig() RetryConfig
DefaultRetryConfig returns a RetryConfig with sensible defaults
func (*RetryConfig) CalculateBackoff ¶ added in v1.3.9
func (c *RetryConfig) CalculateBackoff(attempt int) time.Duration
CalculateBackoff computes the delay before the next retry attempt
func (*RetryConfig) ShouldRetry ¶ added in v1.3.9
func (c *RetryConfig) ShouldRetry(state *RetryState, err error) bool
ShouldRetry determines if another retry attempt should be made
func (*RetryConfig) WaitForRetry ¶ added in v1.3.9
func (c *RetryConfig) WaitForRetry(ctx context.Context, state *RetryState, category ErrorCategory) error
WaitForRetry blocks until the retry delay has elapsed or context is canceled
type RetryExhaustedHandlerFunc ¶ added in v1.3.9
RetryExhaustedHandlerFunc is called when all retries are exhausted for a part.
type RetryHandlerFunc ¶ added in v1.3.9
RetryHandlerFunc is called when a part retry is attempted. Parameters: hash (part id), attempt (current attempt number), maxAttempts, delay (wait time), err (triggering error)
type RetryState ¶ added in v1.3.9
type RetryState struct {
Attempts int // Number of attempts made
LastError error // Most recent error encountered
LastAttempt time.Time // Time of last attempt
TotalDelayed time.Duration // Cumulative time spent waiting between retries
}
RetryState tracks the state of retry attempts
type ScheduleState ¶ added in v1.4.1
type ScheduleState string
ScheduleState represents the lifecycle state of a scheduled download. The zero value ("") means the item is not scheduled (normal download).
const ( // ScheduleStateNone is the zero value — item is not scheduled. ScheduleStateNone ScheduleState = "" // ScheduleStateScheduled means the item is waiting for its trigger time. ScheduleStateScheduled ScheduleState = "scheduled" // ScheduleStateTriggered means the trigger time was reached and the item // has been enqueued for download. ScheduleStateTriggered ScheduleState = "triggered" // ScheduleStateMissed means the trigger time passed while the daemon was down. // Missed items are enqueued immediately on daemon restart. ScheduleStateMissed ScheduleState = "missed" // ScheduleStateCancelled means the user cancelled the schedule before it fired. // This is a terminal state — no transitions out. ScheduleStateCancelled ScheduleState = "cancelled" )
type SchemeRouter ¶ added in v1.4.1
type SchemeRouter struct {
// contains filtered or unexported fields
}
SchemeRouter maps URL schemes to DownloaderFactory implementations. It is the central dispatch point for protocol-agnostic download creation. The zero value is not usable; use NewSchemeRouter to create one.
func NewSchemeRouter ¶ added in v1.4.1
func NewSchemeRouter(client *http.Client) *SchemeRouter
NewSchemeRouter creates a SchemeRouter pre-configured with HTTP and HTTPS factories that use the provided HTTP client.
func (*SchemeRouter) NewDownloader ¶ added in v1.4.1
func (r *SchemeRouter) NewDownloader(rawURL string, opts *DownloaderOpts) (ProtocolDownloader, error)
NewDownloader creates a ProtocolDownloader for the given raw URL. The scheme is extracted from the URL (case-insensitive: HTTP:// is treated as http://). Returns an error if the scheme is unsupported or the URL is invalid.
func (*SchemeRouter) Register ¶ added in v1.4.1
func (r *SchemeRouter) Register(scheme string, factory DownloaderFactory)
Register adds or replaces the factory for the given scheme. scheme must be lowercase (e.g., "ftp", "sftp").
type SizeOption ¶
type SizeOption struct {
// contains filtered or unexported fields
}
SizeOption provides size unit conversion and formatting utilities. It holds a value representing the unit size and a format string for display.
func (*SizeOption) Get ¶
func (s *SizeOption) Get(l ContentLength) (siz, rem int64)
Get divides the ContentLength by the unit size and returns the quotient and remainder.
func (*SizeOption) GetFrom ¶
func (s *SizeOption) GetFrom(l int64) (siz, rem int64)
GetFrom divides the given int64 value by the unit size and returns the quotient and remainder.
func (*SizeOption) String ¶
func (s *SizeOption) String(l ContentLength) string
String returns the ContentLength formatted as a string with the unit suffix.
func (*SizeOption) StringFrom ¶
func (s *SizeOption) StringFrom(l int64) string
StringFrom returns the given int64 value formatted as a string with the unit suffix.
type SpawnPartHandlerFunc ¶
SpawnPartHandlerFunc is a function that handles the spawning of a part. It takes a hash string, the initial offset and the final offset as arguments.
type VMap ¶
type VMap[kT comparable, vT any] struct { // contains filtered or unexported fields }
VMap is a thread-safe generic map with read-write mutex protection. It provides concurrent access to key-value pairs of any comparable key type.
func NewVMap ¶
func NewVMap[kT comparable, vT any]() VMap[kT, vT]
NewVMap creates and returns a new empty VMap instance with an initialized internal map.
func (*VMap[kT, vT]) Delete ¶ added in v1.3.25
func (vm *VMap[kT, vT]) Delete(key kT)
Delete removes a key from the map with write lock protection. If the key does not exist, this is a no-op.
func (*VMap[kT, vT]) Dump ¶
func (vm *VMap[kT, vT]) Dump() (keys []kT, vals []vT)
Dump returns all keys and values as separate slices with write lock protection. RACE FIX: Acquire lock BEFORE reading len(vm.kv) to prevent concurrent modification.
func (*VMap[kT, vT]) Get ¶
func (vm *VMap[kT, vT]) Get(key kT) (val vT)
Get retrieves a value for the given key with read lock protection.
func (*VMap[kT, vT]) GetUnsafe ¶
func (vm *VMap[kT, vT]) GetUnsafe(key kT) (val vT)
GetUnsafe retrieves a value without lock protection. Use only when already holding a lock.
func (*VMap[kT, vT]) Make ¶
func (vm *VMap[kT, vT]) Make()
Make initializes the internal map. Call this to reset the map or if using a zero-value VMap.
type WorkStealHandlerFunc ¶ added in v1.3.25
WorkStealHandlerFunc is called when a fast part steals work from a slower part. Parameters:
- stealerHash: the hash of the part that finished fast and is stealing work
- victimHash: the hash of the part being stolen from
- stolenIoff: the starting offset of the stolen byte range
- stolenFoff: the ending offset of the stolen byte range (inclusive)
Source Files
¶
- checksum.go
- clength.go
- dir_validation.go
- diskspace_unix.go
- dloader.go
- errors.go
- file.go
- file_unix.go
- handlers.go
- header.go
- integrity.go
- item.go
- known_hosts.go
- longpath.go
- longpath_other.go
- manager.go
- misc.go
- parts.go
- protocol.go
- protocol_ftp.go
- protocol_http.go
- protocol_router.go
- protocol_sftp.go
- proxy.go
- queue.go
- ratelimiter.go
- reader.go
- redirect.go
- retry.go
- retry_errno_unix.go
- safego.go
- sizeopt.go
- sorter.go
- stall.go
- vmap.go
- worksteal.go