Documentation
¶
Index ¶
- Constants
- func ApplyDisplayTitle(ctx context.Context, movie *models.Movie, titleSource *models.Movie, ...)
- func EnrichActressesFromDB(movie *models.Movie, actressRepo *database.ActressRepository, ...) int
- func GenerateCroppedPoster(ctx context.Context, movie *models.Movie, ...) (croppedURL string, err error)
- func GenerateTempPoster(ctx context.Context, jobID string, movie *models.Movie, ...) (tempRelativeURL string, err error)
- func LooksLikeTemplatedTitle(title, id string) bool
- type BaseTask
- type BatchJob
- func (job *BatchJob) AllFilesExcluded() bool
- func (job *BatchJob) AtomicUpdateFileResult(filePath string, updateFn func(*FileResult) (*FileResult, error)) error
- func (job *BatchJob) Cancel()
- func (job *BatchJob) ExcludeFile(filePath string)
- func (job *BatchJob) GetCompleted() int
- func (job *BatchJob) GetDestination() string
- func (job *BatchJob) GetFailed() int
- func (job *BatchJob) GetFileMatchInfo(filePath string) (FileMatchInfo, bool)
- func (job *BatchJob) GetFiles() []string
- func (job *BatchJob) GetID() string
- func (job *BatchJob) GetJobStatus() JobStatus
- func (job *BatchJob) GetOperationModeOverride() string
- func (job *BatchJob) GetPersistError() string
- func (job *BatchJob) GetProgress() float64
- func (job *BatchJob) GetStatus() *BatchJob
- func (job *BatchJob) GetStatusSlim() *BatchJobSlim
- func (job *BatchJob) GetTempDir() string
- func (job *BatchJob) GetTotalFiles() int
- func (job *BatchJob) IsDeleted() bool
- func (job *BatchJob) IsExcluded(filePath string) bool
- func (job *BatchJob) Lock()
- func (job *BatchJob) MarkCancelled()
- func (job *BatchJob) MarkCompleted()
- func (job *BatchJob) MarkFailed()
- func (job *BatchJob) MarkOrganized()
- func (job *BatchJob) MarkReverted()
- func (job *BatchJob) MarkStarted()
- func (job *BatchJob) RLock()
- func (job *BatchJob) RUnlock()
- func (job *BatchJob) SetCancelFunc(cancelFunc context.CancelFunc)
- func (job *BatchJob) SetDestination(dest string)
- func (job *BatchJob) SetFileMatchInfo(filePath string, info FileMatchInfo)
- func (job *BatchJob) SetOperationModeOverride(mode string)
- func (job *BatchJob) SetPersistError(msg string)
- func (job *BatchJob) TemplateEngine() *template.Engine
- func (job *BatchJob) Unlock()
- func (job *BatchJob) UpdateFileResult(filePath string, result *FileResult)
- type BatchJobSlim
- type BatchScrapeOptions
- type BatchScrapeTask
- type DownloadTask
- type FileMatchInfo
- type FileResult
- type FileResultSlim
- type JobQueue
- func (jq *JobQueue) CreateJob(files []string) *BatchJob
- func (jq *JobQueue) DeleteJob(id string, tempDir string) error
- func (jq *JobQueue) GetJob(id string) (*BatchJob, bool)
- func (jq *JobQueue) GetJobPointer(id string) (*BatchJob, bool)
- func (jq *JobQueue) ListJobs() []*BatchJob
- func (jq *JobQueue) PersistJob(job *BatchJob)
- func (jq *JobQueue) StartCleanup()
- func (jq *JobQueue) StopCleanup()
- type JobStatus
- type NFOTask
- type OrganizeTask
- type OrganizeTaskOption
- type Pool
- type PoolStats
- type ProcessFileOption
- type ProcessFileOptions
- type ProcessFileTask
- type ProgressStats
- type ProgressTracker
- func (pt *ProgressTracker) Cancel(taskID string)
- func (pt *ProgressTracker) Clear()
- func (pt *ProgressTracker) Complete(taskID string, message string)
- func (pt *ProgressTracker) Fail(taskID string, err error)
- func (pt *ProgressTracker) Get(taskID string) (*TaskProgress, bool)
- func (pt *ProgressTracker) GetAll() []*TaskProgress
- func (pt *ProgressTracker) GetByStatus(status TaskStatus) []*TaskProgress
- func (pt *ProgressTracker) GetByType(taskType TaskType) []*TaskProgress
- func (pt *ProgressTracker) Remove(taskID string)
- func (pt *ProgressTracker) SetTotal(taskID string, bytesTotal int64)
- func (pt *ProgressTracker) Start(taskID string, taskType TaskType, message string)
- func (pt *ProgressTracker) Stats() ProgressStats
- func (pt *ProgressTracker) Update(taskID string, progress float64, message string, bytesDone int64)
- type ProgressUpdate
- type RefererResolver
- type ScrapeTask
- type Task
- type TaskProgress
- type TaskResult
- type TaskStatus
- type TaskType
Constants ¶
const (
DataTypeMovie = "movie"
)
Variables ¶
This section is empty.
Functions ¶
func ApplyDisplayTitle ¶
func EnrichActressesFromDB ¶
func GenerateCroppedPoster ¶
func GenerateCroppedPoster( ctx context.Context, movie *models.Movie, httpClient httpclientiface.HTTPClient, userAgent string, referer string, refererResolver RefererResolver, ) (croppedURL string, err error)
GenerateCroppedPoster downloads and crops a poster, then persists it to disk Returns the API URL for the persisted cropped poster Updates movie.ShouldCropPoster to false since the image is already cropped
Parameters:
- ctx: Context for cancellation support
- movie: Movie model containing poster/cover URLs
- httpClient: Pre-configured HTTP client with proxy and timeout settings
- userAgent: User-Agent header value from config
- referer: Referer header value from config (for CDN compatibility)
Returns:
- croppedURL: API URL path like "/api/v1/posters/{movieID}.jpg"
- error: Any error encountered during download, cropping, or persistence
Note: Caller is responsible for updating the database with the returned croppedURL
func GenerateTempPoster ¶
func GenerateTempPoster( ctx context.Context, jobID string, movie *models.Movie, httpClient httpclientiface.HTTPClient, userAgent string, referer string, refererResolver RefererResolver, tempDir string, ) (tempRelativeURL string, err error)
GenerateTempPoster downloads and crops a poster temporarily for the review page Returns the relative API URL path for the temp poster Updates movie.ShouldCropPoster to false since the temp image is already cropped
Parameters:
- ctx: Context for cancellation support
- jobID: Batch job ID for organizing temp files by job
- movie: Movie model containing poster/cover URLs
- httpClient: Pre-configured HTTP client with proxy and timeout settings
- userAgent: User-Agent header value from config
- referer: Referer header value from config (for CDN compatibility)
- tempDir: Base temp directory (e.g., "data/temp"); posters stored at {tempDir}/posters/{jobID}/
Returns:
- tempRelativeURL: API URL path like "/api/v1/temp/posters/{jobID}/{movieID}.jpg"
- error: Any error encountered during download or cropping
func LooksLikeTemplatedTitle ¶
LooksLikeTemplatedTitle checks whether a title appears to already be template-generated by matching the pattern [ID] at the start. It checks for the exact bracket-enclosed ID (e.g., "[ABC-123]") followed by a non-alphanumeric separator or end-of-string, to avoid false positives where the ID is a prefix of a different ID (e.g., ABP-96 vs [ABP-960]).
Types ¶
type BaseTask ¶
type BaseTask struct {
// contains filtered or unexported fields
}
BaseTask provides common task functionality
func (*BaseTask) Description ¶
type BatchJob ¶
type BatchJob struct {
ID string `json:"id"`
Status JobStatus `json:"status"`
TotalFiles int `json:"total_files"`
Completed int `json:"completed"`
Failed int `json:"failed"`
Excluded map[string]bool `json:"excluded"`
Files []string `json:"files"`
Results map[string]*FileResult `json:"results"`
FileMatchInfo map[string]FileMatchInfo `json:"file_match_info,omitempty"`
Progress float64 `json:"progress"`
Destination string `json:"destination"`
TempDir string `json:"temp_dir"`
StartedAt time.Time `json:"started_at"`
CompletedAt *time.Time `json:"completed_at,omitempty"`
OrganizedAt *time.Time `json:"organized_at,omitempty"`
RevertedAt *time.Time `json:"reverted_at,omitempty"`
OperationModeOverride string `json:"operation_mode_override,omitempty"`
PersistError string `json:"persist_error,omitempty"`
CancelFunc context.CancelFunc `json:"-"`
Done chan struct{} `json:"-"`
// contains filtered or unexported fields
}
BatchJob represents a batch processing job
func (*BatchJob) AllFilesExcluded ¶
func (*BatchJob) AtomicUpdateFileResult ¶
func (job *BatchJob) AtomicUpdateFileResult(filePath string, updateFn func(*FileResult) (*FileResult, error)) error
AtomicUpdateFileResult performs an atomic read-modify-write on a FileResult The updateFn receives a deep copy of the current FileResult and must return the updated version This prevents lost-update races by ensuring all modifications happen under the job's lock
func (*BatchJob) ExcludeFile ¶
ExcludeFile marks a file as excluded from organization (thread-safe)
func (*BatchJob) GetCompleted ¶
GetCompleted returns the completed count (thread-safe)
func (*BatchJob) GetDestination ¶
GetDestination returns the destination path (thread-safe)
func (*BatchJob) GetFileMatchInfo ¶
func (job *BatchJob) GetFileMatchInfo(filePath string) (FileMatchInfo, bool)
GetFileMatchInfo retrieves the FileMatchInfo for a file path (thread-safe) Returns the info and true if found, zero value and false if not found
func (*BatchJob) GetJobStatus ¶
func (*BatchJob) GetOperationModeOverride ¶
GetOperationModeOverride returns the operation mode override (thread-safe)
func (*BatchJob) GetPersistError ¶
func (*BatchJob) GetProgress ¶
GetProgress returns the current progress percentage (thread-safe). This is a lightweight accessor that avoids copying the entire job state.
func (*BatchJob) GetStatusSlim ¶
func (job *BatchJob) GetStatusSlim() *BatchJobSlim
GetStatusSlim returns a lightweight snapshot of the job status without movie Data. This is the recommended method for polling endpoints that only need status/progress.
func (*BatchJob) GetTempDir ¶
GetTempDir returns the job's temporary directory path in a thread-safe manner. This is the recommended way to access TempDir for concurrent safety.
func (*BatchJob) GetTotalFiles ¶
GetTotalFiles returns the total files count (thread-safe)
func (*BatchJob) IsDeleted ¶
IsDeleted returns the tombstone flag Caller must hold at least RLock to safely read this value
func (*BatchJob) IsExcluded ¶
IsExcluded checks if a file is excluded from organization (thread-safe)
func (*BatchJob) Lock ¶
func (job *BatchJob) Lock()
Lock acquires the job's write lock for exclusive access Use this when performing mutations that require atomic state validation
func (*BatchJob) MarkCancelled ¶
func (job *BatchJob) MarkCancelled()
MarkCancelled marks the job as cancelled
func (*BatchJob) MarkCompleted ¶
func (job *BatchJob) MarkCompleted()
MarkCompleted marks the job as completed
func (*BatchJob) MarkOrganized ¶
func (job *BatchJob) MarkOrganized()
MarkOrganized marks the job as organized
func (*BatchJob) MarkReverted ¶
func (job *BatchJob) MarkReverted()
MarkReverted marks the job as reverted
func (*BatchJob) MarkStarted ¶
func (job *BatchJob) MarkStarted()
MarkStarted marks the job as started
func (*BatchJob) RLock ¶
func (job *BatchJob) RLock()
RLock acquires the job's read lock for shared access
func (*BatchJob) SetCancelFunc ¶
func (job *BatchJob) SetCancelFunc(cancelFunc context.CancelFunc)
SetCancelFunc sets the cancel function for the job (thread-safe)
func (*BatchJob) SetDestination ¶
SetDestination sets the destination path (thread-safe)
func (*BatchJob) SetFileMatchInfo ¶
func (job *BatchJob) SetFileMatchInfo(filePath string, info FileMatchInfo)
SetFileMatchInfo stores the FileMatchInfo for a file path (thread-safe)
func (*BatchJob) SetOperationModeOverride ¶
SetOperationModeOverride sets the operation mode override (thread-safe)
func (*BatchJob) SetPersistError ¶
func (*BatchJob) TemplateEngine ¶
TemplateEngine returns the shared template engine (thread-safe, read-only after construction) Lazily initializes if nil (for tests that create BatchJob directly)
func (*BatchJob) UpdateFileResult ¶
func (job *BatchJob) UpdateFileResult(filePath string, result *FileResult)
UpdateFileResult updates the result for a specific file in the job
type BatchJobSlim ¶
type BatchJobSlim struct {
ID string `json:"id"`
Status JobStatus `json:"status"`
TotalFiles int `json:"total_files"`
Completed int `json:"completed"`
Failed int `json:"failed"`
Excluded map[string]bool `json:"excluded"`
Files []string `json:"files"`
Results map[string]*FileResultSlim `json:"results"`
FileMatchInfo map[string]FileMatchInfo `json:"file_match_info,omitempty"`
Progress float64 `json:"progress"`
Destination string `json:"destination"`
TempDir string `json:"temp_dir"`
StartedAt time.Time `json:"started_at"`
CompletedAt *time.Time `json:"completed_at,omitempty"`
OrganizedAt *time.Time `json:"organized_at,omitempty"`
RevertedAt *time.Time `json:"reverted_at,omitempty"`
OperationModeOverride string `json:"operation_mode_override,omitempty"`
PersistError string `json:"persist_error,omitempty"`
}
BatchJobSlim is a lightweight BatchJob snapshot that uses FileResultSlim to avoid deep-copying movie Data on every poll.
type BatchScrapeOptions ¶
type BatchScrapeOptions struct {
TaskID string
FilePath string
FileIndex int
Job *BatchJob
Registry *models.ScraperRegistry
Aggregator *aggregator.Aggregator
MovieRepo *database.MovieRepository
Matcher *matcher.Matcher
ProgressTracker *ProgressTracker
Force bool
UpdateMode bool
SelectedScrapers []string
HTTPClient httpclientiface.HTTPClient
UserAgent string
Referer string
ProcessedMovieIDs map[string]bool
Cfg *config.Config
ScalarStrategy string
ArrayStrategy string
}
BatchScrapeOptions encapsulates all parameters for creating a BatchScrapeTask.
type BatchScrapeTask ¶
type BatchScrapeTask struct {
BaseTask
// contains filtered or unexported fields
}
BatchScrapeTask represents a task for scraping metadata for a single file in a batch operation
func NewBatchScrapeTask ¶
func NewBatchScrapeTask(opts *BatchScrapeOptions) *BatchScrapeTask
NewBatchScrapeTask creates a new batch scrape task
type DownloadTask ¶
type DownloadTask struct {
BaseTask
// contains filtered or unexported fields
}
DownloadTask downloads media for a movie
func NewDownloadTask ¶
func NewDownloadTask( movie *models.Movie, targetDir string, dl *downloader.Downloader, progressTracker *ProgressTracker, dryRun bool, multipart *downloader.MultipartInfo, ) *DownloadTask
NewDownloadTask creates a new download task
type FileMatchInfo ¶
type FileMatchInfo struct {
MovieID string `json:"movie_id"`
IsMultiPart bool `json:"is_multi_part"`
PartNumber int `json:"part_number"`
PartSuffix string `json:"part_suffix"`
}
FileMatchInfo stores match metadata for a file (populated during discovery)
type FileResult ¶
type FileResult struct {
FilePath string `json:"file_path"`
MovieID string `json:"movie_id"`
Revision uint64 `json:"revision"`
Status JobStatus `json:"status"`
Error string `json:"error,omitempty"`
PosterError *string `json:"poster_error,omitempty"`
TranslationWarning *string `json:"translation_warning,omitempty"`
FieldSources map[string]string `json:"field_sources,omitempty"`
ActressSources map[string]string `json:"actress_sources,omitempty"`
DataType string `json:"data_type,omitempty"`
Data interface{} `json:"data,omitempty"`
StartedAt time.Time `json:"started_at"`
EndedAt *time.Time `json:"ended_at,omitempty"`
IsMultiPart bool `json:"is_multi_part,omitempty"`
PartNumber int `json:"part_number,omitempty"`
PartSuffix string `json:"part_suffix,omitempty"`
}
FileResult represents the result of processing a single file
func RunBatchScrapeOnce ¶
func RunBatchScrapeOnce( ctx context.Context, job *BatchJob, filePath string, fileIndex int, queryOverride string, registry *models.ScraperRegistry, agg *aggregator.Aggregator, movieRepo *database.MovieRepository, fileMatcher *matcher.Matcher, httpClient httpclientiface.HTTPClient, userAgent string, referer string, force bool, updateMode bool, selectedScrapers []string, scraperPriorityOverride []string, processedMovieIDs map[string]bool, cfg *config.Config, scalarStrategy string, arrayStrategy string, ) (*models.Movie, *FileResult, error)
func (*FileResult) MarshalJSON ¶
func (fr *FileResult) MarshalJSON() ([]byte, error)
func (*FileResult) UnmarshalJSON ¶
func (fr *FileResult) UnmarshalJSON(data []byte) error
type FileResultSlim ¶
type FileResultSlim struct {
FilePath string `json:"file_path"`
MovieID string `json:"movie_id"`
Revision uint64 `json:"revision"`
Status JobStatus `json:"status"`
Error string `json:"error,omitempty"`
PosterError *string `json:"poster_error,omitempty"`
TranslationWarning *string `json:"translation_warning,omitempty"`
FieldSources map[string]string `json:"field_sources,omitempty"`
ActressSources map[string]string `json:"actress_sources,omitempty"`
DataType string `json:"data_type,omitempty"`
StartedAt time.Time `json:"started_at"`
EndedAt *time.Time `json:"ended_at,omitempty"`
IsMultiPart bool `json:"is_multi_part,omitempty"`
PartNumber int `json:"part_number,omitempty"`
PartSuffix string `json:"part_suffix,omitempty"`
}
FileResultSlim is a lightweight FileResult that omits the Data field for efficient status polling without deep-copying movie objects.
type JobQueue ¶
type JobQueue struct {
// contains filtered or unexported fields
}
JobQueue manages batch jobs
func NewJobQueue ¶
func (*JobQueue) DeleteJob ¶
DeleteJob removes a job from the queue and cleans up associated temp files Cancels the job first and waits for it to fully finish before removing files tempDir is the base temp directory (e.g., "data/temp") Returns error if job not found, job is running, or database deletion fails
func (*JobQueue) GetJob ¶
GetJob retrieves a thread-safe copy of a job by ID Returns a deep copy to prevent external mutations of internal state
func (*JobQueue) GetJobPointer ¶
GetJobPointer retrieves the actual job pointer for internal mutations WARNING: This exposes the internal job - use only when mutations are required Callers must respect the job's internal mutex (job.mu) when modifying state
func (*JobQueue) ListJobs ¶
ListJobs returns thread-safe copies of all jobs Returns deep copies to prevent external mutations of internal state
func (*JobQueue) PersistJob ¶
PersistJob saves a job to the database
func (*JobQueue) StartCleanup ¶
func (jq *JobQueue) StartCleanup()
StartCleanup starts a background goroutine that cleans up old organized jobs every hour
func (*JobQueue) StopCleanup ¶
func (jq *JobQueue) StopCleanup()
type JobStatus ¶
type JobStatus string
JobStatus represents the status of a job
const ( JobStatusPending JobStatus = "pending" JobStatusRunning JobStatus = "running" JobStatusCompleted JobStatus = "completed" JobStatusFailed JobStatus = "failed" JobStatusCancelled JobStatus = "cancelled" JobStatusOrganized JobStatus = "organized" JobStatusReverted JobStatus = "reverted" )
Job State Machine:
Normal flow:
Pending → Running → Completed → Organized
State descriptions:
- Pending: Job created, waiting to start scraping
- Running: Scraping in progress (files being processed)
- Completed: Scraping finished, metadata available for review/editing
- Failed: Job failed during scraping (terminal state)
- Cancelled: Job was cancelled by user (terminal state)
- Organized: Files successfully organized (terminal state)
- Reverted: Files reverted to original state (terminal state)
Organization retry flow:
Completed → Running (organize) → Completed (if failed > 0) Completed → Running (organize) → Organized (if failed == 0)
Revert flow:
Organized → Reverted
Key rules:
- Only "Completed" jobs can be organized
- If organization has any failures, job stays "Completed" to enable retry
- If organization fully succeeds (failed == 0), job transitions to "Organized"
- "Organized" jobs cannot be organized again (terminal state)
- "Reverted" jobs are never deleted by cleanup (no time limit on revert)
Terminal states (no further transitions):
- Failed, Cancelled, Organized, Reverted
type NFOTask ¶
type NFOTask struct {
BaseTask
// contains filtered or unexported fields
}
NFOTask generates an NFO file
type OrganizeTask ¶
type OrganizeTask struct {
BaseTask
// contains filtered or unexported fields
}
OrganizeTask organizes a video file
func NewOrganizeTask ¶
func NewOrganizeTask( match matcher.MatchResult, movie *models.Movie, destPath string, moveFiles bool, forceUpdate bool, org *organizer.Organizer, progressTracker *ProgressTracker, dryRun bool, linkModes ...organizer.LinkMode, ) *OrganizeTask
NewOrganizeTask creates a new organize task
func NewOrganizeTaskWithOptions ¶
func NewOrganizeTaskWithOptions( match matcher.MatchResult, movie *models.Movie, destPath string, moveFiles bool, forceUpdate bool, org *organizer.Organizer, progressTracker *ProgressTracker, dryRun bool, linkMode organizer.LinkMode, options ...OrganizeTaskOption, ) *OrganizeTask
NewOrganizeTaskWithOptions creates a new organize task with optional configuration.
type OrganizeTaskOption ¶
type OrganizeTaskOption func(*OrganizeTask)
OrganizeTaskOption configures optional behavior for organize tasks.
func WithNFOConfig ¶
func WithNFOConfig(cfg *nfo.Config) OrganizeTaskOption
WithNFOConfig sets the NFO config for filename resolution.
func WithSnapshotCapture ¶
func WithSnapshotCapture(batchJobID string, repo database.BatchFileOperationRepositoryInterface) OrganizeTaskOption
WithSnapshotCapture enables snapshot capture for the organize task.
type Pool ¶
type Pool struct {
// contains filtered or unexported fields
}
Pool manages a pool of workers that execute tasks concurrently
func NewPool ¶
func NewPool(maxWorkers int, timeout time.Duration, progress *ProgressTracker) *Pool
New creates a new worker pool
func NewPoolWithContext ¶
func NewPoolWithContext(parentCtx context.Context, maxWorkers int, timeout time.Duration, progress *ProgressTracker) *Pool
NewPoolWithContext creates a new worker pool with a parent context The pool will be cancelled when the parent context is cancelled
func (*Pool) ActiveWorkers ¶
ActiveWorkers returns the number of currently active workers Note: This is an approximation based on running tasks in progress tracker
func (*Pool) Stop ¶
func (p *Pool) Stop()
Stop cancels all running tasks and waits for them to finish
type PoolStats ¶
type PoolStats struct {
MaxWorkers int
Timeout time.Duration
TotalTasks int
Pending int
Running int
Success int
Failed int
Canceled int
Errors int
OverallProgress float64
}
PoolStats holds statistics about the worker pool
type ProcessFileOption ¶
type ProcessFileOption func(*ProcessFileOptions)
ProcessFileOption configures optional behavior for a process file task.
func WithLinkMode ¶
func WithLinkMode(mode organizer.LinkMode) ProcessFileOption
WithLinkMode sets copy link behavior for organize operations.
func WithTemplateEngine ¶
func WithTemplateEngine(engine *template.Engine) ProcessFileOption
func WithUpdateMerge ¶
func WithUpdateMerge(enabled bool, scalarStrategy, arrayStrategy string, cfg *config.Config) ProcessFileOption
WithUpdateMerge enables update-mode merge logic and merge strategy options.
type ProcessFileOptions ¶
type ProcessFileOptions struct {
LinkMode organizer.LinkMode
UpdateMode bool
ScalarStrategy string
ArrayStrategy string
Config *config.Config
TemplateEngine *template.Engine
}
ProcessFileOptions holds optional settings for process file tasks.
type ProcessFileTask ¶
type ProcessFileTask struct {
BaseTask
// contains filtered or unexported fields
}
ProcessFileTask is a composite task that processes a single file It executes scrape, download, organize, and NFO tasks sequentially
func NewProcessFileTask ¶
func NewProcessFileTask( match matcher.MatchResult, registry *models.ScraperRegistry, agg *aggregator.Aggregator, movieRepo *database.MovieRepository, dl *downloader.Downloader, org *organizer.Organizer, nfoGen *nfo.Generator, destPath string, moveFiles bool, forceUpdate bool, forceRefresh bool, progressTracker *ProgressTracker, dryRun bool, scrapeEnabled bool, downloadEnabled bool, organizeEnabled bool, nfoEnabled bool, customScraperPriority []string, options ...ProcessFileOption, ) *ProcessFileTask
NewProcessFileTask creates a new composite task for processing a file
type ProgressStats ¶
type ProgressStats struct {
Total int
Pending int
Running int
Success int
Failed int
Canceled int
TotalBytes int64
DoneBytes int64
OverallProgress float64
}
ProgressStats holds statistics about all tasks
type ProgressTracker ¶
type ProgressTracker struct {
// contains filtered or unexported fields
}
ProgressTracker tracks progress for all tasks
func NewProgressTracker ¶
func NewProgressTracker(notify chan<- ProgressUpdate) *ProgressTracker
NewProgressTracker creates a new progress tracker
func (*ProgressTracker) Cancel ¶
func (pt *ProgressTracker) Cancel(taskID string)
Cancel marks a task as canceled
func (*ProgressTracker) Complete ¶
func (pt *ProgressTracker) Complete(taskID string, message string)
Complete marks a task as completed successfully
func (*ProgressTracker) Fail ¶
func (pt *ProgressTracker) Fail(taskID string, err error)
Fail marks a task as failed
func (*ProgressTracker) Get ¶
func (pt *ProgressTracker) Get(taskID string) (*TaskProgress, bool)
Get retrieves the progress for a task
func (*ProgressTracker) GetAll ¶
func (pt *ProgressTracker) GetAll() []*TaskProgress
GetAll retrieves all task progress
func (*ProgressTracker) GetByStatus ¶
func (pt *ProgressTracker) GetByStatus(status TaskStatus) []*TaskProgress
GetByStatus retrieves tasks with a specific status
func (*ProgressTracker) GetByType ¶
func (pt *ProgressTracker) GetByType(taskType TaskType) []*TaskProgress
GetByType retrieves tasks of a specific type
func (*ProgressTracker) Remove ¶
func (pt *ProgressTracker) Remove(taskID string)
Remove removes a specific task
func (*ProgressTracker) SetTotal ¶
func (pt *ProgressTracker) SetTotal(taskID string, bytesTotal int64)
SetTotal sets the total bytes for a task
func (*ProgressTracker) Start ¶
func (pt *ProgressTracker) Start(taskID string, taskType TaskType, message string)
Start marks a task as started
func (*ProgressTracker) Stats ¶
func (pt *ProgressTracker) Stats() ProgressStats
Stats returns statistics about all tasks
type ProgressUpdate ¶
type ProgressUpdate struct {
TaskID string
Type TaskType
Status TaskStatus
Progress float64
Message string
BytesDone int64
Error error
Timestamp time.Time
}
ProgressUpdate represents a progress update event
type RefererResolver ¶
RefererResolver resolves an effective Referer for a download URL. This is injected by callers so poster generation does not hardcode host rules.
type ScrapeTask ¶
type ScrapeTask struct {
BaseTask
// contains filtered or unexported fields
}
ScrapeTask scrapes metadata for a JAV ID
func NewScrapeTask ¶
func NewScrapeTask( javID string, registry *models.ScraperRegistry, agg *aggregator.Aggregator, movieRepo *database.MovieRepository, progressTracker *ProgressTracker, dryRun bool, forceRefresh bool, customScraperPriority []string, ) *ScrapeTask
NewScrapeTask creates a new scrape task
type Task ¶
type Task interface {
// ID returns a unique identifier for this task
ID() string
// Type returns the type of task
Type() TaskType
// Execute runs the task and returns an error if it fails
Execute(ctx context.Context) error
// Description returns a human-readable description
Description() string
}
Task represents a unit of work to be executed
type TaskProgress ¶
type TaskProgress struct {
ID string
Type TaskType
Status TaskStatus
Progress float64 // 0.0 to 1.0
Message string
BytesTotal int64
BytesDone int64
StartTime time.Time
UpdatedAt time.Time
Error error
}
TaskProgress tracks the progress of a single task
type TaskResult ¶
type TaskResult struct {
TaskID string
Type TaskType
Status TaskStatus
Error error
StartTime time.Time
EndTime time.Time
Duration time.Duration
BytesTotal int64
BytesDone int64
Description string
}
TaskResult holds the result of a completed task
type TaskStatus ¶
type TaskStatus string
TaskStatus represents the status of a task
const ( TaskStatusPending TaskStatus = "pending" TaskStatusRunning TaskStatus = "running" TaskStatusSuccess TaskStatus = "success" TaskStatusFailed TaskStatus = "failed" TaskStatusCanceled TaskStatus = "canceled" )