worker

package
v0.3.3-alpha Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: May 2, 2026 License: MIT Imports: 32 Imported by: 0

Documentation

Index

Constants

View Source
const (
	DataTypeMovie = "movie"
)

Variables

This section is empty.

Functions

func ApplyDisplayTitle

func ApplyDisplayTitle(ctx context.Context, movie *models.Movie, titleSource *models.Movie, displayTitleTmpl string, templateEngine *template.Engine)

func EnrichActressesFromDB

func EnrichActressesFromDB(movie *models.Movie, actressRepo *database.ActressRepository, cfg *config.Config) int

func GenerateCroppedPoster

func GenerateCroppedPoster(
	ctx context.Context,
	movie *models.Movie,
	httpClient httpclientiface.HTTPClient,
	userAgent string,
	referer string,
	refererResolver RefererResolver,
) (croppedURL string, err error)

GenerateCroppedPoster downloads and crops a poster, then persists it to disk Returns the API URL for the persisted cropped poster Updates movie.ShouldCropPoster to false since the image is already cropped

Parameters:

  • ctx: Context for cancellation support
  • movie: Movie model containing poster/cover URLs
  • httpClient: Pre-configured HTTP client with proxy and timeout settings
  • userAgent: User-Agent header value from config
  • referer: Referer header value from config (for CDN compatibility)

Returns:

  • croppedURL: API URL path like "/api/v1/posters/{movieID}.jpg"
  • error: Any error encountered during download, cropping, or persistence

Note: Caller is responsible for updating the database with the returned croppedURL

func GenerateTempPoster

func GenerateTempPoster(
	ctx context.Context,
	jobID string,
	movie *models.Movie,
	httpClient httpclientiface.HTTPClient,
	userAgent string,
	referer string,
	refererResolver RefererResolver,
	tempDir string,
) (tempRelativeURL string, err error)

GenerateTempPoster downloads and crops a poster temporarily for the review page Returns the relative API URL path for the temp poster Updates movie.ShouldCropPoster to false since the temp image is already cropped

Parameters:

  • ctx: Context for cancellation support
  • jobID: Batch job ID for organizing temp files by job
  • movie: Movie model containing poster/cover URLs
  • httpClient: Pre-configured HTTP client with proxy and timeout settings
  • userAgent: User-Agent header value from config
  • referer: Referer header value from config (for CDN compatibility)
  • tempDir: Base temp directory (e.g., "data/temp"); posters stored at {tempDir}/posters/{jobID}/

Returns:

  • tempRelativeURL: API URL path like "/api/v1/temp/posters/{jobID}/{movieID}.jpg"
  • error: Any error encountered during download or cropping

func LooksLikeTemplatedTitle

func LooksLikeTemplatedTitle(title, id string) bool

LooksLikeTemplatedTitle checks whether a title appears to already be template-generated by matching the pattern [ID] at the start. It checks for the exact bracket-enclosed ID (e.g., "[ABC-123]") followed by a non-alphanumeric separator or end-of-string, to avoid false positives where the ID is a prefix of a different ID (e.g., ABP-96 vs [ABP-960]).

Types

type BaseTask

type BaseTask struct {
	// contains filtered or unexported fields
}

BaseTask provides common task functionality

func (*BaseTask) Description

func (t *BaseTask) Description() string

func (*BaseTask) ID

func (t *BaseTask) ID() string

func (*BaseTask) Type

func (t *BaseTask) Type() TaskType

type BatchJob

type BatchJob struct {
	ID                    string                   `json:"id"`
	Status                JobStatus                `json:"status"`
	TotalFiles            int                      `json:"total_files"`
	Completed             int                      `json:"completed"`
	Failed                int                      `json:"failed"`
	Excluded              map[string]bool          `json:"excluded"`
	Files                 []string                 `json:"files"`
	Results               map[string]*FileResult   `json:"results"`
	FileMatchInfo         map[string]FileMatchInfo `json:"file_match_info,omitempty"`
	Progress              float64                  `json:"progress"`
	Destination           string                   `json:"destination"`
	TempDir               string                   `json:"temp_dir"`
	StartedAt             time.Time                `json:"started_at"`
	CompletedAt           *time.Time               `json:"completed_at,omitempty"`
	OrganizedAt           *time.Time               `json:"organized_at,omitempty"`
	RevertedAt            *time.Time               `json:"reverted_at,omitempty"`
	OperationModeOverride string                   `json:"operation_mode_override,omitempty"`
	Update                bool                     `json:"update"`
	PersistError          string                   `json:"persist_error,omitempty"`
	CancelFunc            context.CancelFunc       `json:"-"`
	Done                  chan struct{}            `json:"-"`
	// contains filtered or unexported fields
}

BatchJob represents a batch processing job

func (*BatchJob) AllFilesExcluded

func (job *BatchJob) AllFilesExcluded() bool

func (*BatchJob) AtomicUpdateFileResult

func (job *BatchJob) AtomicUpdateFileResult(filePath string, updateFn func(*FileResult) (*FileResult, error)) error

AtomicUpdateFileResult performs an atomic read-modify-write on a FileResult The updateFn receives a deep copy of the current FileResult and must return the updated version This prevents lost-update races by ensuring all modifications happen under the job's lock

func (*BatchJob) Cancel

func (job *BatchJob) Cancel()

Cancel cancels the job

func (*BatchJob) ExcludeFile

func (job *BatchJob) ExcludeFile(filePath string)

ExcludeFile marks a file as excluded from organization (thread-safe)

func (*BatchJob) GetCompleted

func (job *BatchJob) GetCompleted() int

GetCompleted returns the completed count (thread-safe)

func (*BatchJob) GetDestination

func (job *BatchJob) GetDestination() string

GetDestination returns the destination path (thread-safe)

func (*BatchJob) GetFailed

func (job *BatchJob) GetFailed() int

GetFailed returns the failed count (thread-safe)

func (*BatchJob) GetFileMatchInfo

func (job *BatchJob) GetFileMatchInfo(filePath string) (FileMatchInfo, bool)

GetFileMatchInfo retrieves the FileMatchInfo for a file path (thread-safe) Returns the info and true if found, zero value and false if not found

func (*BatchJob) GetFiles

func (job *BatchJob) GetFiles() []string

GetFiles returns a copy of the files list (thread-safe)

func (*BatchJob) GetID

func (job *BatchJob) GetID() string

func (*BatchJob) GetJobStatus

func (job *BatchJob) GetJobStatus() JobStatus

func (*BatchJob) GetOperationModeOverride

func (job *BatchJob) GetOperationModeOverride() string

GetOperationModeOverride returns the operation mode override (thread-safe)

func (*BatchJob) GetPersistError

func (job *BatchJob) GetPersistError() string

func (*BatchJob) GetProgress

func (job *BatchJob) GetProgress() float64

GetProgress returns the current progress percentage (thread-safe). This is a lightweight accessor that avoids copying the entire job state.

func (*BatchJob) GetStatus

func (job *BatchJob) GetStatus() *BatchJob

GetStatus returns a thread-safe copy of the job status

func (*BatchJob) GetStatusSlim

func (job *BatchJob) GetStatusSlim() *BatchJobSlim

GetStatusSlim returns a lightweight snapshot of the job status without movie Data. This is the recommended method for polling endpoints that only need status/progress.

func (*BatchJob) GetTempDir

func (job *BatchJob) GetTempDir() string

GetTempDir returns the job's temporary directory path in a thread-safe manner. This is the recommended way to access TempDir for concurrent safety.

func (*BatchJob) GetTotalFiles

func (job *BatchJob) GetTotalFiles() int

GetTotalFiles returns the total files count (thread-safe)

func (*BatchJob) GetUpdate

func (job *BatchJob) GetUpdate() bool

func (*BatchJob) IsDeleted

func (job *BatchJob) IsDeleted() bool

IsDeleted returns the tombstone flag Caller must hold at least RLock to safely read this value

func (*BatchJob) IsExcluded

func (job *BatchJob) IsExcluded(filePath string) bool

IsExcluded checks if a file is excluded from organization (thread-safe)

func (*BatchJob) Lock

func (job *BatchJob) Lock()

Lock acquires the job's write lock for exclusive access Use this when performing mutations that require atomic state validation

func (*BatchJob) MarkCancelled

func (job *BatchJob) MarkCancelled()

MarkCancelled marks the job as cancelled

func (*BatchJob) MarkCompleted

func (job *BatchJob) MarkCompleted()

MarkCompleted marks the job as completed

func (*BatchJob) MarkFailed

func (job *BatchJob) MarkFailed()

MarkFailed marks the job as failed

func (*BatchJob) MarkOrganized

func (job *BatchJob) MarkOrganized()

MarkOrganized marks the job as organized

func (*BatchJob) MarkReverted

func (job *BatchJob) MarkReverted()

MarkReverted marks the job as reverted

func (*BatchJob) MarkStarted

func (job *BatchJob) MarkStarted()

MarkStarted marks the job as started

func (*BatchJob) RLock

func (job *BatchJob) RLock()

RLock acquires the job's read lock for shared access

func (*BatchJob) RUnlock

func (job *BatchJob) RUnlock()

RUnlock releases the job's read lock

func (*BatchJob) SetCancelFunc

func (job *BatchJob) SetCancelFunc(cancelFunc context.CancelFunc)

SetCancelFunc sets the cancel function for the job (thread-safe)

func (*BatchJob) SetDestination

func (job *BatchJob) SetDestination(dest string)

SetDestination sets the destination path (thread-safe)

func (*BatchJob) SetFileMatchInfo

func (job *BatchJob) SetFileMatchInfo(filePath string, info FileMatchInfo)

SetFileMatchInfo stores the FileMatchInfo for a file path (thread-safe)

func (*BatchJob) SetOperationModeOverride

func (job *BatchJob) SetOperationModeOverride(mode string)

SetOperationModeOverride sets the operation mode override (thread-safe)

func (*BatchJob) SetPersistError

func (job *BatchJob) SetPersistError(msg string)

func (*BatchJob) SetUpdate

func (job *BatchJob) SetUpdate(update bool)

func (*BatchJob) TemplateEngine

func (job *BatchJob) TemplateEngine() *template.Engine

TemplateEngine returns the shared template engine (thread-safe, read-only after construction) Lazily initializes if nil (for tests that create BatchJob directly)

func (*BatchJob) Unlock

func (job *BatchJob) Unlock()

Unlock releases the job's write lock

func (*BatchJob) UpdateFileResult

func (job *BatchJob) UpdateFileResult(filePath string, result *FileResult)

UpdateFileResult updates the result for a specific file in the job

type BatchJobSlim

type BatchJobSlim struct {
	ID                    string                     `json:"id"`
	Status                JobStatus                  `json:"status"`
	TotalFiles            int                        `json:"total_files"`
	Completed             int                        `json:"completed"`
	Failed                int                        `json:"failed"`
	Excluded              map[string]bool            `json:"excluded"`
	Files                 []string                   `json:"files"`
	Results               map[string]*FileResultSlim `json:"results"`
	FileMatchInfo         map[string]FileMatchInfo   `json:"file_match_info,omitempty"`
	Progress              float64                    `json:"progress"`
	Destination           string                     `json:"destination"`
	TempDir               string                     `json:"temp_dir"`
	StartedAt             time.Time                  `json:"started_at"`
	CompletedAt           *time.Time                 `json:"completed_at,omitempty"`
	OrganizedAt           *time.Time                 `json:"organized_at,omitempty"`
	RevertedAt            *time.Time                 `json:"reverted_at,omitempty"`
	OperationModeOverride string                     `json:"operation_mode_override,omitempty"`
	Update                bool                       `json:"update"`
	PersistError          string                     `json:"persist_error,omitempty"`
}

BatchJobSlim is a lightweight BatchJob snapshot that uses FileResultSlim to avoid deep-copying movie Data on every poll.

type BatchScrapeOptions

type BatchScrapeOptions struct {
	TaskID            string
	FilePath          string
	FileIndex         int
	Job               *BatchJob
	Registry          *models.ScraperRegistry
	Aggregator        *aggregator.Aggregator
	MovieRepo         *database.MovieRepository
	Matcher           *matcher.Matcher
	ProgressTracker   *ProgressTracker
	Force             bool
	UpdateMode        bool
	SelectedScrapers  []string
	HTTPClient        httpclientiface.HTTPClient
	UserAgent         string
	Referer           string
	ProcessedMovieIDs map[string]bool
	Cfg               *config.Config
	ScalarStrategy    string
	ArrayStrategy     string
}

BatchScrapeOptions encapsulates all parameters for creating a BatchScrapeTask.

type BatchScrapeTask

type BatchScrapeTask struct {
	BaseTask
	// contains filtered or unexported fields
}

BatchScrapeTask represents a task for scraping metadata for a single file in a batch operation

func NewBatchScrapeTask

func NewBatchScrapeTask(opts *BatchScrapeOptions) *BatchScrapeTask

NewBatchScrapeTask creates a new batch scrape task

func (*BatchScrapeTask) Execute

func (t *BatchScrapeTask) Execute(ctx context.Context) error

Execute implements the Task interface

type DownloadTask

type DownloadTask struct {
	BaseTask
	// contains filtered or unexported fields
}

DownloadTask downloads media for a movie

func NewDownloadTask

func NewDownloadTask(
	movie *models.Movie,
	targetDir string,
	dl *downloader.Downloader,
	progressTracker *ProgressTracker,
	dryRun bool,
	multipart *downloader.MultipartInfo,
) *DownloadTask

NewDownloadTask creates a new download task

func (*DownloadTask) Execute

func (t *DownloadTask) Execute(ctx context.Context) error

type FileMatchInfo

type FileMatchInfo struct {
	MovieID     string `json:"movie_id"`
	IsMultiPart bool   `json:"is_multi_part"`
	PartNumber  int    `json:"part_number"`
	PartSuffix  string `json:"part_suffix"`
}

FileMatchInfo stores match metadata for a file (populated during discovery)

type FileResult

type FileResult struct {
	FilePath           string            `json:"file_path"`
	MovieID            string            `json:"movie_id"`
	Revision           uint64            `json:"revision"`
	Status             JobStatus         `json:"status"`
	Error              string            `json:"error,omitempty"`
	PosterError        *string           `json:"poster_error,omitempty"`
	TranslationWarning *string           `json:"translation_warning,omitempty"`
	FieldSources       map[string]string `json:"field_sources,omitempty"`
	ActressSources     map[string]string `json:"actress_sources,omitempty"`
	DataType           string            `json:"data_type,omitempty"`
	Data               interface{}       `json:"data,omitempty"`
	StartedAt          time.Time         `json:"started_at"`
	EndedAt            *time.Time        `json:"ended_at,omitempty"`
	IsMultiPart        bool              `json:"is_multi_part,omitempty"`
	PartNumber         int               `json:"part_number,omitempty"`
	PartSuffix         string            `json:"part_suffix,omitempty"`
}

FileResult represents the result of processing a single file

func RunBatchScrapeOnce

func RunBatchScrapeOnce(
	ctx context.Context,
	job *BatchJob,
	filePath string,
	fileIndex int,
	queryOverride string,
	registry *models.ScraperRegistry,
	agg *aggregator.Aggregator,
	movieRepo *database.MovieRepository,
	fileMatcher *matcher.Matcher,
	httpClient httpclientiface.HTTPClient,
	userAgent string,
	referer string,
	force bool,
	updateMode bool,
	selectedScrapers []string,
	scraperPriorityOverride []string,
	processedMovieIDs map[string]bool,
	cfg *config.Config,
	scalarStrategy string,
	arrayStrategy string,
) (*models.Movie, *FileResult, error)

func (*FileResult) MarshalJSON

func (fr *FileResult) MarshalJSON() ([]byte, error)

func (*FileResult) UnmarshalJSON

func (fr *FileResult) UnmarshalJSON(data []byte) error

type FileResultSlim

type FileResultSlim struct {
	FilePath           string            `json:"file_path"`
	MovieID            string            `json:"movie_id"`
	Revision           uint64            `json:"revision"`
	Status             JobStatus         `json:"status"`
	Error              string            `json:"error,omitempty"`
	PosterError        *string           `json:"poster_error,omitempty"`
	TranslationWarning *string           `json:"translation_warning,omitempty"`
	FieldSources       map[string]string `json:"field_sources,omitempty"`
	ActressSources     map[string]string `json:"actress_sources,omitempty"`
	DataType           string            `json:"data_type,omitempty"`
	StartedAt          time.Time         `json:"started_at"`
	EndedAt            *time.Time        `json:"ended_at,omitempty"`
	IsMultiPart        bool              `json:"is_multi_part,omitempty"`
	PartNumber         int               `json:"part_number,omitempty"`
	PartSuffix         string            `json:"part_suffix,omitempty"`
}

FileResultSlim is a lightweight FileResult that omits the Data field for efficient status polling without deep-copying movie objects.

type JobQueue

type JobQueue struct {
	// contains filtered or unexported fields
}

JobQueue manages batch jobs

func NewJobQueue

func NewJobQueue(jobRepo database.JobRepositoryInterface, tempDir string, engine *template.Engine) *JobQueue

func (*JobQueue) CreateJob

func (jq *JobQueue) CreateJob(files []string) *BatchJob

CreateJob creates a new batch job

func (*JobQueue) DeleteJob

func (jq *JobQueue) DeleteJob(id string, tempDir string) error

DeleteJob removes a job from the queue and cleans up associated temp files Cancels the job first and waits for it to fully finish before removing files tempDir is the base temp directory (e.g., "data/temp") Returns error if job not found, job is running, or database deletion fails

func (*JobQueue) GetJob

func (jq *JobQueue) GetJob(id string) (*BatchJob, bool)

GetJob retrieves a thread-safe copy of a job by ID Returns a deep copy to prevent external mutations of internal state

func (*JobQueue) GetJobPointer

func (jq *JobQueue) GetJobPointer(id string) (*BatchJob, bool)

GetJobPointer retrieves the actual job pointer for internal mutations WARNING: This exposes the internal job - use only when mutations are required Callers must respect the job's internal mutex (job.mu) when modifying state

func (*JobQueue) ListJobs

func (jq *JobQueue) ListJobs() []*BatchJob

ListJobs returns thread-safe copies of all jobs Returns deep copies to prevent external mutations of internal state

func (*JobQueue) PersistJob

func (jq *JobQueue) PersistJob(job *BatchJob)

PersistJob saves a job to the database

func (*JobQueue) StartCleanup

func (jq *JobQueue) StartCleanup()

StartCleanup starts a background goroutine that cleans up old organized jobs every hour

func (*JobQueue) StopCleanup

func (jq *JobQueue) StopCleanup()

type JobStatus

type JobStatus string

JobStatus represents the status of a job

const (
	JobStatusPending   JobStatus = "pending"
	JobStatusRunning   JobStatus = "running"
	JobStatusCompleted JobStatus = "completed"
	JobStatusFailed    JobStatus = "failed"
	JobStatusCancelled JobStatus = "cancelled"
	JobStatusOrganized JobStatus = "organized"
	JobStatusReverted  JobStatus = "reverted"
)

Job State Machine:

Normal flow:

Pending → Running → Completed → Organized

State descriptions:

  • Pending: Job created, waiting to start scraping
  • Running: Scraping in progress (files being processed)
  • Completed: Scraping finished, metadata available for review/editing
  • Failed: Job failed during scraping (terminal state)
  • Cancelled: Job was cancelled by user (terminal state)
  • Organized: Files successfully organized (terminal state)
  • Reverted: Files reverted to original state (terminal state)

Organization retry flow:

Completed → Running (organize) → Completed (if failed > 0)
Completed → Running (organize) → Organized (if failed == 0)

Revert flow:

Organized → Reverted

Key rules:

  • Only "Completed" jobs can be organized
  • If organization has any failures, job stays "Completed" to enable retry
  • If organization fully succeeds (failed == 0), job transitions to "Organized"
  • "Organized" jobs cannot be organized again (terminal state)
  • "Reverted" jobs are never deleted by cleanup (no time limit on revert)

Terminal states (no further transitions):

  • Failed, Cancelled, Organized, Reverted

type NFOTask

type NFOTask struct {
	BaseTask
	// contains filtered or unexported fields
}

NFOTask generates an NFO file

func NewNFOTask

func NewNFOTask(
	movie *models.Movie,
	targetDir string,
	gen *nfo.Generator,
	progressTracker *ProgressTracker,
	dryRun bool,
	partSuffix string,
	videoFilePath string,
) *NFOTask

NewNFOTask creates a new NFO generation task

func (*NFOTask) Execute

func (t *NFOTask) Execute(ctx context.Context) error

type OrganizeTask

type OrganizeTask struct {
	BaseTask
	// contains filtered or unexported fields
}

OrganizeTask organizes a video file

func NewOrganizeTask

func NewOrganizeTask(
	match matcher.MatchResult,
	movie *models.Movie,
	destPath string,
	moveFiles bool,
	forceUpdate bool,
	org *organizer.Organizer,
	progressTracker *ProgressTracker,
	dryRun bool,
	linkModes ...organizer.LinkMode,
) *OrganizeTask

NewOrganizeTask creates a new organize task

func NewOrganizeTaskWithOptions

func NewOrganizeTaskWithOptions(
	match matcher.MatchResult,
	movie *models.Movie,
	destPath string,
	moveFiles bool,
	forceUpdate bool,
	org *organizer.Organizer,
	progressTracker *ProgressTracker,
	dryRun bool,
	linkMode organizer.LinkMode,
	options ...OrganizeTaskOption,
) *OrganizeTask

NewOrganizeTaskWithOptions creates a new organize task with optional configuration.

func (*OrganizeTask) Execute

func (t *OrganizeTask) Execute(ctx context.Context) error

type OrganizeTaskOption

type OrganizeTaskOption func(*OrganizeTask)

OrganizeTaskOption configures optional behavior for organize tasks.

func WithNFOConfig

func WithNFOConfig(cfg *nfo.Config) OrganizeTaskOption

WithNFOConfig sets the NFO config for filename resolution.

func WithSnapshotCapture

func WithSnapshotCapture(batchJobID string, repo database.BatchFileOperationRepositoryInterface) OrganizeTaskOption

WithSnapshotCapture enables snapshot capture for the organize task.

type Pool

type Pool struct {
	// contains filtered or unexported fields
}

Pool manages a pool of workers that execute tasks concurrently

func NewPool

func NewPool(maxWorkers int, timeout time.Duration, progress *ProgressTracker) *Pool

New creates a new worker pool

func NewPoolWithContext

func NewPoolWithContext(parentCtx context.Context, maxWorkers int, timeout time.Duration, progress *ProgressTracker) *Pool

NewPoolWithContext creates a new worker pool with a parent context The pool will be cancelled when the parent context is cancelled

func (*Pool) ActiveWorkers

func (p *Pool) ActiveWorkers() int

ActiveWorkers returns the number of currently active workers Note: This is an approximation based on running tasks in progress tracker

func (*Pool) Errors

func (p *Pool) Errors() []error

Errors returns all errors encountered during task execution

func (*Pool) Stats

func (p *Pool) Stats() *PoolStats

Stats returns statistics about the pool

func (*Pool) Stop

func (p *Pool) Stop()

Stop cancels all running tasks and waits for them to finish

func (*Pool) Submit

func (p *Pool) Submit(task Task) error

Submit submits a task to the pool for execution Blocks if the pool is at capacity

func (*Pool) Wait

func (p *Pool) Wait() error

Wait waits for all tasks to complete

type PoolStats

type PoolStats struct {
	MaxWorkers      int
	Timeout         time.Duration
	TotalTasks      int
	Pending         int
	Running         int
	Success         int
	Failed          int
	Canceled        int
	Errors          int
	OverallProgress float64
}

PoolStats holds statistics about the worker pool

type ProcessFileOption

type ProcessFileOption func(*ProcessFileOptions)

ProcessFileOption configures optional behavior for a process file task.

func WithLinkMode

func WithLinkMode(mode organizer.LinkMode) ProcessFileOption

WithLinkMode sets copy link behavior for organize operations.

func WithTemplateEngine

func WithTemplateEngine(engine *template.Engine) ProcessFileOption

func WithUpdateMerge

func WithUpdateMerge(enabled bool, scalarStrategy, arrayStrategy string, cfg *config.Config) ProcessFileOption

WithUpdateMerge enables update-mode merge logic and merge strategy options.

type ProcessFileOptions

type ProcessFileOptions struct {
	LinkMode       organizer.LinkMode
	UpdateMode     bool
	ScalarStrategy string
	ArrayStrategy  string
	Config         *config.Config
	TemplateEngine *template.Engine
}

ProcessFileOptions holds optional settings for process file tasks.

type ProcessFileTask

type ProcessFileTask struct {
	BaseTask
	// contains filtered or unexported fields
}

ProcessFileTask is a composite task that processes a single file It executes scrape, download, organize, and NFO tasks sequentially

func NewProcessFileTask

func NewProcessFileTask(
	match matcher.MatchResult,
	registry *models.ScraperRegistry,
	agg *aggregator.Aggregator,
	movieRepo *database.MovieRepository,
	dl *downloader.Downloader,
	org *organizer.Organizer,
	nfoGen *nfo.Generator,
	destPath string,
	moveFiles bool,
	forceUpdate bool,
	forceRefresh bool,
	progressTracker *ProgressTracker,
	dryRun bool,
	scrapeEnabled bool,
	downloadEnabled bool,
	organizeEnabled bool,
	nfoEnabled bool,
	customScraperPriority []string,
	options ...ProcessFileOption,
) *ProcessFileTask

NewProcessFileTask creates a new composite task for processing a file

func (*ProcessFileTask) Execute

func (t *ProcessFileTask) Execute(ctx context.Context) error

type ProgressStats

type ProgressStats struct {
	Total           int
	Pending         int
	Running         int
	Success         int
	Failed          int
	Canceled        int
	TotalBytes      int64
	DoneBytes       int64
	OverallProgress float64
}

ProgressStats holds statistics about all tasks

type ProgressTracker

type ProgressTracker struct {
	// contains filtered or unexported fields
}

ProgressTracker tracks progress for all tasks

func NewProgressTracker

func NewProgressTracker(notify chan<- ProgressUpdate) *ProgressTracker

NewProgressTracker creates a new progress tracker

func (*ProgressTracker) Cancel

func (pt *ProgressTracker) Cancel(taskID string)

Cancel marks a task as canceled

func (*ProgressTracker) Clear

func (pt *ProgressTracker) Clear()

Clear removes all task progress

func (*ProgressTracker) Complete

func (pt *ProgressTracker) Complete(taskID string, message string)

Complete marks a task as completed successfully

func (*ProgressTracker) Fail

func (pt *ProgressTracker) Fail(taskID string, err error)

Fail marks a task as failed

func (*ProgressTracker) Get

func (pt *ProgressTracker) Get(taskID string) (*TaskProgress, bool)

Get retrieves the progress for a task

func (*ProgressTracker) GetAll

func (pt *ProgressTracker) GetAll() []*TaskProgress

GetAll retrieves all task progress

func (*ProgressTracker) GetByStatus

func (pt *ProgressTracker) GetByStatus(status TaskStatus) []*TaskProgress

GetByStatus retrieves tasks with a specific status

func (*ProgressTracker) GetByType

func (pt *ProgressTracker) GetByType(taskType TaskType) []*TaskProgress

GetByType retrieves tasks of a specific type

func (*ProgressTracker) Remove

func (pt *ProgressTracker) Remove(taskID string)

Remove removes a specific task

func (*ProgressTracker) SetTotal

func (pt *ProgressTracker) SetTotal(taskID string, bytesTotal int64)

SetTotal sets the total bytes for a task

func (*ProgressTracker) Start

func (pt *ProgressTracker) Start(taskID string, taskType TaskType, message string)

Start marks a task as started

func (*ProgressTracker) Stats

func (pt *ProgressTracker) Stats() ProgressStats

Stats returns statistics about all tasks

func (*ProgressTracker) Update

func (pt *ProgressTracker) Update(taskID string, progress float64, message string, bytesDone int64)

Update updates the progress of a task

type ProgressUpdate

type ProgressUpdate struct {
	TaskID    string
	Type      TaskType
	Status    TaskStatus
	Progress  float64
	Message   string
	BytesDone int64
	Error     error
	Timestamp time.Time
}

ProgressUpdate represents a progress update event

type RefererResolver

type RefererResolver func(downloadURL, configuredReferer string) string

RefererResolver resolves an effective Referer for a download URL. This is injected by callers so poster generation does not hardcode host rules.

type ScrapeTask

type ScrapeTask struct {
	BaseTask
	// contains filtered or unexported fields
}

ScrapeTask scrapes metadata for a JAV ID

func NewScrapeTask

func NewScrapeTask(
	javID string,
	registry *models.ScraperRegistry,
	agg *aggregator.Aggregator,
	movieRepo *database.MovieRepository,
	progressTracker *ProgressTracker,
	dryRun bool,
	forceRefresh bool,
	customScraperPriority []string,
) *ScrapeTask

NewScrapeTask creates a new scrape task

func (*ScrapeTask) Execute

func (t *ScrapeTask) Execute(ctx context.Context) (*models.Movie, error)

type Task

type Task interface {
	// ID returns a unique identifier for this task
	ID() string

	// Type returns the type of task
	Type() TaskType

	// Execute runs the task and returns an error if it fails
	Execute(ctx context.Context) error

	// Description returns a human-readable description
	Description() string
}

Task represents a unit of work to be executed

type TaskProgress

type TaskProgress struct {
	ID         string
	Type       TaskType
	Status     TaskStatus
	Progress   float64 // 0.0 to 1.0
	Message    string
	BytesTotal int64
	BytesDone  int64
	StartTime  time.Time
	UpdatedAt  time.Time
	Error      error
}

TaskProgress tracks the progress of a single task

type TaskResult

type TaskResult struct {
	TaskID      string
	Type        TaskType
	Status      TaskStatus
	Error       error
	StartTime   time.Time
	EndTime     time.Time
	Duration    time.Duration
	BytesTotal  int64
	BytesDone   int64
	Description string
}

TaskResult holds the result of a completed task

type TaskStatus

type TaskStatus string

TaskStatus represents the status of a task

const (
	TaskStatusPending  TaskStatus = "pending"
	TaskStatusRunning  TaskStatus = "running"
	TaskStatusSuccess  TaskStatus = "success"
	TaskStatusFailed   TaskStatus = "failed"
	TaskStatusCanceled TaskStatus = "canceled"
)

type TaskType

type TaskType string

TaskType represents the type of task

const (
	TaskTypeScan        TaskType = "scan"
	TaskTypeScrape      TaskType = "scrape"
	TaskTypeBatchScrape TaskType = "batch_scrape"
	TaskTypeDownload    TaskType = "download"
	TaskTypeOrganize    TaskType = "organize"
	TaskTypeNFO         TaskType = "nfo"
)

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL