optimizers

package
v0.35.0 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Jun 29, 2025 License: MIT Imports: 16 Imported by: 0

Documentation

Index

Constants

This section is empty.

Variables

View Source
var AutoRunSettings = map[RunMode]struct {
	NumTrials int
	ValSize   int
}{
	LightMode:  {NumTrials: 7, ValSize: 100},
	MediumMode: {NumTrials: 25, ValSize: 300},
	HeavyMode:  {NumTrials: 50, ValSize: 1000},
}

AutoRunSettings defines default configurations for different run modes.

Functions

This section is empty.

Types

type BootstrapFewShot

type BootstrapFewShot struct {
	Metric          func(example map[string]interface{}, prediction map[string]interface{}, ctx context.Context) bool
	MaxBootstrapped int
}

func NewBootstrapFewShot

func NewBootstrapFewShot(metric func(example map[string]interface{}, prediction map[string]interface{}, ctx context.Context) bool, maxBootstrapped int) *BootstrapFewShot

func (*BootstrapFewShot) Compile

func (b *BootstrapFewShot) Compile(ctx context.Context, student, teacher core.Program, trainset []map[string]interface{}) (core.Program, error)

type CandidateResult added in v0.29.0

type CandidateResult struct {
	Program     core.Program `json:"-"`
	Score       float64      `json:"score"`
	Step        int          `json:"step"`
	Temperature float64      `json:"temperature"`
	CreatedAt   time.Time    `json:"created_at"`
}

CandidateResult represents a candidate program and its performance.

type Copro added in v0.1.0

type Copro struct {
	Metric          func(example, prediction map[string]interface{}, ctx context.Context) bool
	MaxBootstrapped int
	SubOptimizer    core.Optimizer
}

func NewCopro added in v0.1.0

func NewCopro(metric func(example, prediction map[string]interface{}, ctx context.Context) bool, maxBootstrapped int, subOptimizer core.Optimizer) *Copro

func (*Copro) Compile added in v0.1.0

func (c *Copro) Compile(ctx context.Context, program core.Program, dataset core.Dataset, metric core.Metric) (core.Program, error)

type InstructionGenerator added in v0.28.0

type InstructionGenerator struct {
	PromptModel   core.LLM
	MaxCandidates int
	Temperature   float64
}

InstructionGenerator handles the generation of instruction candidates.

func (*InstructionGenerator) GenerateCandidates added in v0.28.0

func (g *InstructionGenerator) GenerateCandidates(
	ctx context.Context,
	program core.Program,
	demos []core.Example,
) (map[int][]string, error)

GenerateCandidates creates instruction candidates for each predictor.

type IntrospectionResult added in v0.29.0

type IntrospectionResult struct {
	Analysis             string   `json:"analysis"`
	Recommendations      []string `json:"recommendations"`
	Confidence           float64  `json:"confidence"`
	IdentifiedPatterns   []string `json:"identified_patterns"`
	SuggestedAdjustments []string `json:"suggested_adjustments"`
}

IntrospectionResult contains self-analysis and advice.

type MIPRO added in v0.1.0

type MIPRO struct {
	// contains filtered or unexported fields
}

MIPRO is the main optimizer implementing multi-step interactive prompt optimization.

func NewMIPRO added in v0.1.0

func NewMIPRO(
	metric func(example, prediction map[string]interface{}, ctx context.Context) float64,
	opts ...MIPROOption,
) *MIPRO

NewMIPRO creates a new MIPRO optimizer instance.

func (*MIPRO) Compile added in v0.1.0

func (m *MIPRO) Compile(
	ctx context.Context,
	program core.Program,
	dataset core.Dataset,
	metric core.Metric,
) (core.Program, error)

Compile implements the main optimization loop.

type MIPROConfig added in v0.28.0

type MIPROConfig struct {
	Mode           RunMode
	NumTrials      int
	ValSize        int
	MiniBatchSize  int
	AdaptiveParams bool
	ScalingFactors struct {
		TrialsPerVariable float64
		BatchSizeScaling  float64
	}
	TeacherSettings map[string]interface{}

	// TPE specific configuration
	TPEGamma        float64
	TPEGenerations  int
	Seed            int64
	NumModules      int // Number of modules to optimize (can be inferred from program)
	MaxLabeledDemos int // Maximum number of labeled demonstrations to use
}

MIPROConfig contains all configuration options for the optimizer.

type MIPROMetrics added in v0.28.0

type MIPROMetrics struct {
	TeacherPerformance  float64
	StudentPerformance  float64
	PromptEffectiveness map[string]float64
	OptimizationHistory []OptimizationStep
	TokenUsage          *core.TokenInfo
}

MIPROMetrics tracks comprehensive optimization metrics.

type MIPROOption added in v0.1.0

type MIPROOption func(*MIPRO)

MIPROOption defines a function type for configuring MIPRO.

func WithMaxLabeledDemos added in v0.1.0

func WithMaxLabeledDemos(maxDemos int) MIPROOption

WithMaxLabeledDemos sets the maximum number of labeled demos to use.

func WithMiniBatchSize added in v0.1.0

func WithMiniBatchSize(size int) MIPROOption

func WithMode added in v0.28.0

func WithMode(mode RunMode) MIPROOption

WithMode sets the optimization mode.

func WithModels added in v0.28.0

func WithModels(promptModel, taskModel core.LLM) MIPROOption

WithModels explicitly sets the prompt and task models for MIPRO.

func WithNumCandidates added in v0.1.0

func WithNumCandidates(num int) MIPROOption

func WithNumModules added in v0.28.0

func WithNumModules(numModules int) MIPROOption

WithNumModules explicitly sets the number of modules to optimize.

func WithNumTrials added in v0.1.0

func WithNumTrials(trials int) MIPROOption

WithNumTrials sets the number of optimization trials.

func WithRandomSeed added in v0.28.0

func WithRandomSeed(seed int64) MIPROOption

WithRandomSeed sets a specific random seed for reproducibility.

func WithSearchStrategy added in v0.28.0

func WithSearchStrategy(strategy SearchStrategy) MIPROOption

WithSearchStrategy sets a custom search strategy.

func WithTPEGamma added in v0.28.0

func WithTPEGamma(gamma float64) MIPROOption

WithTPEGamma sets the gamma parameter for the TPE optimizer.

func WithTPEGenerations added in v0.28.0

func WithTPEGenerations(generations int) MIPROOption

WithTPEGenerations sets the number of candidates to generate for each TPE optimization step.

func WithTeacherSettings added in v0.28.0

func WithTeacherSettings(settings map[string]interface{}) MIPROOption

WithTeacherSettings configures the teacher model settings.

type OptimizationState added in v0.28.0

type OptimizationState struct {
	SuccessfulPatterns []string
	PromptEvolution    []PromptVersion
	TeacherScores      map[string]float64
	CurrentIteration   int
	BestScore          float64
	Convergence        float64
}

OptimizationState tracks the progress of optimization.

type OptimizationStep added in v0.28.0

type OptimizationStep struct {
	Trial         int
	Performance   float64
	Improvements  []string
	FailurePoints []string
}

OptimizationStep represents a single step in the optimization process.

type PromptComponent added in v0.28.0

type PromptComponent struct {
	Type    string
	Content string
	Score   float64
}

PromptComponent represents a specific part of a prompt.

type PromptVersion added in v0.28.0

type PromptVersion struct {
	Template    string
	Performance float64
	Components  []PromptComponent
}

PromptVersion represents a specific version of a prompt template.

type RunMode added in v0.28.0

type RunMode string

RunMode defines different optimization intensities for MIPRO.

const (
	LightMode  RunMode = "light"
	MediumMode RunMode = "medium"
	HeavyMode  RunMode = "heavy"
)

type SIMBA added in v0.29.0

type SIMBA struct {
	// contains filtered or unexported fields
}

SIMBA implements Stochastic Introspective Mini-Batch Ascent optimizer.

func NewSIMBA added in v0.29.0

func NewSIMBA(opts ...SIMBAOption) *SIMBA

NewSIMBA creates a new SIMBA optimizer.

func (*SIMBA) Compile added in v0.29.0

func (s *SIMBA) Compile(ctx context.Context, program core.Program, dataset core.Dataset, metric core.Metric) (core.Program, error)

Compile implements the core.Optimizer interface for SIMBA.

func (*SIMBA) GetConfig added in v0.29.0

func (s *SIMBA) GetConfig() SIMBAConfig

GetConfig returns the current configuration.

func (*SIMBA) GetState added in v0.29.0

func (s *SIMBA) GetState() SIMBAState

GetState returns the current optimization state (thread-safe).

type SIMBAConfig added in v0.29.0

type SIMBAConfig struct {
	// Mini-batch configuration
	BatchSize     int `json:"batch_size"`     // Default: 32
	MaxSteps      int `json:"max_steps"`      // Default: 8
	NumCandidates int `json:"num_candidates"` // Default: 6

	// Temperature controls
	SamplingTemperature float64 `json:"sampling_temperature"` // Default: 0.2

	// Introspective learning
	IntrospectionFrequency int `json:"introspection_frequency"` // Default: 2

	// Performance thresholds
	ConvergenceThreshold float64 `json:"convergence_threshold"` // Default: 0.001
	MinImprovementRatio  float64 `json:"min_improvement_ratio"` // Default: 0.05

	// Concurrency and resources
	MaxGoroutines int `json:"max_goroutines"` // Default: 10
}

SIMBAConfig contains configuration options for SIMBA optimizer.

type SIMBAOption added in v0.29.0

type SIMBAOption func(*SIMBA)

SIMBAOption defines functional options for SIMBA configuration.

func WithSIMBABatchSize added in v0.29.0

func WithSIMBABatchSize(size int) SIMBAOption

WithSIMBABatchSize sets the mini-batch size.

func WithSIMBAMaxSteps added in v0.29.0

func WithSIMBAMaxSteps(steps int) SIMBAOption

WithSIMBAMaxSteps sets the maximum optimization steps.

func WithSIMBANumCandidates added in v0.29.0

func WithSIMBANumCandidates(num int) SIMBAOption

WithSIMBANumCandidates sets the number of candidate programs per iteration.

func WithSamplingTemperature added in v0.29.0

func WithSamplingTemperature(temperature float64) SIMBAOption

WithSamplingTemperature sets the sampling temperature.

type SIMBAState added in v0.29.0

type SIMBAState struct {
	CurrentStep      int
	BestScore        float64
	BestProgram      core.Program
	CandidateHistory []CandidateResult
	PerformanceLog   []StepResult
	IntrospectionLog []string
	StartTime        time.Time
}

SIMBAState tracks optimization progress and history.

type SearchConfig added in v0.28.0

type SearchConfig struct {
	ParamSpace  map[string][]interface{}
	MaxTrials   int
	Seed        int64
	Constraints map[string]interface{}
}

SearchConfig contains configuration for search strategies.

type SearchStrategy added in v0.28.0

type SearchStrategy interface {
	SuggestParams(ctx context.Context) (map[string]interface{}, error)
	UpdateResults(params map[string]interface{}, score float64) error
	GetBestParams() (map[string]interface{}, float64)
	Initialize(config SearchConfig) error
}

SearchStrategy defines the interface for optimization search algorithms.

func NewTPEOptimizer added in v0.28.0

func NewTPEOptimizer(config TPEConfig) SearchStrategy

NewTPEOptimizer creates a new TPE optimizer instance.

type StepResult added in v0.29.0

type StepResult struct {
	Step            int           `json:"step"`
	BestScore       float64       `json:"best_score"`
	CandidateScores []float64     `json:"candidate_scores"`
	Temperature     float64       `json:"temperature"`
	BatchSize       int           `json:"batch_size"`
	Introspection   string        `json:"introspection,omitempty"`
	Duration        time.Duration `json:"duration"`
	Improvement     float64       `json:"improvement"`
}

StepResult captures metrics for each optimization step.

type TPEConfig added in v0.28.0

type TPEConfig struct {
	// Gamma is the percentile split between good and bad observations (default: 0.25)
	Gamma float64
	// Seed is used for random number generation
	Seed int64
	// NumEIGenerations is the number of random points to evaluate EI on
	NumEIGenerations int
	// Prior distributions for each parameter (optional)
	PriorWeight float64
	// Kernel bandwidth factor
	BandwidthFactor float64
}

TPEConfig contains configuration for Tree-structured Parzen Estimators.

type TPEOptimizer added in v0.28.0

type TPEOptimizer struct {
	// contains filtered or unexported fields
}

TPEOptimizer implements the Tree-structured Parzen Estimator for Bayesian optimization.

func (*TPEOptimizer) GetBestParams added in v0.28.0

func (t *TPEOptimizer) GetBestParams() (map[string]interface{}, float64)

GetBestParams returns the best parameters found so far and their score.

func (*TPEOptimizer) Initialize added in v0.28.0

func (t *TPEOptimizer) Initialize(config SearchConfig) error

Initialize sets up the search space and constraints.

func (*TPEOptimizer) SuggestParams added in v0.28.0

func (t *TPEOptimizer) SuggestParams(ctx context.Context) (map[string]interface{}, error)

SuggestParams suggests the next set of parameters to try.

func (*TPEOptimizer) UpdateResults added in v0.28.0

func (t *TPEOptimizer) UpdateResults(params map[string]interface{}, score float64) error

UpdateResults updates the internal state with the results of the last trial.

type TeacherStudentOptimizer added in v0.28.0

type TeacherStudentOptimizer struct {
	Teacher         core.LLM
	Student         core.LLM
	TeacherSettings map[string]interface{}
	MaxExamples     int
	// contains filtered or unexported fields
}

TeacherStudentOptimizer handles the teacher-student learning dynamic.

func (*TeacherStudentOptimizer) GenerateDemonstration added in v0.28.0

func (t *TeacherStudentOptimizer) GenerateDemonstration(ctx context.Context, input core.Example) (core.Example, error)

GenerateDemonstration creates a high-quality demonstration using the teacher.

func (*TeacherStudentOptimizer) Initialize added in v0.28.0

func (t *TeacherStudentOptimizer) Initialize(ctx context.Context, program core.Program, dataset core.Dataset) error

Initialize sets up the teacher-student optimization.

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL