optim

package
v0.5.1 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Dec 1, 2025 License: Apache-2.0 Imports: 3 Imported by: 0

Documentation

Overview

Package optim provides optimization algorithms for training neural networks.

Overview

This package contains:

  • SGD: Stochastic Gradient Descent with momentum
  • Adam: Adaptive Moment Estimation with bias correction
  • Optimizer interface for custom optimizers

Basic Usage

import (
    "github.com/born-ml/born/optim"
    "github.com/born-ml/born/nn"
    "github.com/born-ml/born/backend/cpu"
)

func main() {
    backend := cpu.New()
    model := nn.NewLinear(784, 10, backend)

    // Create optimizer
    optimizer := optim.NewAdam(
        model.Parameters(),
        optim.AdamConfig{
            LR:    0.001,
            Betas: [2]float32{0.9, 0.999},
        },
        backend,
    )

    // Training loop
    for epoch := range 10 {
        // Forward pass
        loss := criterion.Forward(model.Forward(x), y)

        // Backward pass
        optimizer.ZeroGrad()
        grads := backend.Backward(loss.Raw())
        optimizer.Step(grads)
    }
}

Optimizers

SGD (Stochastic Gradient Descent):

optimizer := optim.NewSGD(
    model.Parameters(),
    optim.SGDConfig{
        LR:       0.01,
        Momentum: 0.9,
    },
    backend,
)

Adam (Adaptive Moment Estimation):

optimizer := optim.NewAdam(
    model.Parameters(),
    optim.AdamConfig{
        LR:      0.001,
        Betas:   [2]float32{0.9, 0.999},
        Epsilon: 1e-8,
    },
    backend,
)

Training Loop Pattern

for epoch := range numEpochs {
    for batch := range dataLoader {
        // 1. Zero gradients
        optimizer.ZeroGrad()

        // 2. Forward pass
        output := model.Forward(batch.Input)
        loss := criterion.Forward(output, batch.Target)

        // 3. Backward pass
        grads := backend.Backward(loss.Raw())

        // 4. Update parameters
        optimizer.Step(grads)
    }
}

Index

Constants

This section is empty.

Variables

This section is empty.

Functions

This section is empty.

Types

type Adam

type Adam[B tensor.Backend] = optim.Adam[B]

Adam represents the Adam optimizer.

func NewAdam

func NewAdam[B tensor.Backend](params []*nn.Parameter[B], config AdamConfig, backend B) *Adam[B]

NewAdam creates a new Adam optimizer with bias correction.

Example:

backend := cpu.New()
model := nn.NewLinear(784, 10, backend)
optimizer := optim.NewAdam(
    model.Parameters(),
    optim.AdamConfig{
        LR:      0.001,
        Betas:   [2]float32{0.9, 0.999},
        Epsilon: 1e-8,
    },
    backend,
)

type AdamConfig

type AdamConfig = optim.AdamConfig

AdamConfig contains configuration for Adam optimizer.

type Config

type Config = optim.Config

Config represents the base configuration for optimizers.

type Optimizer

type Optimizer = optim.Optimizer

Optimizer interface defines the common interface for all optimizers.

type SGD

type SGD[B tensor.Backend] = optim.SGD[B]

SGD represents the SGD optimizer with optional momentum.

func NewSGD

func NewSGD[B tensor.Backend](params []*nn.Parameter[B], config SGDConfig, backend B) *SGD[B]

NewSGD creates a new SGD optimizer.

Example:

backend := cpu.New()
model := nn.NewLinear(784, 10, backend)
optimizer := optim.NewSGD(
    model.Parameters(),
    optim.SGDConfig{
        LR:       0.01,
        Momentum: 0.9,
    },
    backend,
)

type SGDConfig

type SGDConfig = optim.SGDConfig

SGDConfig contains configuration for SGD optimizer.

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL