llm

package
v1.1.14 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Nov 26, 2025 License: MIT Imports: 9 Imported by: 0

Documentation

Index

Constants

This section is empty.

Variables

This section is empty.

Functions

This section is empty.

Types

type GenerateOption

type GenerateOption func(*GenerateOptions)

GenerateOption is a function that modifies GenerateOptions

func WithMaxTokens

func WithMaxTokens(tokens int) GenerateOption

WithMaxTokens sets the max tokens

func WithStopWords

func WithStopWords(words []string) GenerateOption

WithStopWords sets the stop words

func WithTemperature

func WithTemperature(temp float64) GenerateOption

WithTemperature sets the temperature

type GenerateOptions

type GenerateOptions struct {
	Temperature   float64
	MaxTokens     int
	StopWords     []string
	StopSequences []string
	TopP          float64
	TopK          int
	Stream        bool
}

GenerateOptions contains options for text generation

type OllamaLLMProvider

type OllamaLLMProvider struct {
	// contains filtered or unexported fields
}

OllamaLLMProvider implements Provider interface for Ollama

func NewOllamaLLMProvider

func NewOllamaLLMProvider(cfg config.LLMConfig) (*OllamaLLMProvider, error)

NewOllamaLLMProvider creates a new Ollama provider with separate chat and embedding models

func (*OllamaLLMProvider) Embed

func (p *OllamaLLMProvider) Embed(ctx context.Context, text string) ([]float64, error)

Embed generates embeddings using Ollama embedding model

func (*OllamaLLMProvider) Generate

func (p *OllamaLLMProvider) Generate(ctx context.Context, prompt string, opts ...GenerateOption) (string, error)

Generate generates text using Ollama chat model

func (*OllamaLLMProvider) GenerateStream

func (p *OllamaLLMProvider) GenerateStream(ctx context.Context, prompt string, opts ...GenerateOption) (<-chan string, <-chan error)

GenerateStream generates streaming text using Ollama chat model

func (*OllamaLLMProvider) Name

func (p *OllamaLLMProvider) Name() string

Name returns the provider name

type Provider

type Provider interface {
	// Generate generates text completion
	Generate(ctx context.Context, prompt string, opts ...GenerateOption) (string, error)

	// GenerateStream generates text completion with streaming
	GenerateStream(ctx context.Context, prompt string, opts ...GenerateOption) (<-chan string, <-chan error)

	// Embed generates embeddings for the given text
	Embed(ctx context.Context, text string) ([]float64, error)

	// Name returns the provider name
	Name() string
}

Provider represents an LLM provider interface

func NewProvider

func NewProvider(cfg *config.LLMConfig) (Provider, error)

NewProvider creates a new LLM provider based on configuration

type RetryableProvider

type RetryableProvider struct {
	// contains filtered or unexported fields
}

RetryableProvider wraps a provider with retry logic

func NewRetryableProvider

func NewRetryableProvider(provider Provider, maxRetries int, timeout time.Duration) *RetryableProvider

NewRetryableProvider creates a new retryable provider

func (*RetryableProvider) Close

func (r *RetryableProvider) Close() error

Close implements io.Closer

func (*RetryableProvider) Embed

func (r *RetryableProvider) Embed(ctx context.Context, text string) ([]float64, error)

Embed generates embeddings with retry logic

func (*RetryableProvider) Generate

func (r *RetryableProvider) Generate(ctx context.Context, prompt string, opts ...GenerateOption) (string, error)

Generate generates text with retry logic

func (*RetryableProvider) GenerateStream

func (r *RetryableProvider) GenerateStream(ctx context.Context, prompt string, opts ...GenerateOption) (<-chan string, <-chan error)

GenerateStream generates streaming text with retry logic

func (*RetryableProvider) Name

func (r *RetryableProvider) Name() string

Name returns the provider name

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL