llm

package
v0.0.0-alpha.13 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Mar 12, 2026 License: MIT Imports: 11 Imported by: 0

Documentation

Index

Constants

This section is empty.

Variables

This section is empty.

Functions

This section is empty.

Types

type AzureOpenAIProvider

type AzureOpenAIProvider struct {
	// contains filtered or unexported fields
}

AzureOpenAIProvider implements the Provider interface for Azure OpenAI

func (*AzureOpenAIProvider) Chat

func (ap *AzureOpenAIProvider) Chat(ctx context.Context, messages []*Message, options *CompletionOptions) (string, error)

Chat performs a chat completion using Azure OpenAI

func (*AzureOpenAIProvider) Complete

func (ap *AzureOpenAIProvider) Complete(ctx context.Context, prompt string, options *CompletionOptions) (string, error)

Complete generates a text completion using Azure OpenAI

func (*AzureOpenAIProvider) GetModel

func (ap *AzureOpenAIProvider) GetModel() *ModelInfo

GetModel returns the model information

func (*AzureOpenAIProvider) GetProvider

func (ap *AzureOpenAIProvider) GetProvider() ProviderType

GetProvider returns the provider type

func (*AzureOpenAIProvider) IsHealthy

func (ap *AzureOpenAIProvider) IsHealthy(ctx context.Context) (bool, error)

IsHealthy checks if the Azure OpenAI service is available by sending a minimal chat completion request. The legacy /completions endpoint is not supported on modern Azure GPT-3.5/4/4o deployments, so we always use /chat/completions here.

type CompletionOptions

type CompletionOptions struct {
	Temperature   *float32
	MaxTokens     *int64
	TopP          *float32
	StopSequences []string
}

CompletionOptions contains options for LLM completions

type Config

type Config struct {
	Provider    ProviderType
	Model       string
	Temperature float32
	MaxTokens   int
	Timeout     time.Duration

	// Ollama specific
	OllamaURL string

	// Azure specific
	AzureOpenAIEndpoint string
	AzureOpenAIKey      string
	AzureOpenAIVersion  string

	// OpenAI specific
	OpenAIAPIKey string
}

Config holds configuration for LLM providers

func LoadConfig

func LoadConfig() *Config

LoadConfig loads configuration from environment and .env files

func (*Config) Validate

func (c *Config) Validate() error

Validate checks if the configuration is valid

type Message

type Message struct {
	Role    string // "system", "user", "assistant"
	Content string
}

Message represents a chat message

type ModelInfo

type ModelInfo struct {
	Name            string
	Provider        ProviderType
	ContextSize     int
	MaxTokens       int
	CostPer1kTokens float64 // in USD
	Capabilities    []string
}

ModelInfo contains information about an LLM model

type OllamaProvider

type OllamaProvider struct {
	// contains filtered or unexported fields
}

OllamaProvider implements the Provider interface for Ollama

func (*OllamaProvider) Chat

func (op *OllamaProvider) Chat(ctx context.Context, messages []*Message, options *CompletionOptions) (string, error)

Chat performs a chat completion using Ollama

func (*OllamaProvider) Complete

func (op *OllamaProvider) Complete(ctx context.Context, prompt string, options *CompletionOptions) (string, error)

Complete generates a text completion using Ollama

func (*OllamaProvider) GetAvailableModels

func (op *OllamaProvider) GetAvailableModels(ctx context.Context) ([]string, error)

GetAvailableModels lists available models in Ollama

func (*OllamaProvider) GetModel

func (op *OllamaProvider) GetModel() *ModelInfo

GetModel returns the model information

func (*OllamaProvider) GetProvider

func (op *OllamaProvider) GetProvider() ProviderType

GetProvider returns the provider type

func (*OllamaProvider) IsHealthy

func (op *OllamaProvider) IsHealthy(ctx context.Context) (bool, error)

IsHealthy checks if the Ollama service is available

func (*OllamaProvider) PullModel

func (op *OllamaProvider) PullModel(ctx context.Context, modelName string) error

PullModel pulls a model from Ollama registry

type OpenAIProvider

type OpenAIProvider struct {
	// contains filtered or unexported fields
}

OpenAIProvider implements the Provider interface for OpenAI

func (*OpenAIProvider) Chat

func (op *OpenAIProvider) Chat(ctx context.Context, messages []*Message, options *CompletionOptions) (string, error)

Chat performs a chat completion using OpenAI

func (*OpenAIProvider) Complete

func (op *OpenAIProvider) Complete(ctx context.Context, prompt string, options *CompletionOptions) (string, error)

Complete generates a text completion using OpenAI

func (*OpenAIProvider) GetModel

func (op *OpenAIProvider) GetModel() *ModelInfo

GetModel returns the model information

func (*OpenAIProvider) GetProvider

func (op *OpenAIProvider) GetProvider() ProviderType

GetProvider returns the provider type

func (*OpenAIProvider) IsHealthy

func (op *OpenAIProvider) IsHealthy(ctx context.Context) (bool, error)

IsHealthy checks if the OpenAI API is accessible

type Provider

type Provider interface {
	// Complete generates text completion
	Complete(ctx context.Context, prompt string, options *CompletionOptions) (string, error)

	// Chat performs a chat completion with message history
	Chat(ctx context.Context, messages []*Message, options *CompletionOptions) (string, error)

	// GetModel returns information about the configured model
	GetModel() *ModelInfo

	// IsHealthy checks if the provider is available and healthy
	IsHealthy(ctx context.Context) (bool, error)

	// GetProvider returns the provider type
	GetProvider() ProviderType
}

Provider is the interface for LLM providers

func NewAzureOpenAIProvider

func NewAzureOpenAIProvider(cfg *Config) (Provider, error)

NewAzureOpenAIProvider creates a new Azure OpenAI provider

func NewOllamaProvider

func NewOllamaProvider(cfg *Config) (Provider, error)

NewOllamaProvider creates a new Ollama provider

func NewOpenAIProvider

func NewOpenAIProvider(cfg *Config) (Provider, error)

NewOpenAIProvider creates a new OpenAI provider

func NewProvider

func NewProvider(cfg *Config) (Provider, error)

NewProvider creates a new LLM provider based on config

type ProviderChain

type ProviderChain struct {
	// contains filtered or unexported fields
}

ProviderChain implements fallback chain for providers

func NewProviderChain

func NewProviderChain(providers ...Provider) *ProviderChain

NewProviderChain creates a new provider chain with fallbacks

func (*ProviderChain) Chat

func (pc *ProviderChain) Chat(ctx context.Context, messages []*Message, options *CompletionOptions) (string, error)

Chat tries each provider in sequence until one succeeds

func (*ProviderChain) Complete

func (pc *ProviderChain) Complete(ctx context.Context, prompt string, options *CompletionOptions) (string, error)

Complete tries each provider in sequence until one succeeds

func (*ProviderChain) GetModel

func (pc *ProviderChain) GetModel() *ModelInfo

GetModel returns the first available provider's model

func (*ProviderChain) GetProvider

func (pc *ProviderChain) GetProvider() ProviderType

GetProvider returns the first available provider's type

func (*ProviderChain) IsHealthy

func (pc *ProviderChain) IsHealthy(ctx context.Context) (bool, error)

IsHealthy checks if at least one provider is healthy

type ProviderType

type ProviderType string

ProviderType represents the LLM provider

const (
	ProviderOllama ProviderType = "ollama"
	ProviderAzure  ProviderType = "azure"
	ProviderOpenAI ProviderType = "openai"
)

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL