llm

package
v0.1.2 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Mar 17, 2026 License: GPL-3.0 Imports: 11 Imported by: 0

Documentation

Overview

Package llm provides abstractions for Large Language Model providers.

Index

Constants

This section is empty.

Variables

View Source
var (
	ErrProviderNotConfigured = fmt.Errorf("llm provider not configured")
	ErrInvalidProvider       = fmt.Errorf("invalid llm provider")
	ErrRateLimited           = fmt.Errorf("llm rate limited")
	ErrContextCanceled       = fmt.Errorf("context canceled")
	ErrInvalidResponse       = fmt.Errorf("invalid llm response")
	ErrTokenLimitExceeded    = fmt.Errorf("token limit exceeded")
)

Errors

Functions

This section is empty.

Types

type ClaudeConfig

type ClaudeConfig struct {
	APIKey     string
	Model      string
	Timeout    time.Duration
	MaxRetries int
}

ClaudeConfig holds configuration for Claude provider.

type ClaudeProvider

type ClaudeProvider struct {
	// contains filtered or unexported fields
}

ClaudeProvider implements the Provider interface for Anthropic's Claude.

func NewClaudeProvider

func NewClaudeProvider(cfg ClaudeConfig) (*ClaudeProvider, error)

NewClaudeProvider creates a new Claude provider.

func (*ClaudeProvider) Complete

Complete sends a prompt to Claude and returns the completion.

func (*ClaudeProvider) Model

func (p *ClaudeProvider) Model() string

Model returns the model being used.

func (*ClaudeProvider) Name

func (p *ClaudeProvider) Name() string

Name returns the provider name.

func (*ClaudeProvider) Validate

func (p *ClaudeProvider) Validate() error

Validate checks if the configuration is valid.

type CompletionRequest

type CompletionRequest struct {
	// SystemPrompt is the system/instruction prompt.
	SystemPrompt string

	// UserPrompt is the user's input prompt.
	UserPrompt string

	// MaxTokens is the maximum tokens in the response.
	MaxTokens int

	// Temperature controls randomness (0.0-1.0).
	Temperature float64

	// JSONMode requests structured JSON output.
	JSONMode bool

	// Metadata for tracking.
	Metadata map[string]string
}

CompletionRequest represents a request to the LLM.

type CompletionResponse

type CompletionResponse struct {
	// Content is the generated text.
	Content string

	// PromptTokens is the number of tokens in the prompt.
	PromptTokens int

	// CompletionTokens is the number of tokens in the response.
	CompletionTokens int

	// TotalTokens is PromptTokens + CompletionTokens.
	TotalTokens int

	// Model is the actual model used (may differ from requested).
	Model string

	// FinishReason indicates why the response ended.
	FinishReason string

	// StopReason is provider-specific stop information.
	StopReason string
}

CompletionResponse represents a response from the LLM.

type Factory

type Factory struct {
	// contains filtered or unexported fields
}

Factory creates LLM providers based on configuration.

func NewFactory

func NewFactory(cfg config.AITriageConfig) *Factory

NewFactory creates a new LLM provider factory.

func NewFactoryWithEncryption

func NewFactoryWithEncryption(cfg config.AITriageConfig, encryptor crypto.Encryptor) *Factory

NewFactoryWithEncryption creates a new LLM provider factory with encryption support. SECURITY: This factory requires encrypted API keys (enc:v1: prefix) by default.

func NewFactoryWithEncryptionLegacy

func NewFactoryWithEncryptionLegacy(cfg config.AITriageConfig, encryptor crypto.Encryptor) *Factory

NewFactoryWithEncryptionLegacy creates a factory that allows plaintext keys (for migration). Deprecated: Use NewFactoryWithEncryption after migrating all API keys to encrypted format.

func (*Factory) CreateProvider

func (f *Factory) CreateProvider(aiSettings tenant.AISettings) (Provider, error)

CreateProvider creates an LLM provider based on tenant settings. If tenant uses platform AI, uses platform config. If tenant uses BYOK, uses tenant's API key.

func (*Factory) IsPlatformEnabled

func (f *Factory) IsPlatformEnabled() bool

IsPlatformEnabled checks if platform AI is both enabled and has at least one LLM provider configured.

type GeminiConfig

type GeminiConfig struct {
	APIKey     string
	Model      string
	Timeout    time.Duration
	MaxRetries int
}

GeminiConfig holds configuration for Gemini provider.

type GeminiProvider

type GeminiProvider struct {
	// contains filtered or unexported fields
}

GeminiProvider implements the Provider interface for Google's Gemini.

func NewGeminiProvider

func NewGeminiProvider(cfg GeminiConfig) (*GeminiProvider, error)

NewGeminiProvider creates a new Gemini provider.

func (*GeminiProvider) Complete

Complete sends a prompt to Gemini and returns the completion.

func (*GeminiProvider) Model

func (p *GeminiProvider) Model() string

Model returns the model being used.

func (*GeminiProvider) Name

func (p *GeminiProvider) Name() string

Name returns the provider name.

func (*GeminiProvider) Validate

func (p *GeminiProvider) Validate() error

Validate checks if the configuration is valid.

type OpenAIConfig

type OpenAIConfig struct {
	APIKey     string
	Model      string
	Timeout    time.Duration
	MaxRetries int
}

OpenAIConfig holds configuration for OpenAI provider.

type OpenAIProvider

type OpenAIProvider struct {
	// contains filtered or unexported fields
}

OpenAIProvider implements the Provider interface for OpenAI.

func NewOpenAIProvider

func NewOpenAIProvider(cfg OpenAIConfig) (*OpenAIProvider, error)

NewOpenAIProvider creates a new OpenAI provider.

func (*OpenAIProvider) Complete

Complete sends a prompt to OpenAI and returns the completion.

func (*OpenAIProvider) Model

func (p *OpenAIProvider) Model() string

Model returns the model being used.

func (*OpenAIProvider) Name

func (p *OpenAIProvider) Name() string

Name returns the provider name.

func (*OpenAIProvider) Validate

func (p *OpenAIProvider) Validate() error

Validate checks if the configuration is valid.

type Provider

type Provider interface {
	// Complete sends a prompt and returns the completion.
	Complete(ctx context.Context, req CompletionRequest) (*CompletionResponse, error)

	// Name returns the provider name for logging.
	Name() string

	// Model returns the model being used.
	Model() string

	// Validate checks if the configuration is valid.
	Validate() error
}

Provider is the interface for LLM providers (Claude, OpenAI, etc.).

type ProviderConfig

type ProviderConfig struct {
	Type          ProviderType
	APIKey        string
	Model         string
	AzureEndpoint string // Only for Azure OpenAI
	Timeout       int    // Timeout in seconds
	MaxRetries    int
}

ProviderConfig holds configuration for creating a provider.

type ProviderType

type ProviderType string

ProviderType represents supported LLM provider types.

const (
	ProviderTypeClaude      ProviderType = "claude"
	ProviderTypeOpenAI      ProviderType = "openai"
	ProviderTypeAzureOpenAI ProviderType = "azure_openai"
	ProviderTypeGemini      ProviderType = "gemini"
)

func (ProviderType) IsValid

func (p ProviderType) IsValid() bool

IsValid checks if the provider type is valid.

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL