llms

package
v0.72.2 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Dec 5, 2025 License: MIT Imports: 21 Imported by: 1

Documentation

Index

Constants

This section is empty.

Variables

This section is empty.

Functions

func AnthropicProviderFactory added in v0.37.0

func AnthropicProviderFactory(ctx context.Context, config core.ProviderConfig, modelID core.ModelID) (core.LLM, error)

AnthropicProviderFactory creates AnthropicLLM instances.

func EnsureFactory added in v0.16.1

func EnsureFactory()

func FastChatProviderFactory added in v0.43.0

func FastChatProviderFactory(ctx context.Context, config core.ProviderConfig, modelID core.ModelID) (core.LLM, error)

FastChatProviderFactory creates FastChat instances from provider config.

func GeminiProviderFactory added in v0.37.0

func GeminiProviderFactory(ctx context.Context, config core.ProviderConfig, modelID core.ModelID) (core.LLM, error)

GeminiProviderFactory creates GeminiLLM instances.

func LiteLLMProviderFactory added in v0.43.0

func LiteLLMProviderFactory(ctx context.Context, config core.ProviderConfig, modelID core.ModelID) (core.LLM, error)

LiteLLMProviderFactory creates LiteLLM instances from provider config.

func LlamacppProviderFactory added in v0.37.0

func LlamacppProviderFactory(ctx context.Context, config core.ProviderConfig, modelID core.ModelID) (core.LLM, error)

LlamacppProviderFactory creates LlamacppLLM instances.

func LocalAIProviderFactory added in v0.43.0

func LocalAIProviderFactory(ctx context.Context, config core.ProviderConfig, modelID core.ModelID) (core.LLM, error)

LocalAIProviderFactory creates LocalAI instances from provider config.

func NewLLM

func NewLLM(apiKey string, modelID core.ModelID) (core.LLM, error)

NewLLM creates a new LLM instance based on the provided model ID. This function now uses the registry system for dynamic model creation.

func OllamaProviderFactory added in v0.37.0

func OllamaProviderFactory(ctx context.Context, config core.ProviderConfig, modelID core.ModelID) (core.LLM, error)

OllamaProviderFactory creates OllamaLLM instances.

func OpenAIProviderFactory added in v0.43.0

func OpenAIProviderFactory(ctx context.Context, config core.ProviderConfig, modelID core.ModelID) (core.LLM, error)

OpenAIProviderFactory creates OpenAILLM instances.

Types

type AnthropicLLM

type AnthropicLLM struct {
	*core.BaseLLM
	// contains filtered or unexported fields
}

AnthropicLLM implements the core.LLM interface for Anthropic's models.

func NewAnthropicLLM

func NewAnthropicLLM(apiKey string, model anthropic.Model) (*AnthropicLLM, error)

NewAnthropicLLM creates a new AnthropicLLM instance.

func NewAnthropicLLMFromConfig added in v0.37.0

func NewAnthropicLLMFromConfig(ctx context.Context, config core.ProviderConfig, modelID core.ModelID) (*AnthropicLLM, error)

NewAnthropicLLMFromConfig creates a new AnthropicLLM instance from configuration.

func (*AnthropicLLM) CreateEmbedding added in v0.16.1

func (a *AnthropicLLM) CreateEmbedding(ctx context.Context, input string, options ...core.EmbeddingOption) (*core.EmbeddingResult, error)

func (*AnthropicLLM) CreateEmbeddings added in v0.16.1

func (a *AnthropicLLM) CreateEmbeddings(ctx context.Context, inputs []string, options ...core.EmbeddingOption) (*core.BatchEmbeddingResult, error)

func (*AnthropicLLM) Generate

func (a *AnthropicLLM) Generate(ctx context.Context, prompt string, options ...core.GenerateOption) (*core.LLMResponse, error)

Generate implements the core.LLM interface.

func (*AnthropicLLM) GenerateWithContent added in v0.71.0

func (a *AnthropicLLM) GenerateWithContent(ctx context.Context, content []core.ContentBlock, options ...core.GenerateOption) (*core.LLMResponse, error)

GenerateWithContent generates a response with multimodal content.

func (*AnthropicLLM) GenerateWithFunctions added in v0.20.0

func (a *AnthropicLLM) GenerateWithFunctions(ctx context.Context, prompt string, functions []map[string]interface{}, options ...core.GenerateOption) (map[string]interface{}, error)

func (*AnthropicLLM) GenerateWithJSON

func (a *AnthropicLLM) GenerateWithJSON(ctx context.Context, prompt string, options ...core.GenerateOption) (map[string]interface{}, error)

GenerateWithJSON implements the core.LLM interface.

func (*AnthropicLLM) StreamGenerate added in v0.23.0

func (a *AnthropicLLM) StreamGenerate(ctx context.Context, prompt string, options ...core.GenerateOption) (*core.StreamResponse, error)

StreamGenerate implements streaming text generation using the official SDK's iterator pattern.

func (*AnthropicLLM) StreamGenerateWithContent added in v0.71.0

func (a *AnthropicLLM) StreamGenerateWithContent(ctx context.Context, content []core.ContentBlock, options ...core.GenerateOption) (*core.StreamResponse, error)

StreamGenerateWithContent generates a streaming response with multimodal content.

type DefaultLLMFactory added in v0.16.1

type DefaultLLMFactory struct{}

func (*DefaultLLMFactory) CreateLLM added in v0.16.1

func (f *DefaultLLMFactory) CreateLLM(apiKey string, modelID core.ModelID) (core.LLM, error)

Implement the LLMFactory interface.

type GeminiLLM added in v0.14.0

type GeminiLLM struct {
	*core.BaseLLM
	// contains filtered or unexported fields
}

GeminiLLM implements the core.LLM interface for Google's Gemini model.

func NewGeminiLLM added in v0.14.0

func NewGeminiLLM(apiKey string, model core.ModelID) (*GeminiLLM, error)

NewGeminiLLM creates a new GeminiLLM instance.

func NewGeminiLLMFromConfig added in v0.37.0

func NewGeminiLLMFromConfig(ctx context.Context, config core.ProviderConfig, modelID core.ModelID) (*GeminiLLM, error)

NewGeminiLLMFromConfig creates a new GeminiLLM instance from configuration.

func (*GeminiLLM) CreateEmbedding added in v0.16.1

func (g *GeminiLLM) CreateEmbedding(ctx context.Context, input string, options ...core.EmbeddingOption) (*core.EmbeddingResult, error)

CreateEmbedding implements the embedding generation for a single input.

func (*GeminiLLM) CreateEmbeddings added in v0.16.1

func (g *GeminiLLM) CreateEmbeddings(ctx context.Context, inputs []string, options ...core.EmbeddingOption) (*core.BatchEmbeddingResult, error)

CreateEmbeddings implements batch embedding generation.

func (*GeminiLLM) Generate added in v0.14.0

func (g *GeminiLLM) Generate(ctx context.Context, prompt string, options ...core.GenerateOption) (*core.LLMResponse, error)

Generate implements the core.LLM interface.

func (*GeminiLLM) GenerateWithContent added in v0.38.0

func (g *GeminiLLM) GenerateWithContent(ctx context.Context, content []core.ContentBlock, options ...core.GenerateOption) (*core.LLMResponse, error)

GenerateWithContent implements multimodal content generation for Gemini.

func (*GeminiLLM) GenerateWithFunctions added in v0.20.0

func (g *GeminiLLM) GenerateWithFunctions(ctx context.Context, prompt string, functions []map[string]interface{}, options ...core.GenerateOption) (map[string]interface{}, error)

Implement the GenerateWithFunctions method for GeminiLLM.

func (*GeminiLLM) GenerateWithJSON added in v0.14.0

func (g *GeminiLLM) GenerateWithJSON(ctx context.Context, prompt string, options ...core.GenerateOption) (map[string]interface{}, error)

GenerateWithJSON implements the core.LLM interface.

func (*GeminiLLM) StreamGenerate added in v0.23.0

func (g *GeminiLLM) StreamGenerate(ctx context.Context, prompt string, options ...core.GenerateOption) (*core.StreamResponse, error)

StreamGenerate for Gemini.

func (*GeminiLLM) StreamGenerateWithContent added in v0.38.0

func (g *GeminiLLM) StreamGenerateWithContent(ctx context.Context, content []core.ContentBlock, options ...core.GenerateOption) (*core.StreamResponse, error)

StreamGenerateWithContent implements multimodal streaming for Gemini.

type LlamacppLLM added in v0.1.0

type LlamacppLLM struct {
	*core.BaseLLM
}

LlamacppLLM implements the core.LLM interface for Llamacpp-hosted models.

func NewLlamacppLLM added in v0.1.0

func NewLlamacppLLM(endpoint string) (*LlamacppLLM, error)

NewLlamacppLLM creates a new LlamacppLLM instance.

func NewLlamacppLLMFromConfig added in v0.37.0

func NewLlamacppLLMFromConfig(ctx context.Context, config core.ProviderConfig, modelID core.ModelID) (*LlamacppLLM, error)

NewLlamacppLLMFromConfig creates a new LlamacppLLM instance from configuration.

func (*LlamacppLLM) CreateEmbedding added in v0.16.1

func (o *LlamacppLLM) CreateEmbedding(ctx context.Context, input string, options ...core.EmbeddingOption) (*core.EmbeddingResult, error)

func (*LlamacppLLM) CreateEmbeddings added in v0.16.1

func (o *LlamacppLLM) CreateEmbeddings(ctx context.Context, inputs []string, options ...core.EmbeddingOption) (*core.BatchEmbeddingResult, error)

CreateEmbeddings implements batch embedding creation.

func (*LlamacppLLM) Generate added in v0.1.0

func (o *LlamacppLLM) Generate(ctx context.Context, prompt string, options ...core.GenerateOption) (*core.LLMResponse, error)

func (*LlamacppLLM) GenerateWithFunctions added in v0.20.0

func (o *LlamacppLLM) GenerateWithFunctions(ctx context.Context, prompt string, functions []map[string]interface{}, options ...core.GenerateOption) (map[string]interface{}, error)

func (*LlamacppLLM) GenerateWithJSON added in v0.1.0

func (o *LlamacppLLM) GenerateWithJSON(ctx context.Context, prompt string, options ...core.GenerateOption) (map[string]interface{}, error)

GenerateWithJSON implements the core.LLM interface.

func (*LlamacppLLM) StreamGenerate added in v0.23.0

func (o *LlamacppLLM) StreamGenerate(ctx context.Context, prompt string, options ...core.GenerateOption) (*core.StreamResponse, error)

StreamGenerate implements streaming for LlamaCPP.

type OllamaConfig added in v0.43.0

type OllamaConfig struct {
	UseOpenAIAPI bool   `yaml:"use_openai_api" json:"use_openai_api"` // Default: true (modern Ollama)
	BaseURL      string `yaml:"base_url" json:"base_url"`             // Default: http://localhost:11434
	APIKey       string `yaml:"api_key" json:"api_key"`               // Optional for auth
	Timeout      int    `yaml:"timeout" json:"timeout"`               // Default: 60
}

OllamaConfig holds configuration for Ollama provider.

type OllamaLLM

type OllamaLLM struct {
	*core.BaseLLM
	// contains filtered or unexported fields
}

OllamaLLM implements the core.LLM interface for Ollama-hosted models with dual-mode support.

func NewOllamaLLM

func NewOllamaLLM(modelID core.ModelID, options ...OllamaOption) (*OllamaLLM, error)

NewOllamaLLM creates a new OllamaLLM instance with modern defaults.

func NewOllamaLLMFromConfig added in v0.37.0

func NewOllamaLLMFromConfig(ctx context.Context, config core.ProviderConfig, modelID core.ModelID) (*OllamaLLM, error)

NewOllamaLLMFromConfig creates a new OllamaLLM instance from configuration.

func (*OllamaLLM) CreateEmbedding added in v0.16.1

func (o *OllamaLLM) CreateEmbedding(ctx context.Context, input string, options ...core.EmbeddingOption) (*core.EmbeddingResult, error)

CreateEmbedding implements embedding generation with OpenAI-compatible mode support.

func (*OllamaLLM) CreateEmbeddings added in v0.16.1

func (o *OllamaLLM) CreateEmbeddings(ctx context.Context, inputs []string, options ...core.EmbeddingOption) (*core.BatchEmbeddingResult, error)

CreateEmbeddings generates embeddings for multiple inputs.

func (*OllamaLLM) Generate

func (o *OllamaLLM) Generate(ctx context.Context, prompt string, options ...core.GenerateOption) (*core.LLMResponse, error)

Generate implements the core.LLM interface with dual-mode support.

func (*OllamaLLM) GenerateWithContent added in v0.43.0

func (o *OllamaLLM) GenerateWithContent(ctx context.Context, content []core.ContentBlock, options ...core.GenerateOption) (*core.LLMResponse, error)

GenerateWithContent implements multimodal content generation.

func (*OllamaLLM) GenerateWithFunctions added in v0.20.0

func (o *OllamaLLM) GenerateWithFunctions(ctx context.Context, prompt string, functions []map[string]interface{}, options ...core.GenerateOption) (map[string]interface{}, error)

GenerateWithFunctions is not yet implemented for Ollama.

func (*OllamaLLM) GenerateWithJSON

func (o *OllamaLLM) GenerateWithJSON(ctx context.Context, prompt string, options ...core.GenerateOption) (map[string]interface{}, error)

GenerateWithJSON implements JSON mode generation.

func (*OllamaLLM) StreamGenerate added in v0.23.0

func (o *OllamaLLM) StreamGenerate(ctx context.Context, prompt string, options ...core.GenerateOption) (*core.StreamResponse, error)

StreamGenerate implements streaming with dual-mode support.

func (*OllamaLLM) StreamGenerateWithContent added in v0.43.0

func (o *OllamaLLM) StreamGenerateWithContent(ctx context.Context, content []core.ContentBlock, options ...core.GenerateOption) (*core.StreamResponse, error)

StreamGenerateWithContent implements multimodal streaming content generation.

type OllamaOption added in v0.43.0

type OllamaOption func(*OllamaConfig)

Option pattern for flexible configuration.

func WithAuth added in v0.43.0

func WithAuth(apiKey string) OllamaOption

WithAuth sets authentication for Ollama (some deployments require it).

func WithBaseURL added in v0.43.0

func WithBaseURL(url string) OllamaOption

WithBaseURL sets the base URL for Ollama.

func WithNativeAPI added in v0.43.0

func WithNativeAPI() OllamaOption

WithNativeAPI configures Ollama to use native API mode.

func WithOpenAIAPI added in v0.43.0

func WithOpenAIAPI() OllamaOption

WithOpenAIAPI configures Ollama to use OpenAI-compatible API mode.

func WithTimeout added in v0.43.0

func WithTimeout(timeout int) OllamaOption

WithTimeout sets the timeout for requests.

type OpenAIConfig added in v0.43.0

type OpenAIConfig struct {
	// contains filtered or unexported fields
}

OpenAIConfig holds configuration for OpenAI provider.

type OpenAILLM added in v0.43.0

type OpenAILLM struct {
	*core.BaseLLM
	// contains filtered or unexported fields
}

OpenAILLM implements the core.LLM interface for OpenAI's models.

func NewFastChat added in v0.43.0

func NewFastChat(modelID core.ModelID, baseURL string, opts ...OpenAIOption) (*OpenAILLM, error)

FastChat convenience constructor.

func NewLiteLLM added in v0.43.0

func NewLiteLLM(modelID core.ModelID, apiKey string, opts ...OpenAIOption) (*OpenAILLM, error)

LiteLLM convenience constructor.

func NewLocalAI added in v0.43.0

func NewLocalAI(modelID core.ModelID, baseURL string, opts ...OpenAIOption) (*OpenAILLM, error)

LocalAI convenience constructor.

func NewOpenAI added in v0.43.0

func NewOpenAI(modelID core.ModelID, apiKey string) (*OpenAILLM, error)

Convenience constructor for standard OpenAI.

func NewOpenAICompatible added in v0.43.0

func NewOpenAICompatible(provider string, modelID core.ModelID, baseURL string, opts ...OpenAIOption) (*OpenAILLM, error)

Generic OpenAI-compatible constructor.

func NewOpenAILLM added in v0.43.0

func NewOpenAILLM(modelID core.ModelID, opts ...OpenAIOption) (*OpenAILLM, error)

NewOpenAILLM creates a new OpenAILLM instance with functional options.

func NewOpenAILLMFromConfig added in v0.43.0

func NewOpenAILLMFromConfig(ctx context.Context, config core.ProviderConfig, modelID core.ModelID) (*OpenAILLM, error)

NewOpenAILLMFromConfig creates a new OpenAILLM instance from configuration.

func (*OpenAILLM) CreateEmbedding added in v0.43.0

func (o *OpenAILLM) CreateEmbedding(ctx context.Context, input string, options ...core.EmbeddingOption) (*core.EmbeddingResult, error)

CreateEmbedding implements the core.LLM interface.

func (*OpenAILLM) CreateEmbeddings added in v0.43.0

func (o *OpenAILLM) CreateEmbeddings(ctx context.Context, inputs []string, options ...core.EmbeddingOption) (*core.BatchEmbeddingResult, error)

CreateEmbeddings implements the core.LLM interface.

func (*OpenAILLM) Generate added in v0.43.0

func (o *OpenAILLM) Generate(ctx context.Context, prompt string, options ...core.GenerateOption) (*core.LLMResponse, error)

Generate implements the core.LLM interface.

func (*OpenAILLM) GenerateWithFunctions added in v0.43.0

func (o *OpenAILLM) GenerateWithFunctions(ctx context.Context, prompt string, functions []map[string]interface{}, options ...core.GenerateOption) (map[string]interface{}, error)

GenerateWithFunctions implements the core.LLM interface.

func (*OpenAILLM) GenerateWithJSON added in v0.43.0

func (o *OpenAILLM) GenerateWithJSON(ctx context.Context, prompt string, options ...core.GenerateOption) (map[string]interface{}, error)

GenerateWithJSON implements the core.LLM interface.

func (*OpenAILLM) StreamGenerate added in v0.43.0

func (o *OpenAILLM) StreamGenerate(ctx context.Context, prompt string, options ...core.GenerateOption) (*core.StreamResponse, error)

StreamGenerate implements the core.LLM interface.

type OpenAIOption added in v0.43.0

type OpenAIOption func(*OpenAIConfig)

OpenAIOption is a functional option for configuring OpenAI provider.

func WithAPIKey added in v0.43.0

func WithAPIKey(apiKey string) OpenAIOption

WithAPIKey sets the API key.

func WithHTTPClient added in v0.43.0

func WithHTTPClient(client *http.Client) OpenAIOption

WithHTTPClient sets a custom HTTP client.

func WithHeader added in v0.43.0

func WithHeader(key, value string) OpenAIOption

WithHeader sets a custom header.

func WithOpenAIBaseURL added in v0.43.0

func WithOpenAIBaseURL(baseURL string) OpenAIOption

WithOpenAIBaseURL sets the base URL.

func WithOpenAIPath added in v0.43.0

func WithOpenAIPath(path string) OpenAIOption

WithOpenAIPath sets the endpoint path.

func WithOpenAITimeout added in v0.43.0

func WithOpenAITimeout(timeout time.Duration) OpenAIOption

WithOpenAITimeout sets the request timeout.

Directories

Path Synopsis

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL