llms

package
v0.0.13 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Dec 7, 2025 License: MIT Imports: 21 Imported by: 0

Documentation

Index

Constants

This section is empty.

Variables

This section is empty.

Functions

func AnthropicProviderFactory

func AnthropicProviderFactory(ctx context.Context, config core.ProviderConfig, modelID core.ModelID) (core.LLM, error)

AnthropicProviderFactory creates AnthropicLLM instances.

func EnsureFactory

func EnsureFactory()

func FastChatProviderFactory

func FastChatProviderFactory(ctx context.Context, config core.ProviderConfig, modelID core.ModelID) (core.LLM, error)

FastChatProviderFactory creates FastChat instances from provider config.

func GeminiProviderFactory

func GeminiProviderFactory(ctx context.Context, config core.ProviderConfig, modelID core.ModelID) (core.LLM, error)

GeminiProviderFactory creates GeminiLLM instances.

func LiteLLMProviderFactory

func LiteLLMProviderFactory(ctx context.Context, config core.ProviderConfig, modelID core.ModelID) (core.LLM, error)

LiteLLMProviderFactory creates LiteLLM instances from provider config.

func LlamacppProviderFactory

func LlamacppProviderFactory(ctx context.Context, config core.ProviderConfig, modelID core.ModelID) (core.LLM, error)

LlamacppProviderFactory creates LlamacppLLM instances.

func LocalAIProviderFactory

func LocalAIProviderFactory(ctx context.Context, config core.ProviderConfig, modelID core.ModelID) (core.LLM, error)

LocalAIProviderFactory creates LocalAI instances from provider config.

func NewLLM

func NewLLM(apiKey string, modelID core.ModelID) (core.LLM, error)

NewLLM creates a new LLM instance based on the provided model ID. This function now uses the registry system for dynamic model creation.

func OllamaProviderFactory

func OllamaProviderFactory(ctx context.Context, config core.ProviderConfig, modelID core.ModelID) (core.LLM, error)

OllamaProviderFactory creates OllamaLLM instances.

func OpenAIProviderFactory

func OpenAIProviderFactory(ctx context.Context, config core.ProviderConfig, modelID core.ModelID) (core.LLM, error)

OpenAIProviderFactory creates OpenAILLM instances.

Types

type AnthropicLLM

type AnthropicLLM struct {
	*core.BaseLLM
	// contains filtered or unexported fields
}

AnthropicLLM implements the core.LLM interface for Anthropic's models.

func NewAnthropicLLM

func NewAnthropicLLM(apiKey string, model anthropic.Model) (*AnthropicLLM, error)

NewAnthropicLLM creates a new AnthropicLLM instance.

func NewAnthropicLLMFromConfig

func NewAnthropicLLMFromConfig(ctx context.Context, config core.ProviderConfig, modelID core.ModelID) (*AnthropicLLM, error)

NewAnthropicLLMFromConfig creates a new AnthropicLLM instance from configuration.

func (*AnthropicLLM) CreateEmbedding

func (a *AnthropicLLM) CreateEmbedding(ctx context.Context, input string, options ...core.EmbeddingOption) (*core.EmbeddingResult, error)

func (*AnthropicLLM) CreateEmbeddings

func (a *AnthropicLLM) CreateEmbeddings(ctx context.Context, inputs []string, options ...core.EmbeddingOption) (*core.BatchEmbeddingResult, error)

func (*AnthropicLLM) Generate

func (a *AnthropicLLM) Generate(ctx context.Context, prompt string, options ...core.GenerateOption) (*core.LLMResponse, error)

Generate implements the core.LLM interface.

func (*AnthropicLLM) GenerateWithContent

func (a *AnthropicLLM) GenerateWithContent(ctx context.Context, content []core.ContentBlock, options ...core.GenerateOption) (*core.LLMResponse, error)

GenerateWithContent generates a response with multimodal content.

func (*AnthropicLLM) GenerateWithFunctions

func (a *AnthropicLLM) GenerateWithFunctions(ctx context.Context, prompt string, functions []map[string]interface{}, options ...core.GenerateOption) (map[string]interface{}, error)

func (*AnthropicLLM) GenerateWithJSON

func (a *AnthropicLLM) GenerateWithJSON(ctx context.Context, prompt string, options ...core.GenerateOption) (map[string]interface{}, error)

GenerateWithJSON implements the core.LLM interface.

func (*AnthropicLLM) StreamGenerate

func (a *AnthropicLLM) StreamGenerate(ctx context.Context, prompt string, options ...core.GenerateOption) (*core.StreamResponse, error)

StreamGenerate implements streaming text generation using the official SDK's iterator pattern.

func (*AnthropicLLM) StreamGenerateWithContent

func (a *AnthropicLLM) StreamGenerateWithContent(ctx context.Context, content []core.ContentBlock, options ...core.GenerateOption) (*core.StreamResponse, error)

StreamGenerateWithContent generates a streaming response with multimodal content.

type DefaultLLMFactory

type DefaultLLMFactory struct{}

func (*DefaultLLMFactory) CreateLLM

func (f *DefaultLLMFactory) CreateLLM(apiKey string, modelID core.ModelID) (core.LLM, error)

Implement the LLMFactory interface.

type GeminiLLM

type GeminiLLM struct {
	*core.BaseLLM
	// contains filtered or unexported fields
}

GeminiLLM implements the core.LLM interface for Google's Gemini model.

func NewGeminiLLM

func NewGeminiLLM(apiKey string, model core.ModelID) (*GeminiLLM, error)

NewGeminiLLM creates a new GeminiLLM instance.

func NewGeminiLLMFromConfig

func NewGeminiLLMFromConfig(ctx context.Context, config core.ProviderConfig, modelID core.ModelID) (*GeminiLLM, error)

NewGeminiLLMFromConfig creates a new GeminiLLM instance from configuration.

func (*GeminiLLM) CreateEmbedding

func (g *GeminiLLM) CreateEmbedding(ctx context.Context, input string, options ...core.EmbeddingOption) (*core.EmbeddingResult, error)

CreateEmbedding implements the embedding generation for a single input.

func (*GeminiLLM) CreateEmbeddings

func (g *GeminiLLM) CreateEmbeddings(ctx context.Context, inputs []string, options ...core.EmbeddingOption) (*core.BatchEmbeddingResult, error)

CreateEmbeddings implements batch embedding generation.

func (*GeminiLLM) Generate

func (g *GeminiLLM) Generate(ctx context.Context, prompt string, options ...core.GenerateOption) (*core.LLMResponse, error)

Generate implements the core.LLM interface.

func (*GeminiLLM) GenerateWithContent

func (g *GeminiLLM) GenerateWithContent(ctx context.Context, content []core.ContentBlock, options ...core.GenerateOption) (*core.LLMResponse, error)

GenerateWithContent implements multimodal content generation for Gemini.

func (*GeminiLLM) GenerateWithFunctions

func (g *GeminiLLM) GenerateWithFunctions(ctx context.Context, prompt string, functions []map[string]interface{}, options ...core.GenerateOption) (map[string]interface{}, error)

Implement the GenerateWithFunctions method for GeminiLLM.

func (*GeminiLLM) GenerateWithJSON

func (g *GeminiLLM) GenerateWithJSON(ctx context.Context, prompt string, options ...core.GenerateOption) (map[string]interface{}, error)

GenerateWithJSON implements the core.LLM interface.

func (*GeminiLLM) StreamGenerate

func (g *GeminiLLM) StreamGenerate(ctx context.Context, prompt string, options ...core.GenerateOption) (*core.StreamResponse, error)

StreamGenerate for Gemini.

func (*GeminiLLM) StreamGenerateWithContent

func (g *GeminiLLM) StreamGenerateWithContent(ctx context.Context, content []core.ContentBlock, options ...core.GenerateOption) (*core.StreamResponse, error)

StreamGenerateWithContent implements multimodal streaming for Gemini.

type LlamacppLLM

type LlamacppLLM struct {
	*core.BaseLLM
}

LlamacppLLM implements the core.LLM interface for Llamacpp-hosted models.

func NewLlamacppLLM

func NewLlamacppLLM(endpoint string) (*LlamacppLLM, error)

NewLlamacppLLM creates a new LlamacppLLM instance.

func NewLlamacppLLMFromConfig

func NewLlamacppLLMFromConfig(ctx context.Context, config core.ProviderConfig, modelID core.ModelID) (*LlamacppLLM, error)

NewLlamacppLLMFromConfig creates a new LlamacppLLM instance from configuration.

func (*LlamacppLLM) CreateEmbedding

func (o *LlamacppLLM) CreateEmbedding(ctx context.Context, input string, options ...core.EmbeddingOption) (*core.EmbeddingResult, error)

func (*LlamacppLLM) CreateEmbeddings

func (o *LlamacppLLM) CreateEmbeddings(ctx context.Context, inputs []string, options ...core.EmbeddingOption) (*core.BatchEmbeddingResult, error)

CreateEmbeddings implements batch embedding creation.

func (*LlamacppLLM) Generate

func (o *LlamacppLLM) Generate(ctx context.Context, prompt string, options ...core.GenerateOption) (*core.LLMResponse, error)

func (*LlamacppLLM) GenerateWithFunctions

func (o *LlamacppLLM) GenerateWithFunctions(ctx context.Context, prompt string, functions []map[string]interface{}, options ...core.GenerateOption) (map[string]interface{}, error)

func (*LlamacppLLM) GenerateWithJSON

func (o *LlamacppLLM) GenerateWithJSON(ctx context.Context, prompt string, options ...core.GenerateOption) (map[string]interface{}, error)

GenerateWithJSON implements the core.LLM interface.

func (*LlamacppLLM) StreamGenerate

func (o *LlamacppLLM) StreamGenerate(ctx context.Context, prompt string, options ...core.GenerateOption) (*core.StreamResponse, error)

StreamGenerate implements streaming for LlamaCPP.

type OllamaConfig

type OllamaConfig struct {
	UseOpenAIAPI bool   `yaml:"use_openai_api" json:"use_openai_api"` // Default: true (modern Ollama)
	BaseURL      string `yaml:"base_url" json:"base_url"`             // Default: http://localhost:11434
	APIKey       string `yaml:"api_key" json:"api_key"`               // Optional for auth
	Timeout      int    `yaml:"timeout" json:"timeout"`               // Default: 60
}

OllamaConfig holds configuration for Ollama provider.

type OllamaLLM

type OllamaLLM struct {
	*core.BaseLLM
	// contains filtered or unexported fields
}

OllamaLLM implements the core.LLM interface for Ollama-hosted models with dual-mode support.

func NewOllamaLLM

func NewOllamaLLM(modelID core.ModelID, options ...OllamaOption) (*OllamaLLM, error)

NewOllamaLLM creates a new OllamaLLM instance with modern defaults.

func NewOllamaLLMFromConfig

func NewOllamaLLMFromConfig(ctx context.Context, config core.ProviderConfig, modelID core.ModelID) (*OllamaLLM, error)

NewOllamaLLMFromConfig creates a new OllamaLLM instance from configuration.

func (*OllamaLLM) CreateEmbedding

func (o *OllamaLLM) CreateEmbedding(ctx context.Context, input string, options ...core.EmbeddingOption) (*core.EmbeddingResult, error)

CreateEmbedding implements embedding generation with OpenAI-compatible mode support.

func (*OllamaLLM) CreateEmbeddings

func (o *OllamaLLM) CreateEmbeddings(ctx context.Context, inputs []string, options ...core.EmbeddingOption) (*core.BatchEmbeddingResult, error)

CreateEmbeddings generates embeddings for multiple inputs.

func (*OllamaLLM) Generate

func (o *OllamaLLM) Generate(ctx context.Context, prompt string, options ...core.GenerateOption) (*core.LLMResponse, error)

Generate implements the core.LLM interface with dual-mode support.

func (*OllamaLLM) GenerateWithContent

func (o *OllamaLLM) GenerateWithContent(ctx context.Context, content []core.ContentBlock, options ...core.GenerateOption) (*core.LLMResponse, error)

GenerateWithContent implements multimodal content generation.

func (*OllamaLLM) GenerateWithFunctions

func (o *OllamaLLM) GenerateWithFunctions(ctx context.Context, prompt string, functions []map[string]interface{}, options ...core.GenerateOption) (map[string]interface{}, error)

GenerateWithFunctions is not yet implemented for Ollama.

func (*OllamaLLM) GenerateWithJSON

func (o *OllamaLLM) GenerateWithJSON(ctx context.Context, prompt string, options ...core.GenerateOption) (map[string]interface{}, error)

GenerateWithJSON implements JSON mode generation.

func (*OllamaLLM) StreamGenerate

func (o *OllamaLLM) StreamGenerate(ctx context.Context, prompt string, options ...core.GenerateOption) (*core.StreamResponse, error)

StreamGenerate implements streaming with dual-mode support.

func (*OllamaLLM) StreamGenerateWithContent

func (o *OllamaLLM) StreamGenerateWithContent(ctx context.Context, content []core.ContentBlock, options ...core.GenerateOption) (*core.StreamResponse, error)

StreamGenerateWithContent implements multimodal streaming content generation.

type OllamaOption

type OllamaOption func(*OllamaConfig)

Option pattern for flexible configuration.

func WithAuth

func WithAuth(apiKey string) OllamaOption

WithAuth sets authentication for Ollama (some deployments require it).

func WithBaseURL

func WithBaseURL(url string) OllamaOption

WithBaseURL sets the base URL for Ollama.

func WithNativeAPI

func WithNativeAPI() OllamaOption

WithNativeAPI configures Ollama to use native API mode.

func WithOpenAIAPI

func WithOpenAIAPI() OllamaOption

WithOpenAIAPI configures Ollama to use OpenAI-compatible API mode.

func WithTimeout

func WithTimeout(timeout int) OllamaOption

WithTimeout sets the timeout for requests.

type OpenAIConfig

type OpenAIConfig struct {
	// contains filtered or unexported fields
}

OpenAIConfig holds configuration for OpenAI provider.

type OpenAILLM

type OpenAILLM struct {
	*core.BaseLLM
	// contains filtered or unexported fields
}

OpenAILLM implements the core.LLM interface for OpenAI's models.

func NewFastChat

func NewFastChat(modelID core.ModelID, baseURL string, opts ...OpenAIOption) (*OpenAILLM, error)

FastChat convenience constructor.

func NewLiteLLM

func NewLiteLLM(modelID core.ModelID, apiKey string, opts ...OpenAIOption) (*OpenAILLM, error)

LiteLLM convenience constructor.

func NewLocalAI

func NewLocalAI(modelID core.ModelID, baseURL string, opts ...OpenAIOption) (*OpenAILLM, error)

LocalAI convenience constructor.

func NewOpenAI

func NewOpenAI(modelID core.ModelID, apiKey string) (*OpenAILLM, error)

Convenience constructor for standard OpenAI.

func NewOpenAICompatible

func NewOpenAICompatible(provider string, modelID core.ModelID, baseURL string, opts ...OpenAIOption) (*OpenAILLM, error)

Generic OpenAI-compatible constructor.

func NewOpenAILLM

func NewOpenAILLM(modelID core.ModelID, opts ...OpenAIOption) (*OpenAILLM, error)

NewOpenAILLM creates a new OpenAILLM instance with functional options.

func NewOpenAILLMFromConfig

func NewOpenAILLMFromConfig(ctx context.Context, config core.ProviderConfig, modelID core.ModelID) (*OpenAILLM, error)

NewOpenAILLMFromConfig creates a new OpenAILLM instance from configuration.

func (*OpenAILLM) CreateEmbedding

func (o *OpenAILLM) CreateEmbedding(ctx context.Context, input string, options ...core.EmbeddingOption) (*core.EmbeddingResult, error)

CreateEmbedding implements the core.LLM interface.

func (*OpenAILLM) CreateEmbeddings

func (o *OpenAILLM) CreateEmbeddings(ctx context.Context, inputs []string, options ...core.EmbeddingOption) (*core.BatchEmbeddingResult, error)

CreateEmbeddings implements the core.LLM interface.

func (*OpenAILLM) Generate

func (o *OpenAILLM) Generate(ctx context.Context, prompt string, options ...core.GenerateOption) (*core.LLMResponse, error)

Generate implements the core.LLM interface.

func (*OpenAILLM) GenerateWithFunctions

func (o *OpenAILLM) GenerateWithFunctions(ctx context.Context, prompt string, functions []map[string]interface{}, options ...core.GenerateOption) (map[string]interface{}, error)

GenerateWithFunctions implements the core.LLM interface.

func (*OpenAILLM) GenerateWithJSON

func (o *OpenAILLM) GenerateWithJSON(ctx context.Context, prompt string, options ...core.GenerateOption) (map[string]interface{}, error)

GenerateWithJSON implements the core.LLM interface.

func (*OpenAILLM) StreamGenerate

func (o *OpenAILLM) StreamGenerate(ctx context.Context, prompt string, options ...core.GenerateOption) (*core.StreamResponse, error)

StreamGenerate implements the core.LLM interface.

type OpenAIOption

type OpenAIOption func(*OpenAIConfig)

OpenAIOption is a functional option for configuring OpenAI provider.

func WithAPIKey

func WithAPIKey(apiKey string) OpenAIOption

WithAPIKey sets the API key.

func WithHTTPClient

func WithHTTPClient(client *http.Client) OpenAIOption

WithHTTPClient sets a custom HTTP client.

func WithHeader

func WithHeader(key, value string) OpenAIOption

WithHeader sets a custom header.

func WithOpenAIBaseURL

func WithOpenAIBaseURL(baseURL string) OpenAIOption

WithOpenAIBaseURL sets the base URL.

func WithOpenAIPath

func WithOpenAIPath(path string) OpenAIOption

WithOpenAIPath sets the endpoint path.

func WithOpenAITimeout

func WithOpenAITimeout(timeout time.Duration) OpenAIOption

WithOpenAITimeout sets the request timeout.

Directories

Path Synopsis

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL