Documentation
¶
Index ¶
- func AnthropicProviderFactory(ctx context.Context, config core.ProviderConfig, modelID core.ModelID) (core.LLM, error)
- func EnsureFactory()
- func FastChatProviderFactory(ctx context.Context, config core.ProviderConfig, modelID core.ModelID) (core.LLM, error)
- func GeminiProviderFactory(ctx context.Context, config core.ProviderConfig, modelID core.ModelID) (core.LLM, error)
- func LiteLLMProviderFactory(ctx context.Context, config core.ProviderConfig, modelID core.ModelID) (core.LLM, error)
- func LlamacppProviderFactory(ctx context.Context, config core.ProviderConfig, modelID core.ModelID) (core.LLM, error)
- func LocalAIProviderFactory(ctx context.Context, config core.ProviderConfig, modelID core.ModelID) (core.LLM, error)
- func NewLLM(apiKey string, modelID core.ModelID) (core.LLM, error)
- func OllamaProviderFactory(ctx context.Context, config core.ProviderConfig, modelID core.ModelID) (core.LLM, error)
- func OpenAIProviderFactory(ctx context.Context, config core.ProviderConfig, modelID core.ModelID) (core.LLM, error)
- type AnthropicLLM
- func (a *AnthropicLLM) CreateEmbedding(ctx context.Context, input string, options ...core.EmbeddingOption) (*core.EmbeddingResult, error)
- func (a *AnthropicLLM) CreateEmbeddings(ctx context.Context, inputs []string, options ...core.EmbeddingOption) (*core.BatchEmbeddingResult, error)
- func (a *AnthropicLLM) Generate(ctx context.Context, prompt string, options ...core.GenerateOption) (*core.LLMResponse, error)
- func (a *AnthropicLLM) GenerateWithContent(ctx context.Context, content []core.ContentBlock, ...) (*core.LLMResponse, error)
- func (a *AnthropicLLM) GenerateWithFunctions(ctx context.Context, prompt string, functions []map[string]interface{}, ...) (map[string]interface{}, error)
- func (a *AnthropicLLM) GenerateWithJSON(ctx context.Context, prompt string, options ...core.GenerateOption) (map[string]interface{}, error)
- func (a *AnthropicLLM) StreamGenerate(ctx context.Context, prompt string, options ...core.GenerateOption) (*core.StreamResponse, error)
- func (a *AnthropicLLM) StreamGenerateWithContent(ctx context.Context, content []core.ContentBlock, ...) (*core.StreamResponse, error)
- type DefaultLLMFactory
- type GeminiLLM
- func (g *GeminiLLM) CreateEmbedding(ctx context.Context, input string, options ...core.EmbeddingOption) (*core.EmbeddingResult, error)
- func (g *GeminiLLM) CreateEmbeddings(ctx context.Context, inputs []string, options ...core.EmbeddingOption) (*core.BatchEmbeddingResult, error)
- func (g *GeminiLLM) Generate(ctx context.Context, prompt string, options ...core.GenerateOption) (*core.LLMResponse, error)
- func (g *GeminiLLM) GenerateWithContent(ctx context.Context, content []core.ContentBlock, ...) (*core.LLMResponse, error)
- func (g *GeminiLLM) GenerateWithFunctions(ctx context.Context, prompt string, functions []map[string]interface{}, ...) (map[string]interface{}, error)
- func (g *GeminiLLM) GenerateWithJSON(ctx context.Context, prompt string, options ...core.GenerateOption) (map[string]interface{}, error)
- func (g *GeminiLLM) StreamGenerate(ctx context.Context, prompt string, options ...core.GenerateOption) (*core.StreamResponse, error)
- func (g *GeminiLLM) StreamGenerateWithContent(ctx context.Context, content []core.ContentBlock, ...) (*core.StreamResponse, error)
- type LlamacppLLM
- func (o *LlamacppLLM) CreateEmbedding(ctx context.Context, input string, options ...core.EmbeddingOption) (*core.EmbeddingResult, error)
- func (o *LlamacppLLM) CreateEmbeddings(ctx context.Context, inputs []string, options ...core.EmbeddingOption) (*core.BatchEmbeddingResult, error)
- func (o *LlamacppLLM) Generate(ctx context.Context, prompt string, options ...core.GenerateOption) (*core.LLMResponse, error)
- func (o *LlamacppLLM) GenerateWithFunctions(ctx context.Context, prompt string, functions []map[string]interface{}, ...) (map[string]interface{}, error)
- func (o *LlamacppLLM) GenerateWithJSON(ctx context.Context, prompt string, options ...core.GenerateOption) (map[string]interface{}, error)
- func (o *LlamacppLLM) StreamGenerate(ctx context.Context, prompt string, options ...core.GenerateOption) (*core.StreamResponse, error)
- type OllamaConfig
- type OllamaLLM
- func (o *OllamaLLM) CreateEmbedding(ctx context.Context, input string, options ...core.EmbeddingOption) (*core.EmbeddingResult, error)
- func (o *OllamaLLM) CreateEmbeddings(ctx context.Context, inputs []string, options ...core.EmbeddingOption) (*core.BatchEmbeddingResult, error)
- func (o *OllamaLLM) Generate(ctx context.Context, prompt string, options ...core.GenerateOption) (*core.LLMResponse, error)
- func (o *OllamaLLM) GenerateWithContent(ctx context.Context, content []core.ContentBlock, ...) (*core.LLMResponse, error)
- func (o *OllamaLLM) GenerateWithFunctions(ctx context.Context, prompt string, functions []map[string]interface{}, ...) (map[string]interface{}, error)
- func (o *OllamaLLM) GenerateWithJSON(ctx context.Context, prompt string, options ...core.GenerateOption) (map[string]interface{}, error)
- func (o *OllamaLLM) StreamGenerate(ctx context.Context, prompt string, options ...core.GenerateOption) (*core.StreamResponse, error)
- func (o *OllamaLLM) StreamGenerateWithContent(ctx context.Context, content []core.ContentBlock, ...) (*core.StreamResponse, error)
- type OllamaOption
- type OpenAIConfig
- type OpenAILLM
- func NewFastChat(modelID core.ModelID, baseURL string, opts ...OpenAIOption) (*OpenAILLM, error)
- func NewLiteLLM(modelID core.ModelID, apiKey string, opts ...OpenAIOption) (*OpenAILLM, error)
- func NewLocalAI(modelID core.ModelID, baseURL string, opts ...OpenAIOption) (*OpenAILLM, error)
- func NewOpenAI(modelID core.ModelID, apiKey string) (*OpenAILLM, error)
- func NewOpenAICompatible(provider string, modelID core.ModelID, baseURL string, opts ...OpenAIOption) (*OpenAILLM, error)
- func NewOpenAILLM(modelID core.ModelID, opts ...OpenAIOption) (*OpenAILLM, error)
- func NewOpenAILLMFromConfig(ctx context.Context, config core.ProviderConfig, modelID core.ModelID) (*OpenAILLM, error)
- func (o *OpenAILLM) CreateEmbedding(ctx context.Context, input string, options ...core.EmbeddingOption) (*core.EmbeddingResult, error)
- func (o *OpenAILLM) CreateEmbeddings(ctx context.Context, inputs []string, options ...core.EmbeddingOption) (*core.BatchEmbeddingResult, error)
- func (o *OpenAILLM) Generate(ctx context.Context, prompt string, options ...core.GenerateOption) (*core.LLMResponse, error)
- func (o *OpenAILLM) GenerateWithFunctions(ctx context.Context, prompt string, functions []map[string]interface{}, ...) (map[string]interface{}, error)
- func (o *OpenAILLM) GenerateWithJSON(ctx context.Context, prompt string, options ...core.GenerateOption) (map[string]interface{}, error)
- func (o *OpenAILLM) StreamGenerate(ctx context.Context, prompt string, options ...core.GenerateOption) (*core.StreamResponse, error)
- type OpenAIOption
- func WithAPIKey(apiKey string) OpenAIOption
- func WithHTTPClient(client *http.Client) OpenAIOption
- func WithHeader(key, value string) OpenAIOption
- func WithOpenAIBaseURL(baseURL string) OpenAIOption
- func WithOpenAIPath(path string) OpenAIOption
- func WithOpenAITimeout(timeout time.Duration) OpenAIOption
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
func AnthropicProviderFactory ¶
func AnthropicProviderFactory(ctx context.Context, config core.ProviderConfig, modelID core.ModelID) (core.LLM, error)
AnthropicProviderFactory creates AnthropicLLM instances.
func EnsureFactory ¶
func EnsureFactory()
func FastChatProviderFactory ¶
func FastChatProviderFactory(ctx context.Context, config core.ProviderConfig, modelID core.ModelID) (core.LLM, error)
FastChatProviderFactory creates FastChat instances from provider config.
func GeminiProviderFactory ¶
func GeminiProviderFactory(ctx context.Context, config core.ProviderConfig, modelID core.ModelID) (core.LLM, error)
GeminiProviderFactory creates GeminiLLM instances.
func LiteLLMProviderFactory ¶
func LiteLLMProviderFactory(ctx context.Context, config core.ProviderConfig, modelID core.ModelID) (core.LLM, error)
LiteLLMProviderFactory creates LiteLLM instances from provider config.
func LlamacppProviderFactory ¶
func LlamacppProviderFactory(ctx context.Context, config core.ProviderConfig, modelID core.ModelID) (core.LLM, error)
LlamacppProviderFactory creates LlamacppLLM instances.
func LocalAIProviderFactory ¶
func LocalAIProviderFactory(ctx context.Context, config core.ProviderConfig, modelID core.ModelID) (core.LLM, error)
LocalAIProviderFactory creates LocalAI instances from provider config.
func NewLLM ¶
NewLLM creates a new LLM instance based on the provided model ID. This function now uses the registry system for dynamic model creation.
Types ¶
type AnthropicLLM ¶
AnthropicLLM implements the core.LLM interface for Anthropic's models.
func NewAnthropicLLM ¶
func NewAnthropicLLM(apiKey string, model anthropic.Model) (*AnthropicLLM, error)
NewAnthropicLLM creates a new AnthropicLLM instance.
func NewAnthropicLLMFromConfig ¶
func NewAnthropicLLMFromConfig(ctx context.Context, config core.ProviderConfig, modelID core.ModelID) (*AnthropicLLM, error)
NewAnthropicLLMFromConfig creates a new AnthropicLLM instance from configuration.
func (*AnthropicLLM) CreateEmbedding ¶
func (a *AnthropicLLM) CreateEmbedding(ctx context.Context, input string, options ...core.EmbeddingOption) (*core.EmbeddingResult, error)
func (*AnthropicLLM) CreateEmbeddings ¶
func (a *AnthropicLLM) CreateEmbeddings(ctx context.Context, inputs []string, options ...core.EmbeddingOption) (*core.BatchEmbeddingResult, error)
func (*AnthropicLLM) Generate ¶
func (a *AnthropicLLM) Generate(ctx context.Context, prompt string, options ...core.GenerateOption) (*core.LLMResponse, error)
Generate implements the core.LLM interface.
func (*AnthropicLLM) GenerateWithContent ¶
func (a *AnthropicLLM) GenerateWithContent(ctx context.Context, content []core.ContentBlock, options ...core.GenerateOption) (*core.LLMResponse, error)
GenerateWithContent generates a response with multimodal content.
func (*AnthropicLLM) GenerateWithFunctions ¶
func (a *AnthropicLLM) GenerateWithFunctions(ctx context.Context, prompt string, functions []map[string]interface{}, options ...core.GenerateOption) (map[string]interface{}, error)
func (*AnthropicLLM) GenerateWithJSON ¶
func (a *AnthropicLLM) GenerateWithJSON(ctx context.Context, prompt string, options ...core.GenerateOption) (map[string]interface{}, error)
GenerateWithJSON implements the core.LLM interface.
func (*AnthropicLLM) StreamGenerate ¶
func (a *AnthropicLLM) StreamGenerate(ctx context.Context, prompt string, options ...core.GenerateOption) (*core.StreamResponse, error)
StreamGenerate implements streaming text generation using the official SDK's iterator pattern.
func (*AnthropicLLM) StreamGenerateWithContent ¶
func (a *AnthropicLLM) StreamGenerateWithContent(ctx context.Context, content []core.ContentBlock, options ...core.GenerateOption) (*core.StreamResponse, error)
StreamGenerateWithContent generates a streaming response with multimodal content.
type DefaultLLMFactory ¶
type DefaultLLMFactory struct{}
type GeminiLLM ¶
GeminiLLM implements the core.LLM interface for Google's Gemini model.
func NewGeminiLLM ¶
NewGeminiLLM creates a new GeminiLLM instance.
func NewGeminiLLMFromConfig ¶
func NewGeminiLLMFromConfig(ctx context.Context, config core.ProviderConfig, modelID core.ModelID) (*GeminiLLM, error)
NewGeminiLLMFromConfig creates a new GeminiLLM instance from configuration.
func (*GeminiLLM) CreateEmbedding ¶
func (g *GeminiLLM) CreateEmbedding(ctx context.Context, input string, options ...core.EmbeddingOption) (*core.EmbeddingResult, error)
CreateEmbedding implements the embedding generation for a single input.
func (*GeminiLLM) CreateEmbeddings ¶
func (g *GeminiLLM) CreateEmbeddings(ctx context.Context, inputs []string, options ...core.EmbeddingOption) (*core.BatchEmbeddingResult, error)
CreateEmbeddings implements batch embedding generation.
func (*GeminiLLM) Generate ¶
func (g *GeminiLLM) Generate(ctx context.Context, prompt string, options ...core.GenerateOption) (*core.LLMResponse, error)
Generate implements the core.LLM interface.
func (*GeminiLLM) GenerateWithContent ¶
func (g *GeminiLLM) GenerateWithContent(ctx context.Context, content []core.ContentBlock, options ...core.GenerateOption) (*core.LLMResponse, error)
GenerateWithContent implements multimodal content generation for Gemini.
func (*GeminiLLM) GenerateWithFunctions ¶
func (g *GeminiLLM) GenerateWithFunctions(ctx context.Context, prompt string, functions []map[string]interface{}, options ...core.GenerateOption) (map[string]interface{}, error)
Implement the GenerateWithFunctions method for GeminiLLM.
func (*GeminiLLM) GenerateWithJSON ¶
func (g *GeminiLLM) GenerateWithJSON(ctx context.Context, prompt string, options ...core.GenerateOption) (map[string]interface{}, error)
GenerateWithJSON implements the core.LLM interface.
func (*GeminiLLM) StreamGenerate ¶
func (g *GeminiLLM) StreamGenerate(ctx context.Context, prompt string, options ...core.GenerateOption) (*core.StreamResponse, error)
StreamGenerate for Gemini.
func (*GeminiLLM) StreamGenerateWithContent ¶
func (g *GeminiLLM) StreamGenerateWithContent(ctx context.Context, content []core.ContentBlock, options ...core.GenerateOption) (*core.StreamResponse, error)
StreamGenerateWithContent implements multimodal streaming for Gemini.
type LlamacppLLM ¶
LlamacppLLM implements the core.LLM interface for Llamacpp-hosted models.
func NewLlamacppLLM ¶
func NewLlamacppLLM(endpoint string) (*LlamacppLLM, error)
NewLlamacppLLM creates a new LlamacppLLM instance.
func NewLlamacppLLMFromConfig ¶
func NewLlamacppLLMFromConfig(ctx context.Context, config core.ProviderConfig, modelID core.ModelID) (*LlamacppLLM, error)
NewLlamacppLLMFromConfig creates a new LlamacppLLM instance from configuration.
func (*LlamacppLLM) CreateEmbedding ¶
func (o *LlamacppLLM) CreateEmbedding(ctx context.Context, input string, options ...core.EmbeddingOption) (*core.EmbeddingResult, error)
func (*LlamacppLLM) CreateEmbeddings ¶
func (o *LlamacppLLM) CreateEmbeddings(ctx context.Context, inputs []string, options ...core.EmbeddingOption) (*core.BatchEmbeddingResult, error)
CreateEmbeddings implements batch embedding creation.
func (*LlamacppLLM) Generate ¶
func (o *LlamacppLLM) Generate(ctx context.Context, prompt string, options ...core.GenerateOption) (*core.LLMResponse, error)
func (*LlamacppLLM) GenerateWithFunctions ¶
func (o *LlamacppLLM) GenerateWithFunctions(ctx context.Context, prompt string, functions []map[string]interface{}, options ...core.GenerateOption) (map[string]interface{}, error)
func (*LlamacppLLM) GenerateWithJSON ¶
func (o *LlamacppLLM) GenerateWithJSON(ctx context.Context, prompt string, options ...core.GenerateOption) (map[string]interface{}, error)
GenerateWithJSON implements the core.LLM interface.
func (*LlamacppLLM) StreamGenerate ¶
func (o *LlamacppLLM) StreamGenerate(ctx context.Context, prompt string, options ...core.GenerateOption) (*core.StreamResponse, error)
StreamGenerate implements streaming for LlamaCPP.
type OllamaConfig ¶
type OllamaConfig struct {
UseOpenAIAPI bool `yaml:"use_openai_api" json:"use_openai_api"` // Default: true (modern Ollama)
BaseURL string `yaml:"base_url" json:"base_url"` // Default: http://localhost:11434
APIKey string `yaml:"api_key" json:"api_key"` // Optional for auth
Timeout int `yaml:"timeout" json:"timeout"` // Default: 60
}
OllamaConfig holds configuration for Ollama provider.
type OllamaLLM ¶
OllamaLLM implements the core.LLM interface for Ollama-hosted models with dual-mode support.
func NewOllamaLLM ¶
func NewOllamaLLM(modelID core.ModelID, options ...OllamaOption) (*OllamaLLM, error)
NewOllamaLLM creates a new OllamaLLM instance with modern defaults.
func NewOllamaLLMFromConfig ¶
func NewOllamaLLMFromConfig(ctx context.Context, config core.ProviderConfig, modelID core.ModelID) (*OllamaLLM, error)
NewOllamaLLMFromConfig creates a new OllamaLLM instance from configuration.
func (*OllamaLLM) CreateEmbedding ¶
func (o *OllamaLLM) CreateEmbedding(ctx context.Context, input string, options ...core.EmbeddingOption) (*core.EmbeddingResult, error)
CreateEmbedding implements embedding generation with OpenAI-compatible mode support.
func (*OllamaLLM) CreateEmbeddings ¶
func (o *OllamaLLM) CreateEmbeddings(ctx context.Context, inputs []string, options ...core.EmbeddingOption) (*core.BatchEmbeddingResult, error)
CreateEmbeddings generates embeddings for multiple inputs.
func (*OllamaLLM) Generate ¶
func (o *OllamaLLM) Generate(ctx context.Context, prompt string, options ...core.GenerateOption) (*core.LLMResponse, error)
Generate implements the core.LLM interface with dual-mode support.
func (*OllamaLLM) GenerateWithContent ¶
func (o *OllamaLLM) GenerateWithContent(ctx context.Context, content []core.ContentBlock, options ...core.GenerateOption) (*core.LLMResponse, error)
GenerateWithContent implements multimodal content generation.
func (*OllamaLLM) GenerateWithFunctions ¶
func (o *OllamaLLM) GenerateWithFunctions(ctx context.Context, prompt string, functions []map[string]interface{}, options ...core.GenerateOption) (map[string]interface{}, error)
GenerateWithFunctions is not yet implemented for Ollama.
func (*OllamaLLM) GenerateWithJSON ¶
func (o *OllamaLLM) GenerateWithJSON(ctx context.Context, prompt string, options ...core.GenerateOption) (map[string]interface{}, error)
GenerateWithJSON implements JSON mode generation.
func (*OllamaLLM) StreamGenerate ¶
func (o *OllamaLLM) StreamGenerate(ctx context.Context, prompt string, options ...core.GenerateOption) (*core.StreamResponse, error)
StreamGenerate implements streaming with dual-mode support.
func (*OllamaLLM) StreamGenerateWithContent ¶
func (o *OllamaLLM) StreamGenerateWithContent(ctx context.Context, content []core.ContentBlock, options ...core.GenerateOption) (*core.StreamResponse, error)
StreamGenerateWithContent implements multimodal streaming content generation.
type OllamaOption ¶
type OllamaOption func(*OllamaConfig)
Option pattern for flexible configuration.
func WithAuth ¶
func WithAuth(apiKey string) OllamaOption
WithAuth sets authentication for Ollama (some deployments require it).
func WithBaseURL ¶
func WithBaseURL(url string) OllamaOption
WithBaseURL sets the base URL for Ollama.
func WithNativeAPI ¶
func WithNativeAPI() OllamaOption
WithNativeAPI configures Ollama to use native API mode.
func WithOpenAIAPI ¶
func WithOpenAIAPI() OllamaOption
WithOpenAIAPI configures Ollama to use OpenAI-compatible API mode.
func WithTimeout ¶
func WithTimeout(timeout int) OllamaOption
WithTimeout sets the timeout for requests.
type OpenAIConfig ¶
type OpenAIConfig struct {
// contains filtered or unexported fields
}
OpenAIConfig holds configuration for OpenAI provider.
type OpenAILLM ¶
OpenAILLM implements the core.LLM interface for OpenAI's models.
func NewFastChat ¶
FastChat convenience constructor.
func NewLiteLLM ¶
LiteLLM convenience constructor.
func NewLocalAI ¶
LocalAI convenience constructor.
func NewOpenAICompatible ¶
func NewOpenAICompatible(provider string, modelID core.ModelID, baseURL string, opts ...OpenAIOption) (*OpenAILLM, error)
Generic OpenAI-compatible constructor.
func NewOpenAILLM ¶
func NewOpenAILLM(modelID core.ModelID, opts ...OpenAIOption) (*OpenAILLM, error)
NewOpenAILLM creates a new OpenAILLM instance with functional options.
func NewOpenAILLMFromConfig ¶
func NewOpenAILLMFromConfig(ctx context.Context, config core.ProviderConfig, modelID core.ModelID) (*OpenAILLM, error)
NewOpenAILLMFromConfig creates a new OpenAILLM instance from configuration.
func (*OpenAILLM) CreateEmbedding ¶
func (o *OpenAILLM) CreateEmbedding(ctx context.Context, input string, options ...core.EmbeddingOption) (*core.EmbeddingResult, error)
CreateEmbedding implements the core.LLM interface.
func (*OpenAILLM) CreateEmbeddings ¶
func (o *OpenAILLM) CreateEmbeddings(ctx context.Context, inputs []string, options ...core.EmbeddingOption) (*core.BatchEmbeddingResult, error)
CreateEmbeddings implements the core.LLM interface.
func (*OpenAILLM) Generate ¶
func (o *OpenAILLM) Generate(ctx context.Context, prompt string, options ...core.GenerateOption) (*core.LLMResponse, error)
Generate implements the core.LLM interface.
func (*OpenAILLM) GenerateWithFunctions ¶
func (o *OpenAILLM) GenerateWithFunctions(ctx context.Context, prompt string, functions []map[string]interface{}, options ...core.GenerateOption) (map[string]interface{}, error)
GenerateWithFunctions implements the core.LLM interface.
func (*OpenAILLM) GenerateWithJSON ¶
func (o *OpenAILLM) GenerateWithJSON(ctx context.Context, prompt string, options ...core.GenerateOption) (map[string]interface{}, error)
GenerateWithJSON implements the core.LLM interface.
func (*OpenAILLM) StreamGenerate ¶
func (o *OpenAILLM) StreamGenerate(ctx context.Context, prompt string, options ...core.GenerateOption) (*core.StreamResponse, error)
StreamGenerate implements the core.LLM interface.
type OpenAIOption ¶
type OpenAIOption func(*OpenAIConfig)
OpenAIOption is a functional option for configuring OpenAI provider.
func WithHTTPClient ¶
func WithHTTPClient(client *http.Client) OpenAIOption
WithHTTPClient sets a custom HTTP client.
func WithOpenAIBaseURL ¶
func WithOpenAIBaseURL(baseURL string) OpenAIOption
WithOpenAIBaseURL sets the base URL.
func WithOpenAIPath ¶
func WithOpenAIPath(path string) OpenAIOption
WithOpenAIPath sets the endpoint path.
func WithOpenAITimeout ¶
func WithOpenAITimeout(timeout time.Duration) OpenAIOption
WithOpenAITimeout sets the request timeout.