Documentation
¶
Overview ¶
Package llmfactory provides factories and configuration for LLM model instantiation, supporting multiple providers (OpenAI, Azure, etc.) and model selection strategies.
Index ¶
Constants ¶
This section is empty.
Variables ¶
var NewLLM = CreateLLM
NewLLM is a wrapper for CreateLLM to allow for overriding the default implementation.
Functions ¶
Types ¶
type Config ¶
type Config struct {
// Providers specifies the list of providers to use
Providers []*ProviderConfig `json:"providers" yaml:"providers"`
// DefaultProvider specifies the default provider to use
DefaultProvider string `json:"default_provider" yaml:"default_provider"`
// ToolModels specifies the mapping of tools to providers.
// key is the tool name, value is the model name.
// Use `default: <model_name>` as the default model for tools.
ToolModels map[string][]string `json:"tool_models" yaml:"tool_models"`
// AssistantModels specifies the mapping of assistants to models.
// key is the assistant name, value is the model name.
// Use `default: <model_name>` as the default model for assistants.
AssistantModels map[string][]string `json:"assistant_models" yaml:"assistant_models"`
}
type Factory ¶
type Factory interface {
// DefaultModel returns the default LLM model.
DefaultModel() (llms.Model, error)
// ModelByType returns an LLM model by its type, e.g.
// OPENAI, AZURE, AZURE_AD, CLOUDFLARE, ANTHROPIC, GOOGLEAI, BEDROCK, PERPLEXITY
ModelByType(providerType llms.ProviderType) (llms.Model, error)
// ModelByName returns an LLM model by its name,
// if the model is not found, it will return the default model.
ModelByName(preferredModels ...string) (llms.Model, error)
// ToolModel returns a tool model by its name.
ToolModel(toolName string, preferredModels ...string) (llms.Model, error)
// AssistantModel returns an assistant model by its name.
AssistantModel(assistantName string, preferredModels ...string) (llms.Model, error)
}
Factory is the interface for creating and managing LLM models.
type HTTPClient ¶ added in v0.16.114
HTTPClient is primarily used to describe an *http.Client, but also supports custom implementations.
For bespoke implementations, prefer using an *http.Client with a custom transport. See http.RoundTripper for further information.
type OpenAIConfig ¶
type OpenAIConfig struct {
BaseURL string `json:"base_url,omitempty" yaml:"base_url,omitempty"`
APIVersion string `json:"api_version,omitempty" yaml:"api_version,omitempty"`
// APIType specifies the type of API to use:
// OPENAI|AZURE|AZURE_AD|CLOUDFLARE|ANTHROPIC|GOOGLEAI|BEDROCK|PERPLEXITY
APIType string `json:"api_type,omitempty" yaml:"api_type,omitempty"`
// OrgID specifies which organization's quota and billing should be used when making API requests.
OrgID string `json:"org_id,omitempty" yaml:"org_id,omitempty"`
AssistantVersion string `json:"assistant_version,omitempty" yaml:"assistant_version,omitempty"`
}
OpenAIConfig specifies options config
type Option ¶ added in v0.16.114
type Option func(*Options)
func WithAWSConfigFactory ¶ added in v0.16.114
func WithHTTPClient ¶ added in v0.16.114
func WithHTTPClient(client HTTPClient) Option
WithHTTPClient allows setting a custom HTTP client. If not set, the default value is http.DefaultClient.
type Options ¶ added in v0.16.114
type Options struct {
// HTTPClient is used to create a new HTTP client.
HTTPClient HTTPClient
// AwsConfigFactory is used to create a new AWS config.
AwsConfigFactory func() (*aws.Config, error)
}
func NewOptions ¶ added in v0.16.114
type ProviderConfig ¶
type ProviderConfig struct {
Name string `json:"name" yaml:"name"`
Token string `json:"token,omitempty" yaml:"token,omitempty"`
DefaultModel string `json:"default_model,omitempty" yaml:"default_model,omitempty"`
AvailableModels []string `json:"available_models,omitempty" yaml:"available_models,omitempty"`
OpenAI OpenAIConfig `json:"open_ai" yaml:"open_ai"`
}
ProviderConfig for the OpenAI provider
func (*ProviderConfig) FindModel ¶ added in v0.7.36
func (c *ProviderConfig) FindModel(models ...string) string