Documentation
¶
Overview ¶
Package llmresolver selects the most appropriate backend LLM instance based on requirements.
Index ¶
- Constants
- Variables
- func Chat(ctx context.Context, req Request, getModels ProviderFromRuntimeState, ...) (libmodelprovider.LLMChatClient, string, error)
- func Embed(ctx context.Context, embedReq EmbedRequest, getModels ProviderFromRuntimeState, ...) (libmodelprovider.LLMEmbedClient, error)
- func HighestContext(candidates []libmodelprovider.Provider) (libmodelprovider.Provider, string, error)
- func NormalizeModelName(modelName string) string
- func PromptExecute(ctx context.Context, reqExec PromptRequest, getModels ProviderFromRuntimeState, ...) (libmodelprovider.LLMPromptExecClient, error)
- func Randomly(candidates []libmodelprovider.Provider) (libmodelprovider.Provider, string, error)
- func Stream(ctx context.Context, req Request, getModels ProviderFromRuntimeState, ...) (libmodelprovider.LLMStreamClient, error)
- type EmbedRequest
- type Policy
- type PromptRequest
- type ProviderFromRuntimeState
- type Request
Constants ¶
View Source
const ( StrategyRandom = "random" StrategyAuto = "auto" StrategyLowLatency = "low-latency" StrategyLowPriority = "low-prio" )
Variables ¶
View Source
var ( ErrNoAvailableModels = errors.New("no models found in runtime state") ErrNoSatisfactoryModel = errors.New("no model matched the requirements") ErrUnknownModelCapabilities = errors.New("capabilities not known for this model") )
View Source
var DefaultProviderType string = "ollama"
Functions ¶
func Chat ¶
func Chat( ctx context.Context, req Request, getModels ProviderFromRuntimeState, resolver Policy, ) (libmodelprovider.LLMChatClient, string, error)
func Embed ¶
func Embed( ctx context.Context, embedReq EmbedRequest, getModels ProviderFromRuntimeState, resolver Policy, ) (libmodelprovider.LLMEmbedClient, error)
func HighestContext ¶
func HighestContext(candidates []libmodelprovider.Provider) (libmodelprovider.Provider, string, error)
func NormalizeModelName ¶
NormalizeModelName standardizes model names for comparison
func PromptExecute ¶
func PromptExecute( ctx context.Context, reqExec PromptRequest, getModels ProviderFromRuntimeState, resolver Policy, ) (libmodelprovider.LLMPromptExecClient, error)
func Randomly ¶
func Randomly(candidates []libmodelprovider.Provider) (libmodelprovider.Provider, string, error)
func Stream ¶
func Stream( ctx context.Context, req Request, getModels ProviderFromRuntimeState, resolver Policy, ) (libmodelprovider.LLMStreamClient, error)
Types ¶
type EmbedRequest ¶
type Policy ¶
type Policy func(candidates []libmodelprovider.Provider) (libmodelprovider.Provider, string, error)
func PolicyFromString ¶
PolicyFromString maps string names to resolver policies
type PromptRequest ¶
type PromptRequest struct {
ModelNames []string
ProviderTypes []string // Optional. Empty uses default.
Tracker activitytracker.ActivityTracker
ContextLength int
}
type ProviderFromRuntimeState ¶
type ProviderFromRuntimeState func(ctx context.Context, backendTypes ...string) ([]libmodelprovider.Provider, error)
ProviderFromRuntimeState retrieves available model providers
type Request ¶
type Request struct {
ProviderTypes []string // Optional: if empty, uses all default providers
ModelNames []string // Optional: if empty, any model is considered
ContextLength int // Minimum required context length
Tracker activitytracker.ActivityTracker
}
Request contains requirements for selecting a model provider.
Click to show internal directories.
Click to hide internal directories.