Documentation
¶
Overview ¶
Package provider defines the core interfaces that external LLM providers must implement. External provider packages should import this package to implement the Provider interface.
Index ¶
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
This section is empty.
Types ¶
type ChatCompletionChoice ¶
type ChatCompletionChoice struct {
Index int `json:"index"`
Message Message `json:"message"`
Delta *Message `json:"delta,omitempty"`
FinishReason *string `json:"finish_reason"`
Logprobs any `json:"logprobs,omitempty"`
}
ChatCompletionChoice represents a single choice in the response
type ChatCompletionChunk ¶
type ChatCompletionChunk struct {
ID string `json:"id"`
Object string `json:"object"`
Created int64 `json:"created"`
Model string `json:"model"`
SystemFingerprint *string `json:"system_fingerprint,omitempty"`
Choices []ChatCompletionChoice `json:"choices"`
Usage *Usage `json:"usage,omitempty"`
ProviderMetadata map[string]any `json:"provider_metadata,omitempty"` // Provider-specific metadata
}
ChatCompletionChunk represents a chunk in streaming response
type ChatCompletionRequest ¶
type ChatCompletionRequest struct {
Model string `json:"model"`
Messages []Message `json:"messages"`
MaxTokens *int `json:"max_tokens,omitempty"`
Temperature *float64 `json:"temperature,omitempty"`
TopP *float64 `json:"top_p,omitempty"`
Stream *bool `json:"stream,omitempty"`
Stop []string `json:"stop,omitempty"`
PresencePenalty *float64 `json:"presence_penalty,omitempty"`
FrequencyPenalty *float64 `json:"frequency_penalty,omitempty"`
LogitBias map[string]int `json:"logit_bias,omitempty"`
User *string `json:"user,omitempty"`
Tools []Tool `json:"tools,omitempty"`
ToolChoice any `json:"tool_choice,omitempty"`
}
ChatCompletionRequest represents a request for chat completion
type ChatCompletionResponse ¶
type ChatCompletionResponse struct {
ID string `json:"id"`
Object string `json:"object"`
Created int64 `json:"created"`
Model string `json:"model"`
SystemFingerprint *string `json:"system_fingerprint,omitempty"`
Choices []ChatCompletionChoice `json:"choices"`
Usage Usage `json:"usage"`
ProviderMetadata map[string]any `json:"provider_metadata,omitempty"` // Provider-specific metadata
}
ChatCompletionResponse represents a response from chat completion
type ChatCompletionStream ¶
type ChatCompletionStream interface {
// Recv receives the next chunk from the stream
Recv() (*ChatCompletionChunk, error)
// Close closes the stream
Close() error
}
ChatCompletionStream represents a streaming chat completion response
type Message ¶
type Message struct {
Role Role `json:"role"`
Content string `json:"content"`
Name *string `json:"name,omitempty"`
ToolCallID *string `json:"tool_call_id,omitempty"`
ToolCalls []ToolCall `json:"tool_calls,omitempty"`
}
Message represents a chat message
type Provider ¶
type Provider interface {
// CreateChatCompletion creates a new chat completion
CreateChatCompletion(ctx context.Context, req *ChatCompletionRequest) (*ChatCompletionResponse, error)
// CreateChatCompletionStream creates a streaming chat completion
CreateChatCompletionStream(ctx context.Context, req *ChatCompletionRequest) (ChatCompletionStream, error)
// Close closes the provider and cleans up resources
Close() error
// Name returns the provider name
Name() string
}
Provider defines the interface that all LLM providers must implement. External packages can implement this interface and inject via omnillm.ClientConfig.CustomProvider.
Example usage in external package:
import "github.com/agentplexus/omnillm/provider"
func NewMyProvider(apiKey string) provider.Provider {
return &myProvider{apiKey: apiKey}
}
type ToolCall ¶
type ToolCall struct {
ID string `json:"id"`
Type string `json:"type"`
Function ToolFunction `json:"function"`
}
ToolCall represents a tool function call
type ToolFunction ¶
ToolFunction represents the function being called