Documentation
¶
Index ¶
- func ProcessStreamContentWithThinks(processor *ThinkProcessor, chunk string) (regularContent, thinkContent string)
- type ClaudeProvider
- func (p *ClaudeProvider) DefaultModel() string
- func (p *ClaudeProvider) GetModelContextWindow(model string) int
- func (p *ClaudeProvider) IsAvailable() bool
- func (p *ClaudeProvider) Name() string
- func (p *ClaudeProvider) StreamChat(ctx context.Context, messages []Message, options StreamOptions) (<-chan StreamResponse, error)
- func (p *ClaudeProvider) SupportedModels() []string
- func (p *ClaudeProvider) ValidateConfig() error
- type Config
- type Function
- type FunctionCall
- type Manager
- func (m *Manager) Close()
- func (m *Manager) GetAvailableProviders() []string
- func (m *Manager) GetCurrentModel() string
- func (m *Manager) GetCurrentProvider() (Provider, error)
- func (m *Manager) GetProvider(name string) (Provider, error)
- func (m *Manager) GetProviderStatus() map[string]ProviderStatus
- func (m *Manager) GetSupportedModels() map[string][]string
- func (m *Manager) StreamChat(ctx context.Context, messages []Message, options StreamOptions) (<-chan StreamResponse, error)
- func (m *Manager) StreamChatWithFallback(ctx context.Context, messages []Message, options StreamOptions) (<-chan StreamResponse, error)
- func (m *Manager) StreamChatWithProvider(ctx context.Context, providerName string, messages []Message, ...) (<-chan StreamResponse, error)
- func (m *Manager) SwitchModel(modelName string) error
- func (m *Manager) SwitchProvider(name string) error
- type Message
- type OllamaProvider
- func (p *OllamaProvider) DefaultModel() string
- func (p *OllamaProvider) GetModelContextWindow(model string) int
- func (p *OllamaProvider) IsAvailable() bool
- func (p *OllamaProvider) Name() string
- func (p *OllamaProvider) StreamChat(ctx context.Context, messages []Message, options StreamOptions) (<-chan StreamResponse, error)
- func (p *OllamaProvider) SupportedModels() []string
- func (p *OllamaProvider) ValidateConfig() error
- type OpenAIProvider
- func (p *OpenAIProvider) DefaultModel() string
- func (p *OpenAIProvider) GetModelContextWindow(model string) int
- func (p *OpenAIProvider) IsAvailable() bool
- func (p *OpenAIProvider) Name() string
- func (p *OpenAIProvider) StreamChat(ctx context.Context, messages []Message, options StreamOptions) (<-chan StreamResponse, error)
- func (p *OpenAIProvider) SupportedModels() []string
- func (p *OpenAIProvider) ValidateConfig() error
- type OpenRouterProvider
- func (p *OpenRouterProvider) DefaultModel() string
- func (p *OpenRouterProvider) GetModelContextWindow(model string) int
- func (p *OpenRouterProvider) IsAvailable() bool
- func (p *OpenRouterProvider) Name() string
- func (p *OpenRouterProvider) StreamChat(ctx context.Context, messages []Message, options StreamOptions) (<-chan StreamResponse, error)
- func (p *OpenRouterProvider) SupportedModels() []string
- func (p *OpenRouterProvider) ValidateConfig() error
- type PlaceholderProvider
- func (p *PlaceholderProvider) DefaultModel() string
- func (p *PlaceholderProvider) GetModelContextWindow(model string) int
- func (p *PlaceholderProvider) IsAvailable() bool
- func (p *PlaceholderProvider) Name() string
- func (p *PlaceholderProvider) StreamChat(ctx context.Context, messages []Message, options StreamOptions) (<-chan StreamResponse, error)
- func (p *PlaceholderProvider) SupportedModels() []string
- func (p *PlaceholderProvider) ValidateConfig() error
- type Provider
- type ProviderConfig
- type ProviderError
- type ProviderStatus
- type ProviderType
- type StreamOptions
- type StreamResponse
- type ThinkProcessor
- type ToolCall
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
func ProcessStreamContentWithThinks ¶
func ProcessStreamContentWithThinks(processor *ThinkProcessor, chunk string) (regularContent, thinkContent string)
ProcessStreamContentWithThinks processes streaming content and separates think tags
Types ¶
type ClaudeProvider ¶
type ClaudeProvider struct {
// contains filtered or unexported fields
}
ClaudeProvider implements the Claude provider
func NewClaudeProvider ¶
func NewClaudeProvider(config ProviderConfig) (*ClaudeProvider, error)
NewClaudeProvider creates a new Claude provider
func (*ClaudeProvider) DefaultModel ¶
func (p *ClaudeProvider) DefaultModel() string
DefaultModel returns the default model for this provider
func (*ClaudeProvider) GetModelContextWindow ¶
func (p *ClaudeProvider) GetModelContextWindow(model string) int
GetModelContextWindow returns the context window size for a given model
func (*ClaudeProvider) IsAvailable ¶
func (p *ClaudeProvider) IsAvailable() bool
IsAvailable checks if the provider is available
func (*ClaudeProvider) StreamChat ¶
func (p *ClaudeProvider) StreamChat(ctx context.Context, messages []Message, options StreamOptions) (<-chan StreamResponse, error)
StreamChat streams a chat completion
func (*ClaudeProvider) SupportedModels ¶
func (p *ClaudeProvider) SupportedModels() []string
SupportedModels returns the list of supported models
func (*ClaudeProvider) ValidateConfig ¶
func (p *ClaudeProvider) ValidateConfig() error
ValidateConfig validates the provider configuration
type Config ¶
type Config struct {
DefaultProvider string `yaml:"default_provider"`
Providers map[string]ProviderConfig `yaml:"providers"`
FallbackProviders []string `yaml:"fallback_providers"`
}
Config represents AI provider configuration
type Function ¶
type Function struct {
Name string `json:"name"`
Description string `json:"description"`
Parameters map[string]interface{} `json:"parameters"`
}
Function represents a function definition
type FunctionCall ¶
type FunctionCall struct {
Name string `json:"name"`
Arguments string `json:"arguments"` // JSON string of arguments
}
FunctionCall represents a function call
type Manager ¶
type Manager struct {
// contains filtered or unexported fields
}
Manager manages multiple AI providers
func NewManager ¶
NewManager creates a new AI provider manager
func (*Manager) Close ¶
func (m *Manager) Close()
Close closes the manager and stops health checking
func (*Manager) GetAvailableProviders ¶
GetAvailableProviders returns a list of available providers
func (*Manager) GetCurrentModel ¶
GetCurrentModel returns the current model for the active provider
func (*Manager) GetCurrentProvider ¶
GetCurrentProvider returns the current provider
func (*Manager) GetProvider ¶
GetProvider returns a specific provider by name
func (*Manager) GetProviderStatus ¶
func (m *Manager) GetProviderStatus() map[string]ProviderStatus
GetProviderStatus returns the status of all providers
func (*Manager) GetSupportedModels ¶
GetSupportedModels returns all supported models across all providers
func (*Manager) StreamChat ¶
func (m *Manager) StreamChat(ctx context.Context, messages []Message, options StreamOptions) (<-chan StreamResponse, error)
StreamChat streams a chat completion using the current provider
func (*Manager) StreamChatWithFallback ¶
func (m *Manager) StreamChatWithFallback(ctx context.Context, messages []Message, options StreamOptions) (<-chan StreamResponse, error)
StreamChatWithFallback streams a chat completion with automatic fallback
func (*Manager) StreamChatWithProvider ¶
func (m *Manager) StreamChatWithProvider(ctx context.Context, providerName string, messages []Message, options StreamOptions) (<-chan StreamResponse, error)
StreamChatWithProvider streams a chat completion using a specific provider
func (*Manager) SwitchModel ¶
SwitchModel switches the current provider to use a different model
func (*Manager) SwitchProvider ¶
SwitchProvider switches to a different provider
type Message ¶
type Message struct {
Role string `json:"role"` // "user", "assistant", "system"
Content string `json:"content"`
ToolCalls []ToolCall `json:"tool_calls,omitempty"`
ToolCallId string `json:"tool_call_id,omitempty"`
}
Message represents a chat message
type OllamaProvider ¶
type OllamaProvider struct {
// contains filtered or unexported fields
}
OllamaProvider implements the Ollama provider
func NewOllamaProvider ¶
func NewOllamaProvider(config ProviderConfig) (*OllamaProvider, error)
NewOllamaProvider creates a new Ollama provider
func (*OllamaProvider) DefaultModel ¶
func (p *OllamaProvider) DefaultModel() string
DefaultModel returns the default model for this provider
func (*OllamaProvider) GetModelContextWindow ¶
func (p *OllamaProvider) GetModelContextWindow(model string) int
IsAvailable checks if the provider is available GetModelContextWindow returns the context window size for a given model
func (*OllamaProvider) IsAvailable ¶
func (p *OllamaProvider) IsAvailable() bool
func (*OllamaProvider) StreamChat ¶
func (p *OllamaProvider) StreamChat(ctx context.Context, messages []Message, options StreamOptions) (<-chan StreamResponse, error)
StreamChat streams a chat completion
func (*OllamaProvider) SupportedModels ¶
func (p *OllamaProvider) SupportedModels() []string
SupportedModels returns the list of supported models
func (*OllamaProvider) ValidateConfig ¶
func (p *OllamaProvider) ValidateConfig() error
ValidateConfig validates the provider configuration
type OpenAIProvider ¶
type OpenAIProvider struct {
// contains filtered or unexported fields
}
OpenAIProvider implements the OpenAI provider
func NewOpenAIProvider ¶
func NewOpenAIProvider(config ProviderConfig) (*OpenAIProvider, error)
NewOpenAIProvider creates a new OpenAI provider
func (*OpenAIProvider) DefaultModel ¶
func (p *OpenAIProvider) DefaultModel() string
DefaultModel returns the default model for this provider
func (*OpenAIProvider) GetModelContextWindow ¶
func (p *OpenAIProvider) GetModelContextWindow(model string) int
IsAvailable checks if the provider is available GetModelContextWindow returns the context window size for a given model
func (*OpenAIProvider) IsAvailable ¶
func (p *OpenAIProvider) IsAvailable() bool
func (*OpenAIProvider) StreamChat ¶
func (p *OpenAIProvider) StreamChat(ctx context.Context, messages []Message, options StreamOptions) (<-chan StreamResponse, error)
StreamChat streams a chat completion
func (*OpenAIProvider) SupportedModels ¶
func (p *OpenAIProvider) SupportedModels() []string
SupportedModels returns the list of supported models
func (*OpenAIProvider) ValidateConfig ¶
func (p *OpenAIProvider) ValidateConfig() error
ValidateConfig validates the provider configuration
type OpenRouterProvider ¶
type OpenRouterProvider struct {
// contains filtered or unexported fields
}
OpenRouterProvider implements the OpenRouter provider
func NewOpenRouterProvider ¶
func NewOpenRouterProvider(config ProviderConfig) (*OpenRouterProvider, error)
NewOpenRouterProvider creates a new OpenRouter provider
func (*OpenRouterProvider) DefaultModel ¶
func (p *OpenRouterProvider) DefaultModel() string
DefaultModel returns the default model for this provider
func (*OpenRouterProvider) GetModelContextWindow ¶
func (p *OpenRouterProvider) GetModelContextWindow(model string) int
GetModelContextWindow returns the context window size for a given model
func (*OpenRouterProvider) IsAvailable ¶
func (p *OpenRouterProvider) IsAvailable() bool
IsAvailable checks if the provider is available
func (*OpenRouterProvider) Name ¶
func (p *OpenRouterProvider) Name() string
Name returns the provider name
func (*OpenRouterProvider) StreamChat ¶
func (p *OpenRouterProvider) StreamChat(ctx context.Context, messages []Message, options StreamOptions) (<-chan StreamResponse, error)
StreamChat streams a chat completion
func (*OpenRouterProvider) SupportedModels ¶
func (p *OpenRouterProvider) SupportedModels() []string
SupportedModels returns the list of supported models
func (*OpenRouterProvider) ValidateConfig ¶
func (p *OpenRouterProvider) ValidateConfig() error
ValidateConfig validates the provider configuration
type PlaceholderProvider ¶
type PlaceholderProvider struct {
// contains filtered or unexported fields
}
PlaceholderProvider represents a provider that couldn't be initialized
func NewPlaceholderProvider ¶
func NewPlaceholderProvider(name, errorMsg string) *PlaceholderProvider
NewPlaceholderProvider creates a new placeholder provider
func (*PlaceholderProvider) DefaultModel ¶
func (p *PlaceholderProvider) DefaultModel() string
DefaultModel returns empty string
func (*PlaceholderProvider) GetModelContextWindow ¶
func (p *PlaceholderProvider) GetModelContextWindow(model string) int
GetModelContextWindow returns a default context window size
func (*PlaceholderProvider) IsAvailable ¶
func (p *PlaceholderProvider) IsAvailable() bool
IsAvailable always returns false
func (*PlaceholderProvider) Name ¶
func (p *PlaceholderProvider) Name() string
Name returns the provider name
func (*PlaceholderProvider) StreamChat ¶
func (p *PlaceholderProvider) StreamChat(ctx context.Context, messages []Message, options StreamOptions) (<-chan StreamResponse, error)
StreamChat returns an error explaining why the provider is unavailable
func (*PlaceholderProvider) SupportedModels ¶
func (p *PlaceholderProvider) SupportedModels() []string
SupportedModels returns an empty list
func (*PlaceholderProvider) ValidateConfig ¶
func (p *PlaceholderProvider) ValidateConfig() error
ValidateConfig returns the initialization error
type Provider ¶
type Provider interface {
// Name returns the provider name
Name() string
// StreamChat streams a chat completion
StreamChat(ctx context.Context, messages []Message, options StreamOptions) (<-chan StreamResponse, error)
// SupportedModels returns the list of supported models
SupportedModels() []string
// ValidateConfig validates the provider configuration
ValidateConfig() error
// IsAvailable checks if the provider is available
IsAvailable() bool
// GetModelContextWindow returns the context window size for a given model
GetModelContextWindow(model string) int
// DefaultModel returns the default model for this provider
DefaultModel() string
}
Provider represents an AI provider interface
type ProviderConfig ¶
type ProviderConfig struct {
APIKey string `yaml:"api_key"`
Endpoint string `yaml:"endpoint"`
DefaultModel string `yaml:"default_model"`
MaxTokens int `yaml:"max_tokens"`
Temperature float64 `yaml:"temperature"`
Timeout time.Duration `yaml:"timeout"`
}
ProviderConfig represents configuration for a specific provider
type ProviderError ¶
ProviderError represents an error from a provider
func NewProviderError ¶
func NewProviderError(provider, message, code string) *ProviderError
NewProviderError creates a new provider error
func (*ProviderError) Error ¶
func (e *ProviderError) Error() string
type ProviderStatus ¶
type ProviderStatus struct {
Name string `json:"name"`
Available bool `json:"available"`
LastCheck time.Time `json:"last_check"`
Error string `json:"error,omitempty"`
Models []string `json:"models"`
}
ProviderStatus represents the status of a provider
type ProviderType ¶
type ProviderType string
ProviderType represents the type of AI provider
const ( ProviderTypeOpenAI ProviderType = "openai" ProviderTypeClaude ProviderType = "claude" ProviderTypeOllama ProviderType = "ollama" ProviderTypeOpenRouter ProviderType = "openrouter" )
type StreamOptions ¶
type StreamOptions struct {
MaxTokens int `json:"max_tokens,omitempty"`
Temperature float64 `json:"temperature,omitempty"`
Model string `json:"model,omitempty"`
Functions []Function `json:"functions,omitempty"`
}
StreamOptions contains options for streaming chat
type StreamResponse ¶
type StreamResponse struct {
Content string `json:"content"` // Regular content to display
ThinkContent string `json:"think_content"` // Reasoning content in collapsible section
ToolCalls []ToolCall `json:"tool_calls,omitempty"`
Finished bool `json:"finished"`
Error error `json:"error,omitempty"`
}
StreamResponse represents a streaming response chunk
type ThinkProcessor ¶
type ThinkProcessor struct {
// contains filtered or unexported fields
}
ThinkProcessor tracks state for processing think tags in streams
func NewThinkProcessor ¶
func NewThinkProcessor() *ThinkProcessor
NewThinkProcessor creates a new think processor
func (*ThinkProcessor) ProcessChunk ¶
func (tp *ThinkProcessor) ProcessChunk(chunk string) (regularContent, thinkContent string)
ProcessChunk processes a content chunk and separates think content from regular content
type ToolCall ¶
type ToolCall struct {
ID string `json:"id"`
Type string `json:"type"` // "function"
Function FunctionCall `json:"function"`
}
ToolCall represents a function call made by the AI