Documentation
¶
Overview ¶
Package llmproviders provides LLM provider integration for TokMan agents
Index ¶
- type AnthropicProvider
- func (p *AnthropicProvider) Complete(ctx context.Context, req CompletionRequest) (*CompletionResponse, error)
- func (p *AnthropicProvider) Embed(ctx context.Context, text string) ([]float64, error)
- func (p *AnthropicProvider) Name() string
- func (p *AnthropicProvider) StreamComplete(ctx context.Context, req CompletionRequest) (<-chan StreamChunk, error)
- type CompletionRequest
- type CompletionResponse
- type FunctionDefinition
- type Message
- type OllamaProvider
- func (p *OllamaProvider) Complete(ctx context.Context, req CompletionRequest) (*CompletionResponse, error)
- func (p *OllamaProvider) Embed(ctx context.Context, text string) ([]float64, error)
- func (p *OllamaProvider) Name() string
- func (p *OllamaProvider) StreamComplete(ctx context.Context, req CompletionRequest) (<-chan StreamChunk, error)
- type OpenAIProvider
- func (p *OpenAIProvider) Complete(ctx context.Context, req CompletionRequest) (*CompletionResponse, error)
- func (p *OpenAIProvider) Embed(ctx context.Context, text string) ([]float64, error)
- func (p *OpenAIProvider) Name() string
- func (p *OpenAIProvider) StreamComplete(ctx context.Context, req CompletionRequest) (<-chan StreamChunk, error)
- type Provider
- type ProviderConfig
- type ProviderFactory
- type StreamChunk
- type ToolDefinition
- type UsageInfo
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
This section is empty.
Types ¶
type AnthropicProvider ¶
type AnthropicProvider struct {
// contains filtered or unexported fields
}
AnthropicProvider implements the Provider interface for Anthropic
func NewAnthropicProvider ¶
func NewAnthropicProvider(apiKey, model string) *AnthropicProvider
NewAnthropicProvider creates a new Anthropic provider
func (*AnthropicProvider) Complete ¶
func (p *AnthropicProvider) Complete(ctx context.Context, req CompletionRequest) (*CompletionResponse, error)
func (*AnthropicProvider) Name ¶
func (p *AnthropicProvider) Name() string
func (*AnthropicProvider) StreamComplete ¶
func (p *AnthropicProvider) StreamComplete(ctx context.Context, req CompletionRequest) (<-chan StreamChunk, error)
type CompletionRequest ¶
type CompletionRequest struct {
Model string
Messages []Message
Temperature float64
MaxTokens int
TopP float64
Stop []string
Tools []ToolDefinition
}
CompletionRequest holds a completion request
type CompletionResponse ¶
CompletionResponse holds a completion response
type FunctionDefinition ¶
type FunctionDefinition struct {
Name string `json:"name"`
Description string `json:"description"`
Parameters map[string]interface{} `json:"parameters"`
}
FunctionDefinition represents a function definition
type Message ¶
type Message struct {
Role string `json:"role"`
Content string `json:"content"`
Name string `json:"name,omitempty"`
}
Message represents a chat message
type OllamaProvider ¶
type OllamaProvider struct {
// contains filtered or unexported fields
}
OllamaProvider implements the Provider interface for local Ollama
func NewOllamaProvider ¶
func NewOllamaProvider(model string) *OllamaProvider
NewOllamaProvider creates a new Ollama provider
func (*OllamaProvider) Complete ¶
func (p *OllamaProvider) Complete(ctx context.Context, req CompletionRequest) (*CompletionResponse, error)
func (*OllamaProvider) Name ¶
func (p *OllamaProvider) Name() string
func (*OllamaProvider) StreamComplete ¶
func (p *OllamaProvider) StreamComplete(ctx context.Context, req CompletionRequest) (<-chan StreamChunk, error)
type OpenAIProvider ¶
type OpenAIProvider struct {
// contains filtered or unexported fields
}
OpenAIProvider implements the Provider interface for OpenAI
func NewOpenAIProvider ¶
func NewOpenAIProvider(apiKey, model string) *OpenAIProvider
NewOpenAIProvider creates a new OpenAI provider
func (*OpenAIProvider) Complete ¶
func (p *OpenAIProvider) Complete(ctx context.Context, req CompletionRequest) (*CompletionResponse, error)
func (*OpenAIProvider) Name ¶
func (p *OpenAIProvider) Name() string
func (*OpenAIProvider) StreamComplete ¶
func (p *OpenAIProvider) StreamComplete(ctx context.Context, req CompletionRequest) (<-chan StreamChunk, error)
type Provider ¶
type Provider interface {
Name() string
Complete(ctx context.Context, req CompletionRequest) (*CompletionResponse, error)
StreamComplete(ctx context.Context, req CompletionRequest) (<-chan StreamChunk, error)
Embed(ctx context.Context, text string) ([]float64, error)
}
Provider defines an LLM provider interface
type ProviderConfig ¶
ProviderConfig holds provider configuration
type ProviderFactory ¶
type ProviderFactory struct {
// contains filtered or unexported fields
}
ProviderFactory creates providers by type
func NewProviderFactory ¶
func NewProviderFactory() *ProviderFactory
NewProviderFactory creates a new factory
func (*ProviderFactory) Create ¶
func (pf *ProviderFactory) Create(name string) (Provider, error)
Create creates a provider by name
func (*ProviderFactory) Register ¶
func (pf *ProviderFactory) Register(name string, config ProviderConfig)
Register registers a provider configuration
type StreamChunk ¶
StreamChunk represents a streaming response chunk
type ToolDefinition ¶
type ToolDefinition struct {
Type string `json:"type"`
Function FunctionDefinition `json:"function"`
}
ToolDefinition represents a tool definition for function calling