Documentation
¶
Index ¶
- type ChatCompletionRequest
- type ChatCompletionResponse
- type ChatCompletionStream
- type Choice
- type ClaudeLLM
- type DeepSeekLLM
- type Function
- type GeminiLLM
- type GeminiOptions
- type LLM
- type LLMProvider
- type Message
- type OllamaLLM
- type OpenAILLM
- type OpenRouterLLM
- type Role
- type StreamChoice
- type StreamDelta
- type Tool
- type ToolCall
- type ToolCallFunction
- type Usage
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
This section is empty.
Types ¶
type ChatCompletionRequest ¶
type ChatCompletionRequest struct {
Model string `json:"model"`
Messages []Message `json:"messages"`
Temperature float32 `json:"temperature,omitempty"`
TopP float32 `json:"top_p,omitempty"`
N int `json:"n,omitempty"`
Stop []string `json:"stop,omitempty"`
MaxTokens int `json:"max_tokens,omitempty"`
PresencePenalty float32 `json:"presence_penalty,omitempty"`
FrequencyPenalty float32 `json:"frequency_penalty,omitempty"`
User string `json:"user,omitempty"`
Tools []Tool `json:"tools,omitempty"`
Stream bool `json:"stream,omitempty"`
}
ChatCompletionRequest represents a generic request for chat completion
type ChatCompletionResponse ¶
type ChatCompletionResponse struct {
ID string `json:"id"`
Choices []Choice `json:"choices"`
Usage Usage `json:"usage"`
}
ChatCompletionResponse represents a generic response from chat completion
type ChatCompletionStream ¶
type ChatCompletionStream interface {
Recv() (ChatCompletionResponse, error)
Close() error
}
ChatCompletionStream represents a streaming response
type Choice ¶
type Choice struct {
Index int `json:"index"`
Message Message `json:"message"`
FinishReason string `json:"finish_reason"`
}
Choice represents a completion choice
type ClaudeLLM ¶
type ClaudeLLM struct {
// contains filtered or unexported fields
}
ClaudeLLM implements the LLM interface for Anthropic's Claude
func NewClaudeLLM ¶
NewClaudeLLM creates a new Claude LLM client
func (*ClaudeLLM) CreateChatCompletion ¶
func (c *ClaudeLLM) CreateChatCompletion(ctx context.Context, req ChatCompletionRequest) (ChatCompletionResponse, error)
CreateChatCompletion implements the LLM interface for Claude
func (*ClaudeLLM) CreateChatCompletionStream ¶
func (c *ClaudeLLM) CreateChatCompletionStream(ctx context.Context, req ChatCompletionRequest) (ChatCompletionStream, error)
CreateChatCompletionStream implements the LLM interface for Claude streaming
type DeepSeekLLM ¶
type DeepSeekLLM struct {
// contains filtered or unexported fields
}
DeepSeekLLM implements the LLM interface for DeepSeek
func NewDeepSeekLLM ¶
func NewDeepSeekLLM(apiKey string) *DeepSeekLLM
NewDeepSeekLLM creates a new DeepSeek LLM client
func (*DeepSeekLLM) CreateChatCompletion ¶
func (l *DeepSeekLLM) CreateChatCompletion(ctx context.Context, req ChatCompletionRequest) (ChatCompletionResponse, error)
CreateChatCompletion implements the LLM interface for DeepSeek
func (*DeepSeekLLM) CreateChatCompletionStream ¶
func (l *DeepSeekLLM) CreateChatCompletionStream(ctx context.Context, req ChatCompletionRequest) (ChatCompletionStream, error)
CreateChatCompletionStream implements the LLM interface for DeepSeek streaming
type Function ¶
type Function struct {
Name string `json:"name"`
Description string `json:"description"`
Parameters map[string]interface{} `json:"parameters"`
}
Function represents a function definition
type GeminiLLM ¶
type GeminiLLM struct {
// contains filtered or unexported fields
}
GeminiLLM implements the LLM interface for Google's Gemini
func NewGeminiLLM ¶
func NewGeminiLLM(apiKey string, opts ...GeminiOptions) (*GeminiLLM, error)
NewGeminiLLM creates a new Gemini LLM client
func (*GeminiLLM) CreateChatCompletion ¶
func (g *GeminiLLM) CreateChatCompletion(ctx context.Context, req ChatCompletionRequest) (ChatCompletionResponse, error)
CreateChatCompletion implements the LLM interface for Gemini
func (*GeminiLLM) CreateChatCompletionStream ¶
func (g *GeminiLLM) CreateChatCompletionStream(ctx context.Context, req ChatCompletionRequest) (ChatCompletionStream, error)
CreateChatCompletionStream implements the LLM interface for Gemini streaming
type GeminiOptions ¶
type GeminiOptions struct {
Model string
HarmThreshold genai.HarmBlockThreshold
SafetySettings []*genai.SafetySetting
}
GeminiOptions contains configuration options for the Gemini model
type LLM ¶
type LLM interface {
CreateChatCompletion(ctx context.Context, req ChatCompletionRequest) (ChatCompletionResponse, error)
CreateChatCompletionStream(ctx context.Context, req ChatCompletionRequest) (ChatCompletionStream, error)
}
LLM defines the interface that all LLM providers must implement
type LLMProvider ¶
type LLMProvider string
LLMProvider represents different LLM providers
const ( OpenAI LLMProvider = "OPEN_AI" Azure LLMProvider = "AZURE" AzureAD LLMProvider = "AZURE_AD" CloudflareAzure LLMProvider = "CLOUDFLARE_AZURE" Gemini LLMProvider = "GEMINI" Claude LLMProvider = "CLAUDE" Ollama LLMProvider = "OLLAMA" DeepSeek LLMProvider = "DEEPSEEK" OpenRouter LLMProvider = "OPENROUTER" )
type Message ¶
type Message struct {
Role Role `json:"role"`
Content string `json:"content"`
Name string `json:"name,omitempty"`
ToolCalls []ToolCall `json:"tool_calls,omitempty"`
}
Message represents a single message in a chat conversation
type OllamaLLM ¶
type OllamaLLM struct {
// contains filtered or unexported fields
}
OllamaLLM implements the LLM interface for Ollama
func NewOllamaLLM ¶
NewOllamaLLM creates a new Ollama LLM client
func NewOllamaLLMWithURL ¶
NewOllamaLLMWithURL creates a new Ollama LLM client with a custom URL
func (*OllamaLLM) CreateChatCompletion ¶
func (o *OllamaLLM) CreateChatCompletion(ctx context.Context, req ChatCompletionRequest) (ChatCompletionResponse, error)
CreateChatCompletion implements the LLM interface for Ollama
func (*OllamaLLM) CreateChatCompletionStream ¶
func (o *OllamaLLM) CreateChatCompletionStream(ctx context.Context, req ChatCompletionRequest) (ChatCompletionStream, error)
CreateChatCompletionStream implements the LLM interface for Ollama streaming
type OpenAILLM ¶
type OpenAILLM struct {
// contains filtered or unexported fields
}
OpenAILLM implements the LLM interface for OpenAI
func NewOpenAILLM ¶
NewOpenAILLM creates a new OpenAI LLM client
func NewOpenAILLMWithHost ¶
func (*OpenAILLM) CreateChatCompletion ¶
func (o *OpenAILLM) CreateChatCompletion(ctx context.Context, req ChatCompletionRequest) (ChatCompletionResponse, error)
CreateChatCompletion implements the LLM interface for OpenAI
func (*OpenAILLM) CreateChatCompletionStream ¶
func (o *OpenAILLM) CreateChatCompletionStream(ctx context.Context, req ChatCompletionRequest) (ChatCompletionStream, error)
CreateChatCompletionStream implements the LLM interface for OpenAI streaming
type OpenRouterLLM ¶
type OpenRouterLLM struct {
// contains filtered or unexported fields
}
OpenRouterLLM implements the LLM interface for OpenRouter
func NewOpenRouterLLM ¶
func NewOpenRouterLLM(apiKey string) *OpenRouterLLM
NewOpenRouterLLM creates a new OpenRouter LLM client
func NewOpenRouterLLMWithHost ¶
func NewOpenRouterLLMWithHost(apiKey string, host string) *OpenRouterLLM
func (*OpenRouterLLM) CreateChatCompletion ¶
func (o *OpenRouterLLM) CreateChatCompletion(ctx context.Context, req ChatCompletionRequest) (ChatCompletionResponse, error)
CreateChatCompletion implements the LLM interface for OpenRouter
func (*OpenRouterLLM) CreateChatCompletionStream ¶
func (o *OpenRouterLLM) CreateChatCompletionStream(ctx context.Context, req ChatCompletionRequest) (ChatCompletionStream, error)
CreateChatCompletionStream implements the LLM interface for OpenRouter streaming
type StreamChoice ¶
type StreamChoice struct {
Index int `json:"index"`
Delta StreamDelta `json:"delta"`
FinishReason string `json:"finish_reason"`
}
StreamChoice represents a choice in a streaming response
type StreamDelta ¶
type StreamDelta struct {
Role Role `json:"role"`
Content string `json:"content"`
ToolCalls []ToolCall `json:"tool_calls,omitempty"`
ToolCallID string `json:"tool_call_id,omitempty"`
}
StreamDelta represents a delta in a streaming response
type ToolCall ¶
type ToolCall struct {
ID string `json:"id"`
Type string `json:"type"`
Function ToolCallFunction `json:"function"`
}
ToolCall represents a tool/function call from the LLM