Documentation
¶
Index ¶
- type ChatRequest
- type ChatResponse
- type Config
- type ContentPart
- type ImageURL
- type Message
- type Provider
- func NewGemini(cfg Config) Provider
- func NewGroq(cfg Config) Provider
- func NewLMStudio(cfg Config) Provider
- func NewOllama(cfg Config) Provider
- func NewOpenAI(cfg Config) Provider
- func NewOpenAICompat(cfg Config) Provider
- func NewOpenRouter(cfg Config) Provider
- func NewProvider(cfg Config) (Provider, error)
- func NewXAI(cfg Config) Provider
- type VisionChatRequest
- type VisionMessage
- type VisionProvider
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
This section is empty.
Types ¶
type ChatRequest ¶
type ChatRequest struct {
Model string `json:"model"`
Messages []Message `json:"messages"`
Temperature float64 `json:"temperature,omitempty"`
MaxTokens int `json:"max_tokens,omitempty"`
// ResponseFormat can be set to "json_object" for JSON mode.
ResponseFormat string `json:"response_format,omitempty"`
}
ChatRequest is a chat completion request.
type ChatResponse ¶
type ChatResponse struct {
Content string `json:"content"`
Model string `json:"model"`
FinishReason string `json:"finish_reason"`
PromptTokens int `json:"prompt_tokens"`
CompletionTokens int `json:"completion_tokens"`
TotalTokens int `json:"total_tokens"`
}
ChatResponse is the response from a chat completion.
type Config ¶
type Config struct {
Provider string `json:"provider"` // ollama, lmstudio, openrouter, openai, groq, xai, gemini, custom
Model string `json:"model"`
BaseURL string `json:"base_url"`
APIKey string `json:"api_key"`
}
Config configures an LLM provider.
type ContentPart ¶
type ContentPart struct {
Type string `json:"type"` // "text" or "image_url"
Text string `json:"text,omitempty"`
ImageURL *ImageURL `json:"image_url,omitempty"`
}
ContentPart is either text or an image in a vision message.
type ImageURL ¶
type ImageURL struct {
URL string `json:"url"`
}
ImageURL contains a base64 or URL reference to an image.
type Provider ¶
type Provider interface {
// Chat sends a chat completion request.
Chat(ctx context.Context, req ChatRequest) (*ChatResponse, error)
// Embed generates embeddings for a batch of texts.
Embed(ctx context.Context, texts []string) ([][]float32, error)
}
Provider is the interface for LLM interactions.
func NewLMStudio ¶
NewLMStudio creates a provider for LM Studio.
func NewOpenAICompat ¶
NewOpenAICompat creates a generic OpenAI-compatible provider.
func NewOpenRouter ¶
NewOpenRouter creates a provider for OpenRouter.
func NewProvider ¶
NewProvider creates an LLM provider from configuration.
type VisionChatRequest ¶
type VisionChatRequest struct {
Model string `json:"model"`
Messages []VisionMessage `json:"messages"`
Temperature float64 `json:"temperature,omitempty"`
MaxTokens int `json:"max_tokens,omitempty"`
}
VisionChatRequest is a chat request with image content.
type VisionMessage ¶
type VisionMessage struct {
Role string `json:"role"`
Content []ContentPart `json:"content"`
}
VisionMessage represents a chat message that may contain images.
type VisionProvider ¶
type VisionProvider interface {
Provider
// ChatWithImages sends a chat request that includes images.
ChatWithImages(ctx context.Context, req VisionChatRequest) (*ChatResponse, error)
}
VisionProvider extends Provider with image understanding.