Documentation
¶
Index ¶
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
This section is empty.
Types ¶
type Anthropic ¶
type Anthropic struct {
// contains filtered or unexported fields
}
Anthropic client uses Anthropic's streaming messages API.
func NewAnthropic ¶
func (*Anthropic) Stream ¶
func (a *Anthropic) Stream(ctx context.Context, msgs []ChatMessage, tools []ToolSpec) (<-chan StreamChunk, error)
Stream implements proper Anthropic streaming API
type ChatMessage ¶
type ChatMessage struct {
Role string `json:"role"`
Content string `json:"content,omitempty"`
Name string `json:"name,omitempty"`
ToolCallID string `json:"tool_call_id,omitempty"`
ToolCalls []ToolCall `json:"tool_calls,omitempty"`
}
ChatMessage represents a message sent to or received from the model.
type Client ¶
type Client interface {
// All clients must support streaming (required for responses API)
Stream(ctx context.Context, msgs []ChatMessage, tools []ToolSpec) (<-chan StreamChunk, error)
}
Client defines the interface for language model backends using the streaming responses API.
func FromManifest ¶
func FromManifest(m config.ModelManifest) (Client, error)
FromManifest creates a Client from a config.ModelManifest.
type Completion ¶
type Completion struct {
Content string
ToolCalls []ToolCall
InputTokens int // Actual input tokens from API
OutputTokens int // Actual output tokens from API
ModelName string // The provider/model name used (e.g., "openai/gpt-4")
}
Completion holds either final content or tool calls.
type Mock ¶
type Mock struct {
// contains filtered or unexported fields
}
Mock model that cycles through one tool call then returns a final message.
func (*Mock) Stream ¶
func (m *Mock) Stream(ctx context.Context, msgs []ChatMessage, tools []ToolSpec) (<-chan StreamChunk, error)
type OpenAI ¶
type OpenAI struct {
Temperature *float64
// contains filtered or unexported fields
}
OpenAI client implemented against /v1/responses (legacy chat completions removed).
func (*OpenAI) Stream ¶
func (o *OpenAI) Stream(ctx context.Context, msgs []ChatMessage, tools []ToolSpec) (<-chan StreamChunk, error)
type StreamChunk ¶
type StreamChunk struct {
ContentDelta string
Done bool
Err error
// Populated only on final chunk when available
InputTokens int
OutputTokens int
ToolCalls []ToolCall
ModelName string // provider/model identifier for accurate cost tracking
}
StreamChunk represents an incremental model output segment.
type StreamingClient ¶
type StreamingClient interface {
Stream(ctx context.Context, msgs []ChatMessage, tools []ToolSpec) (<-chan StreamChunk, error)
}
StreamingClient provides incremental output chunks. Channel must be closed after final chunk (Done=true) or on Err.