Documentation
¶
Index ¶
- Constants
- Variables
- func ApplyConfig(config *FullConfig, token *string, maxRound *int, baseUrl *string, ...) error
- func Main(args []string, opts Options) error
- type ChatHandler
- type ChatOptions
- type ClientUnion
- type Config
- type FullConfig
- type Message
- type MessageHistoryUnion
- type Messages
- func (messages Messages) ToAnthropic() (msgs []anthropic.MessageParam, systemPrompts []string, err error)
- func (messages Messages) ToGemini() (msgs []*genai.Content, systemPrompts []string, err error)
- func (messages Messages) ToOpenAI(keepSystemPrompts bool) (msgs []openai.ChatCompletionMessageParamUnion, systemPrompts []string, ...)
- func (messages Messages) ToOpenAI2() []openai.ChatCompletionMessageParamUnion
- type MessagesUnion
- type MsgType
- type Number
- type Options
- type ResolvedOptions
- type ResponseResultAnthropic
- type ResponseResultGemini
- type ResponseResultOpenAI
- type Role
- type StringOrList
- type TokenCost
- type TokenCostInputBreakdown
- type TokenUsage
- type TokenUsageCost
- type TokenUsageInputBreakdown
- type TokenUsageOutputBreakdown
- type ToolInfo
- type ToolInfoMapping
Constants ¶
View Source
const ( MsgType_Msg = "msg" MsgType_ToolCall = "tool_call" MsgType_ToolResult = "tool_result" MsgType_TokenUsage = "token_usage" MsgType_StopReason = "stop_reason" // anthropic specific )
View Source
const ( Role_User = "user" Role_Assistant = "assistant" Role_System = "system" )
Variables ¶
View Source
var ConfigDef string
View Source
var ExampleConfig string
Functions ¶
func ApplyConfig ¶ added in v0.0.12
func ApplyConfig(config *FullConfig, token *string, maxRound *int, baseUrl *string, model *string, systemPrompt *string, tools *[]string, toolCustomFiles *[]string, toolCustomJSONs *[]string, toolDefaultCwd *string, recordFile *string, noCache *bool, showUsage *bool, ignoreDuplicateMsg *bool, logRequest *bool, logChatFlag **bool, verbose *bool, mcpServers *[]string) error
ApplyConfig applies configuration values to the provided variables, giving precedence to command line arguments
Types ¶
type ChatHandler ¶
func (*ChatHandler) Handle ¶
func (c *ChatHandler) Handle(model string, baseUrl string, token string, msg string, opts ChatOptions) error
type ChatOptions ¶
type ChatOptions struct {
// contains filtered or unexported fields
}
type ClientUnion ¶
type Config ¶ added in v0.0.12
type Config struct {
Token string `json:"token,omitempty"`
MaxRound int `json:"max_round,omitempty"`
BaseURL string `json:"base_url,omitempty"`
Model string `json:"model,omitempty"`
SystemPrompt StringOrList `json:"system,omitempty"` // can be string or a list of strings
Tools []string `json:"tools,omitempty"`
ToolCustomFiles []string `json:"tool_custom_files,omitempty"`
ToolCustomJSONs []*tools.UnifiedTool `json:"tool_custom_jsons,omitempty"`
ToolDefaultCwd string `json:"tool_default_cwd,omitempty"`
MCPServers []string `json:"mcp_servers,omitempty"`
Examples []string `json:"examples,omitempty"` // a list of example questions this agent can assist with
}
Config represents the configuration structure that can be loaded from a file
type FullConfig ¶ added in v0.0.13
type FullConfig struct {
Config
RecordFile string `json:"record_file,omitempty"`
NoCache bool `json:"no_cache,omitempty"`
ShowUsage bool `json:"show_usage,omitempty"`
IgnoreDuplicateMsg bool `json:"ignore_duplicate_msg,omitempty"`
LogRequest bool `json:"log_request,omitempty"`
LogChat *bool `json:"log_chat,omitempty"`
Verbose bool `json:"verbose,omitempty"`
}
func LoadConfig ¶ added in v0.0.12
func LoadConfig(configFile string) (*FullConfig, error)
LoadConfig loads configuration from a JSON file
type Message ¶
type Message struct {
Type MsgType `json:"type"`
Time string `json:"time"`
Role Role `json:"role"`
Model string `json:"model"`
Content string `json:"content"`
ToolUseID string `json:"tool_use_id,omitempty"`
ToolName string `json:"tool_name,omitempty"`
TokenUsage *TokenUsage `json:"token_usage,omitempty"`
}
Message represents a message in the chat record
type MessageHistoryUnion ¶
type MessageHistoryUnion struct {
FullHistory Messages
SystemPrompts []string
OpenAI []openai.ChatCompletionMessageParamUnion
Anthropic []anthropic.MessageParam
Gemini []*genai.Content
}
type Messages ¶
type Messages []Message
Messages represents a slice of unified messages with conversion methods
func (Messages) ToAnthropic ¶
func (messages Messages) ToAnthropic() (msgs []anthropic.MessageParam, systemPrompts []string, err error)
ToAnthropic converts unified messages to Anthropic format
func (Messages) ToOpenAI ¶
func (messages Messages) ToOpenAI(keepSystemPrompts bool) (msgs []openai.ChatCompletionMessageParamUnion, systemPrompts []string, err error)
ToAnthropic converts unified messages to Anthropic format
func (Messages) ToOpenAI2 ¶
func (messages Messages) ToOpenAI2() []openai.ChatCompletionMessageParamUnion
ToOpenAI converts unified messages to OpenAI format
type MessagesUnion ¶
type MessagesUnion struct {
OpenAI []openai.ChatCompletionMessageParamUnion
Anthropic []anthropic.MessageParam
Gemini []*genai.Content
}
type ResolvedOptions ¶ added in v0.0.8
func ResolveEnvOptions ¶ added in v0.0.8
type ResponseResultAnthropic ¶
type ResponseResultAnthropic struct {
ToolUseNum int
Messages []anthropic.ContentBlockParamUnion
ToolResults []anthropic.ContentBlockParamUnion
TokenUsage TokenUsage
}
type ResponseResultGemini ¶ added in v0.0.7
type ResponseResultOpenAI ¶
type ResponseResultOpenAI struct {
ToolUseNum int
Messages []openai.ChatCompletionMessageParamUnion
ToolResults []openai.ChatCompletionMessageParamUnion
TokenUsage TokenUsage
}
type StringOrList ¶ added in v0.0.12
type StringOrList = interface{}
type TokenCost ¶
type TokenCost struct {
// the three are available for all providers
InputUSD string
OutputUSD string
TotalUSD string
// Input breakdown
// anthropic has this detail
InputBreakdown TokenCostInputBreakdown
}
type TokenCostInputBreakdown ¶
type TokenCostInputBreakdown struct {
CacheWriteUSD string
CacheReadUSD string
NonCacheReadUSD string
}
func (TokenCostInputBreakdown) Add ¶
func (c TokenCostInputBreakdown) Add(b TokenCostInputBreakdown) TokenCostInputBreakdown
type TokenUsage ¶
type TokenUsage struct {
Input int64 `json:"input"`
Output int64 `json:"output"`
Total int64 `json:"total"`
InputBreakdown TokenUsageInputBreakdown `json:"input_breakdown"`
OutputBreakdown TokenUsageOutputBreakdown `json:"output_breakdown"`
}
Anthropic:
- how to: https://docs.anthropic.com/en/docs/build-with-claude/prompt-caching
- when: https://www.anthropic.com/news/prompt-caching
- summary: . seems anthropic only caches for long enough texts . The minimum cacheable prompt length is: . 1024 tokens for Claude Opus 4, Claude Sonnet 4, Claude Sonnet 3.7, Claude Sonnet 3.5 and Claude Opus 3 . The cache is invalidated after 5 minutes
func (TokenUsage) Add ¶
func (c TokenUsage) Add(b TokenUsage) TokenUsage
type TokenUsageCost ¶
type TokenUsageCost struct {
Usage TokenUsage
Cost TokenCost
}
type TokenUsageInputBreakdown ¶
type TokenUsageInputBreakdown struct {
CacheWrite int64 `json:"cache_write"` // anthropic specific
CacheRead int64 `json:"cache_read"`
NonCacheRead int64 `json:"non_cache_read"`
}
func (TokenUsageInputBreakdown) Add ¶
func (c TokenUsageInputBreakdown) Add(b TokenUsageInputBreakdown) TokenUsageInputBreakdown
type TokenUsageOutputBreakdown ¶
type TokenUsageOutputBreakdown struct {
CacheOutput int64 `json:"cache_output"`
}
type ToolInfo ¶ added in v0.0.8
type ToolInfoMapping ¶ added in v0.0.8
Click to show internal directories.
Click to hide internal directories.