types

package module
v0.0.10 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Dec 24, 2025 License: MIT Imports: 15 Imported by: 1

Documentation

Index

Constants

View Source
const (
	// OpenAI
	ModelGPT4o           = "gpt-4o"      // $0.15 / 1M
	ModelGPT4oMini       = "gpt-4o-mini" // $0.15 / 1M
	ModelGPT4oNano       = "gpt-4o-nano" // $0.15 / 1M
	ModelGPTo4Mini       = "o4-mini"     // 10x gpt-4o-mini
	ModelGPTo3Mini       = "o3-mini"     // 10x gpt-4o-mini
	ModelGPT4_1          = "gpt-4.1"     // $2
	ModelGPT4_1_Mini     = "gpt-4.1-mini"
	ModelGPTo3           = "o3"               // $2
	ModelGPT5_20250807   = "gpt-5-2025-08-07" // $1.25
	ModelGPT5_2_20251211 = "gpt-5.2-2025-12-11"

	// Anthropic
	ModelClaude3_7Sonnet          = "claude-3-7-sonnet" // $3
	ModelClaude3_7Sonnet_20250219 = "claude-3-7-sonnet@20250219"

	ModelClaudeSonnet4          = "claude-sonnet-4" // $3
	ModelClaudeSonnet4_20250514 = "claude-sonnet-4@20250514"

	ModelClaudeSonnet4_5        = "claude-sonnet-4-5" // $3
	ModelClaudeSonnet4_20250929 = "claude-sonnet-4-5@20250929"

	// Gemini
	ModelGemini2_0_Flash      = "gemini-2.0-flash"
	ModelGemini2_0_Flash_001  = "gemini-2.0-flash-001"
	ModelGemini2_5_Pro        = "gemini-2.5-pro"
	ModelGemini2_5_Pro_0605   = "gemini-2.5-pro-preview-06-05"
	ModelGemini2_5_Flash      = "gemini-2.5-flash"
	ModelGemini2_5_Flash_0520 = "gemini-2.5-flash-preview-05-20"
	ModelGemini3ProPreview    = "gemini-3-pro-preview"

	// kimi
	ModelKimiK2              = "kimi-k2"
	ModelKimiK2_0711_Preview = "kimi-k2-0711-preview"
	ModelOpenRouterKimiK2    = "moonshotai/kimi-k2"

	ModelDeepSeekR1          = "DeepSeek-R1"
	ModelQwen25VL72BInstruct = "Qwen2.5-VL-72B-Instruct"
)

Model constants

View Source
const (
	ProviderAnthropic  = models.ProviderAnthropic
	ProviderGemini     = models.ProviderGemini
	ProviderOpenAI     = models.ProviderOpenAI
	ProviderMoonshot   = models.ProviderMoonshot
	ProviderDeepSeek   = models.ProviderDeepSeek
	ProviderQwen       = models.ProviderQwen
	ProviderOpenRouter = models.ProviderOpenRouter
)

Re-export Provider constants from models package

View Source
const (
	APIShapeOpenAI    = models.APIShapeOpenAI
	APIShapeAnthropic = models.APIShapeAnthropic
	APIShapeGemini    = models.APIShapeGemini
)

Re-export APIShape constants from models package

View Source
const STREAM_ACK_TIMEOUT = 1 * time.Second

const STREAM_ACK_TIMEOUT = 100 * time.Second

Variables

View Source
var AllModelInfos = initAllModelInfos()

AllModelInfos contains all model information in a centralized map Model definitions are organized in types/models/ by provider

View Source
var ErrStreamEnd = fmt.Errorf("stream end")
View Source
var UnifiedToolDef string

Functions

func GetAllModels added in v0.0.10

func GetAllModels() []string

GetAllModels returns all supported model names

Types

type APIShape

type APIShape = models.APIShape

APIShape represents the API shape/format (re-exported from models package)

type CacheInfoMetadata

type CacheInfoMetadata struct {
	CacheEnabled bool   `json:"cache_enabled"`
	Model        string `json:"model,omitempty"`
}

CacheInfoMetadata represents metadata for cache_info events

type ChatOption

type ChatOption func(*Request)

ChatOption represents a functional option for chat configuration

func WithCache

func WithCache(enabled bool) ChatOption

WithCache controls whether caching is enabled (default: true)

func WithDefaultToolCwd

func WithDefaultToolCwd(cwd string) ChatOption

WithDefaultToolCwd sets the default working directory for tool execution

func WithEventCallback

func WithEventCallback(callback EventCallback) ChatOption

WithEventCallback sets a callback for receiving events during chat processing

func WithFollowUpCallback

func WithFollowUpCallback(callback FollowUpCallback) ChatOption

WithFollowUpCallback sets a callback for follow-up tool execution

func WithHistory

func WithHistory(messages []Message) ChatOption

WithHistory provides historical messages for conversation context

func WithMCPServers

func WithMCPServers(servers ...string) ChatOption

WithMCPServers specifies MCP servers to connect to

func WithMaxRounds

func WithMaxRounds(rounds int) ChatOption

WithMaxRounds sets the maximum number of conversation rounds

func WithStdStream

func WithStdStream(stdin io.Reader, stdout io.Writer) ChatOption

WithStdStream sets stdin and stdout for bidirectional tool callback communication

func WithSystemPrompt

func WithSystemPrompt(prompt string) ChatOption

WithSystemPrompt sets the system prompt for the conversation

func WithToolCallback

func WithToolCallback(callback ToolCallback) ChatOption

WithToolCallback sets a custom tool execution callback

func WithToolDefinitions

func WithToolDefinitions(tool ...*UnifiedTool) ChatOption

func WithToolFiles

func WithToolFiles(files ...string) ChatOption

WithToolFiles specifies custom tool definition files to load

func WithToolJSONs

func WithToolJSONs(jsons ...string) ChatOption

WithToolJSONs specifies custom tool definitions as JSON strings

func WithTools

func WithTools(tools ...string) ChatOption

WithTools specifies the builtin tools to make available

type Config

type Config struct {
	Token           string       `json:"token,omitempty"`
	MaxRound        int          `json:"max_round,omitempty"`
	BaseURL         string       `json:"base_url,omitempty"`
	Model           string       `json:"model,omitempty"`
	SystemPrompt    StringOrList `json:"system,omitempty"` // can be string or a list of strings
	Tools           []string     `json:"tools,omitempty"`
	ToolCustomFiles []string     `json:"tool_custom_files,omitempty"`
	ToolDefaultCwd  string       `json:"tool_default_cwd,omitempty"`
	MCPServers      []string     `json:"mcp_servers,omitempty"`
	Examples        []string     `json:"examples,omitempty"` // a list of example questions this agent can assist with
}

Config represents the basic configuration structure

type EventCallback

type EventCallback func(msg Message)

EventCallback is called for each message during chat processing

type FollowUpCallback

type FollowUpCallback func(ctx context.Context) (*Message, error)

FollowUpCallback allows custom follow-up tool execution Returns: (result, handled, error) - result: Follow-up message, nil to indicate end of conversation - error: Any execution error

type JsonSchema

type JsonSchema = jsonschema.JsonSchema

type LogLevel

type LogLevel int

LogLevel represents the logging level for the client

const (
	LogLevelNone LogLevel = iota
	LogLevelRequest
	LogLevelResponse
	LogLevelDebug
)

type LogType

type LogType string
const (
	LogType_Info  LogType = "info"
	LogType_Error LogType = "error"
)

type Logger

type Logger interface {
	Log(ctx context.Context, logType LogType, format string, args ...interface{})
}

type LoggerFunc

type LoggerFunc func(ctx context.Context, logType LogType, format string, args ...interface{})

func (LoggerFunc) Log

func (l LoggerFunc) Log(ctx context.Context, logType LogType, format string, args ...interface{})

type Message

type Message struct {
	Type MsgType `json:"type"`
	// Annotation for Timestamp
	Time  string `json:"time"`
	Role  Role   `json:"role"`
	Model string `json:"model"`

	// general content
	Content string `json:"content"`
	Error   string `json:"error,omitempty"` // meaningful when: Type == MsgType_StreamEnd, MsgType_Error, MsgType_ToolCall

	// for tool call
	ToolUseID string `json:"tool_use_id,omitempty"`
	ToolName  string `json:"tool_name,omitempty"`

	// StreamID for stream
	StreamID string `json:"stream_id,omitempty"`

	// for message token usage record
	TokenUsage *TokenUsage `json:"token_usage,omitempty"`

	// for message token cost record
	TokenCost *TokenCost `json:"token_cost,omitempty"`

	// Extended structured metadata
	Metadata Metadata `json:"metadata,omitempty"`

	// unix timestamp, accurate
	Timestamp int64 `json:"timestamp,omitempty"`
}

Message represents a message in the chat conversation

func StreamRequest

func StreamRequest(ctx context.Context, writer io.Writer, reader StdinReader, requestMsg Message, expectMsgType MsgType) (Message, error)

if expectMsgType is empty, it will return the first message that is not a stream handle ack

func (Message) TimeFilled

func (c Message) TimeFilled() Message

type Messages

type Messages []Message

Messages represents a slice of messages

type Metadata

type Metadata struct {
	CacheInfo          *CacheInfoMetadata          `json:"cache_info,omitempty"`
	RoundStart         *RoundStartMetadata         `json:"round_start,omitempty"`
	RoundEnd           *RoundEndMetadata           `json:"round_end,omitempty"`
	StreamRequestTool  *StreamRequestToolMetadata  `json:"stream_request_tool,omitempty"`
	StreamResponseTool *StreamResponseToolMetadata `json:"stream_response_tool,omitempty"`
}

type ModelCost

type ModelCost = models.ModelCost

ModelCost represents the cost structure for a model (re-exported from models package)

type ModelInfo added in v0.0.10

type ModelInfo = models.ModelInfo

ModelInfo contains all information about a model (re-exported from models package)

type MsgType

type MsgType string

MsgType represents the type of message

const (
	// for both logs and input
	MsgType_Msg        MsgType = "msg"
	MsgType_ToolCall   MsgType = "tool_call"
	MsgType_ToolResult MsgType = "tool_result"

	// for logs only
	MsgType_Info       MsgType = "info"
	MsgType_Error      MsgType = "error"
	MsgType_CacheInfo  MsgType = "cache_info"
	MsgType_StopReason MsgType = "stop_reason"
	MsgType_TokenUsage MsgType = "token_usage"

	// for stream
	MsgType_StreamRequestTool    MsgType = "stream_request_tool"
	MsgType_StreamResponseTool   MsgType = "stream_response_tool"
	MsgType_StreamRequestUserMsg MsgType = "stream_request_user_msg"
	MsgType_StreamHandleAck      MsgType = "stream_handle_ack"
	MsgType_StreamEnd            MsgType = "stream_end" // cannot handle message

	// for initial stream
	MsgType_StreamInitRequest        MsgType = "stream_init_request"
	MsgType_StreamInitEventsFinished MsgType = "stream_init_events_finished"
)

func (MsgType) HistorySendable

func (m MsgType) HistorySendable() bool

func (MsgType) IsFileRecordable added in v0.0.9

func (m MsgType) IsFileRecordable() bool

type ParamType

type ParamType = jsonschema.ParamType

type Provider

type Provider = models.Provider

Provider represents the AI provider (re-exported from models package)

type Request

type Request struct {
	Model   string `json:"model"`
	Token   string `json:"token"`
	BaseURL string `json:"base_url"`

	SystemPrompt string    `json:"system_prompt"`
	Message      string    `json:"message"`
	History      []Message `json:"history"`

	MaxRounds       int            `json:"max_rounds"`
	Tools           []string       `json:"tools"`
	ToolFiles       []string       `json:"tool_files"`
	ToolJSONs       []string       `json:"tool_jsons"`
	ToolDefinitions []*UnifiedTool `json:"tool_definitions"`
	DefaultToolCwd  string         `json:"default_tool_cwd"`

	NoCache    bool     `json:"no_cache"`
	MCPServers []string `json:"mcp_servers"`

	Logger Logger `json:"-"`

	// functional options
	EventCallback    EventCallback    `json:"-"` // Cannot be serialized
	ToolCallback     ToolCallback     `json:"-"` // Cannot be serialized
	FollowUpCallback FollowUpCallback `json:"-"` // Cannot be serialized

	// Stream fields for bidirectional tool callback communication
	StreamPair *StreamPair `json:"-"` // Cannot be serialized
}

Request represents a chat request

type Response

type Response struct {
	TokenUsage TokenUsage `json:"token_usage"` // Token consumption details
	Cost       *TokenCost `json:"cost"`        // Cost information if available
	StopReason string     `json:"stop_reason"` // Why the conversation stopped
	RoundsUsed int        `json:"rounds_used"` // Number of conversation rounds used

	NumToolCalls int `json:"num_tool_calls"` // Number of tool calls used

	// last response message of the chat
	// assitant respones including msg and tool calls
	// LastAssistantMsg in the exactly the last assistant msg, excluding
	// tool calls
	LastAssistantMsg string `json:"last_assistant_response"`
}

Response represents a chat response

type Role

type Role string

Role represents the role of a message sender

const (
	Role_User      Role = "user"
	Role_Assistant Role = "assistant"
	Role_System    Role = "system"
)

type RoundEndMetadata

type RoundEndMetadata struct {
	Round int `json:"round"`
}

type RoundStartMetadata

type RoundStartMetadata struct {
	MaxRounds int `json:"max_rounds"`
}

type StdLogger added in v0.0.3

type StdLogger struct{}

func (StdLogger) Log added in v0.0.3

func (l StdLogger) Log(ctx context.Context, logType LogType, format string, args ...interface{})

type StdinReader

type StdinReader interface {
	Subscribe(id string) chan Message
	Unsubscribe(id string)
	Start()
}

StdinReader interface for background stdin reading

func NewStdinReader

func NewStdinReader(stdin io.Reader) StdinReader

NewStdinReader creates a new stdin reader instance

type StreamContext

type StreamContext interface {
	// ACK will handle it
	ACK(id string) error

	Write(msg Message) error
}

func NewStreamContext

func NewStreamContext(out io.Writer) StreamContext

type StreamPair

type StreamPair struct {
	Input  io.Reader `json:"-"` // Cannot be serialized - for reading tool callback responses
	Output io.Writer `json:"-"` // Cannot be serialized - for writing tool callback requests
}

type StreamRequestToolMetadata

type StreamRequestToolMetadata struct {
	DefaultWorkingDir string `json:"default_working_dir"`
}

type StreamResponseToolMetadata

type StreamResponseToolMetadata struct {
	OK bool `json:"ok"`
}

type StringOrList

type StringOrList = interface{}

StringOrList represents a value that can be either a string or a list of strings

type TokenCost

type TokenCost struct {
	InputUSD       string                  `json:"input_usd"`
	OutputUSD      string                  `json:"output_usd"`
	TotalUSD       string                  `json:"total_usd"`
	InputBreakdown TokenCostInputBreakdown `json:"input_breakdown"`
}

TokenCost represents cost information

func (TokenCost) Add

func (t TokenCost) Add(other TokenCost) TokenCost

Add adds two TokenCost together

type TokenCostInputBreakdown

type TokenCostInputBreakdown struct {
	CacheWriteUSD   string `json:"cache_write_usd"`
	CacheReadUSD    string `json:"cache_read_usd"`
	NonCacheReadUSD string `json:"non_cache_read_usd"`
}

TokenCostInputBreakdown represents input token cost breakdown

func (TokenCostInputBreakdown) Add

Add adds two TokenCostInputBreakdown together

type TokenUsage

type TokenUsage struct {
	Input           int64                     `json:"input"`
	Output          int64                     `json:"output"`
	Total           int64                     `json:"total"`
	InputBreakdown  TokenUsageInputBreakdown  `json:"input_breakdown"`
	OutputBreakdown TokenUsageOutputBreakdown `json:"output_breakdown"`
}

Anthropic:

TokenUsage represents token usage information

func (TokenUsage) Add

func (t TokenUsage) Add(other TokenUsage) TokenUsage

Add adds two TokenUsage together

type TokenUsageCost

type TokenUsageCost struct {
	Usage TokenUsage `json:"usage"`
	Cost  TokenCost  `json:"cost"`
}

TokenUsageCost combines usage and cost information

type TokenUsageInputBreakdown

type TokenUsageInputBreakdown struct {
	CacheWrite   int64 `json:"cache_write"`
	CacheRead    int64 `json:"cache_read"`
	NonCacheRead int64 `json:"non_cache_read"`
}

TokenUsageInputBreakdown represents input token breakdown

func (TokenUsageInputBreakdown) Add

Add adds two TokenUsageInputBreakdown together

type TokenUsageOutputBreakdown

type TokenUsageOutputBreakdown struct {
	CacheOutput int64 `json:"cache_output"`
}

TokenUsageOutputBreakdown represents output token breakdown

func (TokenUsageOutputBreakdown) Add

Add adds two TokenUsageOutputBreakdown together

type ToolCall

type ToolCall struct {
	ID         string                 `json:"id"`          // Unique identifier for this tool call
	Name       string                 `json:"name"`        // Tool name
	Arguments  map[string]interface{} `json:"arguments"`   // Parsed JSON arguments
	RawArgs    string                 `json:"raw_args"`    // Raw JSON string arguments
	WorkingDir string                 `json:"working_dir"` // Working directory for tool execution
}

ToolCall represents a tool call

type ToolCallback

type ToolCallback func(ctx context.Context, stream StreamContext, call ToolCall) (ToolResult, bool, error)

ToolCallback allows custom tool execution Returns: (result, handled, error) - result: Tool execution result - handled: true if tool was handled by callback, false to fallback to built-in tools - error: Any execution error

type ToolResult

type ToolResult struct {
	Content interface{} `json:"content"`         // Result data (must be JSON serializable)
	Error   string      `json:"error,omitempty"` // Tool execution error (if any)
}

ToolResult represents the result of a tool execution

type UnifiedTool

type UnifiedTool struct {
	Format      string                 `json:"format,omitempty"`
	Name        string                 `json:"name"`
	Description string                 `json:"description,omitempty"`
	Parameters  *jsonschema.JsonSchema `json:"parameters,omitempty"`

	// command to be executed
	Command []string `json:"command"`

	Handle func(ctx context.Context, stream StreamContext, call ToolCall) (ToolResult, bool, error) `json:"-"`
}

UnifiedTool represents a unified tool definition

Directories

Path Synopsis

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL