chat

package
v0.0.37 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Nov 21, 2025 License: MIT Imports: 30 Imported by: 0

Documentation

Index

Examples

Constants

View Source
const MAX_PRINT_LIMIT = 2048

Variables

This section is empty.

Functions

func AppendToHistory

func AppendToHistory(filename string, message types.Message) error

AppendToHistory appends a single message to a history file

func Chat added in v0.0.17

func Chat(ctx context.Context, req types.Request) (*types.Response, error)

func CreateMessage

func CreateMessage(msgType types.MsgType, role types.Role, model, content string) types.Message

CreateMessage creates a new message with timestamp

func CreateToolCallMessage

func CreateToolCallMessage(role types.Role, model, toolName, toolUseID, content string) types.Message

CreateToolCallMessage creates a tool call message

func CreateToolResultMessage

func CreateToolResultMessage(role types.Role, model, toolName, toolUseID, content string) types.Message

CreateToolResultMessage creates a tool result message

func ExecuteBuiltinTool

func ExecuteBuiltinTool(ctx context.Context, call types.ToolCall) (types.ToolResult, error)

ExecuteBuiltinTool executes a builtin tool with the given call

func FilterHistoryByType

func FilterHistoryByType(messages []types.Message, msgType types.MsgType) []types.Message

FilterHistoryByType filters messages by type

func GetLastUserMessage

func GetLastUserMessage(messages []types.Message) *types.Message

GetLastUserMessage returns the last user message from history

func GetSystemPrompts

func GetSystemPrompts(messages []types.Message) []string

GetSystemPrompts extracts all system prompts from message history

func LoadHistory

func LoadHistory(filename string) ([]types.Message, error)

LoadHistory loads historical messages from a file

func SaveHistory

func SaveHistory(filename string, messages []types.Message) error

SaveHistory saves messages to a file (overwrites existing file)

func WithCache

func WithCache(enabled bool) types.ChatOption

WithCache controls whether caching is enabled (default: true)

func WithDefaultToolCwd

func WithDefaultToolCwd(cwd string) types.ChatOption

WithDefaultToolCwd sets the default working directory for tool execution

func WithEventCallback

func WithEventCallback(callback types.EventCallback) types.ChatOption

WithEventCallback sets a callback for receiving events during chat processing

func WithHistory

func WithHistory(messages []types.Message) types.ChatOption

WithHistory provides historical messages for conversation context

func WithMCPServers

func WithMCPServers(servers ...string) types.ChatOption

WithMCPServers specifies MCP servers to connect to

func WithMaxRounds

func WithMaxRounds(rounds int) types.ChatOption

WithMaxRounds sets the maximum number of conversation rounds

func WithStdStream

func WithStdStream(stdin io.Reader, stdout io.Writer) types.ChatOption

WithStdStream sets stdin and stdout for bidirectional tool callback communication

func WithSystemPrompt

func WithSystemPrompt(prompt string) types.ChatOption

WithSystemPrompt sets the system prompt for the conversation

func WithToolCallback

func WithToolCallback(callback types.ToolCallback) types.ChatOption

WithToolCallback sets a custom tool execution callback

func WithToolFiles

func WithToolFiles(files ...string) types.ChatOption

WithToolFiles specifies custom tool definition files to load

func WithToolJSONs

func WithToolJSONs(jsons ...string) types.ChatOption

WithToolJSONs specifies custom tool definitions as JSON strings

func WithTools

func WithTools(tools ...string) types.ChatOption

WithTools specifies the builtin tools to make available

Types

type AnthropicResponseResult

type AnthropicResponseResult struct {
	Messages     []types.Message
	ToolCalls    []types.ToolCall
	TokenUsage   types.TokenUsage
	ToolUseNum   int
	Stopped      bool
	RespMessages []anthropic.ContentBlockParamUnion
	ToolResults  []anthropic.ContentBlockParamUnion
}

type CliHandler

type CliHandler struct {
	// contains filtered or unexported fields
}

CliHandler wraps the core client with CLI-specific functionality

Example

ExampleCliHandler demonstrates CLI usage

package main

import (
	"context"
	"fmt"
	"os"

	"github.com/xhd2015/kode-ai/chat"
)

func main() {
	client, err := chat.NewClient(chat.Config{
		Model: "claude-3-7-sonnet",
		Token: os.Getenv("ANTHROPIC_API_KEY"),
	})
	if err != nil {
		fmt.Printf("Error creating client: %v\n", err)
		return
	}

	// CLI wrapper for command-line usage
	cliHandler := chat.NewCliHandler(client, chat.CliOptions{
		RecordFile: "session.json",
		LogChat:    true,
		Verbose:    false,
	})

	err = cliHandler.HandleCli(context.Background(), "Hello, how are you?",
		chat.WithTools("file_read"),
		chat.WithMaxRounds(2),
	)
	if err != nil {
		fmt.Printf("Error: %v\n", err)
		return
	}

	fmt.Println("CLI chat completed")
}

func NewCliHandler

func NewCliHandler(client *Client, opts CliOptions) *CliHandler

NewCliHandler creates a new CLI handler

func (*CliHandler) HandleCli

func (h *CliHandler) HandleCli(ctx context.Context, message string, coreOpts ...types.ChatOption) error

HandleCLI handles a chat request with CLI-specific behavior

func (*CliHandler) HandleCliWithServer added in v0.0.35

func (h *CliHandler) HandleCliWithServer(ctx context.Context, message string, server string, chatWithServer func(ctx context.Context, server string, req types.Request) (*types.Response, error), coreOpts ...types.ChatOption) error

type CliOptions

type CliOptions struct {
	RecordFile         string // File recording for session persistence
	IgnoreDuplicateMsg bool   // Interactive duplicate message handling
	LogRequest         bool   // Debug request logging
	LogChat            bool   // Chat progress logging
	Verbose            bool   // Verbose output
	JSONOutput         bool   // Output response as JSON

	StreamPair *types.StreamPair
}

CliOptions represents CLI-specific options that don't belong in core library

type Client

type Client struct {
	// contains filtered or unexported fields
}

Client represents the chat client

Example

ExampleClient demonstrates basic usage of the chat library

package main

import (
	"context"
	"fmt"
	"os"

	"github.com/xhd2015/kode-ai/chat"
)

func main() {
	// Create a client
	client, err := chat.NewClient(chat.Config{
		Model: "claude-3-7-sonnet",
		Token: os.Getenv("ANTHROPIC_API_KEY"),
	})
	if err != nil {
		fmt.Printf("Error creating client: %v\n", err)
		return
	}

	// Simple chat
	response, err := client.Chat(context.Background(), "What is Go programming language?")
	if err != nil {
		fmt.Printf("Error: %v\n", err)
		return
	}

	fmt.Printf("Response: %s\n", response.LastAssistantMsg[:50]+"...")
	fmt.Printf("Token usage: %d\n", response.TokenUsage.Total)
}
Example (MultiRound)

ExampleClient_multiRound demonstrates multi-round conversation

package main

import (
	"context"
	"fmt"
	"os"

	"github.com/xhd2015/kode-ai/chat"
	"github.com/xhd2015/kode-ai/types"
)

func main() {
	client, err := chat.NewClient(chat.Config{
		Model: "gpt-4o",
		Token: os.Getenv("OPENAI_API_KEY"),
	})
	if err != nil {
		fmt.Printf("Error creating client: %v\n", err)
		return
	}

	var history []types.Message

	// First message
	_, err = client.Chat(context.Background(), "My name is Alice",
		chat.WithHistory(history))
	if err != nil {
		fmt.Printf("Error: %v\n", err)
		return
	}
	// Note: Response doesn't have Messages field, so we can't append to history in this simple way

	// Follow-up message
	response2, err := client.Chat(context.Background(), "What is my name?",
		chat.WithHistory(history))
	if err != nil {
		fmt.Printf("Error: %v\n", err)
		return
	}

	fmt.Printf("Second response: %s\n", response2.LastAssistantMsg[:50]+"...")
}
Example (WithCustomToolCallback)

ExampleClient_withCustomToolCallback demonstrates custom tool handling

package main

import (
	"context"
	"fmt"
	"os"

	"github.com/xhd2015/kode-ai/chat"
	"github.com/xhd2015/kode-ai/types"
)

func main() {
	client, err := chat.NewClient(chat.Config{
		Model: "claude-3-7-sonnet",
		Token: os.Getenv("ANTHROPIC_API_KEY"),
	})
	if err != nil {
		fmt.Printf("Error creating client: %v\n", err)
		return
	}

	// Custom tool handler
	toolHandler := func(ctx context.Context, stream types.StreamContext, call types.ToolCall) (types.ToolResult, bool, error) {
		switch call.Name {
		case "custom_database_query":
			sql := call.Arguments["sql"].(string)
			// Simulate database query
			result := map[string]interface{}{
				"rows":    []string{"user1", "user2"},
				"count":   2,
				"query":   sql,
				"message": "Query executed successfully",
			}
			return types.ToolResult{Content: result}, true, nil // handled=true
		default:
			// Don't handle this tool, fallback to built-in tools
			return types.ToolResult{}, false, nil // handled=false, no error
		}
	}

	response, err := client.Chat(context.Background(), "Query the database for users",
		chat.WithToolCallback(toolHandler))
	if err != nil {
		fmt.Printf("Error: %v\n", err)
		return
	}

	fmt.Printf("Response received: %s\n", response.LastAssistantMsg)
}
Example (WithTools)

ExampleClient_withTools demonstrates chat with tools

package main

import (
	"context"
	"fmt"
	"os"

	"github.com/xhd2015/kode-ai/chat"
	"github.com/xhd2015/kode-ai/types"
)

func main() {
	client, err := chat.NewClient(chat.Config{
		Model: "gpt-4o",
		Token: os.Getenv("OPENAI_API_KEY"),
	})
	if err != nil {
		fmt.Printf("Error creating client: %v\n", err)
		return
	}

	// Chat with tools and custom callback
	response, err := client.Chat(context.Background(), "List files in current directory",
		chat.WithTools("file_list"),
		chat.WithEventCallback(func(event types.Message) {
			switch event.Type {
			case types.MsgType_Msg:
				fmt.Print(event.Content)
			case types.MsgType_ToolCall:
				fmt.Printf("\n🔧 Calling tool: %s\n", event.ToolName)
			case types.MsgType_ToolResult:
				fmt.Printf("✅ Tool completed\n")
			}
		}),
	)
	if err != nil {
		fmt.Printf("Error: %v\n", err)
		return
	}

	fmt.Printf("Response: %s\n", response.LastAssistantMsg)
}

func NewClient

func NewClient(config Config) (*Client, error)

NewClient creates a new chat client

func (*Client) Chat

func (c *Client) Chat(ctx context.Context, message string, opts ...types.ChatOption) (*types.Response, error)

Chat performs a chat conversation using functional options

func (*Client) ChatRequest

func (c *Client) ChatRequest(ctx context.Context, req types.Request) (*types.Response, error)

ChatRequest performs a chat conversation using a direct request

type ClientUnion

type ClientUnion struct {
	OpenAI    *openai.Client
	Anthropic *anthropic.Client
	Gemini    *genai.Client
}

Provider-specific message unions for internal use

type Config

type Config struct {
	Model    string             // Required: Model name (e.g., "claude-3-7-sonnet")
	Token    string             // Required: API token
	BaseURL  string             // Optional: Custom API base URL
	Provider providers.Provider // Optional: Auto-detected from model if not specified
	LogLevel types.LogLevel     // Optional: None, Request, Response, Debug

	Logger types.Logger
}

Config represents the client configuration with provider-specific fields

type GeminiResponseResult

type GeminiResponseResult struct {
	Messages     []types.Message
	ToolCalls    []types.ToolCall
	TokenUsage   types.TokenUsage
	ToolUseNum   int
	Stopped      bool
	RespMessages []*genai.Content
	ToolResults  []*genai.Content
}

type JSONLogEntry

type JSONLogEntry struct {
	Type      string      `json:"type"`
	Content   string      `json:"content,omitempty"`
	Metadata  interface{} `json:"metadata,omitempty"`
	Timestamp string      `json:"timestamp,omitempty"`
}

JSONLogEntry represents a structured log entry for JSON output

type MessageHistoryUnion

type MessageHistoryUnion struct {
	FullHistory   Messages
	SystemPrompts []string

	OpenAI    []openai.ChatCompletionMessageParamUnion
	Anthropic []anthropic.MessageParam
	Gemini    []*genai.Content
}

type Messages

type Messages []types.Message

Messages is a local wrapper for conversion methods

func (Messages) ToAnthropic

func (messages Messages) ToAnthropic() (msgs []anthropic.MessageParam, systemPrompts []string, err error)

ToAnthropic converts unified messages to Anthropic format

func (Messages) ToGemini

func (messages Messages) ToGemini() (msgs []*genai.Content, systemPrompts []string, err error)

ToGemini converts unified messages to Gemini format

func (Messages) ToOpenAI

func (messages Messages) ToOpenAI(keepSystemPrompts bool) (msgs []openai.ChatCompletionMessageParamUnion, systemPrompts []string, err error)

ToOpenAI converts unified messages to OpenAI format

type MessagesUnion

type MessagesUnion struct {
	OpenAI    []openai.ChatCompletionMessageParamUnion
	Anthropic []anthropic.MessageParam
	Gemini    []*genai.Content
}

type ResponseResult

type ResponseResult struct {
	Messages     []types.Message
	ToolCalls    []types.ToolCall
	TokenUsage   types.TokenUsage
	ToolUseNum   int
	Stopped      bool
	RespMessages []openai.ChatCompletionMessageParamUnion // For OpenAI
	ToolResults  []openai.ChatCompletionMessageParamUnion // For OpenAI
}

Response processing result types

type ToolInfo

type ToolInfo struct {
	Name           string
	Builtin        bool
	ToolDefinition *tools.UnifiedTool
	MCPServer      string
	MCPClient      *client.Client
}

ToolInfo represents information about a tool

func (*ToolInfo) String

func (c *ToolInfo) String() string

String returns a string representation of the tool info

type ToolInfoMapping

type ToolInfoMapping map[string]*ToolInfo

ToolInfoMapping maps tool names to their information

func (ToolInfoMapping) AddTool

func (c ToolInfoMapping) AddTool(toolName string, toolInfo *ToolInfo) error

AddTool adds a tool to the mapping

Directories

Path Synopsis

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL