Documentation
¶
Index ¶
- func CalculateOptimalMaxTokens(modelMaxTokens int) int
- func SetCurrentTheme(name string) error
- type ChatCommand
- type ChatInterface
- type ChatMetrics
- type CommandArg
- type CommandHandler
- type CommandResult
- type ConversationContext
- type GeneratedFile
- type InputHandler
- type MarkdownRenderer
- type Message
- type SavedMessage
- type SessionConfig
- type SessionHistory
- type SessionManager
- func (sm *SessionManager) AddConversationMessage(role, content string, metadata map[string]interface{}) error
- func (sm *SessionManager) AddMessage(message string) error
- func (sm *SessionManager) CleanupOldSessions() error
- func (sm *SessionManager) CreateNewSession(projectPath, mode string) *SessionHistory
- func (sm *SessionManager) DeleteSession(sessionID string) error
- func (sm *SessionManager) GetCurrentSession() *SessionHistory
- func (sm *SessionManager) GetHistoryDir() string
- func (sm *SessionManager) GetMaxSessions() int
- func (sm *SessionManager) ListSessions() ([]*SessionHistory, error)
- func (sm *SessionManager) LoadSession(sessionID string) (*SessionHistory, error)
- func (sm *SessionManager) LoadSessionWithoutUpdate(sessionID string) (*SessionHistory, error)
- func (sm *SessionManager) SaveSession(session *SessionHistory) error
- func (sm *SessionManager) SetMaxSessions(max int)
- type StreamRenderer
- type Theme
- type ToolCallHandler
- func (h *ToolCallHandler) ExecuteToolCall(ctx context.Context, toolCall llm.ToolCall, chatContext *ConversationContext) (*CommandResult, error)
- func (h *ToolCallHandler) FormatToolCallForConversation(toolCall llm.ToolCall, result *CommandResult) string
- func (h *ToolCallHandler) FormatToolCallResult(toolCall llm.ToolCall, result *CommandResult) string
- func (h *ToolCallHandler) GetAvailableTools() []llm.Tool
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
func CalculateOptimalMaxTokens ¶
CalculateOptimalMaxTokens calculates appropriate MaxTokens based on model capabilities Uses 15% of context window for responses, with reasonable bounds
func SetCurrentTheme ¶
SetCurrentTheme sets the current active theme
Types ¶
type ChatCommand ¶
type ChatCommand struct {
Name string `json:"name"` // Command name (e.g., "search", "analyze")
Description string `json:"description"` // Command description
Usage string `json:"usage"` // Usage example
Handler CommandHandler `json:"-"` // Command handler function
Aliases []string `json:"aliases"` // Command aliases
Args []CommandArg `json:"args"` // Command arguments
}
ChatCommand represents a command that can be executed during chat
type ChatInterface ¶
type ChatInterface struct {
// contains filtered or unexported fields
}
ChatInterface implements the interactive chat experience
func NewChatInterface ¶
func NewChatInterface(sessionConfig SessionConfig) (*ChatInterface, error)
NewChatInterface creates a new chat interface
func (*ChatInterface) GetContext ¶
func (c *ChatInterface) GetContext() *ConversationContext
GetContext returns the current conversation context
func (*ChatInterface) ReloadLLMProvider ¶
func (c *ChatInterface) ReloadLLMProvider() error
ReloadLLMProvider reloads the LLM provider with current config
func (*ChatInterface) StartSession ¶
func (c *ChatInterface) StartSession(ctx context.Context) error
StartSession starts an interactive chat session
type ChatMetrics ¶
type ChatMetrics struct {
SessionDuration time.Duration `json:"session_duration"` // Total session duration
MessageCount int `json:"message_count"` // Total messages exchanged
CommandsUsed []string `json:"commands_used"` // Commands used in session
FilesGenerated int `json:"files_generated"` // Number of files generated
TokensUsed int `json:"tokens_used"` // Total LLM tokens used
Errors int `json:"errors"` // Number of errors encountered
}
ChatMetrics tracks metrics for chat sessions
type CommandArg ¶
type CommandArg struct {
Name string `json:"name"` // Argument name
Type string `json:"type"` // Argument type (string, int, bool)
Required bool `json:"required"` // Whether argument is required
Description string `json:"description"` // Argument description
Default string `json:"default"` // Default value
}
CommandArg represents an argument for a chat command
type CommandHandler ¶
type CommandHandler func(ctx context.Context, args []string, context *ConversationContext) (*CommandResult, error)
CommandHandler is a function that handles a chat command
type CommandResult ¶
type CommandResult struct {
Success bool `json:"success"` // Whether command succeeded
Message string `json:"message"` // Result message
Data map[string]interface{} `json:"data"` // Additional result data
Files []GeneratedFile `json:"files"` // Files generated by command
NextStep string `json:"next_step"` // Suggested next step
}
CommandResult represents the result of a command execution
type ConversationContext ¶
type ConversationContext struct {
ProjectPath string `json:"project_path"` // Path to the project being discussed
ProjectInfo *analysis.ProjectAnalysis `json:"project_info"` // Analysis of the current project
Mode string `json:"mode"` // "dev", "devops", or "general"
History []Message `json:"history"` // Conversation history
CurrentTopic string `json:"current_topic"` // Current conversation topic
Resources []string `json:"resources"` // Available resources from parent
Environment string `json:"environment"` // Current target environment
UserIntent string `json:"user_intent"` // Inferred user intent
SessionID string `json:"session_id"` // Unique session identifier
CreatedAt time.Time `json:"created_at"` // Session creation time
UpdatedAt time.Time `json:"updated_at"` // Last update time
Metadata map[string]interface{} `json:"metadata"` // Additional context data
}
ConversationContext maintains the state of an ongoing conversation
type GeneratedFile ¶
type GeneratedFile struct {
Path string `json:"path"` // File path
Content string `json:"content"` // File content
Type string `json:"type"` // File type (yaml, dockerfile, etc.)
Description string `json:"description"` // File description
Generated bool `json:"generated"` // Whether file was generated vs updated
}
GeneratedFile represents a file generated during chat
type InputHandler ¶
type InputHandler struct {
// contains filtered or unexported fields
}
InputHandler handles enhanced input with autocomplete and history
func NewInputHandler ¶
func NewInputHandler(commands map[string]*ChatCommand) *InputHandler
NewInputHandler creates a new input handler
func (*InputHandler) ClearHistory ¶
func (h *InputHandler) ClearHistory()
ClearHistory clears the command history
func (*InputHandler) GetHistory ¶
func (h *InputHandler) GetHistory() []string
GetHistory returns the command history
func (*InputHandler) ReadLine ¶
func (h *InputHandler) ReadLine(promptText string) (string, error)
ReadLine reads a line with autocomplete and history support
func (*InputHandler) ReadSimple ¶
func (h *InputHandler) ReadSimple(promptText string) (string, error)
ReadSimple reads a simple line without autocomplete (for menus, prompts, etc)
type MarkdownRenderer ¶
type MarkdownRenderer struct {
// contains filtered or unexported fields
}
MarkdownRenderer renders markdown text with colors for terminal
func NewMarkdownRenderer ¶
func NewMarkdownRenderer() *MarkdownRenderer
NewMarkdownRenderer creates a new markdown renderer
func (*MarkdownRenderer) Render ¶
func (mr *MarkdownRenderer) Render(text string) string
Render renders markdown text with colors
type SavedMessage ¶
type SavedMessage struct {
Role string `json:"role"`
Content string `json:"content"`
Timestamp time.Time `json:"timestamp"`
Metadata map[string]interface{} `json:"metadata,omitempty"`
}
SavedMessage represents a message in conversation history
type SessionConfig ¶
type SessionConfig struct {
Mode string `json:"mode"` // Chat mode (dev, devops, general)
ProjectPath string `json:"project_path"` // Project path
LLMProvider string `json:"llm_provider"` // LLM provider (openai, local, etc.)
APIKey string `json:"api_key"` // LLM provider API key
MaxTokens int `json:"max_tokens"` // Maximum tokens per response
Temperature float32 `json:"temperature"` // LLM temperature setting
EnableCommands bool `json:"enable_commands"` // Enable chat commands
LogLevel string `json:"log_level"` // Logging level
Metadata map[string]string `json:"metadata"` // Additional session metadata
}
SessionConfig configures a chat session
func DefaultSessionConfig ¶
func DefaultSessionConfig() SessionConfig
DefaultSessionConfig returns default session configuration
type SessionHistory ¶
type SessionHistory struct {
ID string `json:"id"`
StartedAt time.Time `json:"started_at"`
LastUsedAt time.Time `json:"last_used_at"`
CommandHistory []string `json:"command_history"` // Command history for input
ConversationHistory []SavedMessage `json:"conversation_history"` // Full conversation context
ProjectPath string `json:"project_path"`
Mode string `json:"mode"`
Title string `json:"title"` // Optional user-defined title
}
SessionHistory represents a saved chat session
type SessionManager ¶
type SessionManager struct {
// contains filtered or unexported fields
}
SessionManager manages session history
func NewSessionManager ¶
func NewSessionManager() (*SessionManager, error)
NewSessionManager creates a new session manager
func (*SessionManager) AddConversationMessage ¶
func (sm *SessionManager) AddConversationMessage(role, content string, metadata map[string]interface{}) error
AddConversationMessage adds a message to conversation history
func (*SessionManager) AddMessage ¶
func (sm *SessionManager) AddMessage(message string) error
AddMessage adds a message to command history
func (*SessionManager) CleanupOldSessions ¶
func (sm *SessionManager) CleanupOldSessions() error
CleanupOldSessions removes old sessions beyond maxSessions limit
func (*SessionManager) CreateNewSession ¶
func (sm *SessionManager) CreateNewSession(projectPath, mode string) *SessionHistory
CreateNewSession creates a new session
func (*SessionManager) DeleteSession ¶
func (sm *SessionManager) DeleteSession(sessionID string) error
DeleteSession deletes a session by ID
func (*SessionManager) GetCurrentSession ¶
func (sm *SessionManager) GetCurrentSession() *SessionHistory
GetCurrentSession returns the current session
func (*SessionManager) GetHistoryDir ¶
func (sm *SessionManager) GetHistoryDir() string
GetHistoryDir returns the history directory path
func (*SessionManager) GetMaxSessions ¶
func (sm *SessionManager) GetMaxSessions() int
GetMaxSessions returns the maximum number of sessions
func (*SessionManager) ListSessions ¶
func (sm *SessionManager) ListSessions() ([]*SessionHistory, error)
ListSessions lists all saved sessions sorted by last used time
func (*SessionManager) LoadSession ¶
func (sm *SessionManager) LoadSession(sessionID string) (*SessionHistory, error)
LoadSession loads a session by ID
func (*SessionManager) LoadSessionWithoutUpdate ¶
func (sm *SessionManager) LoadSessionWithoutUpdate(sessionID string) (*SessionHistory, error)
LoadSessionWithoutUpdate loads a session without updating LastUsedAt
func (*SessionManager) SaveSession ¶
func (sm *SessionManager) SaveSession(session *SessionHistory) error
SaveSession saves the current session to disk
func (*SessionManager) SetMaxSessions ¶
func (sm *SessionManager) SetMaxSessions(max int)
SetMaxSessions sets the maximum number of sessions to keep
type StreamRenderer ¶
type StreamRenderer struct {
// contains filtered or unexported fields
}
StreamRenderer handles real-time markdown rendering for streaming text
func NewStreamRenderer ¶
func NewStreamRenderer() *StreamRenderer
NewStreamRenderer creates a new stream renderer
func (*StreamRenderer) Flush ¶
func (sr *StreamRenderer) Flush() string
Flush returns any remaining buffered content
func (*StreamRenderer) ProcessChunk ¶
func (sr *StreamRenderer) ProcessChunk(chunk string) string
ProcessChunk processes a chunk of streaming text and returns colored output
func (*StreamRenderer) SetTheme ¶
func (sr *StreamRenderer) SetTheme(theme *Theme)
SetTheme updates the theme
type Theme ¶
type Theme struct {
Name string
Description string
TextColor *color.Color
CodeColor *color.Color
HeaderColor *color.Color
EmphasisColor *color.Color
}
Theme represents a color theme for the chat interface
func GetCurrentTheme ¶
func GetCurrentTheme() *Theme
GetCurrentTheme returns the current active theme
func (*Theme) ApplyEmphasis ¶
func (*Theme) ApplyHeader ¶
type ToolCallHandler ¶
type ToolCallHandler struct {
// contains filtered or unexported fields
}
ToolCallHandler handles LLM tool calls by routing them to appropriate chat commands
func NewToolCallHandler ¶
func NewToolCallHandler(commands map[string]*ChatCommand) *ToolCallHandler
NewToolCallHandler creates a new tool call handler
func (*ToolCallHandler) ExecuteToolCall ¶
func (h *ToolCallHandler) ExecuteToolCall(ctx context.Context, toolCall llm.ToolCall, chatContext *ConversationContext) (*CommandResult, error)
ExecuteToolCall executes a tool call by routing it to the appropriate command
func (*ToolCallHandler) FormatToolCallForConversation ¶
func (h *ToolCallHandler) FormatToolCallForConversation(toolCall llm.ToolCall, result *CommandResult) string
FormatToolCallForConversation creates a formatted message about the tool call for the conversation
func (*ToolCallHandler) FormatToolCallResult ¶
func (h *ToolCallHandler) FormatToolCallResult(toolCall llm.ToolCall, result *CommandResult) string
FormatToolCallResult formats a tool call result for inclusion in conversation
func (*ToolCallHandler) GetAvailableTools ¶
func (h *ToolCallHandler) GetAvailableTools() []llm.Tool
GetAvailableTools returns the tool definitions for available chat commands