Documentation
¶
Index ¶
- type Agent
- type AgentResponse
- type Cache
- type CompletionRequest
- type CompletionResponse
- type Config
- type Document
- type Embedding
- type EmbeddingProvider
- type LLMProvider
- type Manager
- func (m *Manager) Complete(ctx context.Context, messages []Message) (*CompletionResponse, error)
- func (m *Manager) GetAllTools() []Tool
- func (m *Manager) GetConfig() *Config
- func (m *Manager) GetDefaultProvider() (LLMProvider, error)
- func (m *Manager) GetProvider(name string) (LLMProvider, error)
- func (m *Manager) GetTool(name string) (Tool, bool)
- func (m *Manager) IndexDocument(ctx context.Context, collection string, doc Document) error
- func (m *Manager) IsConfigured() bool
- func (m *Manager) RegisterTool(tool Tool)
- func (m *Manager) SearchSimilar(ctx context.Context, collection, query string, k int) ([]SearchResult, error)
- func (m *Manager) UpdateConfig(config *Config)
- type Message
- type SearchResult
- type Tool
- type ToolCall
- type VectorDB
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
This section is empty.
Types ¶
type Agent ¶
type Agent interface {
// Name returns the agent name
Name() string
// Description returns the agent's capabilities
Description() string
// SetTools configures the tools available to the agent
SetTools(tools []Tool)
// Execute runs the agent with the given task
Execute(ctx context.Context, task string) (*AgentResponse, error)
// Chat handles a conversational interaction
Chat(ctx context.Context, messages []Message) (*AgentResponse, error)
}
Agent represents an AI agent that can use tools and complete tasks
type AgentResponse ¶
type AgentResponse struct {
Content string `json:"content"`
ToolCalls []ToolCall `json:"tool_calls,omitempty"`
Metadata map[string]interface{} `json:"metadata,omitempty"`
TokensUsed int `json:"tokens_used"`
}
AgentResponse represents an agent's response
type Cache ¶
type Cache interface {
// Get retrieves a cached value
Get(key string) (interface{}, bool)
// Set stores a value in the cache
Set(key string, value interface{}, ttl time.Duration)
// Delete removes a value from the cache
Delete(key string)
// Clear removes all values from the cache
Clear()
}
Cache interface for caching AI responses
type CompletionRequest ¶
type CompletionRequest struct {
Messages []Message `json:"messages"`
Model string `json:"model"`
Temperature float64 `json:"temperature"`
MaxTokens int `json:"max_tokens"`
Stream bool `json:"stream"`
}
CompletionRequest represents a request for LLM completion
type CompletionResponse ¶
type CompletionResponse struct {
Content string `json:"content"`
Model string `json:"model"`
TokensUsed int `json:"tokens_used"`
FinishReason string `json:"finish_reason"`
Created time.Time `json:"created"`
}
CompletionResponse represents the LLM's response
type Config ¶
type Config struct {
// LLM settings
DefaultProvider string
DefaultModel string
Temperature float64
MaxTokens int
// Vector DB settings
VectorDBProvider string
// Rate limiting
RequestsPerMinute int
RequestsPerDay int
// Cache settings
CacheTTL time.Duration
// Provider-specific settings
AnthropicAPIKey string
OpenAIAPIKey string
OllamaHost string
ChromaHost string
PineconeAPIKey string
PineconeEnvironment string
WeaviateHost string
WeaviateAPIKey string
}
Config represents the AI system configuration
type Document ¶
type Document struct {
ID string `json:"id"`
Content string `json:"content"`
Metadata map[string]interface{} `json:"metadata"`
Vector []float64 `json:"vector,omitempty"`
}
Document represents a document for vector storage
type Embedding ¶
type Embedding struct {
Vector []float64 `json:"vector"`
Metadata map[string]interface{} `json:"metadata"`
}
Embedding represents a vector embedding
type EmbeddingProvider ¶
type EmbeddingProvider interface {
// Embed generates an embedding for the given text
Embed(ctx context.Context, text string) ([]float64, error)
// EmbedBatch generates embeddings for multiple texts
EmbedBatch(ctx context.Context, texts []string) ([][]float64, error)
// Dimension returns the embedding dimension
Dimension() int
// IsConfigured checks if the provider is properly configured
IsConfigured() bool
// Name returns the provider name
Name() string
}
EmbeddingProvider generates embeddings for text
type LLMProvider ¶
type LLMProvider interface {
// Complete generates a completion for the given request
Complete(ctx context.Context, req CompletionRequest) (*CompletionResponse, error)
// Stream generates a streaming completion
Stream(ctx context.Context, req CompletionRequest) (<-chan string, error)
// ListModels returns available models
ListModels(ctx context.Context) ([]string, error)
// IsConfigured checks if the provider is properly configured
IsConfigured() bool
// Name returns the provider name
Name() string
}
LLMProvider is the interface that all LLM providers must implement
type Manager ¶
type Manager struct {
// contains filtered or unexported fields
}
Manager orchestrates AI providers, vector databases, and tools
func (*Manager) GetAllTools ¶
GetAllTools returns all registered tools
func (*Manager) GetDefaultProvider ¶
func (m *Manager) GetDefaultProvider() (LLMProvider, error)
GetDefaultProvider returns the default LLM provider
func (*Manager) GetProvider ¶
func (m *Manager) GetProvider(name string) (LLMProvider, error)
GetProvider returns the specified LLM provider
func (*Manager) IndexDocument ¶
IndexDocument adds a document to the vector database
func (*Manager) IsConfigured ¶
IsConfigured checks if the AI system is properly configured
func (*Manager) RegisterTool ¶
RegisterTool registers a tool for use by agents
func (*Manager) SearchSimilar ¶
func (m *Manager) SearchSimilar(ctx context.Context, collection, query string, k int) ([]SearchResult, error)
SearchSimilar performs vector similarity search
func (*Manager) UpdateConfig ¶
UpdateConfig updates the AI configuration
type Message ¶
type Message struct {
Role string `json:"role"` // "system", "user", "assistant"
Content string `json:"content"` // The message content
}
Message represents a message in a conversation
type SearchResult ¶
SearchResult represents a vector search result
type Tool ¶
type Tool interface {
// Name returns the tool name
Name() string
// Description returns a description of what the tool does
Description() string
// Parameters returns the tool's parameter schema
Parameters() map[string]interface{}
// Execute runs the tool with the given parameters
Execute(ctx context.Context, params map[string]interface{}) (interface{}, error)
}
Tool represents an AI tool that can be used by agents
type ToolCall ¶
type ToolCall struct {
Tool string `json:"tool"`
Parameters map[string]interface{} `json:"parameters"`
Result interface{} `json:"result,omitempty"`
}
ToolCall represents a tool invocation by an agent
type VectorDB ¶
type VectorDB interface {
// CreateCollection creates a new collection
CreateCollection(ctx context.Context, name string, dimension int) error
// DeleteCollection deletes a collection
DeleteCollection(ctx context.Context, name string) error
// Insert adds documents to a collection
Insert(ctx context.Context, collection string, documents []Document) error
// Search performs similarity search
Search(ctx context.Context, collection string, query []float64, k int) ([]SearchResult, error)
// Delete removes documents by ID
Delete(ctx context.Context, collection string, ids []string) error
// IsConfigured checks if the vector DB is properly configured
IsConfigured() bool
// Name returns the provider name
Name() string
}
VectorDB is the interface for vector databases