ai

package
v0.20.0 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Mar 9, 2026 License: Apache-2.0 Imports: 8 Imported by: 0

Documentation

Index

Constants

This section is empty.

Variables

View Source
var (
	// ErrStreamClosed is returned when attempting to read from a closed stream.
	ErrStreamClosed = errors.New("ai: stream is closed")
	// ErrNoContent is returned when the model returns no content.
	ErrNoContent = errors.New("ai: no content in response")
	// ErrMaxIterationsReached is returned when an agent exceeds its iteration limit.
	ErrMaxIterationsReached = errors.New("ai: maximum iterations reached")
	// ErrToolNotFound is returned when a requested tool is not available.
	ErrToolNotFound = errors.New("ai: tool not found")
	// ErrInvalidArguments is returned when tool arguments are invalid.
	ErrInvalidArguments = errors.New("ai: invalid tool arguments")
	// ErrProviderNotFound is returned when a model provider is not registered.
	ErrProviderNotFound = errors.New("ai: provider not found")
	// ErrModelNotSupported is returned when a model is not supported by the provider.
	ErrModelNotSupported = errors.New("ai: model not supported")
	// ErrAgentNotFound is returned when an agent type is not registered.
	ErrAgentNotFound = errors.New("ai: agent type not found")
)

Functions

func ListAgentFactories

func ListAgentFactories() []string

ListAgentFactories returns the names of all registered agent factories.

func ListModelProviders

func ListModelProviders() []string

ListModelProviders returns the names of all registered model providers.

func RegisterAgentFactory

func RegisterAgentFactory(f AgentFactory)

RegisterAgentFactory registers an agent factory. It panics if a factory with the same name is already registered.

func RegisterModelProvider

func RegisterModelProvider(p ModelProvider)

RegisterModelProvider registers a model provider. It panics if a provider with the same name is already registered.

Types

type Agent

type Agent interface {
	// Run executes the agent synchronously with the given input.
	Run(ctx context.Context, input string, opts ...Option) (*Message, error)
	// Stream executes the agent and returns a streaming response.
	Stream(ctx context.Context, input string, opts ...Option) (MessageStream, error)
}

Agent represents an AI agent that can reason and use tools.

type AgentBuilder

type AgentBuilder interface {
	// WithModel sets the chat model for the agent.
	WithModel(model ToolableChatModel) AgentBuilder
	// WithTools adds tools to the agent.
	WithTools(tools ...Tool) AgentBuilder
	// WithSystemPrompt sets the system prompt.
	WithSystemPrompt(prompt string) AgentBuilder
	// WithMaxIterations sets the maximum number of iterations.
	WithMaxIterations(n int) AgentBuilder
	// Build creates the agent instance.
	Build(ctx context.Context) (Agent, error)
}

AgentBuilder provides a fluent interface for building agents.

func NewAgentBuilder

func NewAgentBuilder(agentType string) (AgentBuilder, error)

NewAgentBuilder creates a new agent builder using the registered factory.

type AgentConfig

type AgentConfig struct {
	// Model is the chat model to use for reasoning.
	Model ToolableChatModel
	// Tools are the tools available to the agent.
	Tools []Tool
	// SystemPrompt is the system prompt that guides the agent's behavior.
	SystemPrompt string
	// MaxIterations limits the maximum number of reasoning iterations.
	MaxIterations int
}

AgentConfig contains configuration for creating an agent.

type AgentFactory

type AgentFactory interface {
	// Name returns the agent type name.
	Name() string
	// CreateBuilder creates a new agent builder.
	CreateBuilder() AgentBuilder
}

AgentFactory defines the interface for agent factories.

type ChatModel

type ChatModel interface {
	// Generate produces a response from the given messages synchronously.
	Generate(ctx context.Context, messages []*Message, opts ...Option) (*Message, error)
	// Stream produces a streaming response from the given messages.
	Stream(ctx context.Context, messages []*Message, opts ...Option) (MessageStream, error)
}

ChatModel defines the interface for chat-based language models.

type Message

type Message struct {
	// Role indicates who sent this message.
	Role Role
	// Content is the text content of the message.
	Content string
	// ToolCalls contains tool invocation requests (only for Assistant role).
	ToolCalls []ToolCall
	// ToolResult contains tool execution result (only for Tool role).
	ToolResult *ToolResult
	// Usage contains token usage statistics (only for response messages).
	Usage *TokenUsage
}

Message represents a chat message in a conversation.

func NewAssistantMessage

func NewAssistantMessage(content string) *Message

NewAssistantMessage creates a new assistant message.

func NewAssistantMessageWithToolCalls

func NewAssistantMessageWithToolCalls(content string, toolCalls []ToolCall) *Message

NewAssistantMessageWithToolCalls creates a new assistant message with tool calls.

func NewSystemMessage

func NewSystemMessage(content string) *Message

NewSystemMessage creates a new system message.

func NewToolMessage

func NewToolMessage(callID, content string) *Message

NewToolMessage creates a new tool result message.

func NewUserMessage

func NewUserMessage(content string) *Message

NewUserMessage creates a new user message.

func (*Message) HasToolCalls

func (m *Message) HasToolCalls() bool

HasToolCalls returns true if this message contains tool calls.

func (*Message) IsAssistant

func (m *Message) IsAssistant() bool

IsAssistant returns true if this is an assistant message.

func (*Message) IsSystem

func (m *Message) IsSystem() bool

IsSystem returns true if this is a system message.

func (*Message) IsTool

func (m *Message) IsTool() bool

IsTool returns true if this is a tool result message.

func (*Message) IsUser

func (m *Message) IsUser() bool

IsUser returns true if this is a user message.

type MessageChunk

type MessageChunk struct {
	// Content is the incremental text content.
	Content string
	// ToolCalls contains tool calls (may be partial in streaming).
	ToolCalls []ToolCall
	// Done indicates whether the stream is complete.
	Done bool
}

MessageChunk represents a chunk of a streaming message.

type MessageStream

type MessageStream interface {
	io.Closer

	// Recv receives the next message chunk from the stream.
	// Returns io.EOF when the stream is exhausted.
	Recv() (*MessageChunk, error)
	// Collect collects all chunks and merges them into a complete message.
	Collect() (*Message, error)
}

MessageStream represents a stream of message chunks.

type ModelConfig

type ModelConfig struct {
	// Provider is the name of the model provider.
	Provider string
	// Model is the name of the model to use.
	Model string
	// APIKey is the API key for authentication.
	APIKey string
	// BaseURL is the base URL for the API endpoint.
	BaseURL string
	// Temperature controls randomness (0.0 to 1.0).
	Temperature float64
	// MaxTokens limits the maximum tokens in the response.
	MaxTokens int
	// Timeout is the request timeout duration.
	Timeout time.Duration
}

ModelConfig contains configuration for creating a model.

type ModelError

type ModelError struct {
	Provider   string
	StatusCode int
	Message    string
}

ModelError represents an error from the model API.

func NewModelError

func NewModelError(provider string, statusCode int, message string) *ModelError

NewModelError creates a new ModelError.

func (*ModelError) Error

func (e *ModelError) Error() string

type ModelInfo

type ModelInfo struct {
	// Provider is the name of the model provider.
	Provider string
	// Model is the name of the model.
	Model string
	// MaxTokens is the maximum context length.
	MaxTokens int
	// Temperature is the default temperature setting.
	Temperature float64
}

ModelInfo contains information about a model.

type ModelProvider

type ModelProvider interface {
	// Name returns the provider's unique identifier.
	Name() string
	// CreateModel creates a new chat model instance.
	CreateModel(ctx context.Context, cfg *ModelConfig) (ToolableChatModel, error)
}

ModelProvider defines the interface for model providers.

type Option

type Option func(*Options)

Option is a functional option for configuring AI operations.

func WithMaxTokens

func WithMaxTokens(n int) Option

WithMaxTokens sets the maximum tokens parameter.

func WithMeta

func WithMeta(key, value string) Option

WithMeta adds a meta key-value pair.

func WithStopSequences

func WithStopSequences(seqs ...string) Option

WithStopSequences sets the stop sequences.

func WithTemperature

func WithTemperature(t float64) Option

WithTemperature sets the temperature parameter.

type Options

type Options struct {
	// Temperature controls randomness in the output.
	Temperature *float64
	// MaxTokens limits the maximum number of tokens to generate.
	MaxTokens *int
	// StopSequences specifies sequences that stop generation.
	StopSequences []string
	// Meta contains additional key-value pairs.
	Meta map[string]string
}

Options contains runtime configuration for AI operations.

func NewOptions

func NewOptions() *Options

NewOptions creates a new Options with defaults.

func (*Options) Apply

func (o *Options) Apply(opts ...Option) *Options

Apply applies the given options to this Options instance.

type ParameterSchema

type ParameterSchema struct {
	// Type is the schema type, typically "object".
	Type string
	// Properties defines the parameter properties.
	Properties map[string]*PropertySchema
	// Required lists the required parameter names.
	Required []string
}

ParameterSchema defines the JSON Schema for tool parameters.

type PropertySchema

type PropertySchema struct {
	// Type is the property type: string, number, integer, boolean, array, object.
	Type string
	// Description explains what this parameter is for.
	Description string
	// Enum lists allowed values if this is an enumeration.
	Enum []string
	// Items defines the schema for array items (only for array type).
	Items *PropertySchema
}

PropertySchema defines a single parameter property.

type Role

type Role string

Role defines the message role type.

const (
	// RoleSystem represents a system message that sets context or instructions.
	RoleSystem Role = "system"
	// RoleUser represents a message from the user.
	RoleUser Role = "user"
	// RoleAssistant represents a message from the AI assistant.
	RoleAssistant Role = "assistant"
	// RoleTool represents a message containing tool execution results.
	RoleTool Role = "tool"
)

type StreamableTool

type StreamableTool interface {
	Tool

	// InvokeStream executes the tool and returns a streaming result.
	InvokeStream(ctx context.Context, arguments string) (StringStream, error)
}

StreamableTool is a tool that supports streaming output.

type StringStream

type StringStream interface {
	io.Closer

	// Recv receives the next string chunk from the stream.
	// Returns io.EOF when the stream is exhausted.
	Recv() (string, error)
	// Collect collects all chunks and concatenates them.
	Collect() (string, error)
}

StringStream represents a stream of string chunks.

type TokenUsage

type TokenUsage struct {
	// PromptTokens is the number of tokens in the prompt.
	PromptTokens int
	// CompletionTokens is the number of tokens in the completion.
	CompletionTokens int
	// TotalTokens is the total number of tokens used.
	TotalTokens int
}

TokenUsage represents token consumption statistics.

type Tool

type Tool interface {
	// Info returns the tool's metadata.
	Info() *ToolInfo
	// Invoke executes the tool with the given JSON-encoded arguments.
	// Returns the result as a string.
	Invoke(ctx context.Context, arguments string) (string, error)
}

Tool represents a callable tool that can be used by AI models.

type ToolCall

type ToolCall struct {
	// ID is the unique identifier for this tool call.
	ID string
	// Name is the name of the tool to invoke.
	Name string
	// Arguments contains the JSON-encoded arguments for the tool.
	Arguments string
}

ToolCall represents a tool invocation request from the model.

type ToolError

type ToolError struct {
	ToolName string
	Err      error
}

ToolError represents an error that occurred during tool execution.

func NewToolError

func NewToolError(toolName string, err error) *ToolError

NewToolError creates a new ToolError.

func (*ToolError) Error

func (e *ToolError) Error() string

func (*ToolError) Unwrap

func (e *ToolError) Unwrap() error

type ToolInfo

type ToolInfo struct {
	// Name is the unique identifier of the tool.
	Name string
	// Description explains what the tool does.
	Description string
	// Parameters defines the input schema for the tool.
	Parameters *ParameterSchema
}

ToolInfo contains metadata about a tool.

type ToolResult

type ToolResult struct {
	// CallID is the identifier of the corresponding ToolCall.
	CallID string
	// Content is the result content from the tool execution.
	Content string
}

ToolResult represents the result of a tool execution.

type ToolableChatModel

type ToolableChatModel interface {
	ChatModel

	// WithTools returns a new model instance with the specified tools bound.
	// This follows the immutable pattern - it does not modify the current instance.
	WithTools(tools ...Tool) ToolableChatModel
}

ToolableChatModel is a chat model that supports tool calling.

func NewChatModel

func NewChatModel(ctx context.Context, cfg *ModelConfig) (ToolableChatModel, error)

NewChatModel creates a new chat model using the registered provider.

Directories

Path Synopsis

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL