llm

package
v0.1.5 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Dec 31, 2025 License: Apache-2.0 Imports: 28 Imported by: 0

Documentation

Index

Constants

View Source
const AgentTypeLLM ai.AgentType = "llm"

Variables

View Source
var (
	// ErrConnectionNotFound is returned when a named connection doesn't exist in the registry.
	ErrConnectionNotFound = errors.New("connection not found")

	// ErrMissingAPIKey is returned when a connection has no API key (password field).
	ErrMissingAPIKey = errors.New("connection missing API key")

	// ErrInvalidProvider is returned when the connection type doesn't map to a known provider.
	ErrInvalidProvider = errors.New("invalid or unknown provider type")

	// ErrMissingPrompt is returned when Execute() is called without a prompt.
	ErrMissingPrompt = errors.New("prompt is required")

	// ErrMissingConnection is returned when Execute() is called without a connection name.
	ErrMissingConnection = errors.New("connection name is required")

	// ErrTimeout is returned when a request exceeds the configured timeout.
	ErrTimeout = errors.New("request timeout exceeded")

	// ErrSchemaValidation is returned when structured output doesn't match the schema.
	ErrSchemaValidation = errors.New("response failed schema validation")

	// ErrInvalidMaxTokens is returned when max tokens is <= 0.
	ErrInvalidMaxTokens = errors.New("max tokens must be greater than 0")

	// ErrInvalidTimeout is returned when timeout is <= 0.
	ErrInvalidTimeout = errors.New("timeout must be greater than 0")

	// ErrCLINotFound is returned when the claude-code CLI executable is not found in PATH.
	ErrCLINotFound = errors.New("claude-code CLI not found in PATH")

	// ErrCLIExecutionFailed is returned when the CLI process exits with a non-zero status.
	ErrCLIExecutionFailed = errors.New("claude-code CLI execution failed")
)

Functions

func CleanupJSONResponse

func CleanupJSONResponse(response string) string

CleanupJSONResponse attempts to extract and clean JSON from LLM responses that may contain markdown formatting, explanatory text, or other noise.

It tries the following strategies in order: 1. Validate if already valid JSON 2. Extract JSON from markdown code blocks (```json or ```) 3. Extract the first JSON object {...} 4. Extract the first JSON array [...] 5. Return the trimmed original string

After extraction, it validates that the result is valid JSON.

func NewAnthropicProvider

func NewAnthropicProvider(apiKey, model, apiURL string) Provider

NewAnthropicProvider creates a new Anthropic provider with the specified configuration.

func NewClaudeCodeProvider

func NewClaudeCodeProvider(model string) Provider

NewClaudeCodeProvider creates a new Claude Code provider with the specified model.

func NewClientWithModel

func NewClientWithModel(model string, options ...middleware.Option) (Client, error)

NewClientWithModel creates a new LLM client for the specified model. The client automatically infers the provider from the model name and looks up the API key from environment variables:

  • OpenAI models: OPENAI_API_KEY
  • Anthropic models: ANTHROPIC_API_KEY
  • Gemini models: GEMINI_API_KEY or GOOGLE_API_KEY

Example usage:

client := llm.NewClientWithModel("gpt-4o")
resp, err := client.NewRequest().
    WithPrompt("Hello world").
    Execute(ctx)

func NewGeminiProvider

func NewGeminiProvider(apiKey, model, apiURL string) Provider

NewGeminiProvider creates a new Gemini provider with the specified configuration.

func NewOpenAIProvider

func NewOpenAIProvider(apiKey, model, apiURL string) Provider

NewOpenAIProvider creates a new OpenAI provider with the specified configuration.

func NewProvider

func NewProvider(conn *Connection) (Provider, error)

NewProvider creates a provider from a connection configuration.

func UnmarshalWithCleanup

func UnmarshalWithCleanup(data string, v interface{}) error

UnmarshalWithCleanup attempts to unmarshal JSON with automatic cleanup

Types

type JSONSchema

type JSONSchema struct {
	Type                 string                `json:"type,omitempty"`
	Properties           map[string]JSONSchema `json:"properties,omitempty"`
	Required             []string              `json:"required,omitempty"`
	Items                *JSONSchema           `json:"items,omitempty"`
	Description          string                `json:"description,omitempty"`
	Enum                 []interface{}         `json:"enum,omitempty"`
	AdditionalProperties bool                  `json:"additionalProperties,omitempty"`
}

JSONSchema represents a JSON Schema definition

type LLMAgent

type LLMAgent struct {
	// contains filtered or unexported fields
}

LLMAgent implements the Agent interface using the commons-db LLM client. It supports all LLM backends (OpenAI, Anthropic, Gemini, Claude Code CLI).

func NewLLMAgent

func NewLLMAgent(config ai.AgentConfig) (*LLMAgent, error)

NewLLMAgent creates a new LLM agent with the specified configuration.

The agent can be configured to use any LLM backend by specifying the ai.Model name:

  • OpenAI: gpt-4o, gpt-4-turbo, gpt-3.5-turbo
  • Anthropic: claude-3-opus, claude-3.5-sonnet, claude-3.5-haiku
  • Gemini: gemini-2.5-pro, gemini-2.0-flash, gemini-1.5-pro
  • Claude Code CLI: claude-code-sonnet, claude-code-opus, claude-code-haiku

func (*LLMAgent) Close

func (la *LLMAgent) Close() error

Close cleans up agent resources.

func (*LLMAgent) ExecuteBatch

func (la *LLMAgent) ExecuteBatch(ctx context.Context, requests []ai.PromptRequest) (map[string]*ai.PromptResponse, error)

ExecuteBatch processes multiple prompts concurrently.

func (*LLMAgent) ExecutePrompt

func (la *LLMAgent) ExecutePrompt(ctx context.Context, request ai.PromptRequest) (*ai.PromptResponse, error)

ExecutePrompt processes a single prompt using the LLM client.

func (*LLMAgent) GetConfig

func (la *LLMAgent) GetConfig() ai.AgentConfig

GetConfig returns the agent configuration.

func (*LLMAgent) GetCosts

func (la *LLMAgent) GetCosts() ai.Costs

GetCosts returns accumulated costs for this session.

func (*LLMAgent) GetType

func (la *LLMAgent) GetType() ai.AgentType

GetType returns the agent type.

func (*LLMAgent) ListModels

func (la *LLMAgent) ListModels(ctx context.Context) ([]ai.Model, error)

Listai.Models returns available ai.Models for all LLM backends.

func (LLMAgent) Pretty

func (la LLMAgent) Pretty() api.Text

func (LLMAgent) String

func (la LLMAgent) String() string

Directories

Path Synopsis

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL