llm

package
v0.2.5 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Jan 3, 2026 License: MIT Imports: 3 Imported by: 0

Documentation

Index

Constants

This section is empty.

Variables

This section is empty.

Functions

This section is empty.

Types

type AnthropicClient

type AnthropicClient struct {
	// contains filtered or unexported fields
}

AnthropicClient implements Client for Anthropic

func NewAnthropicClient

func NewAnthropicClient(apiKey string) (*AnthropicClient, error)

NewAnthropicClient creates a new Anthropic client

func (*AnthropicClient) Chat

func (c *AnthropicClient) Chat(ctx context.Context, messages []Message, opts ...Option) (*Response, error)

func (*AnthropicClient) Close

func (c *AnthropicClient) Close() error

func (*AnthropicClient) Complete

func (c *AnthropicClient) Complete(ctx context.Context, prompt string, opts ...Option) (*Response, error)

func (*AnthropicClient) Embed

func (c *AnthropicClient) Embed(ctx context.Context, texts []string) ([][]float64, error)

type Client

type Client interface {
	// Complete generates a completion for the given prompt
	Complete(ctx context.Context, prompt string, opts ...Option) (*Response, error)

	// Chat performs a chat completion with message history
	Chat(ctx context.Context, messages []Message, opts ...Option) (*Response, error)

	// Embed generates embeddings for the given texts
	Embed(ctx context.Context, texts []string) ([][]float64, error)

	// Close closes the client and releases resources
	Close() error
}

Client provides a unified interface for LLM providers

func NewClient

func NewClient(provider string, apiKey string) (Client, error)

NewClient creates a new LLM client based on the provider

type Message

type Message struct {
	Role    string // system, user, assistant
	Content string
}

Message represents a chat message

type OpenAIClient

type OpenAIClient struct {
	// contains filtered or unexported fields
}

OpenAIClient implements Client for OpenAI

func NewOpenAIClient

func NewOpenAIClient(apiKey string) (*OpenAIClient, error)

NewOpenAIClient creates a new OpenAI client

func (*OpenAIClient) Chat

func (c *OpenAIClient) Chat(ctx context.Context, messages []Message, opts ...Option) (*Response, error)

func (*OpenAIClient) Close

func (c *OpenAIClient) Close() error

func (*OpenAIClient) Complete

func (c *OpenAIClient) Complete(ctx context.Context, prompt string, opts ...Option) (*Response, error)

func (*OpenAIClient) Embed

func (c *OpenAIClient) Embed(ctx context.Context, texts []string) ([][]float64, error)

type Option

type Option func(*Options)

Option is a functional option for LLM requests

func WithMaxTokens

func WithMaxTokens(tokens int) Option

WithMaxTokens sets the maximum tokens to generate

func WithModel

func WithModel(model string) Option

WithModel sets the model to use

func WithStop

func WithStop(stop ...string) Option

WithStop sets stop sequences

func WithTemperature

func WithTemperature(temp float64) Option

WithTemperature sets the temperature

func WithTopP

func WithTopP(p float64) Option

WithTopP sets the top-p sampling parameter

type Options

type Options struct {
	Model       string
	MaxTokens   int
	Temperature float64
	TopP        float64
	Stop        []string
}

Options holds generation options

type Response

type Response struct {
	Content      string
	FinishReason string
	Usage        Usage
	Model        string
}

Response represents an LLM response

type Usage

type Usage struct {
	PromptTokens     int
	CompletionTokens int
	TotalTokens      int
}

Usage tracks token usage

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL