llms

package
v0.21.0 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Feb 4, 2026 License: MIT Imports: 9 Imported by: 1

Documentation

Index

Constants

View Source
const RerankPromptDefault = `` /* 584-byte string literal not displayed */

Variables

This section is empty.

Functions

func GenerateFromSinglePrompt

func GenerateFromSinglePrompt(ctx context.Context, llm Model, prompt string, options ...CallOption) (string, error)

func TextParts

func TextParts(role schema.ChatMessageType, parts ...string) schema.MessageContent

Types

type CallOption

type CallOption func(*CallOptions)

func WithMaxTokens added in v0.15.0

func WithMaxTokens(maxTokens int) CallOption

WithMaxTokens specifies the maximum number of tokens to generate.

func WithModel added in v0.15.0

func WithModel(model string) CallOption

WithModel specifies the model to use.

func WithSeed added in v0.15.0

func WithSeed(seed int) CallOption

WithSeed specifies the seed to use.

func WithStopWords added in v0.15.0

func WithStopWords(stopWords []string) CallOption

WithStopWords specifies the stop words to use.

func WithStreamingFunc

func WithStreamingFunc(streamingFunc func(ctx context.Context, chunk []byte) error) CallOption

WithStreamingFunc specifies the streaming function to use.

func WithTemperature added in v0.15.0

func WithTemperature(temperature float64) CallOption

WithTemperature specifies the temperature to use.

func WithTopK added in v0.15.0

func WithTopK(topK int) CallOption

WithTopK specifies the top-k value to use.

func WithTopP added in v0.15.0

func WithTopP(topP float64) CallOption

WithTopP specifies the top-p value to use.

type CallOptions

type CallOptions struct {
	Model         string                                        `json:"model"`
	Temperature   float64                                       `json:"temperature"`
	MaxTokens     int                                           `json:"max_tokens"`
	StopWords     []string                                      `json:"stop_words"`
	TopP          float64                                       `json:"top_p"`
	TopK          int                                           `json:"top_k"`
	Seed          int                                           `json:"seed"`
	Metadata      map[string]any                                `json:"metadata,omitempty"`
	StreamingFunc func(ctx context.Context, chunk []byte) error `json:"-"`
}

type LLMReranker added in v0.15.0

type LLMReranker struct {
	// contains filtered or unexported fields
}

func NewLLMReranker added in v0.15.0

func NewLLMReranker(model Model, opts ...LLMRerankerOption) *LLMReranker

func (*LLMReranker) Rerank added in v0.15.0

func (r *LLMReranker) Rerank(ctx context.Context, query string, docs []schema.Document) ([]schema.ScoredDocument, error)

type LLMRerankerOption added in v0.15.0

type LLMRerankerOption func(*LLMReranker)

func WithConcurrency added in v0.15.0

func WithConcurrency(c int) LLMRerankerOption

func WithPrompt added in v0.15.0

func WithPrompt(p string) LLMRerankerOption

type Model

type Model interface {
	GenerateContent(ctx context.Context, messages []schema.MessageContent, options ...CallOption) (*schema.ContentResponse, error)
	Call(ctx context.Context, prompt string, options ...CallOption) (string, error)
}

type Tokenizer

type Tokenizer interface {
	CountTokens(ctx context.Context, text string) (int, error)
}

Directories

Path Synopsis

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL