assistants

package
v0.3.13 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: May 8, 2025 License: Apache-2.0 Imports: 15 Imported by: 0

Documentation

Index

Constants

This section is empty.

Variables

This section is empty.

Functions

func Call

func Call(
	ctx context.Context,
	assistant IAssistant,
	input string,
	promptInputs map[string]any,
	options ...Option,
) (*llms.ContentResponse, error)

Call executes a generic assistant without typed output.

func GetDescriptions

func GetDescriptions(list ...IAssistant) string

func MapAssistants

func MapAssistants(list ...IAssistant) map[string]IAssistant

func Run

func Run[O chatmodel.ContentProvider](
	ctx context.Context,
	assistant TypeableAssistant[O],
	input string,
	promptInputs map[string]any,
	optionalOutputType *O,
	options ...Option,
) (*llms.ContentResponse, error)

Run executes the assistant with the given input and prompt inputs.

Types

type Assistant

type Assistant[O chatmodel.ContentProvider] struct {
	LLM          llms.Model
	OutputParser chatmodel.OutputParser[O]
	// contains filtered or unexported fields
}

Assistant class for chat assistants. This class provides the core functionality for handling chat interactions, including managing memory, generating system prompts, and obtaining responses from a language model.

func NewAssistant

func NewAssistant[O chatmodel.ContentProvider](
	llmModel llms.Model,
	sysprompt prompts.FormatPrompter,
	options ...Option) *Assistant[O]

NewAssistant initializes the AgentAgent

func (*Assistant[O]) Call

func (a *Assistant[O]) Call(ctx context.Context, input string, promptInputs map[string]any, options ...Option) (*llms.ContentResponse, error)

func (*Assistant[O]) CallMCP added in v0.3.11

func (a *Assistant[O]) CallMCP(ctx context.Context, input chatmodel.MCPInputRequest) (*mcp.PromptResponse, error)

func (*Assistant[O]) Description

func (a *Assistant[O]) Description() string

Description returns the description of the Agent, to be used in the prompt of other Agents or LLMs. Should not exceed LLM model limit.

func (*Assistant[O]) FormatPrompt

func (a *Assistant[O]) FormatPrompt(promptInputs map[string]any) (llms.PromptValue, error)

func (*Assistant[O]) GetCallback

func (a *Assistant[O]) GetCallback() Callback

func (*Assistant[O]) GetPromptInputVariables

func (a *Assistant[O]) GetPromptInputVariables() []string

func (*Assistant[O]) GetSystemPrompt added in v0.2.10

func (a *Assistant[O]) GetSystemPrompt(input string, promptInputs map[string]any) (string, error)

GetSystemPrompt generates the system prompt for the Assistant.

func (*Assistant[O]) Name

func (a *Assistant[O]) Name() string

Name returns the name of the Agent.

func (*Assistant[O]) RegisterMCP added in v0.3.11

func (a *Assistant[O]) RegisterMCP(registrator McpServerRegistrator) error

func (*Assistant[O]) Run

func (a *Assistant[O]) Run(ctx context.Context, input string, promptInputs map[string]any, optionalOutputType *O, options ...Option) (*llms.ContentResponse, error)

Run runs the chat agent with the given user input synchronously.

func (*Assistant[O]) RunMessages added in v0.2.10

func (a *Assistant[O]) RunMessages() []llms.MessageContent

func (*Assistant[O]) WithDescription

func (a *Assistant[O]) WithDescription(description string) *Assistant[O]

WithDescription sets the description of the Agent, to be used in the prompt of other Agents or LLMs.

func (*Assistant[O]) WithInputParser added in v0.3.12

func (a *Assistant[O]) WithInputParser(inputParser func(string) (string, error))

WithInputParser sets the input parser for the Assistant.

func (*Assistant[O]) WithName

func (a *Assistant[O]) WithName(name string) *Assistant[O]

WithName sets the name of the Agent, when used in a prompt of another Agents or LLMs.

func (*Assistant[O]) WithOutputParser

func (a *Assistant[O]) WithOutputParser(outputParser chatmodel.OutputParser[O]) *Assistant[O]

WithCallback sets the callback.

func (*Assistant[O]) WithPromptInputProvider added in v0.3.12

func (a *Assistant[O]) WithPromptInputProvider(cb ProvidePromptInputsFunc)

func (*Assistant[O]) WithTools

func (a *Assistant[O]) WithTools(list ...tools.ITool) *Assistant[O]

type Callback

type Callback interface {
	tools.Callback
	OnAssistantStart(ctx context.Context, agent IAssistant, input string)
	OnAssistantEnd(ctx context.Context, agent IAssistant, input string, resp *llms.ContentResponse)
	OnAssistantError(ctx context.Context, agent IAssistant, input string, err error)
	OnAssistantLLMCall(ctx context.Context, agent IAssistant, payload []llms.MessageContent)
}

type Config

type Config struct {
	// Model is the model to use in an LLM call.
	Model string

	// MaxTokens is the maximum number of tokens to generate to use in an LLM call.
	MaxTokens int

	// Temperature is the temperature for sampling to use in an LLM call, between 0 and 1.
	Temperature float64

	// StopWords is a list of words to stop on to use in an LLM call.
	StopWords []string

	// TopK is the number of tokens to consider for top-k sampling in an LLM call.
	TopK int

	// TopP is the cumulative probability for top-p sampling in an LLM call.
	TopP float64

	// Seed is a seed for deterministic sampling in an LLM call.
	Seed int

	// MinLength is the minimum length of the generated text in an LLM call.
	MinLength int

	// MaxLength is the maximum length of the generated text in an LLM call.
	MaxLength int

	// RepetitionPenalty is the repetition penalty for sampling in an LLM call.
	RepetitionPenalty float64

	// CallbackHandler is the callback handler for Chain
	CallbackHandler Callback

	// Tools is a list of tools to use. Each tool can be a specific tool or a function.
	Tools []llms.Tool

	// ToolChoice is the choice of tool to use, it can either be "none", "auto" (the default behavior), or a specific tool as described in the ToolChoice type.
	ToolChoice any

	JSONMode bool

	// StreamingFunc is a function to be called for each chunk of a streaming response.
	// Return an error to stop streaming early.
	StreamingFunc func(ctx context.Context, chunk []byte) error

	Store       store.MessageStore
	PromptInput map[string]any
	Examples    chatmodel.FewShotExamples
	Mode        encoding.Mode
	// SkipMessageHistory is a flag to skip adding Assistant messages to History.
	SkipMessageHistory bool
	// IsGeneric is a flag to indicate that the assistant should add a generic message to the history,
	// instead of the human
	IsGeneric bool
	// contains filtered or unexported fields
}

func NewConfig

func NewConfig(opts ...Option) *Config

func (*Config) Apply added in v0.3.12

func (c *Config) Apply(opts ...Option) *Config

Apply applies the options to the Config.

func (*Config) GetCallOptions

func (c *Config) GetCallOptions(options ...Option) []llms.CallOption

type HasCallback

type HasCallback interface {
	GetCallback() Callback
}

type IAssistant

type IAssistant interface {
	// Name returns the name of the Assistant.
	Name() string
	// Description returns the description of the Assistant, to be used in the prompt of other Assistants or LLMs.
	// Should not exceed LLM model limit.
	Description() string
	// FormatPrompter returns the format prompter for the Assistant.
	FormatPrompt(values map[string]any) (llms.PromptValue, error)
	GetPromptInputVariables() []string

	Call(ctx context.Context, input string, promptInputs map[string]any, options ...Option) (*llms.ContentResponse, error)
}

type IAssistantTool added in v0.3.13

type IAssistantTool interface {
	// CallAssistant allows the tool to call the assistant with the given input and options.
	CallAssistant(ctx context.Context, input string, options ...Option) (string, error)
}

IAssistantTool provides an interface for tools that use underlying the Assistants.

type IMCPAssistant added in v0.3.11

type IMCPAssistant interface {
	IAssistant
	RegisterMCP(registrator McpServerRegistrator) error
	CallMCP(context.Context, chatmodel.MCPInputRequest) (*mcp.PromptResponse, error)
}

IMCPAssistant is an interface that extends IAssistant to include functionality for registering the assistant with an MCP server. The RegisterMCP method allows the assistant to be registered with a given MCP Server.

type McpServerRegistrator added in v0.3.11

type McpServerRegistrator interface {
	RegisterPrompt(name string, description string, handler any) error
}

type NoopCallback

type NoopCallback struct{}

NoopCallback does nothing.

func NewNoopCallback

func NewNoopCallback() *NoopCallback

func (*NoopCallback) OnAssistantEnd

func (l *NoopCallback) OnAssistantEnd(ctx context.Context, assistant IAssistant, input string, resp *llms.ContentResponse)

func (*NoopCallback) OnAssistantError

func (l *NoopCallback) OnAssistantError(ctx context.Context, assistant IAssistant, input string, err error)

func (*NoopCallback) OnAssistantLLMCall added in v0.3.13

func (l *NoopCallback) OnAssistantLLMCall(ctx context.Context, agent IAssistant, payload []llms.MessageContent)

func (*NoopCallback) OnAssistantStart

func (l *NoopCallback) OnAssistantStart(ctx context.Context, assistant IAssistant, input string)

func (*NoopCallback) OnToolEnd

func (l *NoopCallback) OnToolEnd(ctx context.Context, tool tools.ITool, input string, output string)

func (*NoopCallback) OnToolError

func (l *NoopCallback) OnToolError(ctx context.Context, tool tools.ITool, input string, err error)

func (*NoopCallback) OnToolLLMCall added in v0.3.13

func (l *NoopCallback) OnToolLLMCall(ctx context.Context, tool tools.ITool, payload []llms.MessageContent)

func (*NoopCallback) OnToolStart

func (l *NoopCallback) OnToolStart(ctx context.Context, tool tools.ITool, input string)

type Option

type Option func(*Config)

Option is a function that can be used to modify the behavior of the Agent Config.

func WithCallback

func WithCallback(callbackHandler Callback) Option

WithCallback allows setting a custom Callback Handler.

func WithExamples

func WithExamples(examples chatmodel.FewShotExamples) Option

WithExamples is an option that allows to specify the few-shot examples for the system prompt.

func WithGeneric added in v0.3.12

func WithGeneric(val bool) Option

WithGeneric is an option to indicate that the assistant should add a generic message to the history, instead of the human

func WithJSONMode

func WithJSONMode(jsonMode bool) Option

WithJSONMode is an option for LLM.Call that allows the user to specify whether to use JSON mode.

func WithMaxLength

func WithMaxLength(maxLength int) Option

WithMaxLength will add an option to set the maximum length of the generated text for LLM.Call.

func WithMaxTokens

func WithMaxTokens(maxTokens int) Option

WithMaxTokens is an option for LLM.Call.

func WithMessageStore added in v0.3.12

func WithMessageStore(store store.MessageStore) Option

WithMessageStore is an option that allows to specify the message store.

func WithMinLength

func WithMinLength(minLength int) Option

WithMinLength will add an option to set the minimum length of the generated text for LLM.Call.

func WithMode

func WithMode(mode encoding.Mode) Option

WithMode is an option that allows to specify the encoding mode.

func WithModel

func WithModel(model string) Option

WithModel is an option for LLM.Call.

func WithPromptInput

func WithPromptInput(input map[string]any) Option

WithPromptInput is an option that allows the user to specify the system prompt input.

func WithRepetitionPenalty

func WithRepetitionPenalty(repetitionPenalty float64) Option

WithRepetitionPenalty will add an option to set the repetition penalty for sampling.

func WithSeed

func WithSeed(seed int) Option

WithSeed will add an option to use deterministic sampling for LLM.Call.

func WithSkipMessageHistory added in v0.1.7

func WithSkipMessageHistory(skip bool) Option

WithSkipMessageHistory is an option that allows to skip adding Assistant messages to History.

func WithStopWords

func WithStopWords(stopWords []string) Option

WithStopWords is an option for setting the stop words for LLM.Call.

func WithStreamingFunc

func WithStreamingFunc(streamingFunc func(ctx context.Context, chunk []byte) error) Option

WithStreamingFunc is an option for LLM.Call that allows streaming responses.

func WithTemperature

func WithTemperature(temperature float64) Option

WithTemperature is an option for LLM.Call.

func WithTool

func WithTool(tool llms.Tool) Option

WithTool is an option for LLM.Call.

func WithToolChoice

func WithToolChoice(choice any) Option

WithToolChoice is an option for LLM.Call.

func WithTools

func WithTools(tools []llms.Tool) Option

WithTools is an option for LLM.Call.

func WithTopK

func WithTopK(topK int) Option

WithTopK will add an option to use top-k sampling for LLM.Call.

func WithTopP

func WithTopP(topP float64) Option

WithTopP will add an option to use top-p sampling for LLM.Call.

type PackageLoggerCallback

type PackageLoggerCallback struct {
	// contains filtered or unexported fields
}

PackageLoggerCallback is a callback handler that prints to the logger.

func NewPackageLoggerCallback

func NewPackageLoggerCallback(logger *xlog.PackageLogger) *PackageLoggerCallback

func (*PackageLoggerCallback) OnAssistantEnd

func (l *PackageLoggerCallback) OnAssistantEnd(ctx context.Context, assistant IAssistant, input string, resp *llms.ContentResponse)

func (*PackageLoggerCallback) OnAssistantError

func (l *PackageLoggerCallback) OnAssistantError(ctx context.Context, assistant IAssistant, input string, err error)

func (*PackageLoggerCallback) OnAssistantLLMCall added in v0.3.13

func (l *PackageLoggerCallback) OnAssistantLLMCall(ctx context.Context, agent IAssistant, payload []llms.MessageContent)

func (*PackageLoggerCallback) OnAssistantStart

func (l *PackageLoggerCallback) OnAssistantStart(ctx context.Context, assistant IAssistant, input string)

func (*PackageLoggerCallback) OnToolEnd

func (l *PackageLoggerCallback) OnToolEnd(ctx context.Context, tool tools.ITool, input string, output string)

func (*PackageLoggerCallback) OnToolError

func (l *PackageLoggerCallback) OnToolError(ctx context.Context, tool tools.ITool, input string, err error)

func (*PackageLoggerCallback) OnToolLLMCall added in v0.3.13

func (l *PackageLoggerCallback) OnToolLLMCall(ctx context.Context, tool tools.ITool, payload []llms.MessageContent)

func (*PackageLoggerCallback) OnToolStart

func (l *PackageLoggerCallback) OnToolStart(ctx context.Context, tool tools.ITool, input string)

type PrinterCallback

type PrinterCallback struct {
	Out io.Writer
}

PrinterCallback is a callback handler that prints to the Writer.

func NewPrinterCallback

func NewPrinterCallback(out io.Writer) *PrinterCallback

func (*PrinterCallback) OnAssistantEnd

func (l *PrinterCallback) OnAssistantEnd(ctx context.Context, assistant IAssistant, input string, resp *llms.ContentResponse)

func (*PrinterCallback) OnAssistantError

func (l *PrinterCallback) OnAssistantError(ctx context.Context, assistant IAssistant, input string, err error)

func (*PrinterCallback) OnAssistantLLMCall added in v0.3.13

func (l *PrinterCallback) OnAssistantLLMCall(ctx context.Context, agent IAssistant, payload []llms.MessageContent)

func (*PrinterCallback) OnAssistantStart

func (l *PrinterCallback) OnAssistantStart(ctx context.Context, assistant IAssistant, input string)

func (*PrinterCallback) OnToolEnd

func (l *PrinterCallback) OnToolEnd(ctx context.Context, tool tools.ITool, input string, output string)

func (*PrinterCallback) OnToolError

func (l *PrinterCallback) OnToolError(ctx context.Context, tool tools.ITool, input string, err error)

func (*PrinterCallback) OnToolLLMCall added in v0.3.13

func (l *PrinterCallback) OnToolLLMCall(ctx context.Context, tool tools.ITool, payload []llms.MessageContent)

func (*PrinterCallback) OnToolStart

func (l *PrinterCallback) OnToolStart(ctx context.Context, tool tools.ITool, input string)

type ProvidePromptInputsFunc added in v0.3.12

type ProvidePromptInputsFunc func(input string) (map[string]any, error)

type TypeableAssistant

type TypeableAssistant[O chatmodel.ContentProvider] interface {
	IAssistant
	HasCallback
	// Run executes the assistant with the given input and prompt inputs.
	// Do not use this method directly, use the Run function instead.
	Run(ctx context.Context, input string, promptInputs map[string]any, optionalOutputType *O, options ...Option) (*llms.ContentResponse, error)
}

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL