Documentation
¶
Index ¶
- func Call(ctx context.Context, assistant IAssistant, input string, ...) (*llms.ContentResponse, error)
- func GetDescriptions(list ...IAssistant) string
- func MapAssistants(list ...IAssistant) map[string]IAssistant
- func Run[O chatmodel.ContentProvider](ctx context.Context, assistant TypeableAssistant[O], input string, ...) (*llms.ContentResponse, error)
- type Assistant
- func (a *Assistant[O]) Call(ctx context.Context, input string, promptInputs map[string]any, ...) (*llms.ContentResponse, error)
- func (a *Assistant[O]) CallMCP(ctx context.Context, input chatmodel.MCPInputRequest) (*mcp.PromptResponse, error)
- func (a *Assistant[O]) Description() string
- func (a *Assistant[O]) FormatPrompt(promptInputs map[string]any) (llms.PromptValue, error)
- func (a *Assistant[O]) GetCallback() Callback
- func (a *Assistant[O]) GetPromptInputVariables() []string
- func (a *Assistant[O]) GetSystemPrompt(input string, promptInputs map[string]any) (string, error)
- func (a *Assistant[O]) Name() string
- func (a *Assistant[O]) RegisterMCP(registrator McpServerRegistrator) error
- func (a *Assistant[O]) Run(ctx context.Context, input string, promptInputs map[string]any, ...) (*llms.ContentResponse, error)
- func (a *Assistant[O]) RunMessages() []llms.MessageContent
- func (a *Assistant[O]) WithDescription(description string) *Assistant[O]
- func (a *Assistant[O]) WithInputParser(inputParser func(string) (string, error))
- func (a *Assistant[O]) WithName(name string) *Assistant[O]
- func (a *Assistant[O]) WithOutputParser(outputParser chatmodel.OutputParser[O]) *Assistant[O]
- func (a *Assistant[O]) WithPromptInputProvider(cb ProvidePromptInputsFunc)
- func (a *Assistant[O]) WithTools(list ...tools.ITool) *Assistant[O]
- type Callback
- type Config
- type HasCallback
- type IAssistant
- type IAssistantTool
- type IMCPAssistant
- type McpServerRegistrator
- type NoopCallback
- func (l *NoopCallback) OnAssistantEnd(ctx context.Context, assistant IAssistant, input string, ...)
- func (l *NoopCallback) OnAssistantError(ctx context.Context, assistant IAssistant, input string, err error)
- func (l *NoopCallback) OnAssistantLLMCall(ctx context.Context, agent IAssistant, payload []llms.MessageContent)
- func (l *NoopCallback) OnAssistantStart(ctx context.Context, assistant IAssistant, input string)
- func (l *NoopCallback) OnToolEnd(ctx context.Context, tool tools.ITool, input string, output string)
- func (l *NoopCallback) OnToolError(ctx context.Context, tool tools.ITool, input string, err error)
- func (l *NoopCallback) OnToolLLMCall(ctx context.Context, tool tools.ITool, payload []llms.MessageContent)
- func (l *NoopCallback) OnToolStart(ctx context.Context, tool tools.ITool, input string)
- type Option
- func WithCallback(callbackHandler Callback) Option
- func WithExamples(examples chatmodel.FewShotExamples) Option
- func WithGeneric(val bool) Option
- func WithJSONMode(jsonMode bool) Option
- func WithMaxLength(maxLength int) Option
- func WithMaxTokens(maxTokens int) Option
- func WithMessageStore(store store.MessageStore) Option
- func WithMinLength(minLength int) Option
- func WithMode(mode encoding.Mode) Option
- func WithModel(model string) Option
- func WithPromptInput(input map[string]any) Option
- func WithRepetitionPenalty(repetitionPenalty float64) Option
- func WithSeed(seed int) Option
- func WithSkipMessageHistory(skip bool) Option
- func WithStopWords(stopWords []string) Option
- func WithStreamingFunc(streamingFunc func(ctx context.Context, chunk []byte) error) Option
- func WithTemperature(temperature float64) Option
- func WithTool(tool llms.Tool) Option
- func WithToolChoice(choice any) Option
- func WithTools(tools []llms.Tool) Option
- func WithTopK(topK int) Option
- func WithTopP(topP float64) Option
- type PackageLoggerCallback
- func (l *PackageLoggerCallback) OnAssistantEnd(ctx context.Context, assistant IAssistant, input string, ...)
- func (l *PackageLoggerCallback) OnAssistantError(ctx context.Context, assistant IAssistant, input string, err error)
- func (l *PackageLoggerCallback) OnAssistantLLMCall(ctx context.Context, agent IAssistant, payload []llms.MessageContent)
- func (l *PackageLoggerCallback) OnAssistantStart(ctx context.Context, assistant IAssistant, input string)
- func (l *PackageLoggerCallback) OnToolEnd(ctx context.Context, tool tools.ITool, input string, output string)
- func (l *PackageLoggerCallback) OnToolError(ctx context.Context, tool tools.ITool, input string, err error)
- func (l *PackageLoggerCallback) OnToolLLMCall(ctx context.Context, tool tools.ITool, payload []llms.MessageContent)
- func (l *PackageLoggerCallback) OnToolStart(ctx context.Context, tool tools.ITool, input string)
- type PrinterCallback
- func (l *PrinterCallback) OnAssistantEnd(ctx context.Context, assistant IAssistant, input string, ...)
- func (l *PrinterCallback) OnAssistantError(ctx context.Context, assistant IAssistant, input string, err error)
- func (l *PrinterCallback) OnAssistantLLMCall(ctx context.Context, agent IAssistant, payload []llms.MessageContent)
- func (l *PrinterCallback) OnAssistantStart(ctx context.Context, assistant IAssistant, input string)
- func (l *PrinterCallback) OnToolEnd(ctx context.Context, tool tools.ITool, input string, output string)
- func (l *PrinterCallback) OnToolError(ctx context.Context, tool tools.ITool, input string, err error)
- func (l *PrinterCallback) OnToolLLMCall(ctx context.Context, tool tools.ITool, payload []llms.MessageContent)
- func (l *PrinterCallback) OnToolStart(ctx context.Context, tool tools.ITool, input string)
- type ProvidePromptInputsFunc
- type TypeableAssistant
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
func Call ¶
func Call( ctx context.Context, assistant IAssistant, input string, promptInputs map[string]any, options ...Option, ) (*llms.ContentResponse, error)
Call executes a generic assistant without typed output.
func GetDescriptions ¶
func GetDescriptions(list ...IAssistant) string
func MapAssistants ¶
func MapAssistants(list ...IAssistant) map[string]IAssistant
func Run ¶
func Run[O chatmodel.ContentProvider]( ctx context.Context, assistant TypeableAssistant[O], input string, promptInputs map[string]any, optionalOutputType *O, options ...Option, ) (*llms.ContentResponse, error)
Run executes the assistant with the given input and prompt inputs.
Types ¶
type Assistant ¶
type Assistant[O chatmodel.ContentProvider] struct { LLM llms.Model OutputParser chatmodel.OutputParser[O] // contains filtered or unexported fields }
Assistant class for chat assistants. This class provides the core functionality for handling chat interactions, including managing memory, generating system prompts, and obtaining responses from a language model.
func NewAssistant ¶
func NewAssistant[O chatmodel.ContentProvider]( llmModel llms.Model, sysprompt prompts.FormatPrompter, options ...Option) *Assistant[O]
NewAssistant initializes the AgentAgent
func (*Assistant[O]) CallMCP ¶ added in v0.3.11
func (a *Assistant[O]) CallMCP(ctx context.Context, input chatmodel.MCPInputRequest) (*mcp.PromptResponse, error)
func (*Assistant[O]) Description ¶
Description returns the description of the Agent, to be used in the prompt of other Agents or LLMs. Should not exceed LLM model limit.
func (*Assistant[O]) FormatPrompt ¶
func (*Assistant[O]) GetCallback ¶
func (*Assistant[O]) GetPromptInputVariables ¶
func (*Assistant[O]) GetSystemPrompt ¶ added in v0.2.10
GetSystemPrompt generates the system prompt for the Assistant.
func (*Assistant[O]) RegisterMCP ¶ added in v0.3.11
func (a *Assistant[O]) RegisterMCP(registrator McpServerRegistrator) error
func (*Assistant[O]) Run ¶
func (a *Assistant[O]) Run(ctx context.Context, input string, promptInputs map[string]any, optionalOutputType *O, options ...Option) (*llms.ContentResponse, error)
Run runs the chat agent with the given user input synchronously.
func (*Assistant[O]) RunMessages ¶ added in v0.2.10
func (a *Assistant[O]) RunMessages() []llms.MessageContent
func (*Assistant[O]) WithDescription ¶
WithDescription sets the description of the Agent, to be used in the prompt of other Agents or LLMs.
func (*Assistant[O]) WithInputParser ¶ added in v0.3.12
WithInputParser sets the input parser for the Assistant.
func (*Assistant[O]) WithName ¶
WithName sets the name of the Agent, when used in a prompt of another Agents or LLMs.
func (*Assistant[O]) WithOutputParser ¶
func (a *Assistant[O]) WithOutputParser(outputParser chatmodel.OutputParser[O]) *Assistant[O]
WithCallback sets the callback.
func (*Assistant[O]) WithPromptInputProvider ¶ added in v0.3.12
func (a *Assistant[O]) WithPromptInputProvider(cb ProvidePromptInputsFunc)
type Callback ¶
type Callback interface {
tools.Callback
OnAssistantStart(ctx context.Context, agent IAssistant, input string)
OnAssistantEnd(ctx context.Context, agent IAssistant, input string, resp *llms.ContentResponse)
OnAssistantError(ctx context.Context, agent IAssistant, input string, err error)
OnAssistantLLMCall(ctx context.Context, agent IAssistant, payload []llms.MessageContent)
}
type Config ¶
type Config struct {
// Model is the model to use in an LLM call.
Model string
// MaxTokens is the maximum number of tokens to generate to use in an LLM call.
MaxTokens int
// Temperature is the temperature for sampling to use in an LLM call, between 0 and 1.
Temperature float64
// StopWords is a list of words to stop on to use in an LLM call.
StopWords []string
// TopK is the number of tokens to consider for top-k sampling in an LLM call.
TopK int
// TopP is the cumulative probability for top-p sampling in an LLM call.
TopP float64
// Seed is a seed for deterministic sampling in an LLM call.
Seed int
// MinLength is the minimum length of the generated text in an LLM call.
MinLength int
// MaxLength is the maximum length of the generated text in an LLM call.
MaxLength int
// RepetitionPenalty is the repetition penalty for sampling in an LLM call.
RepetitionPenalty float64
// CallbackHandler is the callback handler for Chain
CallbackHandler Callback
// Tools is a list of tools to use. Each tool can be a specific tool or a function.
Tools []llms.Tool
// ToolChoice is the choice of tool to use, it can either be "none", "auto" (the default behavior), or a specific tool as described in the ToolChoice type.
ToolChoice any
JSONMode bool
// StreamingFunc is a function to be called for each chunk of a streaming response.
// Return an error to stop streaming early.
StreamingFunc func(ctx context.Context, chunk []byte) error
Store store.MessageStore
PromptInput map[string]any
Examples chatmodel.FewShotExamples
Mode encoding.Mode
// SkipMessageHistory is a flag to skip adding Assistant messages to History.
SkipMessageHistory bool
// IsGeneric is a flag to indicate that the assistant should add a generic message to the history,
// instead of the human
IsGeneric bool
// contains filtered or unexported fields
}
func (*Config) GetCallOptions ¶
func (c *Config) GetCallOptions(options ...Option) []llms.CallOption
type HasCallback ¶
type HasCallback interface {
GetCallback() Callback
}
type IAssistant ¶
type IAssistant interface {
// Name returns the name of the Assistant.
Name() string
// Description returns the description of the Assistant, to be used in the prompt of other Assistants or LLMs.
// Should not exceed LLM model limit.
Description() string
// FormatPrompter returns the format prompter for the Assistant.
FormatPrompt(values map[string]any) (llms.PromptValue, error)
GetPromptInputVariables() []string
Call(ctx context.Context, input string, promptInputs map[string]any, options ...Option) (*llms.ContentResponse, error)
}
type IAssistantTool ¶ added in v0.3.13
type IAssistantTool interface {
// CallAssistant allows the tool to call the assistant with the given input and options.
CallAssistant(ctx context.Context, input string, options ...Option) (string, error)
}
IAssistantTool provides an interface for tools that use underlying the Assistants.
type IMCPAssistant ¶ added in v0.3.11
type IMCPAssistant interface {
IAssistant
RegisterMCP(registrator McpServerRegistrator) error
CallMCP(context.Context, chatmodel.MCPInputRequest) (*mcp.PromptResponse, error)
}
IMCPAssistant is an interface that extends IAssistant to include functionality for registering the assistant with an MCP server. The RegisterMCP method allows the assistant to be registered with a given MCP Server.
type McpServerRegistrator ¶ added in v0.3.11
type NoopCallback ¶
type NoopCallback struct{}
NoopCallback does nothing.
func NewNoopCallback ¶
func NewNoopCallback() *NoopCallback
func (*NoopCallback) OnAssistantEnd ¶
func (l *NoopCallback) OnAssistantEnd(ctx context.Context, assistant IAssistant, input string, resp *llms.ContentResponse)
func (*NoopCallback) OnAssistantError ¶
func (l *NoopCallback) OnAssistantError(ctx context.Context, assistant IAssistant, input string, err error)
func (*NoopCallback) OnAssistantLLMCall ¶ added in v0.3.13
func (l *NoopCallback) OnAssistantLLMCall(ctx context.Context, agent IAssistant, payload []llms.MessageContent)
func (*NoopCallback) OnAssistantStart ¶
func (l *NoopCallback) OnAssistantStart(ctx context.Context, assistant IAssistant, input string)
func (*NoopCallback) OnToolError ¶
func (*NoopCallback) OnToolLLMCall ¶ added in v0.3.13
func (l *NoopCallback) OnToolLLMCall(ctx context.Context, tool tools.ITool, payload []llms.MessageContent)
func (*NoopCallback) OnToolStart ¶
type Option ¶
type Option func(*Config)
Option is a function that can be used to modify the behavior of the Agent Config.
func WithCallback ¶
WithCallback allows setting a custom Callback Handler.
func WithExamples ¶
func WithExamples(examples chatmodel.FewShotExamples) Option
WithExamples is an option that allows to specify the few-shot examples for the system prompt.
func WithGeneric ¶ added in v0.3.12
WithGeneric is an option to indicate that the assistant should add a generic message to the history, instead of the human
func WithJSONMode ¶
WithJSONMode is an option for LLM.Call that allows the user to specify whether to use JSON mode.
func WithMaxLength ¶
WithMaxLength will add an option to set the maximum length of the generated text for LLM.Call.
func WithMaxTokens ¶
WithMaxTokens is an option for LLM.Call.
func WithMessageStore ¶ added in v0.3.12
func WithMessageStore(store store.MessageStore) Option
WithMessageStore is an option that allows to specify the message store.
func WithMinLength ¶
WithMinLength will add an option to set the minimum length of the generated text for LLM.Call.
func WithPromptInput ¶
WithPromptInput is an option that allows the user to specify the system prompt input.
func WithRepetitionPenalty ¶
WithRepetitionPenalty will add an option to set the repetition penalty for sampling.
func WithSkipMessageHistory ¶ added in v0.1.7
WithSkipMessageHistory is an option that allows to skip adding Assistant messages to History.
func WithStopWords ¶
WithStopWords is an option for setting the stop words for LLM.Call.
func WithStreamingFunc ¶
WithStreamingFunc is an option for LLM.Call that allows streaming responses.
func WithTemperature ¶
WithTemperature is an option for LLM.Call.
func WithToolChoice ¶
WithToolChoice is an option for LLM.Call.
type PackageLoggerCallback ¶
type PackageLoggerCallback struct {
// contains filtered or unexported fields
}
PackageLoggerCallback is a callback handler that prints to the logger.
func NewPackageLoggerCallback ¶
func NewPackageLoggerCallback(logger *xlog.PackageLogger) *PackageLoggerCallback
func (*PackageLoggerCallback) OnAssistantEnd ¶
func (l *PackageLoggerCallback) OnAssistantEnd(ctx context.Context, assistant IAssistant, input string, resp *llms.ContentResponse)
func (*PackageLoggerCallback) OnAssistantError ¶
func (l *PackageLoggerCallback) OnAssistantError(ctx context.Context, assistant IAssistant, input string, err error)
func (*PackageLoggerCallback) OnAssistantLLMCall ¶ added in v0.3.13
func (l *PackageLoggerCallback) OnAssistantLLMCall(ctx context.Context, agent IAssistant, payload []llms.MessageContent)
func (*PackageLoggerCallback) OnAssistantStart ¶
func (l *PackageLoggerCallback) OnAssistantStart(ctx context.Context, assistant IAssistant, input string)
func (*PackageLoggerCallback) OnToolError ¶
func (*PackageLoggerCallback) OnToolLLMCall ¶ added in v0.3.13
func (l *PackageLoggerCallback) OnToolLLMCall(ctx context.Context, tool tools.ITool, payload []llms.MessageContent)
func (*PackageLoggerCallback) OnToolStart ¶
type PrinterCallback ¶
PrinterCallback is a callback handler that prints to the Writer.
func NewPrinterCallback ¶
func NewPrinterCallback(out io.Writer) *PrinterCallback
func (*PrinterCallback) OnAssistantEnd ¶
func (l *PrinterCallback) OnAssistantEnd(ctx context.Context, assistant IAssistant, input string, resp *llms.ContentResponse)
func (*PrinterCallback) OnAssistantError ¶
func (l *PrinterCallback) OnAssistantError(ctx context.Context, assistant IAssistant, input string, err error)
func (*PrinterCallback) OnAssistantLLMCall ¶ added in v0.3.13
func (l *PrinterCallback) OnAssistantLLMCall(ctx context.Context, agent IAssistant, payload []llms.MessageContent)
func (*PrinterCallback) OnAssistantStart ¶
func (l *PrinterCallback) OnAssistantStart(ctx context.Context, assistant IAssistant, input string)
func (*PrinterCallback) OnToolError ¶
func (*PrinterCallback) OnToolLLMCall ¶ added in v0.3.13
func (l *PrinterCallback) OnToolLLMCall(ctx context.Context, tool tools.ITool, payload []llms.MessageContent)
func (*PrinterCallback) OnToolStart ¶
type ProvidePromptInputsFunc ¶ added in v0.3.12
type TypeableAssistant ¶
type TypeableAssistant[O chatmodel.ContentProvider] interface { IAssistant HasCallback // Run executes the assistant with the given input and prompt inputs. // Do not use this method directly, use the Run function instead. Run(ctx context.Context, input string, promptInputs map[string]any, optionalOutputType *O, options ...Option) (*llms.ContentResponse, error) }