Documentation
¶
Overview ¶
Package llm provides a unified abstraction layer for Large Language Model interactions within the Mattermost AI plugin.
This package defines the core interfaces and data structures for working with various LLM providers (OpenAI, Anthropic, etc.) in a consistent manner. It handles:
- LanguageModel interface abstraction for different LLM providers
- Conversation management with structured posts, roles, and context
- Prompt template system with embedded templates and variable substitution
- Streaming text responses for real-time chat interactions
- Tool/function calling capabilities with JSON schema validation
- Request/response structures with token counting and truncation
- Context management including user info, channels, and bot configurations
The package is designed to be provider-agnostic, allowing the plugin to work with multiple LLM services through a common interface while preserving provider-specific capabilities like vision, JSON output, and tool calling.
Index ¶
- Constants
- func NewJSONSchemaFromStruct(schemaStruct interface{}) *jsonschema.Schema
- type BotConfig
- type ChannelAccessLevel
- type CompletionRequest
- type Context
- type ContextOption
- type EventType
- type File
- type LanguageModel
- type LanguageModelConfig
- type LanguageModelLogWrapper
- func (w *LanguageModelLogWrapper) ChatCompletion(request CompletionRequest, opts ...LanguageModelOption) (*TextStreamResult, error)
- func (w *LanguageModelLogWrapper) ChatCompletionNoStream(request CompletionRequest, opts ...LanguageModelOption) (string, error)
- func (w *LanguageModelLogWrapper) CountTokens(text string) int
- func (w *LanguageModelLogWrapper) InputTokenLimit() int
- type LanguageModelOption
- type LanguageModelTestLogWrapper
- func (w *LanguageModelTestLogWrapper) ChatCompletion(request CompletionRequest, opts ...LanguageModelOption) (*TextStreamResult, error)
- func (w *LanguageModelTestLogWrapper) ChatCompletionNoStream(request CompletionRequest, opts ...LanguageModelOption) (string, error)
- func (w *LanguageModelTestLogWrapper) CountTokens(text string) int
- func (w *LanguageModelTestLogWrapper) InputTokenLimit() int
- type LanguageModelWrapper
- type Post
- type PostRole
- type Prompts
- type ServiceConfig
- type TextStreamEvent
- type TextStreamResult
- type Tool
- type ToolArgumentGetter
- type ToolCall
- type ToolCallStatus
- type ToolResolver
- type ToolStore
- func (s *ToolStore) AddTools(tools []Tool)
- func (s *ToolStore) GetTools() []Tool
- func (s *ToolStore) ResolveTool(name string, argsGetter ToolArgumentGetter, context *Context) (string, error)
- func (s *ToolStore) TraceResolved(name string, argsGetter ToolArgumentGetter, result string)
- func (s *ToolStore) TraceUnknown(name string, argsGetter ToolArgumentGetter)
- type TraceLog
- type TruncationWrapper
- func (w *TruncationWrapper) ChatCompletion(request CompletionRequest, opts ...LanguageModelOption) (*TextStreamResult, error)
- func (w *TruncationWrapper) ChatCompletionNoStream(request CompletionRequest, opts ...LanguageModelOption) (string, error)
- func (w *TruncationWrapper) CountTokens(text string) int
- func (w *TruncationWrapper) InputTokenLimit() int
- type UserAccessLevel
Constants ¶
const ( ServiceTypeOpenAI = "openai" ServiceTypeOpenAICompatible = "openaicompatible" ServiceTypeAzure = "azure" ServiceTypeASage = "asage" ServiceTypeAnthropic = "anthropic" ServiceTypeCohere = "cohere" )
const FunctionsTokenBudget = 200
const MinTokens = 100
const PromptExtension = "tmpl"
const TokenLimitBufferSize = 0.9
Variables ¶
This section is empty.
Functions ¶
func NewJSONSchemaFromStruct ¶
func NewJSONSchemaFromStruct(schemaStruct interface{}) *jsonschema.Schema
NewJSONSchemaFromStruct creates a JSONSchema from a Go struct using reflection It's a helper function for tool providers that currently define schemas as structs
Types ¶
type BotConfig ¶
type BotConfig struct {
ID string `json:"id"`
Name string `json:"name"`
DisplayName string `json:"displayName"`
CustomInstructions string `json:"customInstructions"`
Service ServiceConfig `json:"service"`
EnableVision bool `json:"enableVision"`
DisableTools bool `json:"disableTools"`
ChannelAccessLevel ChannelAccessLevel `json:"channelAccessLevel"`
ChannelIDs []string `json:"channelIDs"`
UserAccessLevel UserAccessLevel `json:"userAccessLevel"`
UserIDs []string `json:"userIDs"`
TeamIDs []string `json:"teamIDs"`
MaxFileSize int64 `json:"maxFileSize"`
}
type ChannelAccessLevel ¶
type ChannelAccessLevel int
const ( ChannelAccessLevelAll ChannelAccessLevel = iota ChannelAccessLevelAllow ChannelAccessLevelBlock ChannelAccessLevelNone )
type CompletionRequest ¶
func (CompletionRequest) ExtractSystemMessage ¶
func (b CompletionRequest) ExtractSystemMessage() string
ExtractSystemMessage extracts the system message from the conversation.
func (CompletionRequest) String ¶
func (b CompletionRequest) String() string
type Context ¶
type Context struct {
// Server
Time string
ServerName string
CompanyName string
// Location
Team *model.Team
Channel *model.Channel
Thread []Post // Normalized posts that already have been formatted. nil if not in a thread or a root post
// User that is making the request
RequestingUser *model.User
// Bot Specific
BotName string
BotUsername string
BotModel string
CustomInstructions string
Tools *ToolStore
Parameters map[string]interface{}
}
Context represents the data necessary to build the context of the LLM. For consumers none of the fields can be assumed to be present.
func NewContext ¶
func NewContext(opts ...ContextOption) *Context
NewContext creates a new Context with the given options
type ContextOption ¶
type ContextOption func(*Context)
ContextOption defines a function that configures a Context
type LanguageModel ¶
type LanguageModel interface {
ChatCompletion(conversation CompletionRequest, opts ...LanguageModelOption) (*TextStreamResult, error)
ChatCompletionNoStream(conversation CompletionRequest, opts ...LanguageModelOption) (string, error)
CountTokens(text string) int
InputTokenLimit() int
}
type LanguageModelConfig ¶
type LanguageModelLogWrapper ¶
type LanguageModelLogWrapper struct {
// contains filtered or unexported fields
}
func NewLanguageModelLogWrapper ¶
func NewLanguageModelLogWrapper(log pluginapi.LogService, wrapped LanguageModel) *LanguageModelLogWrapper
func (*LanguageModelLogWrapper) ChatCompletion ¶
func (w *LanguageModelLogWrapper) ChatCompletion(request CompletionRequest, opts ...LanguageModelOption) (*TextStreamResult, error)
func (*LanguageModelLogWrapper) ChatCompletionNoStream ¶
func (w *LanguageModelLogWrapper) ChatCompletionNoStream(request CompletionRequest, opts ...LanguageModelOption) (string, error)
func (*LanguageModelLogWrapper) CountTokens ¶
func (w *LanguageModelLogWrapper) CountTokens(text string) int
func (*LanguageModelLogWrapper) InputTokenLimit ¶
func (w *LanguageModelLogWrapper) InputTokenLimit() int
type LanguageModelOption ¶
type LanguageModelOption func(*LanguageModelConfig)
func WithJSONOutput ¶
func WithJSONOutput(format any) LanguageModelOption
func WithMaxGeneratedTokens ¶
func WithMaxGeneratedTokens(maxGeneratedTokens int) LanguageModelOption
func WithModel ¶
func WithModel(model string) LanguageModelOption
type LanguageModelTestLogWrapper ¶
type LanguageModelTestLogWrapper struct {
// contains filtered or unexported fields
}
func NewLanguageModelTestLogWrapper ¶
func NewLanguageModelTestLogWrapper(t *testing.T, wrapped LanguageModel) *LanguageModelTestLogWrapper
func (*LanguageModelTestLogWrapper) ChatCompletion ¶
func (w *LanguageModelTestLogWrapper) ChatCompletion(request CompletionRequest, opts ...LanguageModelOption) (*TextStreamResult, error)
func (*LanguageModelTestLogWrapper) ChatCompletionNoStream ¶
func (w *LanguageModelTestLogWrapper) ChatCompletionNoStream(request CompletionRequest, opts ...LanguageModelOption) (string, error)
func (*LanguageModelTestLogWrapper) CountTokens ¶
func (w *LanguageModelTestLogWrapper) CountTokens(text string) int
func (*LanguageModelTestLogWrapper) InputTokenLimit ¶
func (w *LanguageModelTestLogWrapper) InputTokenLimit() int
type LanguageModelWrapper ¶
type LanguageModelWrapper func(LanguageModel) LanguageModel
type Prompts ¶
type Prompts struct {
// contains filtered or unexported fields
}
type ServiceConfig ¶
type ServiceConfig struct {
Name string `json:"name"`
Type string `json:"type"`
APIKey string `json:"apiKey"`
OrgID string `json:"orgId"`
DefaultModel string `json:"defaultModel"`
APIURL string `json:"apiURL"`
// Renaming the JSON field to inputTokenLimit would require a migration, leaving as is for now.
InputTokenLimit int `json:"tokenLimit"`
StreamingTimeoutSeconds int `json:"streamingTimeoutSeconds"`
SendUserID bool `json:"sendUserID"`
// Otherwise known as maxTokens
OutputTokenLimit int `json:"outputTokenLimit"`
}
type TextStreamEvent ¶
TextStreamEvent represents an event in the text stream
type TextStreamResult ¶
type TextStreamResult struct {
Stream <-chan TextStreamEvent
}
TextStreamResult represents a stream of text events
func NewStreamFromString ¶
func NewStreamFromString(text string) *TextStreamResult
func (*TextStreamResult) ReadAll ¶
func (t *TextStreamResult) ReadAll() (string, error)
type Tool ¶
type Tool struct {
Name string
Description string
Schema *jsonschema.Schema
Resolver ToolResolver
}
Tool represents a function that can be called by the language model during a conversation.
Each tool has a name, description, and schema that defines its parameters. These are passed to the LLM for it to understand what capabilities it has. It is the Resolver function that implements the actual functionality.
The Schema field should contain a JSONSchema that defines the expected structure of the tool's arguments. The Resolver function receives the conversation context and a way to access the parsed arguments, and returns either a result that will be passed to the LLM or an error.
type ToolArgumentGetter ¶
type ToolCall ¶
type ToolCall struct {
ID string `json:"id"`
Name string `json:"name"`
Description string `json:"description"`
Arguments json.RawMessage `json:"arguments"`
Result string `json:"result"`
Status ToolCallStatus `json:"status"`
}
ToolCall represents a tool call. An empty result indicates that the tool has not yet been resolved.
type ToolCallStatus ¶
type ToolCallStatus int
ToolCallStatus represents the current status of a tool call
const ( // ToolCallStatusPending indicates the tool is waiting for user approval/rejection ToolCallStatusPending ToolCallStatus = iota // ToolCallStatusAccepted indicates the user has accepted the tool call but it's not resolved yet ToolCallStatusAccepted // ToolCallStatusRejected indicates the user has rejected the tool call ToolCallStatusRejected // ToolCallStatusError indicates the tool call was accepted but errored during resolution ToolCallStatusError // ToolCallStatusSuccess indicates the tool call was accepted and resolved successfully ToolCallStatusSuccess )
type ToolResolver ¶
type ToolResolver func(context *Context, argsGetter ToolArgumentGetter) (string, error)
type ToolStore ¶
type ToolStore struct {
// contains filtered or unexported fields
}
func NewNoTools ¶
func NewNoTools() *ToolStore
func NewToolStore ¶
func (*ToolStore) ResolveTool ¶
func (*ToolStore) TraceResolved ¶
func (s *ToolStore) TraceResolved(name string, argsGetter ToolArgumentGetter, result string)
func (*ToolStore) TraceUnknown ¶
func (s *ToolStore) TraceUnknown(name string, argsGetter ToolArgumentGetter)
type TruncationWrapper ¶
type TruncationWrapper struct {
// contains filtered or unexported fields
}
func NewLLMTruncationWrapper ¶
func NewLLMTruncationWrapper(llm LanguageModel) *TruncationWrapper
func (*TruncationWrapper) ChatCompletion ¶
func (w *TruncationWrapper) ChatCompletion(request CompletionRequest, opts ...LanguageModelOption) (*TextStreamResult, error)
func (*TruncationWrapper) ChatCompletionNoStream ¶
func (w *TruncationWrapper) ChatCompletionNoStream(request CompletionRequest, opts ...LanguageModelOption) (string, error)
func (*TruncationWrapper) CountTokens ¶
func (w *TruncationWrapper) CountTokens(text string) int
func (*TruncationWrapper) InputTokenLimit ¶
func (w *TruncationWrapper) InputTokenLimit() int
type UserAccessLevel ¶
type UserAccessLevel int
const ( UserAccessLevelAll UserAccessLevel = iota UserAccessLevelAllow UserAccessLevelBlock UserAccessLevelNone )