Documentation
¶
Index ¶
- func CallChatGPT(cfg Config, inputMessages []ChatMessage, stream bool) (<-chan string, error)
- func GetCommands(base bot.BaseCommand, config *config.Config) bot.Commands
- type ChatChoice
- type ChatMessage
- type ChatRequest
- type ChatResponse
- type Config
- type DalleRequest
- type DalleResponse
- type DalleResponseImage
- type HashtagOptions
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
func CallChatGPT ¶
func CallChatGPT(cfg Config, inputMessages []ChatMessage, stream bool) (<-chan string, error)
func GetCommands ¶
GetCommands if enable, register the openai commands
Types ¶
type ChatChoice ¶
type ChatChoice struct {
Index int `json:"index"`
Message ChatMessage `json:"message"`
FinishReason string `json:"finish_reason"`
Delta ChatMessage `json:"delta"`
}
type ChatMessage ¶
type ChatRequest ¶
type ChatRequest struct {
Model string `json:"model"`
Messages []ChatMessage `json:"messages"`
Temperature float32 `json:"temperature,omitempty"`
TopP float32 `json:"top_p,omitempty"`
N int `json:"n,omitempty"`
Stop []string `json:"stop,omitempty"`
Stream bool `json:"stream,omitempty"`
MaxTokens int `json:"max_tokens,omitempty"`
PresencePenalty float32 `json:"presence_penalty,omitempty"`
ReasoningEffort string `json:"reasoning_effort,omitempty"`
FrequencyPenalty float32 `json:"frequency_penalty,omitempty"`
User string `json:"user,omitempty"`
Seed string `json:"seed,omitempty"`
}
ChatRequest API reference: https://platform.openai.com/docs/api-reference/chat
type ChatResponse ¶
type ChatResponse struct {
ID string `json:"id"`
Object string `json:"object"`
Created int64 `json:"created"`
Choices []ChatChoice `json:"choices"`
Error struct {
Message string `json:"message"`
Type string `json:"type"`
} `json:"error"`
Usage struct {
PromptTokens int `json:"prompt_tokens"`
CompletionTokens int `json:"completion_tokens"`
TotalTokens int `json:"total_tokens"`
} `json:"usage"`
}
func (ChatResponse) GetDelta ¶
func (r ChatResponse) GetDelta() ChatMessage
func (ChatResponse) GetError ¶
func (r ChatResponse) GetError() error
func (ChatResponse) GetMessage ¶
func (r ChatResponse) GetMessage() ChatMessage
type Config ¶
type Config struct {
APIKey string `mapstructure:"api_key"`
APIHost string `mapstructure:"api_host"`
InitialSystemMessage string `mapstructure:"initial_system_message"`
Model string `mapstructure:"model"`
Temperature float32 `mapstructure:"temperature"`
Seed string `mapstructure:"seed"`
MaxTokens int `mapstructure:"max_tokens"`
ReasoningEffort string `mapstructure:"reasoning_effort"` // "minimum, "low", "medium", "high" or empty for default
// number of thread messages stored which are used as a context for further requests
HistorySize int `mapstructure:"history_size"`
// is no other command matched, evaluate the message with openai
UseAsFallback bool `mapstructure:"use_as_fallback"`
// maximum update frequency of slack messages when "stream" is active
UpdateInterval time.Duration `mapstructure:"update_interval"`
// timeout for API requests to OpenAI
APITimeout time.Duration `mapstructure:"api_timeout"`
// log all input+output text to the logger. This could include personal information, therefore disabled by default!
LogTexts bool `mapstructure:"log_texts"`
// Dall-E image generation
DalleModel string `mapstructure:"dalle_model"`
DalleImageSize string `mapstructure:"dalle_image_size"`
DalleNumberOfImages int `mapstructure:"dalle_number_of_images"`
DalleQuality string `mapstructure:"dalle_quality"`
}
Config configuration: API key to do API calls
type DalleRequest ¶
type DalleRequest struct {
Model string `json:"model"`
Quality string `json:"quality,omitempty"`
Prompt string `json:"prompt"`
N int `json:"n"`
Size string `json:"size"`
}
{
"model": "dall-e-3",
"prompt": "a white siamese cat",
"n": 1,
"size": "1024x1024"
}
type DalleResponse ¶
type DalleResponse struct {
Data []DalleResponseImage `json:"data"`
Error struct {
Code string `json:"code"`
Message string `json:"message"`
} `json:"error"`
}
{
"created": 1700233554,
"data": [
{
"url": "https://XXXX"
}
]
}
or:
{
"error": {
"code": "invalid_size",
"message": "The size is not supported by this model.",
"param": null,
"type": "invalid_request_error"
}
}
type DalleResponseImage ¶
type HashtagOptions ¶ added in v2.3.15
type HashtagOptions struct {
ReasoningEffort string // "minimal", "medium", "high", or ""
Model string // override model, empty means use config default
MessageHistory int // number of channel messages to include, 0 means disabled
NoStreaming bool // disable streaming responses, get full response at once
NoThread bool // disable thread replies, reply directly to the message instead
Debug bool // show debug information at the end of the response
}
HashtagOptions contains parsed hashtag options from user input
func ParseHashtags ¶ added in v2.3.15
func ParseHashtags(text string) (cleanText string, options HashtagOptions)
ParseHashtags extracts hashtag options from the input text and returns the cleaned text (without hashtags) and the parsed options
Click to show internal directories.
Click to hide internal directories.