Documentation
¶
Index ¶
- func ChatEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fiber.Ctx) error
- func CompletionEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fiber.Ctx) error
- func EditEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fiber.Ctx) error
- func EmbeddingsEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fiber.Ctx) error
- func ImageEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fiber.Ctx) error
- func ListModelsEndpoint(loader *model.ModelLoader, cm *config.ConfigLoader) func(ctx *fiber.Ctx) error
- func TranscriptEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fiber.Ctx) error
- type APIError
- type Choice
- type ErrorResponse
- type Item
- type Message
- type OpenAIModel
- type OpenAIRequest
- type OpenAIResponse
- type OpenAIUsage
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
func ChatEndpoint ¶
func CompletionEndpoint ¶
func EditEndpoint ¶
func EmbeddingsEndpoint ¶
func ImageEndpoint ¶
*
curl http://localhost:8080/v1/images/generations \
-H "Content-Type: application/json" \
-d '{
"prompt": "A cute baby sea otter",
"n": 1,
"size": "512x512"
}'
*
func ListModelsEndpoint ¶
func ListModelsEndpoint(loader *model.ModelLoader, cm *config.ConfigLoader) func(ctx *fiber.Ctx) error
func TranscriptEndpoint ¶
Types ¶
type APIError ¶
type APIError struct {
Code any `json:"code,omitempty"`
Message string `json:"message"`
Param *string `json:"param,omitempty"`
Type string `json:"type"`
}
APIError provides error information returned by the OpenAI API.
type Choice ¶
type ErrorResponse ¶
type ErrorResponse struct {
Error *APIError `json:"error,omitempty"`
}
type OpenAIModel ¶
type OpenAIRequest ¶
type OpenAIRequest struct {
config.PredictionOptions
// whisper
File string `json:"file" validate:"required"`
//whisper/image
ResponseFormat string `json:"response_format"`
// image
Size string `json:"size"`
// Prompt is read only by completion/image API calls
Prompt interface{} `json:"prompt" yaml:"prompt"`
// Edit endpoint
Instruction string `json:"instruction" yaml:"instruction"`
Input interface{} `json:"input" yaml:"input"`
Stop interface{} `json:"stop" yaml:"stop"`
// Messages is read only by chat/completion API calls
Messages []Message `json:"messages" yaml:"messages"`
// A list of available functions to call
Functions []grammar.Function `json:"functions" yaml:"functions"`
FunctionCall interface{} `json:"function_call" yaml:"function_call"` // might be a string or an object
Stream bool `json:"stream"`
// Image (not supported by OpenAI)
Mode int `json:"mode"`
Step int `json:"step"`
// A grammar to constrain the LLM output
Grammar string `json:"grammar" yaml:"grammar"`
JSONFunctionGrammarObject *grammar.JSONFunctionStructure `json:"grammar_json_functions" yaml:"grammar_json_functions"`
}
type OpenAIResponse ¶
type OpenAIUsage ¶
Click to show internal directories.
Click to hide internal directories.