model

package
v1.0.13 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Apr 28, 2025 License: Apache-2.0 Imports: 6 Imported by: 0

Documentation

Index

Constants

View Source
const (
	ChatMessageRoleSystem    = "system"
	ChatMessageRoleUser      = "user"
	ChatMessageRoleAssistant = "assistant"
	ChatMessageRoleTool      = "tool"
)
View Source
const (
	ToolChoiceStringTypeAuto     = "auto"
	ToolChoiceStringTypeNone     = "none"
	ToolChoiceStringTypeRequired = "required"
)
View Source
const (
	ClientRequestHeader = "X-Client-Request-Id"
	RetryAfterHeader    = "Retry-After"

	DefaultMandatoryRefreshTimeout = 10 * 60          // 10 min
	DefaultAdvisoryRefreshTimeout  = 30 * 60          // 30 min
	DefaultStsTimeout              = 7 * 24 * 60 * 60 // 7 days

	InitialRetryDelay = 0.5
	MaxRetryDelay     = 8.0

	ErrorRetryBaseDelay = 500 * time.Millisecond
	ErrorRetryMaxDelay  = 8 * time.Second
)

Variables

View Source
var (
	ErrTooManyEmptyStreamMessages       = errors.New("stream has sent too many empty messages")
	ErrChatCompletionInvalidModel       = errors.New("this model is not supported with this method, please use CreateCompletion client method instead") //nolint:lll
	ErrChatCompletionStreamNotSupported = errors.New("streaming is not supported with this method, please use CreateChatCompletionStream")              //nolint:lll
	ErrContentFieldsMisused             = errors.New("can't use both Content and MultiContent properties simultaneously")
	ErrBodyWithoutEndpoint              = errors.New("can't fetch endpoint sts token without endpoint")
	ErrBodyWithoutBot                   = errors.New("can't fetch bot sts token without bot id")
	ErrAKSKNotSupported                 = errors.New("ak&sk authentication is currently not supported for this method, please use api key instead")
)

Functions

This section is empty.

Types

type APIError

type APIError struct {
	Code           string  `json:"code,omitempty"`
	Message        string  `json:"message"`
	Param          *string `json:"param,omitempty"`
	Type           string  `json:"type"`
	HTTPStatusCode int     `json:"-"`
	RequestId      string  `json:"request_id"`
}

func (*APIError) Error

func (e *APIError) Error() string

type ChatCompletionChoice

type ChatCompletionChoice struct {
	Index   int                   `json:"index"`
	Message ChatCompletionMessage `json:"message"`
	// FinishReason
	// stop: API returned complete message,
	// or a message terminated by one of the stop sequences provided via the stop parameter
	// length: Incomplete model output due to max_tokens parameter or token limit
	// function_call: The model decided to call a function
	// content_filter: Omitted content due to a flag from our content filters
	// null: API response still in progress or incomplete
	FinishReason FinishReason `json:"finish_reason"`
	// ModerationHitType
	// The type of content moderation strategy hit.
	// Only after selecting a moderation strategy for the endpoint that supports returning moderation hit types,
	// API will return the corresponding values.
	ModerationHitType *ChatCompletionResponseChoicesElemModerationHitType `json:"moderation_hit_type,omitempty" yaml:"moderation_hit_type,omitempty" mapstructure:"moderation_hit_type,omitempty"`
	LogProbs          *LogProbs                                           `json:"logprobs,omitempty"`
}

type ChatCompletionMessage

type ChatCompletionMessage struct {
	Role             string                        `json:"role"`
	Content          *ChatCompletionMessageContent `json:"content"`
	ReasoningContent *string                       `json:"reasoning_content,omitempty"`
	Name             *string                       `json:"name"`
	FunctionCall     *FunctionCall                 `json:"function_call,omitempty"`
	ToolCalls        []*ToolCall                   `json:"tool_calls,omitempty"`
	ToolCallID       string                        `json:"tool_call_id,omitempty"`
}

type ChatCompletionMessageContent

type ChatCompletionMessageContent struct {
	StringValue *string
	ListValue   []*ChatCompletionMessageContentPart
}

func (ChatCompletionMessageContent) MarshalJSON

func (j ChatCompletionMessageContent) MarshalJSON() ([]byte, error)

MarshalJSON implements json.Marshaler.

func (*ChatCompletionMessageContent) UnmarshalJSON

func (j *ChatCompletionMessageContent) UnmarshalJSON(b []byte) error

type ChatCompletionMessageContentPart

type ChatCompletionMessageContentPart struct {
	Type     ChatCompletionMessageContentPartType `json:"type,omitempty"`
	Text     string                               `json:"text,omitempty"`
	ImageURL *ChatMessageImageURL                 `json:"image_url,omitempty"`
}

type ChatCompletionMessageContentPartType

type ChatCompletionMessageContentPartType string
const (
	ChatCompletionMessageContentPartTypeText     ChatCompletionMessageContentPartType = "text"
	ChatCompletionMessageContentPartTypeImageURL ChatCompletionMessageContentPartType = "image_url"
)

type ChatCompletionRequest deprecated

type ChatCompletionRequest struct {
	Model             string                   `json:"model"`
	Messages          []*ChatCompletionMessage `json:"messages"`
	MaxTokens         int                      `json:"max_tokens,omitempty"`
	Temperature       float32                  `json:"temperature,omitempty"`
	TopP              float32                  `json:"top_p,omitempty"`
	Stream            bool                     `json:"stream,omitempty"`
	Stop              []string                 `json:"stop,omitempty"`
	FrequencyPenalty  float32                  `json:"frequency_penalty,omitempty"`
	LogitBias         map[string]int           `json:"logit_bias,omitempty"`
	LogProbs          bool                     `json:"logprobs,omitempty"`
	TopLogProbs       int                      `json:"top_logprobs,omitempty"`
	User              string                   `json:"user,omitempty"`
	FunctionCall      interface{}              `json:"function_call,omitempty"`
	Tools             []*Tool                  `json:"tools,omitempty"`
	ToolChoice        interface{}              `json:"tool_choice,omitempty"`
	StreamOptions     *StreamOptions           `json:"stream_options,omitempty"`
	PresencePenalty   float32                  `json:"presence_penalty,omitempty"`
	RepetitionPenalty float32                  `json:"repetition_penalty,omitempty"`
	N                 int                      `json:"n,omitempty"`
	ResponseFormat    *ResponseFormat          `json:"response_format,omitempty"`
	ServiceTier       *string                  `json:"service_tier,omitempty"`
}

Deprecated: use `CreateChatCompletionRequest` instead. ChatCompletionRequest - When making a request using this struct, only non-zero fields take effect. This means that if your field value is 0, an empty string (""), false, or other zero values, it will not be sent to the server. The server will handle these fields according to their default values. If you need to specify a zero value, please use CreateChatCompletionRequest.

func (ChatCompletionRequest) GetModel

func (r ChatCompletionRequest) GetModel() string

func (ChatCompletionRequest) IsStream

func (r ChatCompletionRequest) IsStream() bool

func (ChatCompletionRequest) MarshalJSON

func (r ChatCompletionRequest) MarshalJSON() ([]byte, error)

func (ChatCompletionRequest) WithStream

func (r ChatCompletionRequest) WithStream(stream bool) ChatRequest

type ChatCompletionResponse

type ChatCompletionResponse struct {
	ID      string `json:"id"`
	Object  string `json:"object"`
	Created int64  `json:"created"`
	Model   string `json:"model"`
	// mark the request is scale-tier or default, only exists for scale-tier
	ServiceTier string                  `json:"service_tier,omitempty"`
	Choices     []*ChatCompletionChoice `json:"choices"`
	Usage       Usage                   `json:"usage"`

	HttpHeader
}

ChatCompletionResponse represents a response structure for chat completion API.

type ChatCompletionResponseChoicesElemModerationHitType

type ChatCompletionResponseChoicesElemModerationHitType string
const (
	ChatCompletionResponseChoicesElemModerationHitTypeViolence        ChatCompletionResponseChoicesElemModerationHitType = "violence"
	ChatCompletionResponseChoicesElemModerationHitTypeSevereViolation ChatCompletionResponseChoicesElemModerationHitType = "severe_violation"
)

type ChatCompletionStreamChoice

type ChatCompletionStreamChoice struct {
	Index             int                                                 `json:"index"`
	Delta             ChatCompletionStreamChoiceDelta                     `json:"delta"`
	LogProbs          *LogProbs                                           `json:"logprobs,omitempty"`
	FinishReason      FinishReason                                        `json:"finish_reason"`
	ModerationHitType *ChatCompletionResponseChoicesElemModerationHitType `json:"moderation_hit_type,omitempty" yaml:"moderation_hit_type,omitempty" mapstructure:"moderation_hit_type,omitempty"`
}

type ChatCompletionStreamChoiceDelta

type ChatCompletionStreamChoiceDelta struct {
	Content          string        `json:"content,omitempty"`
	Role             string        `json:"role,omitempty"`
	ReasoningContent *string       `json:"reasoning_content,omitempty"`
	FunctionCall     *FunctionCall `json:"function_call,omitempty"`
	ToolCalls        []*ToolCall   `json:"tool_calls,omitempty"`
}

type ChatCompletionStreamResponse

type ChatCompletionStreamResponse struct {
	ID      string `json:"id"`
	Object  string `json:"object"`
	Created int64  `json:"created"`
	Model   string `json:"model"`
	// mark the request is scale-tier or default, only exists for scale-tier
	ServiceTier string                        `json:"service_tier,omitempty"`
	Choices     []*ChatCompletionStreamChoice `json:"choices"`
	// An optional field that will only be present when you set stream_options: {"include_usage": true} in your request.
	// When present, it contains a null value except for the last chunk which contains the token usage statistics
	// for the entire request.
	Usage *Usage `json:"usage,omitempty"`
}

type ChatMessageImageURL

type ChatMessageImageURL struct {
	URL    string         `json:"url,omitempty"`
	Detail ImageURLDetail `json:"detail,omitempty"`
}

type ChatRequest

type ChatRequest interface {
	json.Marshaler
	WithStream(stream bool) ChatRequest
	IsStream() bool
	GetModel() string
}

type CompletionTokensDetails added in v1.0.12

type CompletionTokensDetails struct {
	ReasoningTokens int `json:"reasoning_tokens"`
}

type ContextChatCompletionRequest

type ContextChatCompletionRequest struct {
	ContextID        string                   `json:"context_id"`
	Mode             ContextMode              `json:"mode"`
	Model            string                   `json:"model"`
	Messages         []*ChatCompletionMessage `json:"messages"`
	MaxTokens        int                      `json:"max_tokens,omitempty"`
	Temperature      float32                  `json:"temperature,omitempty"`
	TopP             float32                  `json:"top_p,omitempty"`
	Stream           bool                     `json:"stream,omitempty"`
	Stop             []string                 `json:"stop,omitempty"`
	FrequencyPenalty float32                  `json:"frequency_penalty,omitempty"`
	LogitBias        map[string]int           `json:"logit_bias,omitempty"`
	LogProbs         bool                     `json:"logprobs,omitempty"`
	TopLogProbs      int                      `json:"top_logprobs,omitempty"`
	User             string                   `json:"user,omitempty"`
	FunctionCall     interface{}              `json:"function_call,omitempty"`
	Tools            []*Tool                  `json:"tools,omitempty"`
	ToolChoice       interface{}              `json:"tool_choice,omitempty"`
	StreamOptions    *StreamOptions           `json:"stream_options,omitempty"`
	Metadata         map[string]interface{}   `json:"metadata,omitempty"`
}

type ContextMode

type ContextMode string
const (
	ContextModeSession      ContextMode = "session"
	ContextModeCommonPrefix ContextMode = "common_prefix"
)

type CreateChatCompletionRequest

type CreateChatCompletionRequest struct {
	Model             string                   `json:"model"`
	Messages          []*ChatCompletionMessage `json:"messages"`
	MaxTokens         *int                     `json:"max_tokens,omitempty"`
	Temperature       *float32                 `json:"temperature,omitempty"`
	TopP              *float32                 `json:"top_p,omitempty"`
	Stream            *bool                    `json:"stream,omitempty"`
	Stop              []string                 `json:"stop,omitempty"`
	FrequencyPenalty  *float32                 `json:"frequency_penalty,omitempty"`
	LogitBias         map[string]int           `json:"logit_bias,omitempty"`
	LogProbs          *bool                    `json:"logprobs,omitempty"`
	TopLogProbs       *int                     `json:"top_logprobs,omitempty"`
	User              *string                  `json:"user,omitempty"`
	FunctionCall      interface{}              `json:"function_call,omitempty"`
	Tools             []*Tool                  `json:"tools,omitempty"`
	ToolChoice        interface{}              `json:"tool_choice,omitempty"`
	StreamOptions     *StreamOptions           `json:"stream_options,omitempty"`
	PresencePenalty   *float32                 `json:"presence_penalty,omitempty"`
	RepetitionPenalty *float32                 `json:"repetition_penalty,omitempty"`
	N                 *int                     `json:"n,omitempty"`
	ResponseFormat    *ResponseFormat          `json:"response_format,omitempty"`
	ParallelToolCalls *bool                    `json:"parallel_tool_calls,omitempty"`
	ServiceTier       *string                  `json:"service_tier,omitempty"`
}

CreateChatCompletionRequest - When making a request using this struct, if your field value is 0, an empty string (""), false, or other zero values, it will be sent to the server. The server will handle these fields according to the specified values.

func (CreateChatCompletionRequest) GetModel

func (r CreateChatCompletionRequest) GetModel() string

func (CreateChatCompletionRequest) IsStream

func (r CreateChatCompletionRequest) IsStream() bool

func (CreateChatCompletionRequest) MarshalJSON

func (r CreateChatCompletionRequest) MarshalJSON() ([]byte, error)

func (CreateChatCompletionRequest) WithStream

func (r CreateChatCompletionRequest) WithStream(stream bool) ChatRequest

type CreateContextRequest

type CreateContextRequest struct {
	Model              string                   `json:"model"`
	Mode               ContextMode              `json:"mode"`
	Messages           []*ChatCompletionMessage `json:"messages"`
	TTL                *int                     `json:"ttl,omitempty"`
	TruncationStrategy *TruncationStrategy      `json:"truncation_strategy,omitempty"`
}

type CreateContextResponse

type CreateContextResponse struct {
	ID                 string              `json:"id"`
	Mode               ContextMode         `json:"mode"`
	Model              string              `json:"model"`
	TTL                *int                `json:"ttl,omitempty"`
	TruncationStrategy *TruncationStrategy `json:"truncation_strategy,omitempty"`

	Usage Usage `json:"usage"`

	HttpHeader
}

type ErrorResponse

type ErrorResponse struct {
	Error *APIError `json:"error,omitempty"`
}

type FinishReason

type FinishReason string
const (
	FinishReasonStop          FinishReason = "stop"
	FinishReasonLength        FinishReason = "length"
	FinishReasonFunctionCall  FinishReason = "function_call"
	FinishReasonToolCalls     FinishReason = "tool_calls"
	FinishReasonContentFilter FinishReason = "content_filter"
	FinishReasonNull          FinishReason = "null"
)

func (FinishReason) MarshalJSON

func (r FinishReason) MarshalJSON() ([]byte, error)

type FunctionCall

type FunctionCall struct {
	Name      string `json:"name,omitempty"`
	Arguments string `json:"arguments,omitempty"`
}

type FunctionDefine deprecated

type FunctionDefine = FunctionDefinition

Deprecated: use FunctionDefinition instead.

type FunctionDefinition

type FunctionDefinition struct {
	Name        string `json:"name"`
	Description string `json:"description,omitempty"`
	// Parameters is an object describing the function.
	// You can pass json.RawMessage to describe the schema,
	// or you can pass in a struct which serializes to the proper JSON schema.
	// The jsonschema package is provided for convenience, but you should
	// consider another specialized library if you require more complex schemas.
	Parameters interface{} `json:"parameters"`
}

type HttpHeader

type HttpHeader http.Header

func (*HttpHeader) GetHeader

func (h *HttpHeader) GetHeader() http.Header

func (*HttpHeader) Header

func (h *HttpHeader) Header() http.Header

func (*HttpHeader) SetHeader

func (h *HttpHeader) SetHeader(header http.Header)

type ImageURLDetail

type ImageURLDetail string
const (
	ImageURLDetailHigh ImageURLDetail = "high"
	ImageURLDetailLow  ImageURLDetail = "low"
	ImageURLDetailAuto ImageURLDetail = "auto"
)

type LogProb

type LogProb struct {
	Token   string  `json:"token"`
	LogProb float64 `json:"logprob"`
	Bytes   []rune  `json:"bytes,omitempty"` // Omitting the field if it is null
	// TopLogProbs is a list of the most likely tokens and their log probability, at this token position.
	// In rare cases, there may be fewer than the number of requested top_logprobs returned.
	TopLogProbs []*TopLogProbs `json:"top_logprobs"`
}

LogProb represents the probability information for a token.

type LogProbs

type LogProbs struct {
	// Content is a list of message content tokens with log probability information.
	Content []*LogProb `json:"content"`
}

LogProbs is the top-level structure containing the log probability information.

type PromptTokensDetail

type PromptTokensDetail struct {
	CachedTokens int `json:"cached_tokens"`
}

type RawResponse

type RawResponse struct {
	io.ReadCloser

	HttpHeader
}

type RequestError

type RequestError struct {
	HTTPStatusCode int
	Err            error
	RequestId      string `json:"request_id"`
}

RequestError provides information about generic request errors.

func NewRequestError

func NewRequestError(httpStatusCode int, rawErr error, requestID string) *RequestError

func (*RequestError) Error

func (e *RequestError) Error() string

func (*RequestError) Unwrap

func (e *RequestError) Unwrap() error

type Response

type Response interface {
	SetHeader(http.Header)
	GetHeader() http.Header
}

type ResponseFormat

type ResponseFormat struct {
	Type   ResponseFormatType `json:"type"`
	Schema interface{}        `json:"schema,omitempty"`
}

type ResponseFormatType

type ResponseFormatType string
const (
	ResponseFormatJsonObject ResponseFormatType = "json_object"
	ResponseFormatText       ResponseFormatType = "text"
)

type StreamOptions

type StreamOptions struct {
	// If set, an additional chunk will be streamed before the data: [DONE] message.
	// The usage field on this chunk shows the token usage statistics for the entire request,
	// and the choices field will always be an empty array.
	// All other chunks will also include a usage field, but with a null value.
	IncludeUsage bool `json:"include_usage,omitempty"`
	// if set, each data chunk will include a `usage` field
	// representing the current cumulative token usage for the entire request.
	ChunkIncludeUsage bool `json:"chunk_include_usage,omitempty"`
}

type Tool

type Tool struct {
	Type     ToolType            `json:"type"`
	Function *FunctionDefinition `json:"function,omitempty"`
}

type ToolCall

type ToolCall struct {
	ID       string       `json:"id"`
	Type     ToolType     `json:"type"`
	Function FunctionCall `json:"function"`
	Index    *int         `json:"index,omitempty"`
}

type ToolChoice

type ToolChoice struct {
	Type     ToolType           `json:"type"`
	Function ToolChoiceFunction `json:"function,omitempty"`
}

type ToolChoiceFunction

type ToolChoiceFunction struct {
	Name string `json:"name"`
}

type ToolType

type ToolType string
const (
	ToolTypeFunction ToolType = "function"
)

type TopLogProbs

type TopLogProbs struct {
	Token   string  `json:"token"`
	LogProb float64 `json:"logprob"`
	Bytes   []rune  `json:"bytes,omitempty"`
}

type TruncationStrategy

type TruncationStrategy struct {
	Type              TruncationStrategyType `json:"type"`
	LastHistoryTokens *int                   `json:"last_history_tokens,omitempty"`
	RollingTokens     *bool                  `json:"rolling_tokens,omitempty"`
}

type TruncationStrategyType

type TruncationStrategyType string
const (
	TruncationStrategyTypeLastHistoryTokens TruncationStrategyType = "last_history_tokens"
	TruncationStrategyTypeRollingTokens     TruncationStrategyType = "rolling_tokens"
)

type Usage

type Usage struct {
	PromptTokens            int                     `json:"prompt_tokens"`
	CompletionTokens        int                     `json:"completion_tokens"`
	TotalTokens             int                     `json:"total_tokens"`
	PromptTokensDetails     PromptTokensDetail      `json:"prompt_tokens_details"`
	CompletionTokensDetails CompletionTokensDetails `json:"completion_tokens_details"`
}

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL