Documentation
¶
Index ¶
- Constants
- Variables
- func ConvertFinishReason(reason string) string
- func ConvertOpenAIUsage(geminiUsage *GeminiUsageMetadata) types.Usage
- func ConvertRole(roleName string) string
- func ConvertToChatOpenai(provider base.ProviderInterface, response *GeminiChatResponse, ...) (openaiResponse *types.ChatCompletionResponse, ...)
- func RequestErrorHandle(key string) requester.HttpErrorHandler
- type GeminiChatCandidate
- type GeminiChatContent
- type GeminiChatGenerationConfig
- type GeminiChatInterface
- type GeminiChatPromptFeedback
- type GeminiChatRequest
- type GeminiChatResponse
- type GeminiChatSafetyRating
- type GeminiChatSafetySettings
- type GeminiChatTools
- type GeminiCodeExecution
- type GeminiError
- type GeminiErrorResponse
- type GeminiErrorWithStatusCode
- type GeminiErrors
- type GeminiFileData
- type GeminiFunctionCall
- type GeminiFunctionCallingConfig
- type GeminiFunctionResponse
- type GeminiFunctionResponseContent
- type GeminiGroundingChunk
- type GeminiGroundingChunkWeb
- type GeminiGroundingMetadata
- type GeminiImageInstance
- type GeminiImageParameters
- type GeminiImagePrediction
- type GeminiImageRequest
- type GeminiImageResponse
- type GeminiInlineData
- type GeminiPart
- type GeminiPartCodeExecutionResult
- type GeminiPartExecutableCode
- type GeminiProvider
- func (p *GeminiProvider) CreateChatCompletion(request *types.ChatCompletionRequest) (*types.ChatCompletionResponse, *types.OpenAIErrorWithStatusCode)
- func (p *GeminiProvider) CreateChatCompletionStream(request *types.ChatCompletionRequest) (requester.StreamReaderInterface[string], *types.OpenAIErrorWithStatusCode)
- func (p *GeminiProvider) CreateGeminiChat(request *GeminiChatRequest) (*GeminiChatResponse, *types.OpenAIErrorWithStatusCode)
- func (p *GeminiProvider) CreateGeminiChatStream(request *GeminiChatRequest) (requester.StreamReaderInterface[string], *types.OpenAIErrorWithStatusCode)
- func (p *GeminiProvider) CreateImageGenerations(request *types.ImageRequest) (*types.ImageResponse, *types.OpenAIErrorWithStatusCode)
- func (p *GeminiProvider) GetFullRequestURL(requestURL string, modelName string) string
- func (p *GeminiProvider) GetModelList() ([]string, error)
- func (p *GeminiProvider) GetRequestHeaders() (headers map[string]string)
- type GeminiProviderFactory
- type GeminiRelayStreamHandler
- type GeminiStreamHandler
- type GeminiToolConfig
- type GeminiUsageMetadata
- type GeminiUsageMetadataDetails
- type ModelDetails
- type ModelListResponse
- type OpenAIStreamHandler
- type ThinkingConfig
Constants ¶
View Source
const ( ModalityTEXT = "TEXT" ModalityAUDIO = "AUDIO" ModalityIMAGE = "IMAGE" ModalityVIDEO = "VIDEO" )
View Source
const GeminiImageSymbol = "![one-hub-gemini-image]"
View Source
const (
GeminiVisionMaxImageNum = 16
)
Variables ¶
View Source
var ImageSymbolAcMachines = &goahocorasick.Machine{}
Functions ¶
func ConvertFinishReason ¶
func ConvertOpenAIUsage ¶
func ConvertOpenAIUsage(geminiUsage *GeminiUsageMetadata) types.Usage
func ConvertRole ¶
func ConvertToChatOpenai ¶
func ConvertToChatOpenai(provider base.ProviderInterface, response *GeminiChatResponse, request *types.ChatCompletionRequest) (openaiResponse *types.ChatCompletionResponse, errWithCode *types.OpenAIErrorWithStatusCode)
Types ¶
type GeminiChatCandidate ¶
type GeminiChatCandidate struct {
Content GeminiChatContent `json:"content"`
FinishReason *string `json:"finishReason,omitempty"`
Index int64 `json:"index"`
SafetyRatings []GeminiChatSafetyRating `json:"safetyRatings"`
CitationMetadata any `json:"citationMetadata,omitempty"`
TokenCount int `json:"tokenCount,omitempty"`
GroundingAttributions []any `json:"groundingAttributions,omitempty"`
GroundingMetadata *GeminiGroundingMetadata `json:"groundingMetadata,omitempty"`
AvgLogprobs any `json:"avgLogprobs,omitempty"`
}
func (*GeminiChatCandidate) ToOpenAIChoice ¶
func (candidate *GeminiChatCandidate) ToOpenAIChoice(request *types.ChatCompletionRequest) types.ChatCompletionChoice
func (*GeminiChatCandidate) ToOpenAIStreamChoice ¶
func (candidate *GeminiChatCandidate) ToOpenAIStreamChoice(request *types.ChatCompletionRequest) types.ChatCompletionStreamChoice
type GeminiChatContent ¶
type GeminiChatContent struct {
Role string `json:"role,omitempty"`
Parts []GeminiPart `json:"parts,omitempty"`
}
func OpenAIToGeminiChatContent ¶
func OpenAIToGeminiChatContent(openaiContents []types.ChatCompletionMessage) ([]GeminiChatContent, string, *types.OpenAIErrorWithStatusCode)
type GeminiChatGenerationConfig ¶
type GeminiChatGenerationConfig struct {
Temperature *float64 `json:"temperature,omitempty"`
TopP *float64 `json:"topP,omitempty"`
TopK *float64 `json:"topK,omitempty"`
MaxOutputTokens int `json:"maxOutputTokens,omitempty"`
CandidateCount int `json:"candidateCount,omitempty"`
StopSequences []string `json:"stopSequences,omitempty"`
ResponseMimeType string `json:"responseMimeType,omitempty"`
ResponseSchema any `json:"responseSchema,omitempty"`
ResponseModalities []string `json:"responseModalities,omitempty"`
ThinkingConfig *ThinkingConfig `json:"thinkingConfig,omitempty"`
}
type GeminiChatInterface ¶
type GeminiChatInterface interface {
base.ProviderInterface
CreateGeminiChat(request *GeminiChatRequest) (*GeminiChatResponse, *types.OpenAIErrorWithStatusCode)
CreateGeminiChatStream(request *GeminiChatRequest) (requester.StreamReaderInterface[string], *types.OpenAIErrorWithStatusCode)
}
type GeminiChatPromptFeedback ¶
type GeminiChatPromptFeedback struct {
BlockReason string `json:"blockReason"`
SafetyRatings []GeminiChatSafetyRating `json:"safetyRatings"`
}
type GeminiChatRequest ¶
type GeminiChatRequest struct {
Model string `json:"-"`
Stream bool `json:"-"`
Contents []GeminiChatContent `json:"contents"`
SafetySettings []GeminiChatSafetySettings `json:"safetySettings,omitempty"`
GenerationConfig GeminiChatGenerationConfig `json:"generationConfig,omitempty"`
Tools []GeminiChatTools `json:"tools,omitempty"`
ToolConfig *GeminiToolConfig `json:"toolConfig,omitempty"`
SystemInstruction any `json:"systemInstruction,omitempty"`
JsonRaw []byte `json:"-"`
}
func ConvertFromChatOpenai ¶
func ConvertFromChatOpenai(request *types.ChatCompletionRequest) (*GeminiChatRequest, *types.OpenAIErrorWithStatusCode)
type GeminiChatResponse ¶
type GeminiChatResponse struct {
Candidates []GeminiChatCandidate `json:"candidates"`
PromptFeedback *GeminiChatPromptFeedback `json:"promptFeedback,omitempty"`
UsageMetadata *GeminiUsageMetadata `json:"usageMetadata,omitempty"`
ModelVersion string `json:"modelVersion,omitempty"`
Model string `json:"model,omitempty"`
ResponseId string `json:"responseId,omitempty"`
GeminiErrorResponse
}
func (*GeminiChatResponse) GetResponseText ¶
func (g *GeminiChatResponse) GetResponseText() string
type GeminiChatSafetyRating ¶
type GeminiChatTools ¶
type GeminiChatTools struct {
FunctionDeclarations []types.ChatCompletionFunction `json:"functionDeclarations,omitempty"`
CodeExecution *GeminiCodeExecution `json:"codeExecution,omitempty"`
GoogleSearch any `json:"googleSearch,omitempty"`
UrlContext any `json:"urlContext,omitempty"`
GoogleSearchRetrieval any `json:"googleSearchRetrieval,omitempty"`
}
type GeminiCodeExecution ¶
type GeminiCodeExecution struct {
}
type GeminiError ¶
type GeminiError struct {
Code int `json:"code"`
Message string `json:"message"`
Status string `json:"status"`
}
func (*GeminiError) Error ¶
func (e *GeminiError) Error() string
type GeminiErrorResponse ¶
type GeminiErrorResponse struct {
ErrorInfo *GeminiError `json:"error,omitempty"`
}
func ErrorToGeminiErr ¶
func ErrorToGeminiErr(err error) *GeminiErrorResponse
func (*GeminiErrorResponse) Error ¶
func (e *GeminiErrorResponse) Error() string
type GeminiErrorWithStatusCode ¶
type GeminiErrorWithStatusCode struct {
GeminiErrorResponse
StatusCode int `json:"status_code"`
LocalError bool `json:"-"`
}
func OpenaiErrToGeminiErr ¶
func OpenaiErrToGeminiErr(err *types.OpenAIErrorWithStatusCode) *GeminiErrorWithStatusCode
func StringErrorWrapper ¶
func StringErrorWrapper(err string, code string, statusCode int, localError bool) *GeminiErrorWithStatusCode
func (*GeminiErrorWithStatusCode) ToOpenAiError ¶
func (e *GeminiErrorWithStatusCode) ToOpenAiError() *types.OpenAIErrorWithStatusCode
type GeminiErrors ¶
type GeminiErrors []*GeminiErrorResponse
func (*GeminiErrors) Error ¶
func (e *GeminiErrors) Error() *GeminiErrorResponse
type GeminiFileData ¶
type GeminiFunctionCall ¶
type GeminiFunctionCall struct {
Name string `json:"name,omitempty"`
Args map[string]interface{} `json:"args,omitempty"`
}
func (*GeminiFunctionCall) ToOpenAITool ¶
func (g *GeminiFunctionCall) ToOpenAITool() *types.ChatCompletionToolCalls
type GeminiFunctionResponse ¶
type GeminiFunctionResponse struct {
Name string `json:"name,omitempty"`
Response any `json:"response,omitempty"`
WillContinue json.RawMessage `json:"willContinue,omitempty"`
Scheduling json.RawMessage `json:"scheduling,omitempty"`
Parts json.RawMessage `json:"parts,omitempty"`
ID json.RawMessage `json:"id,omitempty"`
}
type GeminiGroundingChunk ¶
type GeminiGroundingChunk struct {
Web *GeminiGroundingChunkWeb `json:"web,omitempty"`
}
type GeminiGroundingChunkWeb ¶
type GeminiGroundingMetadata ¶
type GeminiGroundingMetadata struct {
GroundingChunks []GeminiGroundingChunk `json:"groundingChunks,omitempty"`
WebSearchQueries []string `json:"webSearchQueries,omitempty"`
}
type GeminiImageInstance ¶
type GeminiImageInstance struct {
Prompt string `json:"prompt"`
}
type GeminiImageParameters ¶
type GeminiImagePrediction ¶
type GeminiImageRequest ¶
type GeminiImageRequest struct {
Instances []GeminiImageInstance `json:"instances"`
Parameters GeminiImageParameters `json:"parameters"`
}
type GeminiImageResponse ¶
type GeminiImageResponse struct {
Predictions []GeminiImagePrediction `json:"predictions"`
}
type GeminiInlineData ¶
type GeminiPart ¶
type GeminiPart struct {
FunctionCall *GeminiFunctionCall `json:"functionCall,omitempty"`
FunctionResponse *GeminiFunctionResponse `json:"functionResponse,omitempty"`
Text string `json:"text,omitempty"`
InlineData *GeminiInlineData `json:"inlineData,omitempty"`
FileData *GeminiFileData `json:"fileData,omitempty"`
ExecutableCode *GeminiPartExecutableCode `json:"executableCode,omitempty"`
CodeExecutionResult *GeminiPartCodeExecutionResult `json:"codeExecutionResult,omitempty"`
Thought bool `json:"thought,omitempty"` // 是否是思考内容
ThoughtSignature json.RawMessage `json:"thoughtSignature,omitempty"`
MediaResolution json.RawMessage `json:"mediaResolution,omitempty"`
VideoMetadata json.RawMessage `json:"videoMetadata,omitempty"`
}
type GeminiProvider ¶
type GeminiProvider struct {
openai.OpenAIProvider
UseOpenaiAPI bool
UseCodeExecution bool
}
func (*GeminiProvider) CreateChatCompletion ¶
func (p *GeminiProvider) CreateChatCompletion(request *types.ChatCompletionRequest) (*types.ChatCompletionResponse, *types.OpenAIErrorWithStatusCode)
func (*GeminiProvider) CreateChatCompletionStream ¶
func (p *GeminiProvider) CreateChatCompletionStream(request *types.ChatCompletionRequest) (requester.StreamReaderInterface[string], *types.OpenAIErrorWithStatusCode)
func (*GeminiProvider) CreateGeminiChat ¶
func (p *GeminiProvider) CreateGeminiChat(request *GeminiChatRequest) (*GeminiChatResponse, *types.OpenAIErrorWithStatusCode)
func (*GeminiProvider) CreateGeminiChatStream ¶
func (p *GeminiProvider) CreateGeminiChatStream(request *GeminiChatRequest) (requester.StreamReaderInterface[string], *types.OpenAIErrorWithStatusCode)
func (*GeminiProvider) CreateImageGenerations ¶
func (p *GeminiProvider) CreateImageGenerations(request *types.ImageRequest) (*types.ImageResponse, *types.OpenAIErrorWithStatusCode)
func (*GeminiProvider) GetFullRequestURL ¶
func (p *GeminiProvider) GetFullRequestURL(requestURL string, modelName string) string
func (*GeminiProvider) GetModelList ¶
func (p *GeminiProvider) GetModelList() ([]string, error)
func (*GeminiProvider) GetRequestHeaders ¶
func (p *GeminiProvider) GetRequestHeaders() (headers map[string]string)
获取请求头
type GeminiProviderFactory ¶
type GeminiProviderFactory struct{}
func (GeminiProviderFactory) Create ¶
func (f GeminiProviderFactory) Create(channel *model.Channel) base.ProviderInterface
创建 GeminiProvider
type GeminiRelayStreamHandler ¶
type GeminiRelayStreamHandler struct {
Usage *types.Usage
Prefix string
ModelName string
// contains filtered or unexported fields
}
func (*GeminiRelayStreamHandler) HandlerStream ¶
func (h *GeminiRelayStreamHandler) HandlerStream(rawLine *[]byte, dataChan chan string, errChan chan error)
type GeminiStreamHandler ¶
type GeminiStreamHandler struct {
Usage *types.Usage
Request *types.ChatCompletionRequest
// contains filtered or unexported fields
}
func (*GeminiStreamHandler) HandlerStream ¶
func (h *GeminiStreamHandler) HandlerStream(rawLine *[]byte, dataChan chan string, errChan chan error)
转换为OpenAI聊天流式请求体
type GeminiToolConfig ¶
type GeminiToolConfig struct {
FunctionCallingConfig *GeminiFunctionCallingConfig `json:"functionCallingConfig,omitempty"`
}
type GeminiUsageMetadata ¶
type GeminiUsageMetadata struct {
PromptTokenCount int `json:"promptTokenCount"`
CandidatesTokenCount int `json:"candidatesTokenCount"`
TotalTokenCount int `json:"totalTokenCount"`
CachedContentTokenCount int `json:"cachedContentTokenCount,omitempty"`
ThoughtsTokenCount int `json:"thoughtsTokenCount,omitempty"`
ToolUsePromptTokenCount int `json:"toolUsePromptTokenCount,omitempty"`
PromptTokensDetails []GeminiUsageMetadataDetails `json:"promptTokensDetails,omitempty"`
CandidatesTokensDetails []GeminiUsageMetadataDetails `json:"candidatesTokensDetails,omitempty"`
}
type ModelDetails ¶
type ModelListResponse ¶
type ModelListResponse struct {
Models []ModelDetails `json:"models"`
}
type OpenAIStreamHandler ¶
type ThinkingConfig ¶
Click to show internal directories.
Click to hide internal directories.