Documentation
¶
Index ¶
- Constants
- func BuildErrorEvent(message, errorType, code string) map[string]interface{}
- func ConvertStreamResultToResponse(result *ChatGPTBackendStreamResult, model string) (*responses.Response, error)
- func FilterSpecialFields(extras map[string]interface{}) map[string]interface{}
- func GenerateObfuscationString() string
- func HandleAnthropicToGoogleStreamResponse(c *gin.Context, ...) error
- func HandleAnthropicToOpenAIResponsesStream(hc *protocol.HandleContext, ...) (*protocol.TokenUsage, error)
- func HandleAnthropicToOpenAIStreamResponse(c *gin.Context, req *anthropic.MessageNewParams, ...) (int, int, error)
- func HandleAnthropicV1BetaStream(hc *protocol.HandleContext, req anthropic.BetaMessageNewParams, ...) (*protocol.TokenUsage, error)
- func HandleAnthropicV1Stream(hc *protocol.HandleContext, req anthropic.MessageNewParams, ...) (*protocol.TokenUsage, error)
- func HandleGoogleToAnthropicBetaStreamResponse(c *gin.Context, stream iter.Seq2[*genai.GenerateContentResponse, error], ...) (*protocol.TokenUsage, error)
- func HandleGoogleToAnthropicStreamResponse(c *gin.Context, stream iter.Seq2[*genai.GenerateContentResponse, error], ...) (*protocol.TokenUsage, error)
- func HandleGoogleToOpenAIStreamResponse(c *gin.Context, stream iter.Seq2[*genai.GenerateContentResponse, error], ...) error
- func HandleOpenAIChatStream(hc *protocol.HandleContext, ...) (*protocol.TokenUsage, error)
- func HandleOpenAIChatToResponsesStream(c *gin.Context, stream *openaistream.Stream[openai.ChatCompletionChunk], ...) (*protocol.TokenUsage, error)
- func HandleOpenAIResponsesStream(hc *protocol.HandleContext, ...) (*protocol.TokenUsage, error)
- func HandleOpenAIToAnthropicBetaStream(c *gin.Context, req *openai.ChatCompletionNewParams, ...) (*protocol.TokenUsage, error)
- func HandleOpenAIToAnthropicStreamResponse(c *gin.Context, req *openai.ChatCompletionNewParams, ...) (*protocol.TokenUsage, error)
- func HandleOpenAIToGoogleStreamResponse(c *gin.Context, stream *openaistream.Stream[openai.ChatCompletionChunk], ...) error
- func HandleResponsesToAnthropicBetaStream(c *gin.Context, ...) (*protocol.TokenUsage, error)
- func HandleResponsesToAnthropicV1Stream(c *gin.Context, ...) (*protocol.TokenUsage, error)
- func MarshalAndSendErrorEvent(c *gin.Context, message, errorType, code string)
- func NewExampleTool() openai.ChatCompletionToolUnionParam
- func SendFinishEvent(c *gin.Context)
- func SendForwardingError(c *gin.Context, err error)
- func SendInternalError(c *gin.Context, errMsg string)
- func SendInvalidRequestBodyError(c *gin.Context, err error)
- func SendSSErrorEvent(c *gin.Context, message, errorType string)
- func SendSSErrorEventJSON(c *gin.Context, errorJSON []byte)
- func SendStreamingError(c *gin.Context, err error)
- type AnthropicStreamAssembler
- func (a *AnthropicStreamAssembler) Finish(model string, inputTokens, outputTokens int) *anthropic.Message
- func (a *AnthropicStreamAssembler) RecordV1BetaEvent(event *anthropic.BetaRawMessageStreamEventUnion)
- func (a *AnthropicStreamAssembler) RecordV1Event(event *anthropic.MessageStreamEventUnion)
- func (a *AnthropicStreamAssembler) SetUsage(inputTokens, outputTokens int)
- type ChatGPTBackendStreamResult
- type StreamEventRecorder
Constants ¶
const (
// OpenAI extra field names that map to Anthropic content blocks
OpenaiFieldReasoningContent = "reasoning_content"
)
Variables ¶
This section is empty.
Functions ¶
func BuildErrorEvent ¶
BuildErrorEvent builds a standard error event map
func ConvertStreamResultToResponse ¶ added in v0.260224.0
func ConvertStreamResultToResponse(result *ChatGPTBackendStreamResult, model string) (*responses.Response, error)
ConvertStreamResultToResponse converts a ChatGPTBackendStreamResult to OpenAI Response format.
func FilterSpecialFields ¶
FilterSpecialFields removes special fields that have dedicated content blocks e.g., reasoning_content is handled as thinking block, not merged into text_delta
func GenerateObfuscationString ¶
func GenerateObfuscationString() string
GenerateObfuscationString generates a random string similar to "KOJz1A"
func HandleAnthropicToGoogleStreamResponse ¶
func HandleAnthropicToGoogleStreamResponse(c *gin.Context, stream *anthropicstream.Stream[anthropic.MessageStreamEventUnion], responseModel string) error
HandleAnthropicToGoogleStreamResponse processes Anthropic streaming events and converts them to Google format
func HandleAnthropicToOpenAIResponsesStream ¶
func HandleAnthropicToOpenAIResponsesStream( hc *protocol.HandleContext, stream *anthropicstream.Stream[anthropic.MessageStreamEventUnion], responseModel string, ) (*protocol.TokenUsage, error)
HandleAnthropicToOpenAIResponsesStream converts Anthropic streaming events to OpenAI Responses API format.
Returns (UsageStat, error) for usage tracking and error handling.
func HandleAnthropicToOpenAIStreamResponse ¶ added in v0.260124.900
func HandleAnthropicToOpenAIStreamResponse(c *gin.Context, req *anthropic.MessageNewParams, stream *anthropicstream.Stream[anthropic.MessageStreamEventUnion], responseModel string, disableStreamUsage bool) (int, int, error)
HandleAnthropicToOpenAIStreamResponse processes Anthropic streaming events and converts them to OpenAI format Returns inputTokens, outputTokens, and error for usage tracking
func HandleAnthropicV1BetaStream ¶
func HandleAnthropicV1BetaStream(hc *protocol.HandleContext, req anthropic.BetaMessageNewParams, streamResp *anthropicstream.Stream[anthropic.BetaRawMessageStreamEventUnion]) (*protocol.TokenUsage, error)
HandleAnthropicV1BetaStream handles Anthropic v1 beta streaming response. Returns (UsageStat, error)
func HandleAnthropicV1Stream ¶
func HandleAnthropicV1Stream(hc *protocol.HandleContext, req anthropic.MessageNewParams, streamResp *anthropicstream.Stream[anthropic.MessageStreamEventUnion]) (*protocol.TokenUsage, error)
HandleAnthropicV1Stream handles Anthropic v1 streaming response. Returns (UsageStat, error)
func HandleGoogleToAnthropicBetaStreamResponse ¶
func HandleGoogleToAnthropicBetaStreamResponse(c *gin.Context, stream iter.Seq2[*genai.GenerateContentResponse, error], responseModel string) (*protocol.TokenUsage, error)
HandleGoogleToAnthropicBetaStreamResponse processes Google streaming events and converts them to Anthropic beta format. Returns UsageStat containing token usage information for tracking.
func HandleGoogleToAnthropicStreamResponse ¶
func HandleGoogleToAnthropicStreamResponse(c *gin.Context, stream iter.Seq2[*genai.GenerateContentResponse, error], responseModel string) (*protocol.TokenUsage, error)
HandleGoogleToAnthropicStreamResponse processes Google streaming events and converts them to Anthropic format. Returns UsageStat containing token usage information for tracking.
func HandleGoogleToOpenAIStreamResponse ¶
func HandleGoogleToOpenAIStreamResponse(c *gin.Context, stream iter.Seq2[*genai.GenerateContentResponse, error], responseModel string) error
HandleGoogleToOpenAIStreamResponse processes Google streaming events and converts them to OpenAI format
func HandleOpenAIChatStream ¶
func HandleOpenAIChatStream(hc *protocol.HandleContext, stream *openaistream.Stream[openai.ChatCompletionChunk], req *openai.ChatCompletionNewParams) (*protocol.TokenUsage, error)
HandleOpenAIChatStream handles OpenAI chat streaming response. Returns (UsageStat, error)
func HandleOpenAIChatToResponsesStream ¶
func HandleOpenAIChatToResponsesStream(c *gin.Context, stream *openaistream.Stream[openai.ChatCompletionChunk], responseModel string) (*protocol.TokenUsage, error)
HandleOpenAIChatToResponsesStream converts OpenAI Chat Completions streaming to Responses API format. Returns UsageStat containing token usage information for tracking.
func HandleOpenAIResponsesStream ¶
func HandleOpenAIResponsesStream(hc *protocol.HandleContext, stream *openaistream.Stream[responses.ResponseStreamEventUnion], responseModel string) (*protocol.TokenUsage, error)
HandleOpenAIResponsesStream handles OpenAI Responses API streaming response. Returns (UsageStat, error)
func HandleOpenAIToAnthropicBetaStream ¶
func HandleOpenAIToAnthropicBetaStream(c *gin.Context, req *openai.ChatCompletionNewParams, stream *openaistream.Stream[openai.ChatCompletionChunk], responseModel string) (*protocol.TokenUsage, error)
HandleOpenAIToAnthropicBetaStream processes OpenAI streaming events and converts them to Anthropic beta format. Returns UsageStat containing token usage information for tracking.
func HandleOpenAIToAnthropicStreamResponse ¶
func HandleOpenAIToAnthropicStreamResponse(c *gin.Context, req *openai.ChatCompletionNewParams, stream *openaistream.Stream[openai.ChatCompletionChunk], responseModel string) (*protocol.TokenUsage, error)
HandleOpenAIToAnthropicStreamResponse processes OpenAI streaming events and converts them to Anthropic format. Returns UsageStat containing token usage information for tracking.
func HandleOpenAIToGoogleStreamResponse ¶
func HandleOpenAIToGoogleStreamResponse(c *gin.Context, stream *openaistream.Stream[openai.ChatCompletionChunk], responseModel string) error
HandleOpenAIToGoogleStreamResponse processes OpenAI streaming events and converts them to Google format This handler writes Google-format streaming responses to the gin.Context
func HandleResponsesToAnthropicBetaStream ¶
func HandleResponsesToAnthropicBetaStream(c *gin.Context, stream *openaistream.Stream[responses.ResponseStreamEventUnion], responseModel string) (*protocol.TokenUsage, error)
HandleResponsesToAnthropicBetaStream processes OpenAI Responses API streaming events and converts them to Anthropic beta format. This is a thin wrapper that uses the shared core logic with beta event senders. Returns UsageStat containing token usage information for tracking.
func HandleResponsesToAnthropicV1Stream ¶
func HandleResponsesToAnthropicV1Stream(c *gin.Context, stream *openaistream.Stream[responses.ResponseStreamEventUnion], responseModel string) (*protocol.TokenUsage, error)
HandleResponsesToAnthropicV1Stream processes OpenAI Responses API streaming events and converts them to Anthropic v1 format. This is a thin wrapper that uses the shared core logic with v1 event senders. Returns UsageStat containing token usage information for tracking.
func MarshalAndSendErrorEvent ¶
MarshalAndSendErrorEvent marshals and sends an error event
func NewExampleTool ¶
func NewExampleTool() openai.ChatCompletionToolUnionParam
func SendFinishEvent ¶
SendFinishEvent sends a message_stop event to indicate completion
func SendForwardingError ¶
SendForwardingError sends an error response for request forwarding failures
func SendInternalError ¶
SendInternalError sends an error response for internal errors
func SendInvalidRequestBodyError ¶
SendInvalidRequestBodyError sends an error response for invalid request body
func SendSSErrorEvent ¶
SendSSErrorEvent sends an error event through SSE
func SendSSErrorEventJSON ¶
SendSSErrorEventJSON sends a JSON error event through SSE
func SendStreamingError ¶
SendStreamingError sends an error response for streaming request failures
Types ¶
type AnthropicStreamAssembler ¶
type AnthropicStreamAssembler struct {
// contains filtered or unexported fields
}
AnthropicStreamAssembler assembles Anthropic streaming responses It is a pure assembler that doesn't depend on recording logic
func NewAnthropicStreamAssembler ¶
func NewAnthropicStreamAssembler() *AnthropicStreamAssembler
NewAnthropicStreamAssembler creates a new assembler for Anthropic streams
func (*AnthropicStreamAssembler) Finish ¶
func (a *AnthropicStreamAssembler) Finish(model string, inputTokens, outputTokens int) *anthropic.Message
Finish assembles the final response and returns it as anthropic.Message
func (*AnthropicStreamAssembler) RecordV1BetaEvent ¶
func (a *AnthropicStreamAssembler) RecordV1BetaEvent(event *anthropic.BetaRawMessageStreamEventUnion)
RecordV1BetaEvent processes a v1 beta stream event
func (*AnthropicStreamAssembler) RecordV1Event ¶
func (a *AnthropicStreamAssembler) RecordV1Event(event *anthropic.MessageStreamEventUnion)
RecordV1Event processes a v1 stream event
func (*AnthropicStreamAssembler) SetUsage ¶
func (a *AnthropicStreamAssembler) SetUsage(inputTokens, outputTokens int)
SetUsage sets the usage data
type ChatGPTBackendStreamResult ¶ added in v0.260224.0
type ChatGPTBackendStreamResult struct {
ResponseID string
Created int64
InputTokens int
OutputTokens int
Content string
}
ChatGPTBackendStreamResult represents the accumulated result from a ChatGPT backend stream.
func AccumulateChatGPTBackendStream ¶ added in v0.260224.0
func AccumulateChatGPTBackendStream(reader io.Reader) (*ChatGPTBackendStreamResult, error)
AccumulateChatGPTBackendStream reads SSE stream from ChatGPT backend API and accumulates into a result.
type StreamEventRecorder ¶
type StreamEventRecorder interface {
RecordRawMapEvent(eventType string, event map[string]interface{})
}
StreamEventRecorder is an interface for recording stream events during protocol conversion
Source Files
¶
- anthropic_assembler.go
- anthropic_beta_sse.go
- anthropic_constant.go
- anthropic_error.go
- anthropic_passthrough.go
- anthropic_sse.go
- anthropic_to_openai.go
- anthropic_to_openai_responses.go
- any_to_google.go
- chatgpt_backend.go
- google_to_any.go
- openai_chat_to_responses.go
- openai_constant.go
- openai_passthrough.go
- openai_to_anthropic.go
- openai_to_anthropic_beta.go
- openai_util.go
- stream_util.go
- tool_util.go