provider

package
v0.10.0 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Jan 4, 2026 License: MIT Imports: 1 Imported by: 0

Documentation

Overview

Package provider defines the core interfaces that external LLM providers must implement. External provider packages should import this package to implement the Provider interface.

Index

Constants

This section is empty.

Variables

This section is empty.

Functions

This section is empty.

Types

type ChatCompletionChoice

type ChatCompletionChoice struct {
	Index        int      `json:"index"`
	Message      Message  `json:"message"`
	Delta        *Message `json:"delta,omitempty"`
	FinishReason *string  `json:"finish_reason"`
	Logprobs     any      `json:"logprobs,omitempty"`
}

ChatCompletionChoice represents a single choice in the response

type ChatCompletionChunk

type ChatCompletionChunk struct {
	ID                string                 `json:"id"`
	Object            string                 `json:"object"`
	Created           int64                  `json:"created"`
	Model             string                 `json:"model"`
	SystemFingerprint *string                `json:"system_fingerprint,omitempty"`
	Choices           []ChatCompletionChoice `json:"choices"`
	Usage             *Usage                 `json:"usage,omitempty"`
	ProviderMetadata  map[string]any         `json:"provider_metadata,omitempty"` // Provider-specific metadata
}

ChatCompletionChunk represents a chunk in streaming response

type ChatCompletionRequest

type ChatCompletionRequest struct {
	Model            string         `json:"model"`
	Messages         []Message      `json:"messages"`
	MaxTokens        *int           `json:"max_tokens,omitempty"`
	Temperature      *float64       `json:"temperature,omitempty"`
	TopP             *float64       `json:"top_p,omitempty"`
	Stream           *bool          `json:"stream,omitempty"`
	Stop             []string       `json:"stop,omitempty"`
	PresencePenalty  *float64       `json:"presence_penalty,omitempty"`
	FrequencyPenalty *float64       `json:"frequency_penalty,omitempty"`
	LogitBias        map[string]int `json:"logit_bias,omitempty"`
	User             *string        `json:"user,omitempty"`
	Tools            []Tool         `json:"tools,omitempty"`
	ToolChoice       any            `json:"tool_choice,omitempty"`
}

ChatCompletionRequest represents a request for chat completion

type ChatCompletionResponse

type ChatCompletionResponse struct {
	ID                string                 `json:"id"`
	Object            string                 `json:"object"`
	Created           int64                  `json:"created"`
	Model             string                 `json:"model"`
	SystemFingerprint *string                `json:"system_fingerprint,omitempty"`
	Choices           []ChatCompletionChoice `json:"choices"`
	Usage             Usage                  `json:"usage"`
	ProviderMetadata  map[string]any         `json:"provider_metadata,omitempty"` // Provider-specific metadata
}

ChatCompletionResponse represents a response from chat completion

type ChatCompletionStream

type ChatCompletionStream interface {
	// Recv receives the next chunk from the stream
	Recv() (*ChatCompletionChunk, error)

	// Close closes the stream
	Close() error
}

ChatCompletionStream represents a streaming chat completion response

type Message

type Message struct {
	Role       Role       `json:"role"`
	Content    string     `json:"content"`
	Name       *string    `json:"name,omitempty"`
	ToolCallID *string    `json:"tool_call_id,omitempty"`
	ToolCalls  []ToolCall `json:"tool_calls,omitempty"`
}

Message represents a chat message

type Provider

type Provider interface {
	// CreateChatCompletion creates a new chat completion
	CreateChatCompletion(ctx context.Context, req *ChatCompletionRequest) (*ChatCompletionResponse, error)

	// CreateChatCompletionStream creates a streaming chat completion
	CreateChatCompletionStream(ctx context.Context, req *ChatCompletionRequest) (ChatCompletionStream, error)

	// Close closes the provider and cleans up resources
	Close() error

	// Name returns the provider name
	Name() string
}

Provider defines the interface that all LLM providers must implement. External packages can implement this interface and inject via omnillm.ClientConfig.CustomProvider.

Example usage in external package:

import "github.com/agentplexus/omnillm/provider"

func NewMyProvider(apiKey string) provider.Provider {
    return &myProvider{apiKey: apiKey}
}

type Role

type Role string

Role represents the role of a message sender

const (
	RoleSystem    Role = "system"
	RoleUser      Role = "user"
	RoleAssistant Role = "assistant"
	RoleTool      Role = "tool"
)

type Tool

type Tool struct {
	Type     string   `json:"type"`
	Function ToolSpec `json:"function"`
}

Tool represents a tool that can be called

type ToolCall

type ToolCall struct {
	ID       string       `json:"id"`
	Type     string       `json:"type"`
	Function ToolFunction `json:"function"`
}

ToolCall represents a tool function call

type ToolFunction

type ToolFunction struct {
	Name      string `json:"name"`
	Arguments string `json:"arguments"`
}

ToolFunction represents the function being called

type ToolSpec

type ToolSpec struct {
	Name        string `json:"name"`
	Description string `json:"description"`
	Parameters  any    `json:"parameters"`
}

ToolSpec defines a tool specification

type Usage

type Usage struct {
	PromptTokens     int `json:"prompt_tokens"`
	CompletionTokens int `json:"completion_tokens"`
	TotalTokens      int `json:"total_tokens"`
}

Usage represents token usage information

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL