Documentation
¶
Index ¶
- Constants
- Variables
- func ExtractToolParts(msg *ChatMessage) ([]llms.ContentPart, []llms.ToolCall)
- type ChatMessage
- type LLM
- func (o *LLM) CreateEmbedding(ctx context.Context, inputTexts []string) ([][]float32, error)
- func (o *LLM) GenerateContent(ctx context.Context, messages []llms.Message, options ...llms.CallOption) (*llms.ContentResponse, error)
- func (o *LLM) GetName() string
- func (o *LLM) GetProviderType() llms.ProviderType
- type Option
- func WithAPIVersion(apiVersion string) Option
- func WithBaseURL(baseURL string) Option
- func WithEmbeddingModel(embeddingModel string) Option
- func WithHTTPClient(client openaiclient.Doer) Option
- func WithModel(model string) Option
- func WithOrganization(organization string) Option
- func WithProvider(apiType ProviderType) Option
- func WithResponseFormat(responseFormat *schema.ResponseFormat) Option
- func WithToken(token string) Option
- type ProviderType
Constants ¶
const ( RoleSystem = "system" RoleAssistant = "assistant" RoleUser = "user" RoleFunction = "function" RoleTool = "tool" )
const (
DefaultAPIVersion = "2023-05-15"
)
Variables ¶
var ( ErrEmptyResponse = errors.New("no response") ErrMissingToken = errors.New("missing the OpenAI API key, set it in the OPENAI_API_KEY environment variable") //nolint:lll ErrMissingAzureModel = errors.New("model needs to be provided when using Azure API") ErrMissingAzureEmbeddingModel = errors.New("embeddings model needs to be provided when using Azure API") ErrUnexpectedResponseLength = errors.New("unexpected length of response") )
Functions ¶
func ExtractToolParts ¶
func ExtractToolParts(msg *ChatMessage) ([]llms.ContentPart, []llms.ToolCall)
ExtractToolParts extracts the tool parts from a message.
Types ¶
type ChatMessage ¶
type ChatMessage = openaiclient.ChatMessage
type LLM ¶
type LLM struct {
// contains filtered or unexported fields
}
func (*LLM) CreateEmbedding ¶
CreateEmbedding creates embeddings for the given input texts.
func (*LLM) GenerateContent ¶
func (o *LLM) GenerateContent(ctx context.Context, messages []llms.Message, options ...llms.CallOption) (*llms.ContentResponse, error)
GenerateContent implements the Model interface.
func (*LLM) GetProviderType ¶ added in v0.10.54
func (o *LLM) GetProviderType() llms.ProviderType
GetProviderType implements the Model interface.
type Option ¶
type Option func(*options)
Option is a functional option for the OpenAI client.
func WithAPIVersion ¶
WithAPIVersion passes the api version to the client. If not set, the default value is DefaultAPIVersion.
func WithBaseURL ¶
WithBaseURL passes the OpenAI base url to the client. If not set, the base url is read from the OPENAI_BASE_URL environment variable. If still not set in ENV VAR OPENAI_BASE_URL, then the default value is https://api.openai.com/v1 is used.
func WithEmbeddingModel ¶
WithEmbeddingModel passes the OpenAI model to the client. Required when ApiType is Azure.
func WithHTTPClient ¶
func WithHTTPClient(client openaiclient.Doer) Option
WithHTTPClient allows setting a custom HTTP client. If not set, the default value is http.DefaultClient.
func WithModel ¶
WithModel passes the OpenAI model to the client. If not set, the model is read from the OPENAI_MODEL environment variable. Required when ApiType is Azure.
func WithOrganization ¶
WithOrganization passes the OpenAI organization to the client. If not set, the organization is read from the OPENAI_ORGANIZATION.
func WithProvider ¶ added in v0.14.88
func WithProvider(apiType ProviderType) Option
WithProvider passes the api type to the client. If not set, the default value is ProviderOpenAI.
func WithResponseFormat ¶
func WithResponseFormat(responseFormat *schema.ResponseFormat) Option
WithResponseFormat allows setting a custom response format.
type ProviderType ¶ added in v0.14.88
type ProviderType string
const ( ProviderOpenAI ProviderType = "OPENAI" ProviderAzure ProviderType = "AZURE" ProviderAzureAD ProviderType = "AZURE_AD" ProviderPerplexity ProviderType = "PERPLEXITY" )