providers

package
v0.7.2 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Aug 28, 2025 License: MPL-2.0 Imports: 23 Imported by: 0

Documentation

Overview

Package providers implements various AI model service provider connectors supported by MindTrial.

Index

Constants

This section is empty.

Variables

View Source
var (
	// ErrUnknownProviderName is returned when provider name is not recognized.
	ErrUnknownProviderName = errors.New("unknown provider name")
	// ErrCreateClient is returned when provider client initialization fails.
	ErrCreateClient = errors.New("failed to create client")
	// ErrInvalidModelParams is returned when model parameters are invalid.
	ErrInvalidModelParams = errors.New("invalid model parameters for run")
	// ErrCompileSchema is returned when response schema compilation fails.
	ErrCompileSchema = errors.New("failed to compile response schema")
	// ErrGenerateResponse is returned when response generation fails.
	ErrGenerateResponse = errors.New("failed to generate response")
	// ErrCreatePromptRequest is returned when request generation fails.
	ErrCreatePromptRequest = errors.New("failed to create prompt request")
	// ErrFeatureNotSupported is returned when a requested feature is not supported by the provider.
	ErrFeatureNotSupported = errors.New("feature not supported by provider")
	// ErrFileNotSupported is returned when a task context file is not supported by the provider.
	ErrFileNotSupported = fmt.Errorf("%w: file type", ErrFeatureNotSupported)
	// ErrFileUploadNotSupported is returned when file upload is not supported by the provider.
	ErrFileUploadNotSupported = fmt.Errorf("%w: file upload", ErrFeatureNotSupported)
	// ErrRetryable is returned when an operation can be retried.
	ErrRetryable = errors.New("retryable error")
)
View Source
var DefaultResponseFormatInstruction = sync.OnceValue(func() string {
	schema, err := json.Marshal(ResultJSONSchema())
	if err != nil {
		panic(fmt.Errorf("%w: %v", ErrCompileSchema, err))
	}
	return fmt.Sprintf("Structure the response according to this JSON schema: %s", schema)
})

DefaultResponseFormatInstruction generates default response formatting instruction to be passed to AI models that require it.

View Source
var ResultJSONSchema = sync.OnceValue(func() *jsonschema.Schema {
	reflector := jsonschema.Reflector{
		AllowAdditionalProperties: false,
		DoNotReference:            true,
	}
	return reflector.Reflect(Result{})
})

ResultJSONSchema is a lazily initialized JSON schema for the Result type.

View Source
var ResultJSONSchemaRaw = sync.OnceValue(func() map[string]interface{} {
	schemaBytes, err := json.Marshal(ResultJSONSchema())
	if err != nil {
		panic(fmt.Errorf("%w: %v", ErrCompileSchema, err))
	}

	var schemaMap map[string]interface{}
	if err := json.Unmarshal(schemaBytes, &schemaMap); err != nil {
		panic(fmt.Errorf("%w: %v", ErrCompileSchema, err))
	}

	return schemaMap
})

ResultJSONSchemaRaw is a lazily initialized JSON schema for the Result type.

Functions

func DefaultAnswerFormatInstruction

func DefaultAnswerFormatInstruction(task config.Task) string

DefaultAnswerFormatInstruction generates default answer formatting instruction for a given task to be passed to the AI model.

func DefaultTaskFileNameInstruction

func DefaultTaskFileNameInstruction(file config.TaskFile) string

DefaultTaskFileNameInstruction generates default task file name instruction to be passed to AI models that require it.

func WrapErrGenerateResponse added in v0.4.0

func WrapErrGenerateResponse(err error) error

WrapErrGenerateResponse wraps an error as a generate response error, preserving the original error chain.

func WrapErrRetryable added in v0.4.0

func WrapErrRetryable(err error) error

WrapErrRetryable wraps an error as retryable, preserving the original error chain.

Types

type Anthropic

type Anthropic struct {
	// contains filtered or unexported fields
}

Anthropic implements the Provider interface for Anthropic generative models.

func NewAnthropic

func NewAnthropic(cfg config.AnthropicClientConfig) *Anthropic

NewAnthropic creates a new Anthropic provider instance with the given configuration.

func (*Anthropic) Close

func (o *Anthropic) Close(ctx context.Context) error

func (Anthropic) Name

func (o Anthropic) Name() string

func (*Anthropic) Run

func (o *Anthropic) Run(ctx context.Context, _ logging.Logger, cfg config.RunConfig, task config.Task) (result Result, err error)

type Deepseek

type Deepseek struct {
	// contains filtered or unexported fields
}

Deepseek implements the Provider interface for Deepseek generative models.

func NewDeepseek

func NewDeepseek(cfg config.DeepseekClientConfig) (*Deepseek, error)

NewDeepseek creates a new Deepseek provider instance with the given configuration.

func (*Deepseek) Close

func (o *Deepseek) Close(ctx context.Context) error

func (Deepseek) Name

func (o Deepseek) Name() string

func (*Deepseek) Run

func (o *Deepseek) Run(ctx context.Context, _ logging.Logger, cfg config.RunConfig, task config.Task) (result Result, err error)

type ErrAPIResponse added in v0.7.2

type ErrAPIResponse struct {
	// Cause is the underlying error that caused the API call to fail.
	Cause error
	// Body contains the raw HTTP response body returned by the provider API when available.
	Body []byte
}

ErrAPIResponse holds additional information about an API error returned by a provider, including the raw HTTP response body when available.

func NewErrAPIResponse added in v0.7.2

func NewErrAPIResponse(cause error, body []byte) *ErrAPIResponse

NewErrAPIResponse creates a new ErrAPIResponse instance.

func (*ErrAPIResponse) Error added in v0.7.2

func (e *ErrAPIResponse) Error() string

func (*ErrAPIResponse) Unwrap added in v0.7.2

func (e *ErrAPIResponse) Unwrap() error

type ErrUnmarshalResponse

type ErrUnmarshalResponse struct {
	// Cause is the underlying error that caused the unmarshaling to fail.
	Cause error
	// RawMessage is the raw message that failed to be unmarshaled.
	RawMessage []byte
	// StopReason contains the reason why the AI model stopped generating the response.
	StopReason []byte
}

ErrUnmarshalResponse is returned when response unmarshaling fails.

func NewErrUnmarshalResponse

func NewErrUnmarshalResponse(cause error, rawMessage []byte, stopReason []byte) *ErrUnmarshalResponse

NewErrUnmarshalResponse creates a new ErrUnmarshalResponse instance.

func (*ErrUnmarshalResponse) Error

func (e *ErrUnmarshalResponse) Error() string

func (*ErrUnmarshalResponse) Unwrap added in v0.7.2

func (e *ErrUnmarshalResponse) Unwrap() error

type GoogleAI

type GoogleAI struct {
	// contains filtered or unexported fields
}

GoogleAI implements the Provider interface for Google AI generative models.

func NewGoogleAI

func NewGoogleAI(ctx context.Context, cfg config.GoogleAIClientConfig) (*GoogleAI, error)

NewGoogleAI creates a new GoogleAI provider instance with the given configuration. It returns an error if client initialization fails.

func (*GoogleAI) Close

func (o *GoogleAI) Close(ctx context.Context) error

func (GoogleAI) Name

func (o GoogleAI) Name() string

func (*GoogleAI) Run

func (o *GoogleAI) Run(ctx context.Context, _ logging.Logger, cfg config.RunConfig, task config.Task) (result Result, err error)

type MistralAI added in v0.4.0

type MistralAI struct {
	// contains filtered or unexported fields
}

MistralAI implements the Provider interface for Mistral AI generative models.

func NewMistralAI added in v0.4.0

func NewMistralAI(cfg config.MistralAIClientConfig) (*MistralAI, error)

NewMistralAI creates a new Mistral AI provider instance with the given configuration.

func (*MistralAI) Close added in v0.4.0

func (o *MistralAI) Close(ctx context.Context) error

func (MistralAI) Name added in v0.4.0

func (o MistralAI) Name() string

func (*MistralAI) Run added in v0.4.0

func (o *MistralAI) Run(ctx context.Context, _ logging.Logger, cfg config.RunConfig, task config.Task) (result Result, err error)

type OpenAI

type OpenAI struct {
	// contains filtered or unexported fields
}

OpenAI implements the Provider interface for OpenAI generative models.

func NewOpenAI

func NewOpenAI(cfg config.OpenAIClientConfig) *OpenAI

NewOpenAI creates a new OpenAI provider instance with the given configuration.

func (*OpenAI) Close

func (o *OpenAI) Close(ctx context.Context) error

func (OpenAI) Name

func (o OpenAI) Name() string

func (*OpenAI) Run

func (o *OpenAI) Run(ctx context.Context, _ logging.Logger, cfg config.RunConfig, task config.Task) (result Result, err error)

type Provider

type Provider interface {
	// Name returns the provider's unique identifier.
	Name() string
	// Run executes a task using specified configuration and returns the result.
	Run(ctx context.Context, logger logging.Logger, cfg config.RunConfig, task config.Task) (result Result, err error)
	// Close releases resources when the provider is no longer needed.
	Close(ctx context.Context) error
}

Provider interacts with AI model services.

func NewProvider

func NewProvider(ctx context.Context, cfg config.ProviderConfig) (Provider, error)

NewProvider creates a new AI model provider based on the given configuration. It returns an error if the provider name is unknown or initialization fails.

type Result

type Result struct {
	// Title is a brief summary of the response.
	Title string `json:"title" validate:"required"`
	// Explanation is a detailed explanation of the answer.
	Explanation string `json:"explanation" validate:"required"`
	// FinalAnswer is the final answer to the task's query.
	FinalAnswer string `json:"final_answer" validate:"required"`
	// contains filtered or unexported fields
}

Result represents the structured response received from an AI model.

func (Result) Explain

func (r Result) Explain() string

Explain returns a formatted explanation of the result as generated by the AI model.

func (Result) GetDuration

func (r Result) GetDuration() time.Duration

GetDuration returns the time duration it took to generate this result.

func (Result) GetPrompts

func (r Result) GetPrompts() []string

GetPrompts returns the prompts used to generate this result.

func (Result) GetUsage

func (r Result) GetUsage() Usage

GetUsage returns the token usage statistics for this result.

type Usage

type Usage struct {
	InputTokens  *int64 `json:"-"` // Tokens used by the input if available.
	OutputTokens *int64 `json:"-"` // Tokens used by the output if available.
}

Usage represents the token usage statistics for a response.

type XAI added in v0.7.2

type XAI struct {
	// contains filtered or unexported fields
}

XAI implements the Provider interface for xAI.

func NewXAI added in v0.7.2

func NewXAI(cfg config.XAIClientConfig) (*XAI, error)

NewXAI creates a new xAI provider instance with the given configuration.

func (*XAI) Close added in v0.7.2

func (o *XAI) Close(ctx context.Context) error

func (XAI) Name added in v0.7.2

func (o XAI) Name() string

func (*XAI) Run added in v0.7.2

func (o *XAI) Run(ctx context.Context, _ logging.Logger, cfg config.RunConfig, task config.Task) (result Result, err error)

Directories

Path Synopsis
Package execution provides unified provider execution patterns for the MindTrial application.
Package execution provides unified provider execution patterns for the MindTrial application.

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL