providers

package
v0.4.2 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Jul 8, 2025 License: MPL-2.0 Imports: 22 Imported by: 0

Documentation

Overview

Package providers implements various AI model service provider connectors supported by MindTrial.

Index

Constants

This section is empty.

Variables

View Source
var (
	// ErrUnknownProviderName is returned when provider name is not recognized.
	ErrUnknownProviderName = errors.New("unknown provider name")
	// ErrCreateClient is returned when provider client initialization fails.
	ErrCreateClient = errors.New("failed to create client")
	// ErrInvalidModelParams is returned when model parameters are invalid.
	ErrInvalidModelParams = errors.New("invalid model parameters for run")
	// ErrCompileSchema is returned when response schema compilation fails.
	ErrCompileSchema = errors.New("failed to compile response schema")
	// ErrGenerateResponse is returned when response generation fails.
	ErrGenerateResponse = errors.New("failed to generate response")
	// ErrCreatePromptRequest is returned when request generation fails.
	ErrCreatePromptRequest = errors.New("failed to create prompt request")
	// ErrFeatureNotSupported is returned when a requested feature is not supported by the provider.
	ErrFeatureNotSupported = errors.New("feature not supported by provider")
	// ErrFileNotSupported is returned when a task context file is not supported by the provider.
	ErrFileNotSupported = fmt.Errorf("%w: file type", ErrFeatureNotSupported)
	// ErrFileUploadNotSupported is returned when file upload is not supported by the provider.
	ErrFileUploadNotSupported = fmt.Errorf("%w: file upload", ErrFeatureNotSupported)
	// ErrRetryable is returned when an operation can be retried.
	ErrRetryable = errors.New("retryable error")
)
View Source
var DefaultResponseFormatInstruction = sync.OnceValue(func() string {
	schema, err := json.Marshal(ResultJSONSchema())
	if err != nil {
		panic(fmt.Errorf("%w: %v", ErrCompileSchema, err))
	}
	return fmt.Sprintf("Structure the response according to this JSON schema: %s", schema)
})

DefaultResponseFormatInstruction generates default response formatting instruction to be passed to AI models that require it.

View Source
var ResultJSONSchema = sync.OnceValue(func() *jsonschema.Schema {
	reflector := jsonschema.Reflector{
		AllowAdditionalProperties: false,
		DoNotReference:            true,
	}
	return reflector.Reflect(Result{})
})

ResultJSONSchema is a lazily initialized JSON schema for the Result type.

View Source
var ResultJSONSchemaRaw = sync.OnceValue(func() map[string]interface{} {
	schemaBytes, err := json.Marshal(ResultJSONSchema())
	if err != nil {
		panic(fmt.Errorf("%w: %v", ErrCompileSchema, err))
	}

	var schemaMap map[string]interface{}
	if err := json.Unmarshal(schemaBytes, &schemaMap); err != nil {
		panic(fmt.Errorf("%w: %v", ErrCompileSchema, err))
	}

	return schemaMap
})

ResultJSONSchemaRaw is a lazily initialized JSON schema for the Result type.

Functions

func DefaultAnswerFormatInstruction

func DefaultAnswerFormatInstruction(task config.Task) string

DefaultAnswerFormatInstruction generates default answer formatting instruction for a given task to be passed to the AI model.

func DefaultTaskFileNameInstruction

func DefaultTaskFileNameInstruction(file config.TaskFile) string

DefaultTaskFileNameInstruction generates default task file name instruction to be passed to AI models that require it.

func WrapErrGenerateResponse added in v0.4.0

func WrapErrGenerateResponse(err error) error

WrapErrGenerateResponse wraps an error as a generate response error, preserving the original error chain.

func WrapErrRetryable added in v0.4.0

func WrapErrRetryable(err error) error

WrapErrRetryable wraps an error as retryable, preserving the original error chain.

Types

type Anthropic

type Anthropic struct {
	// contains filtered or unexported fields
}

Anthropic implements the Provider interface for Anthropic generative models.

func NewAnthropic

func NewAnthropic(cfg config.AnthropicClientConfig) *Anthropic

NewAnthropic creates a new Anthropic provider instance with the given configuration.

func (*Anthropic) Close

func (o *Anthropic) Close(ctx context.Context) error

func (Anthropic) Name

func (o Anthropic) Name() string

func (*Anthropic) Run

func (o *Anthropic) Run(ctx context.Context, cfg config.RunConfig, task config.Task) (result Result, err error)

func (Anthropic) Validator

func (o Anthropic) Validator(expected utils.StringSet, validationRules config.ValidationRules) Validator

type Deepseek

type Deepseek struct {
	// contains filtered or unexported fields
}

Deepseek implements the Provider interface for Deepseek generative models.

func NewDeepseek

func NewDeepseek(cfg config.DeepseekClientConfig) (*Deepseek, error)

NewDeepseek creates a new Deepseek provider instance with the given configuration.

func (*Deepseek) Close

func (o *Deepseek) Close(ctx context.Context) error

func (Deepseek) Name

func (o Deepseek) Name() string

func (*Deepseek) Run

func (o *Deepseek) Run(ctx context.Context, cfg config.RunConfig, task config.Task) (result Result, err error)

func (Deepseek) Validator

func (o Deepseek) Validator(expected utils.StringSet, validationRules config.ValidationRules) Validator

type ErrUnmarshalResponse

type ErrUnmarshalResponse struct {
	// Cause is the underlying error that caused the unmarshaling to fail.
	Cause error
	// RawMessage is the raw message that failed to be unmarshaled.
	RawMessage []byte
	// StopReason contains the reason why the AI model stopped generating the response.
	StopReason []byte
}

ErrUnmarshalResponse is returned when response unmarshaling fails.

func NewErrUnmarshalResponse

func NewErrUnmarshalResponse(cause error, rawMessage []byte, stopReason []byte) *ErrUnmarshalResponse

NewErrUnmarshalResponse creates a new ErrUnmarshalResponse instance.

func (*ErrUnmarshalResponse) Details

func (e *ErrUnmarshalResponse) Details() string

Details returns a formatted string containing the stop reason and raw message from the ErrUnmarshalResponse error. This provides additional context for debugging and understanding the error.

func (*ErrUnmarshalResponse) Error

func (e *ErrUnmarshalResponse) Error() string

type GoogleAI

type GoogleAI struct {
	// contains filtered or unexported fields
}

GoogleAI implements the Provider interface for Google AI generative models.

func NewGoogleAI

func NewGoogleAI(ctx context.Context, cfg config.GoogleAIClientConfig) (*GoogleAI, error)

NewGoogleAI creates a new GoogleAI provider instance with the given configuration. It returns an error if client initialization fails.

func (*GoogleAI) Close

func (o *GoogleAI) Close(ctx context.Context) error

func (GoogleAI) Name

func (o GoogleAI) Name() string

func (*GoogleAI) Run

func (o *GoogleAI) Run(ctx context.Context, cfg config.RunConfig, task config.Task) (result Result, err error)

func (GoogleAI) Validator

func (o GoogleAI) Validator(expected utils.StringSet, validationRules config.ValidationRules) Validator

type MistralAI added in v0.4.0

type MistralAI struct {
	// contains filtered or unexported fields
}

MistralAI implements the Provider interface for Mistral AI generative models.

func NewMistralAI added in v0.4.0

func NewMistralAI(cfg config.MistralAIClientConfig) (*MistralAI, error)

NewMistralAI creates a new Mistral AI provider instance with the given configuration.

func (*MistralAI) Close added in v0.4.0

func (o *MistralAI) Close(ctx context.Context) error

func (MistralAI) Name added in v0.4.0

func (o MistralAI) Name() string

func (*MistralAI) Run added in v0.4.0

func (o *MistralAI) Run(ctx context.Context, cfg config.RunConfig, task config.Task) (result Result, err error)

func (MistralAI) Validator added in v0.4.0

func (o MistralAI) Validator(expected utils.StringSet, validationRules config.ValidationRules) Validator

type OpenAI

type OpenAI struct {
	// contains filtered or unexported fields
}

OpenAI implements the Provider interface for OpenAI generative models.

func NewOpenAI

func NewOpenAI(cfg config.OpenAIClientConfig) *OpenAI

NewOpenAI creates a new OpenAI provider instance with the given configuration.

func (*OpenAI) Close

func (o *OpenAI) Close(ctx context.Context) error

func (OpenAI) Name

func (o OpenAI) Name() string

func (*OpenAI) Run

func (o *OpenAI) Run(ctx context.Context, cfg config.RunConfig, task config.Task) (result Result, err error)

func (OpenAI) Validator

func (o OpenAI) Validator(expected utils.StringSet, validationRules config.ValidationRules) Validator

type Provider

type Provider interface {
	// Name returns the provider's unique identifier.
	Name() string
	// Validator creates a validator for checking response correctness against expected answers.
	// The expected parameter contains the set of accepted valid answers.
	// The validationRules parameter controls how the validation should be performed.
	Validator(expected utils.StringSet, validationRules config.ValidationRules) Validator
	// Run executes a task using specified configuration and returns the result.
	Run(ctx context.Context, cfg config.RunConfig, task config.Task) (result Result, err error)
	// Close releases resources when the provider is no longer needed.
	Close(ctx context.Context) error
}

Provider interacts with AI model services.

func NewProvider

func NewProvider(ctx context.Context, cfg config.ProviderConfig) (Provider, error)

NewProvider creates a new AI model provider based on the given configuration. It returns an error if the provider name is unknown or initialization fails.

type Result

type Result struct {
	// Title is a brief summary of the response.
	Title string `json:"title" validate:"required"`
	// Explanation is a detailed explanation of the answer.
	Explanation string `json:"explanation" validate:"required"`
	// FinalAnswer is the final answer to the task's query.
	FinalAnswer string `json:"final_answer" validate:"required"`
	// contains filtered or unexported fields
}

Result represents the structured response received from an AI model.

func (Result) Explain

func (r Result) Explain() string

Explain returns a formatted explanation of the result as generated by the AI model.

func (Result) GetDuration

func (r Result) GetDuration() time.Duration

GetDuration returns the time duration it took to generate this result.

func (Result) GetPrompts

func (r Result) GetPrompts() []string

GetPrompts returns the prompts used to generate this result.

func (Result) GetUsage

func (r Result) GetUsage() Usage

GetUsage returns the token usage statistics for this result.

type Usage

type Usage struct {
	InputTokens  *int64 `json:"-"` // Tokens used by the input if available.
	OutputTokens *int64 `json:"-"` // Tokens used by the output if available.
}

Usage represents the token usage statistics for a response.

type Validator

type Validator interface {
	// IsCorrect checks if result matches expected value.
	IsCorrect(ctx context.Context, actual Result) bool
	// ToCanonical normalizes string for validation.
	ToCanonical(value string) string
}

Validator verifies AI model responses.

func NewDefaultValidator

func NewDefaultValidator(expected utils.StringSet, validationRules config.ValidationRules) Validator

NewDefaultValidator returns a new Validator to check results against the provided expected value(s).

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL