Documentation
¶
Index ¶
- type OllamaProvider
- func (o *OllamaProvider) Complete(ctx context.Context, req ai.CompletionRequest) (*ai.CompletionResponse, error)
- func (o *OllamaProvider) Generate(ctx context.Context, prompt, model string) (string, error)
- func (o *OllamaProvider) IsConfigured() bool
- func (o *OllamaProvider) ListModels(ctx context.Context) ([]string, error)
- func (o *OllamaProvider) Name() string
- func (o *OllamaProvider) PullModel(ctx context.Context, modelName string) error
- func (o *OllamaProvider) Stream(ctx context.Context, req ai.CompletionRequest) (<-chan string, error)
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
This section is empty.
Types ¶
type OllamaProvider ¶
type OllamaProvider struct {
// contains filtered or unexported fields
}
OllamaProvider implements the LLMProvider interface for Ollama
func NewOllamaProvider ¶
func NewOllamaProvider(host string) *OllamaProvider
NewOllamaProvider creates a new Ollama provider
func (*OllamaProvider) Complete ¶
func (o *OllamaProvider) Complete(ctx context.Context, req ai.CompletionRequest) (*ai.CompletionResponse, error)
Complete generates a completion
func (*OllamaProvider) IsConfigured ¶
func (o *OllamaProvider) IsConfigured() bool
IsConfigured returns whether the provider is configured
func (*OllamaProvider) ListModels ¶
func (o *OllamaProvider) ListModels(ctx context.Context) ([]string, error)
ListModels returns available models
func (*OllamaProvider) PullModel ¶
func (o *OllamaProvider) PullModel(ctx context.Context, modelName string) error
PullModel pulls a model from Ollama registry
func (*OllamaProvider) Stream ¶
func (o *OllamaProvider) Stream(ctx context.Context, req ai.CompletionRequest) (<-chan string, error)
Stream generates a streaming completion
Click to show internal directories.
Click to hide internal directories.