Documentation
¶
Index ¶
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
This section is empty.
Types ¶
type Client ¶
type Client interface {
Prompt(ctx context.Context, opts PrompterOpts) (string, error)
}
func NewClient ¶
func NewClient(d Dependencies, opts ClientOptions) (Client, error)
type ClientOption ¶ added in v0.1.8
type ClientOption func(*ClientOptions)
func WithAPIKey ¶ added in v0.1.8
func WithAPIKey(key string) ClientOption
func WithModel ¶ added in v0.1.8
func WithModel(model string) ClientOption
func WithURL ¶ added in v0.1.8
func WithURL(url string) ClientOption
type ClientOptions ¶ added in v0.1.8
func ApplyClientOptions ¶ added in v0.1.8
func ApplyClientOptions(opts *ClientOptions, modifiers ...ClientOption) ClientOptions
type DefaultTool ¶ added in v0.1.11
func NewTool ¶
func NewTool(c Client, opts contract.ToolOptions[Client]) *DefaultTool
func (*DefaultTool) Prompt ¶ added in v0.1.11
func (d *DefaultTool) Prompt(ctx context.Context, opts PrompterOpts) (string, error)
Prompt sends a prompt to the LLM and retrieves the response.
type Dependencies ¶ added in v0.1.11
type Dependencies struct {
M contract.Marshaler
U contract.Unmarshaler
HR contract.HTTPRequester
HD contract.HTTPDoer
RD contract.Reader
}
type MessageRole ¶
type MessageRole = string
MessageRole represents the role of a message.
const ( // MessageRoleSystem is the role for the system message. MessageRoleSystem MessageRole = "system" // MessageRoleUser is the role for the user message. MessageRoleUser MessageRole = "user" // MessageRoleAssistant is the role for the assistant message. MessageRoleAssistant MessageRole = "assistant" // GrokChatCompletionsURL is the URL for the Grok LLM API. GrokChatCompletionsURL = "https://api.x.ai/v1/chat/completions" // Grok2LatestModel is the latest model for the Grok LLM. Grok2LatestModel = "grok-2-latest" )
type PrompterOpts ¶
type PrompterOpts struct {
SystemPrompt string // The system prompt for the LLM.
UserPrompt string // The user prompt for the LLM.
Context []Message // The context for the LLM.
Temperature float64 // The temperature for the LLM.
ResponseFormat *ResponseFormat // The response format for the LLM.
AdditionalOpts map[string]any // Additional options for the LLM to be implemented by prompters, optionally.
}
PrompterOpts represents the options for the Prompter.
type ResponseFormat ¶
type ResponseFormat struct {
Type string `json:"type"`
JSONSchema *schemaFormat `json:"json_schema,omitempty"`
}
ResponseFormat represents the response format customization.
Click to show internal directories.
Click to hide internal directories.