Documentation
¶
Index ¶
- func Main()
- func New() *cobra.Command
- type CacheOptions
- type Credential
- type Delete
- type DisplayOptions
- type Eval
- type Fmt
- type GPTScript
- func (r *GPTScript) Customize(cmd *cobra.Command)
- func (r *GPTScript) NewGPTScriptOpts() (gptscript.Options, error)
- func (r *GPTScript) PersistentPre(*cobra.Command, []string) error
- func (r *GPTScript) PrintOutput(toolInput, toolOutput string) (err error)
- func (r *GPTScript) Run(cmd *cobra.Command, args []string) (retErr error)
- type Getenv
- type OpenAIOptions
- type Parse
- type SDKServer
- type Show
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
Types ¶
type CacheOptions ¶ added in v0.1.4
type Credential ¶ added in v0.5.0
type Credential struct {
AllContexts bool `usage:"List credentials for all contexts" local:"true"`
ShowEnvVars bool `usage:"Show names of environment variables in each credential" local:"true"`
// contains filtered or unexported fields
}
func (*Credential) Customize ¶ added in v0.5.0
func (c *Credential) Customize(cmd *cobra.Command)
type DisplayOptions ¶
type Eval ¶ added in v0.4.0
type Eval struct {
Tools []string `usage:"Tools available to call"`
Chat bool `usage:"Enable chat"`
MaxTokens int `usage:"Maximum number of tokens to output"`
Model string `usage:"The model to use"`
JSON bool `usage:"Output JSON"`
Temperature string `usage:"Set the temperature, \"creativity\""`
InternalPrompt *bool `Usage:"Set to false to disable the internal prompt"`
// contains filtered or unexported fields
}
type Fmt ¶ added in v0.6.0
type Fmt struct {
Write bool `usage:"Write output to file instead of stdout" short:"w"`
}
type GPTScript ¶
type GPTScript struct {
CacheOptions
OpenAIOptions
DisplayOptions
SystemToolsDir string `usage:"Directory that contains system managed tool for which GPTScript will not manage the runtime"`
Color *bool `usage:"Use color in output (default true)" default:"true"`
Confirm bool `usage:"Prompt before running potentially dangerous commands"`
Debug bool `usage:"Enable debug logging"`
NoTrunc bool `usage:"Do not truncate long log messages"`
Quiet *bool `usage:"No output logging (set --quiet=false to force on even when there is no TTY)" short:"q"`
Output string `usage:"Save output to a file, or - for stdout" short:"o"`
EventsStreamTo string `` /* 164-byte string literal not displayed */
// Input should not be using GPTSCRIPT_INPUT env var because that is the same value that is set in tool executions
Input string `usage:"Read input from a file (\"-\" for stdin)" short:"f" env:"GPTSCRIPT_INPUT_FILE"`
SubTool string `usage:"Use tool of this name, not the first tool in file" local:"true"`
ListModels bool `usage:"List the models available and exit" local:"true"`
ListTools bool `usage:"List built-in tools and exit" local:"true"`
ListenAddress string `usage:"Server listen address" default:"127.0.0.1:0" hidden:"true"`
Chdir string `usage:"Change current working directory" short:"C"`
Daemon bool `usage:"Run tool as a daemon" local:"true" hidden:"true"`
Ports string `usage:"The port range to use for ephemeral daemon ports (ex: 11000-12000)" hidden:"true"`
CredentialContext []string `usage:"Context name(s) in which to store credentials"`
CredentialOverride []string `usage:"Credentials to override (ex: --credential-override github.com/example/cred-tool:API_TOKEN=1234)"`
ChatState string `usage:"The chat state to continue, or null to start a new chat and return the state" local:"true"`
ForceChat bool `usage:"Force an interactive chat session if even the top level tool is not a chat tool" local:"true"`
ForceSequential bool `usage:"Force parallel calls to run sequentially" local:"true"`
Workspace string `usage:"Directory to use for the workspace, if specified it will not be deleted on exit"`
UI bool `usage:"Launch the UI" local:"true" name:"ui"`
DisableTUI bool `usage:"Don't use chat TUI but instead verbose output" local:"true" name:"disable-tui"`
SaveChatStateFile string `usage:"A file to save the chat state to so that a conversation can be resumed with --chat-state" local:"true"`
DefaultModelProvider string `usage:"Default LLM model provider to use, this will override OpenAI settings"`
GithubEnterpriseHostname string `usage:"The host name for a Github Enterprise instance to enable for remote loading" local:"true"`
// contains filtered or unexported fields
}
func (*GPTScript) NewGPTScriptOpts ¶ added in v0.4.0
func (*GPTScript) PersistentPre ¶ added in v0.4.0
func (*GPTScript) PrintOutput ¶ added in v0.4.0
type OpenAIOptions ¶ added in v0.1.4
type Parse ¶ added in v0.6.0
type Parse struct {
PrettyPrint bool `usage:"Indent the json output" short:"p"`
// contains filtered or unexported fields
}
Click to show internal directories.
Click to hide internal directories.