bellman

package module
v0.3.0 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Dec 4, 2024 License: MIT Imports: 5 Imported by: 1

README

Bellman

Unified LLM Interface for vertexai/gemini, openai and anthropic

Prerequisites

  • A valid API key for each of the supported models (OpenAI, Anthropic, VertexAI/Gemini, VoyageAI)

Installation

go get github.com/modfin/bellman

Usage

Prompting

Just normal conversation mode


llm := openai.New(apiKey).Generator()
res, err := llm.
    Model(openai.GenModel_gpt4o_mini).
    Prompt(
        prompt.AsUser("What is the distance to the moon?"),
    )
if err != nil {
    log.Fatalf("Prompt() error = %v", err)
}

awnser, err := res.AsText()


fmt.Println(awnser, err)
// The average distance from Earth to the Moon is approximately 384,400 kilometers 
// (about 238,855 miles). This distance can vary slightly because the Moon's orbit
// is elliptical, ranging from about 363,300 km (225,623 miles) at its closest 
// (perigee) to 405,500 km (251,966 miles) at its farthest (apogee). <nil>

System Prompting

Just normal conversation mode


llm := openai.New(apiKey).Generator()
res, err := llm.
    Model(openai.GenModel_gpt4o_mini).
    System("You are a expert movie quoter and lite fo finish peoples sentences with a movie reference").
    Prompt(
        prompt.AsUser("Who are you going to call?"),
    )
if err != nil {
    log.Fatalf("Prompt() error = %v", err)
}

awnser, err := res.AsText()

fmt.Println(awnser, err)
// Ghostbusters! <nil>

General Configuration

Setting things like temperature, max tokens, top p, and stop secuences


llm := openai.New(apiKey).Generator()
res, err := llm.
    Model(openai.GenModel_gpt4o_mini).
	    Temperature(0.5).
	    MaxTokens(100).
	    TopP(0.9). // should really not be used with temperature
        StopAt(".", "!", "?").
    Prompt(
        prompt.AsUser("Write me a 2 paragraph text about gophers"),
    )
if err != nil {
    log.Fatalf("Prompt() error = %v", err)
}

awnser, err := res.AsText()

fmt.Println(awnser, err)
// Gophers are small, 
// burrowing rodents belonging to the family Geomyidae, 
// primarily found in North America

Structured Output

From many models, you can now specify a schema that you want the models to output.

a supporting lib with transforming your go struct to json schema is provided. github.com/modfin/bellman/schema


type Quote struct {
    Character string `json:"character"`
    Quote     string `json:"quote"`
}
type Responese struct {
    Quote []Quote `json:"quotes"`
}


llm := vertexai.New(googleConfig).Generator()
res, err := llm.
    Model(vertexai.GenModel_gemini_1_5_pro).
    Output(Responese{}).
    Prompt(
        prompt.AsUser("give me 3 quotes from different characters in Hamlet"),
    )
if err != nil {
    log.Fatalf("Prompt() error = %v", err)
}

awnser, err := res.AsText() // will return the json of the struct
fmt.Println(awnser, err)
//{
//  "quotes": [
//    {
//      "character": "Hamlet",
//      "quote": "To be or not to be, that is the question."
//    },
//    {
//      "character": "Polonius",
//      "quote": "This above all: to thine own self be true."
//    },
//    {
//      "character": "Queen Gertrude",
//      "quote": "The lady doth protest too much, methinks."
//    }
//  ]
//}  <nil>

var result Result
err := res.Unmarshal(&result) // Just a shorthand to marshal it into your struct
fmt.Println(result, err)
// {[
//      {Hamlet To be or not to be, that is the question.} 
//      {Polonius This above all: to thine own self be true.} 
//      {Queen Gertrude The lady doth protest too much, methinks.}
// ]} <nil>

Tools

The Bellman library allows you to define and use tools in your prompts. Here is an example of how to define and use a tool:

  1. Define a tool:

    
     type Args struct {
          Name string `json:"name"`
     }
    
     getQuote := tools.NewTool("get_quote",
        tools.WithDescription(
             "a function to get a quote from a person or character in Hamlet",
        ),
        tools.WithArgSchema(Args{}),
        tools.WithCallback(func(jsondata string) error {
            var arg Args
            err := json.Unmarshal([]byte(jsondata), &arg)
            if err != nil {
                return err
            }
        }),
    )
    
  2. Use the tool in a prompt:

       llm := anthopic.New(apiKey).Generator()
       res, err := llm.
           Model(anthropic.GenModel_3_5_haiku_latest)).
           System("You are a Shakespeare quote generator").
           Tools(getQuote).
           // Configure a specific too to be used, or the setting for it
           Tool(tools.RequiredTool). 
           Prompt(
               prompt.AsUser("Give me 3 quotes from different characters"),
           )
    
       if err != nil {
           log.Fatalf("Prompt() error = %v", err)
       }
    
       // Evaluate with callback function
       err = res.Eval()
       if err != nil {
           log.Fatalf("Eval() error = %v", err)
       }
    
    
       // or Evaluate your self
    
       tools, err := res.Tools()
       if err != nil {
             log.Fatalf("Tools() error = %v", err)
       }
    
       for _, tool := range tools {
           log.Printf("Tool: %s", tool.Name)
           switch tool.Name {
              // ....
           }
       }
    
    

Binary Data

Images is supported by Gemini, OpenAI and Anthropic.
PDFs is only supported by Gemini and Anthropic

Image

   image := "/9j/4AAQSkZJRgABAQEBLAEsAAD//g......gM4OToWbsBg5mGu0veCcRZO6f0EjK5Jv5X/AP/Z"
   data, err := base64.StdEncoding.DecodeString(image)
   if err != nil {
      t.Fatalf("could not decode image %v", err)
   }
   res, err := llm.
      Prompt(
          prompt.AsUserWithData(prompt.MimeImageJPEG, bytes.NewBuffer(data)),
          prompt.AsUser("Describe the image to me"),
      )
   
   if err != nil {
      t.Fatalf("Prompt() error = %v", err)
   }
   fmt.Println(res.AsText())
   // The image contains the word "Hot!" in red text. The text is centered on a white background. 
   // The exclamation point is after the word.  The image is a simple and straightforward 
   // depiction of the word "hot." <nil>

PDF

   pdf := os.Open("path/to/pdf")
   if err != nil {
      t.Fatalf("could not decode image %v", err)
   }

   llm := anthopic.New(apiKey).Generator()
   
   res, err := llm.
      Prompt(
          prompt.AsUserWithData(prompt.MimeApplicationPDF, pdf),
          prompt.AsUser("Describe to me what is in the PDF"),
      )
   
   if err != nil {
      t.Fatalf("Prompt() error = %v", err)
   }
   fmt.Println(res.AsText())
   // The image contains the word "Hot!" in red text. The text is centered on a white background. 
   // The exclamation point is after the word.  The image is a simple and straightforward 
   // depiction of the word "hot." <nil>

RAG Example

Supporter lib for "automated" RAG (Retrieval-Augmented Generation) is supported by Gemini, OpenAI and Anthropic.


type GetQuoteArg struct {
   StockId int `json:"stock_id" json-description:"the id of a stock for which  quote to get"`
}
type Search struct {
   Name string `json:"name" json-description:"the name of a stock being looked for"`
}

getQuote := tools.NewTool("get_quote",
   tools.WithDescription("a function get a stock quote based on stock id"),
   tools.WithArgSchema(GetQuoteArg{}),
   tools.WithCallback(func(jsondata string) (string, error) {
       var arg GetQuoteArg
       err := json.Unmarshal([]byte(jsondata), &arg)
       if err != nil {
           return "", err
       }
       return `{"stock_id": ` + strconv.Itoa(arg.StockId) + `,"price": 123.45}`, nil
   }),
)

getStock := tools.NewTool("get_stock",
   tools.WithDescription("a function a stock based on name"),
   tools.WithArgSchema(Search{}),
   tools.WithCallback(func(jsondata string) (string, error) {
       var arg GetQuoteArg
       err := json.Unmarshal([]byte(jsondata), &arg)
       if err != nil {
           return "", err
       }
       return `{"stock_id": 98765}`, nil
   }),
)


type Result struct {
   StockId int     `json:"stock_id"`
   Price   float64 `json:"price"`
}

llm := anthopic.New(apiKey).Generator()
llm = llm.SetTools(getQuote, getStock)

res, err := rag.Run[Result](5, llm, prompt.AsUser("Get me the price of Volvo B"))
if err != nil {
   t.Fatalf("Prompt() error = %v", err)
}

fmt.Printf("==== Result after %d calls ====\n", res.Depth)
fmt.Printf("%+v\n", res.Result)
fmt.Printf("==== Conversation ====\n")

for _, p := range res.Promps {
   fmt.Printf("%s: %s\n", p.Role, p.Text)
}

// ==== Result after 2 calls ====
// {StockId:98765 Price:123.45}
// ==== Conversation ====
// user:       Get me the price of Volvo B
// assistant:  tool function call: get_stock with argument: {"name":"Volvo B"}
// user:       result: get_stock => {"stock_id": 98765}
// assistant:  tool function call: get_quote with argument: {"stock_id":98765}
// user:       result: get_quote => {"stock_id": 98765,"price": 123.45}
// assistant:  tool function call: __bellman__rag_result_callback with argument: {"price":123.45,"stock_id":98765}



License

This project is licensed under the MIT License. See the LICENSE file for details.

Documentation

Index

Constants

This section is empty.

Variables

This section is empty.

Functions

This section is empty.

Types

type Config added in v0.2.0

type Config struct {
	Model        GenModel
	SystemPrompt string `json:"system_prompt"`

	StopSequences []string `json:"stop_sequences"`
	TopP          float64  `json:"top_p"`
	Temperature   float64  `json:"temperature"`
	MaxTokens     int      `json:"max_tokens"`

	OutputSchema *schema.JSON `json:"output_schema"`

	Tools      []tools.Tool `json:"tools"`
	ToolConfig *tools.Tool  `json:"tool"`

	Log *slog.Logger `json:"-"`
}

type EmbedModel

type EmbedModel struct {
	Name        string `json:"name"`
	Description string `json:"description"`

	InputMaxTokens   int `json:"input_max_tokens"`
	OutputDimensions int `json:"output_dimensions"`
}

type GenModel

type GenModel struct {
	Name        string `json:"name,omitempty"`
	Description string `json:"description,omitempty"`

	InputContentTypes []string `json:"input_content_types,omitempty"`

	InputMaxToken  int `json:"input_max_token,omitempty"`
	OutputMaxToken int `json:"output_max_token,omitempty"`

	SupportTools            bool `json:"support_tools,omitempty"`
	SupportStructuredOutput bool `json:"support_structured_output,omitempty"`
}

type Generator

type Generator struct {
	Prompter Prompter
	Config   Config
}

func (*Generator) AddTools added in v0.1.0

func (g *Generator) AddTools(tool ...tools.Tool) *Generator

func (*Generator) MaxTokens

func (b *Generator) MaxTokens(maxTokens int) *Generator

func (*Generator) Model

func (b *Generator) Model(model GenModel) *Generator

func (*Generator) Prompt

func (b *Generator) Prompt(prompts ...prompt.Prompt) (Response, error)

func (*Generator) SetConfig added in v0.2.0

func (b *Generator) SetConfig(config Config) *Generator

func (*Generator) SetLogger added in v0.2.0

func (b *Generator) SetLogger(log *slog.Logger) *Generator

func (*Generator) SetOutputSchema added in v0.2.0

func (b *Generator) SetOutputSchema(element any) *Generator

func (*Generator) SetToolConfig added in v0.1.0

func (b *Generator) SetToolConfig(tool tools.Tool) *Generator

func (*Generator) SetTools added in v0.1.0

func (b *Generator) SetTools(tool ...tools.Tool) *Generator

func (*Generator) StopAt

func (b *Generator) StopAt(stop ...string) *Generator

func (*Generator) System

func (b *Generator) System(prompt string) *Generator

func (*Generator) Temperature

func (b *Generator) Temperature(temperature float64) *Generator

func (*Generator) Tools

func (g *Generator) Tools() []tools.Tool

func (*Generator) TopP

func (b *Generator) TopP(topP float64) *Generator

type GeneratorOption

type GeneratorOption func(generator *Generator) *Generator

func WithConfig added in v0.2.0

func WithConfig(config Config) GeneratorOption

func WithMaxTokens

func WithMaxTokens(maxTokens int) GeneratorOption

func WithModel

func WithModel(model GenModel) GeneratorOption

func WithOutput

func WithOutput(element any) GeneratorOption

func WithStopAt

func WithStopAt(stop ...string) GeneratorOption

func WithSystem

func WithSystem(prompt string) GeneratorOption

func WithTemperature

func WithTemperature(temperature float64) GeneratorOption

func WithToolConfig added in v0.1.0

func WithToolConfig(tool tools.Tool) GeneratorOption

func WithTools

func WithTools(tools ...tools.Tool) GeneratorOption

func WithTopP

func WithTopP(topP float64) GeneratorOption

type LLM

type LLM interface {
	Generator(options ...GeneratorOption) Generator
}

type Prompter added in v0.2.0

type Prompter interface {
	SetConfig(config Config)
	Prompt(prompts ...prompt.Prompt) (Response, error)
}

type Response

type Response interface {
	IsText() bool
	IsTools() bool

	// AsText will return the response as a string and an error if no response exist
	// is the response is json, it will be present in this string
	AsText() (string, error)

	// AsTools will return the name of the tool to use, the argument to pass to the tool, in json format form specified schema, and an error if the response is not a tool
	AsTools() ([]tools.Call, error)

	// Eval will run the callback associated with a tool response, otherwise it will return an error
	Eval() (err error)

	// Unmarshal will unmarshal the response into the provided reference
	Unmarshal(ref any) error
}

Directories

Path Synopsis
bellmand module
models

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL