llm

package
v0.4.0 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Dec 20, 2025 License: MIT Imports: 6 Imported by: 0

Documentation

Overview

Package llm provides durable LLM integration for Romancy workflows. LLM calls are automatically wrapped as activities, enabling deterministic replay without re-invoking the LLM API.

Index

Constants

View Source
const (
	RoleSystem    = bucephalus.RoleSystem
	RoleUser      = bucephalus.RoleUser
	RoleAssistant = bucephalus.RoleAssistant
	RoleTool      = bucephalus.RoleTool
)

Role constants re-exported from bucephalus.

Variables

View Source
var (
	// SystemMessage creates a system message.
	SystemMessage = bucephalus.SystemMessage

	// UserMessage creates a user message.
	UserMessage = bucephalus.UserMessage

	// AssistantMessage creates an assistant message.
	AssistantMessage = bucephalus.AssistantMessage

	// ToolMessage creates a tool result message.
	ToolMessage = bucephalus.ToolMessage

	// AssistantMessageWithToolCalls creates an assistant message with tool calls.
	AssistantMessageWithToolCalls = bucephalus.AssistantMessageWithToolCalls
)

Message constructors re-exported from bucephalus.

View Source
var NewToolRegistry = bucephalus.NewToolRegistry

NewToolRegistry creates a new tool registry.

Functions

func CallMessagesParse

func CallMessagesParse[T any](ctx *romancy.WorkflowContext, messages []Message, opts ...Option) (*T, error)

CallMessagesParse makes an LLM call with messages and parses the response. The call is automatically wrapped as a durable activity.

func CallParse

func CallParse[T any](ctx *romancy.WorkflowContext, prompt string, opts ...Option) (*T, error)

CallParse makes an LLM call and parses the response into a struct. The call is automatically wrapped as a durable activity.

Example:

type BookRecommendation struct {
    Title  string `json:"title"`
    Author string `json:"author"`
    Reason string `json:"reason"`
}

book, err := llm.CallParse[BookRecommendation](ctx,
    "Recommend a science fiction book",
    llm.WithProvider("anthropic"),
    llm.WithModel("claude-sonnet-4-5-20250929"),
)

func ClearAppDefaults

func ClearAppDefaults(app *romancy.App)

ClearAppDefaults removes the default LLM options for an App. This is useful for cleanup in tests or when shutting down an App.

func MarshalAgentContext

func MarshalAgentContext[T any](ctx *AgentContext[T]) ([]byte, error)

MarshalAgentContext serializes an agent context to JSON.

func MustNewTool

func MustNewTool[In any, Out any](name, description string, fn func(ctx context.Context, in In) (Out, error)) *bucephalus.TypedTool[In, Out]

MustNewTool creates a new tool and panics on error.

func NewTool

func NewTool[In any, Out any](name, description string, fn func(ctx context.Context, in In) (Out, error)) (*bucephalus.TypedTool[In, Out], error)

NewTool creates a new typed tool with automatic JSON schema generation. The input type In should be a struct with json tags for schema generation.

Example:

type WeatherInput struct {
    City string `json:"city" jsonschema:"required,description=City name"`
}

weatherTool, err := llm.NewTool("get_weather", "Get weather for a city",
    func(ctx context.Context, in WeatherInput) (string, error) {
        return "Sunny, 72°F", nil
    },
)

func ParseStructured

func ParseStructured[T any](dr *DurableResponse) (T, error)

ParseStructured parses the structured data into the given type.

func SetAppDefaults

func SetAppDefaults(app *romancy.App, opts ...Option)

SetAppDefaults sets the default LLM options for an App. These defaults are applied to all LLM calls within workflows registered with that App, unless overridden by per-call options.

Example:

app := romancy.NewApp(romancy.WithDatabase("workflow.db"))
llm.SetAppDefaults(app,
    llm.WithProvider("anthropic"),
    llm.WithModel("claude-sonnet-4-5-20250929"),
    llm.WithMaxTokens(1024),
)

func ToolCallToBucephalus

func ToolCallToBucephalus(call ToolCall) bucephalus.ToolCall

ToolCallToBucephalus converts a ToolCall to bucephalus.ToolCall.

Types

type AgentContext

type AgentContext[T any] struct {
	// Deps holds user-defined dependencies (e.g., database connections, config).
	Deps T

	// Messages holds the conversation history.
	Messages []Message
}

AgentContext holds the state for a durable agent, including dependencies and conversation history.

func NewAgentContext

func NewAgentContext[T any](deps T) *AgentContext[T]

NewAgentContext creates a new agent context with the given dependencies.

func UnmarshalAgentContext

func UnmarshalAgentContext[T any](data []byte) (*AgentContext[T], error)

UnmarshalAgentContext deserializes an agent context from JSON.

func (*AgentContext[T]) AddAssistantMessage

func (c *AgentContext[T]) AddAssistantMessage(content string)

AddAssistantMessage appends an assistant message to the conversation history.

func (*AgentContext[T]) AddSystemMessage

func (c *AgentContext[T]) AddSystemMessage(content string)

AddSystemMessage prepends a system message to the conversation history.

func (*AgentContext[T]) AddUserMessage

func (c *AgentContext[T]) AddUserMessage(content string)

AddUserMessage appends a user message to the conversation history.

type DurableAgent

type DurableAgent[T any] struct {
	// contains filtered or unexported fields
}

DurableAgent provides stateful, multi-turn conversational AI capabilities with automatic persistence and replay support.

Example:

type MyDeps struct {
    UserID    string
    Documents []string
}

agent := llm.NewDurableAgent[MyDeps]("research_agent",
    llm.WithProvider("anthropic"),
    llm.WithModel("claude-sonnet-4-5-20250929"),
).WithBuildPrompt(func(ctx *llm.AgentContext[MyDeps], message string) []llm.Message {
    return []llm.Message{
        llm.SystemMessage("You are a research assistant with access to: " + strings.Join(ctx.Deps.Documents, ", ")),
        // Include conversation history
    }
})

// In workflow
agentCtx := llm.NewAgentContext(MyDeps{UserID: "user123", Documents: docs})
response, err := agent.Chat(wfCtx, agentCtx, "What is quantum computing?")

func NewDurableAgent

func NewDurableAgent[T any](name string, opts ...Option) *DurableAgent[T]

NewDurableAgent creates a new durable agent with the given name and options.

func (*DurableAgent[T]) Chat

func (a *DurableAgent[T]) Chat(wfCtx *romancy.WorkflowContext, agentCtx *AgentContext[T], message string) (*DurableResponse, error)

Chat executes a single conversation turn as a durable activity. The conversation history in agentCtx is automatically updated.

Example:

response, err := agent.Chat(wfCtx, agentCtx, "Hello!")
// agentCtx.Messages now includes the user message and assistant response

func (*DurableAgent[T]) ChatWithToolLoop

func (a *DurableAgent[T]) ChatWithToolLoop(
	wfCtx *romancy.WorkflowContext,
	agentCtx *AgentContext[T],
	message string,
	maxIterations int,
	toolExecutor func(ctx context.Context, call ToolCall) (string, error),
) (*DurableResponse, error)

ChatWithToolLoop executes a conversation turn with automatic tool execution. The agent will continue calling tools until the model stops requesting them or maxIterations is reached.

Example:

response, err := agent.ChatWithToolLoop(wfCtx, agentCtx, "Search for X",
    10, // max iterations
    func(ctx context.Context, call llm.ToolCall) (string, error) {
        // Execute the tool and return the result
        return "Tool result", nil
    },
)

func (*DurableAgent[T]) WithBuildPrompt

func (a *DurableAgent[T]) WithBuildPrompt(fn func(ctx *AgentContext[T], message string) []Message) *DurableAgent[T]

WithBuildPrompt sets the prompt builder function. The builder receives the agent context and the user message, and should return the full message list to send to the LLM.

func (*DurableAgent[T]) WithGetTools

func (a *DurableAgent[T]) WithGetTools(fn func() []Tool) *DurableAgent[T]

WithGetTools sets the tools provider function. The function should return the tools available to the agent.

type DurableCall

type DurableCall struct {
	// contains filtered or unexported fields
}

DurableCall represents a reusable LLM call configuration. It's similar to Edda's @durable_call decorator - you define the LLM settings once and can execute multiple calls with the same configuration.

Example:

var summarizer = llm.DefineDurableCall("summarize",
    llm.WithProvider("anthropic"),
    llm.WithModel("claude-sonnet-4-5-20250929"),
    llm.WithSystemMessage("Summarize the given text concisely"),
    llm.WithMaxTokens(500),
)

// In workflow
response, err := summarizer.Execute(ctx, "Long text to summarize...")

func DefineDurableCall

func DefineDurableCall(name string, opts ...Option) *DurableCall

DefineDurableCall creates a new reusable LLM call definition. The name is used to generate deterministic activity IDs for replay.

func (*DurableCall) Execute

func (d *DurableCall) Execute(ctx *romancy.WorkflowContext, prompt string, opts ...Option) (*DurableResponse, error)

Execute runs the LLM call with the given prompt. Additional options can be provided to override the defaults.

Example:

response, err := summarizer.Execute(ctx, "Text to summarize",
    llm.WithMaxTokens(200), // Override the default
)

func (*DurableCall) ExecuteMessages

func (d *DurableCall) ExecuteMessages(ctx *romancy.WorkflowContext, messages []Message, opts ...Option) (*DurableResponse, error)

ExecuteMessages runs the LLM call with a full message history.

Example:

messages := []llm.Message{
    llm.UserMessage("Hello!"),
    llm.AssistantMessage("Hi there!"),
    llm.UserMessage("Can you help me?"),
}
response, err := chatbot.ExecuteMessages(ctx, messages)

func (*DurableCall) ExecuteMessagesParse

func (d *DurableCall) ExecuteMessagesParse(ctx *romancy.WorkflowContext, messages []Message, target any, opts ...Option) error

ExecuteMessagesParse runs the LLM call with messages and parses the response.

func (*DurableCall) ExecuteParse

func (d *DurableCall) ExecuteParse(ctx *romancy.WorkflowContext, prompt string, target any, opts ...Option) error

ExecuteParse runs the LLM call and parses the response into a struct.

Example:

type Summary struct {
    MainPoints []string `json:"main_points"`
    Conclusion string   `json:"conclusion"`
}

summary, err := summarizer.ExecuteParse[Summary](ctx, "Long article text...")

func (*DurableCall) Name

func (d *DurableCall) Name() string

Name returns the name of the durable call.

type DurableResponse

type DurableResponse struct {
	// Text is the raw text content of the response.
	Text string `json:"text"`

	// Model is the model that generated the response.
	Model string `json:"model,omitempty"`

	// Provider is the LLM provider (e.g., "anthropic", "openai").
	Provider string `json:"provider,omitempty"`

	// Usage contains token usage information.
	Usage *Usage `json:"usage,omitempty"`

	// ToolCalls contains any tool calls requested by the model.
	ToolCalls []ToolCall `json:"tool_calls,omitempty"`

	// FinishReason indicates why the model stopped generating.
	FinishReason string `json:"finish_reason,omitempty"`

	// Structured holds parsed structured data as raw JSON.
	// This is populated when using CallParse or CallMessagesParse.
	Structured json.RawMessage `json:"structured,omitempty"`
}

DurableResponse is a JSON-serializable LLM response for activity caching. It captures the essential information from an LLM call for replay.

func Call

func Call(ctx *romancy.WorkflowContext, prompt string, opts ...Option) (*DurableResponse, error)

Call makes an LLM call and returns the response. The call is automatically wrapped as a durable activity, so replay will return the cached result without re-invoking the LLM API.

Example:

response, err := llm.Call(ctx, "What is the capital of France?",
    llm.WithProvider("anthropic"),
    llm.WithModel("claude-sonnet-4-5-20250929"),
)
if err != nil {
    return err
}
fmt.Println(response.Text)

func CallMessages

func CallMessages(ctx *romancy.WorkflowContext, messages []Message, opts ...Option) (*DurableResponse, error)

CallMessages makes an LLM call with a full message history. The call is automatically wrapped as a durable activity.

Example:

messages := []llm.Message{
    llm.SystemMessage("You are a helpful assistant"),
    llm.UserMessage("What is Go?"),
    llm.AssistantMessage("Go is a programming language..."),
    llm.UserMessage("Tell me more about its concurrency model"),
}

response, err := llm.CallMessages(ctx, messages,
    llm.WithProvider("anthropic"),
    llm.WithModel("claude-sonnet-4-5-20250929"),
)

func FromBucephalusResponse

func FromBucephalusResponse[T any](resp bucephalus.Response[T], provider, model string) *DurableResponse

FromBucephalusResponse converts a bucephalus Response to a DurableResponse.

func FromBucephalusResponseWithParsed

func FromBucephalusResponseWithParsed[T any](resp bucephalus.Response[T], provider, model string, parsed T) *DurableResponse

FromBucephalusResponseWithParsed converts a bucephalus Response to a DurableResponse and includes the parsed structured data.

func (*DurableResponse) HasToolCalls

func (r *DurableResponse) HasToolCalls() bool

HasToolCalls returns true if the response contains tool calls.

type Message

type Message = bucephalus.Message

Message types re-exported from bucephalus for convenience.

func ExecuteToolCalls

func ExecuteToolCalls(ctx context.Context, calls []ToolCall, registry *ToolRegistry) ([]Message, error)

ExecuteToolCalls executes the given tool calls using the registry and returns tool result messages.

This is useful for implementing tool calling loops:

if resp.HasToolCalls() {
    toolMessages, err := llm.ExecuteToolCalls(ctx, resp.ToolCalls, registry)
    // Continue conversation with tool results...
}

type Option

type Option func(*config)

Option configures an LLM call.

func GetLLMDefaults

func GetLLMDefaults(ctx *romancy.WorkflowContext) []Option

GetLLMDefaults retrieves LLM defaults from the workflow context. Returns nil if no defaults are set.

func WithMaxTokens

func WithMaxTokens(n int) Option

WithMaxTokens sets the maximum number of tokens to generate.

func WithModel

func WithModel(model string) Option

WithModel sets the model name (e.g., "claude-sonnet-4-5-20250929", "gpt-4o").

func WithProvider

func WithProvider(provider string) Option

WithProvider sets the LLM provider (e.g., "anthropic", "openai", "gemini").

func WithSeed

func WithSeed(seed int) Option

WithSeed sets the random seed for reproducible outputs. Note: Not supported by all providers (e.g., Anthropic).

func WithStopSequences

func WithStopSequences(seqs ...string) Option

WithStopSequences sets sequences that will stop generation.

func WithSystemMessage

func WithSystemMessage(msg string) Option

WithSystemMessage sets the system message for the LLM call.

func WithTemperature

func WithTemperature(t float64) Option

WithTemperature sets the sampling temperature (typically 0-1 or 0-2).

func WithTools

func WithTools(tools ...Tool) Option

WithTools sets the tools available for the LLM to call.

func WithTopK

func WithTopK(k int) Option

WithTopK sets top-k sampling parameter. Note: Not supported by all providers (e.g., OpenAI).

func WithTopP

func WithTopP(p float64) Option

WithTopP sets nucleus sampling parameter (0-1).

type Tool

type Tool = bucephalus.Tool

Tool is an interface that represents an LLM tool. Re-exported from bucephalus for convenience.

type ToolCall

type ToolCall struct {
	// ID is the unique identifier for this tool call.
	ID string `json:"id"`

	// Name is the name of the tool to call.
	Name string `json:"name"`

	// Arguments is the JSON-encoded arguments for the tool.
	Arguments json.RawMessage `json:"arguments"`
}

ToolCall represents a tool call requested by the model.

func ToolCallFromBucephalus

func ToolCallFromBucephalus(call bucephalus.ToolCall) ToolCall

ToolCallFromBucephalus converts a bucephalus.ToolCall to ToolCall.

type ToolRegistry

type ToolRegistry = bucephalus.ToolRegistry

ToolRegistry manages a collection of tools. Re-exported from bucephalus for convenience.

type Usage

type Usage struct {
	PromptTokens     int `json:"prompt_tokens"`
	CompletionTokens int `json:"completion_tokens"`
	TotalTokens      int `json:"total_tokens"`
}

Usage contains token usage information.

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL