Documentation
¶
Overview ¶
Package core provides the fundamental types and interfaces for langchain-go.
Index ¶
- func Batch[I, O any](ctx context.Context, inputs []I, opts []Option, ...) ([]O, error)
- func CloneMap(input map[string]any) map[string]any
- func GetBufferString(messages []Message, humanPrefix, aiPrefix string) string
- type AIMessage
- type AgentActionData
- type AgentFinishData
- type BaseCallbackHandler
- func (BaseCallbackHandler) OnAgentAction(_ context.Context, _ AgentActionData, _ string)
- func (BaseCallbackHandler) OnAgentFinish(_ context.Context, _ AgentFinishData, _ string)
- func (BaseCallbackHandler) OnChainEnd(_ context.Context, _ map[string]any, _ string)
- func (BaseCallbackHandler) OnChainError(_ context.Context, _ error, _ string)
- func (BaseCallbackHandler) OnChainStart(_ context.Context, _ map[string]any, _ string, _ string, _ map[string]any)
- func (BaseCallbackHandler) OnChatModelStart(_ context.Context, _ []Message, _ string, _ string, _ map[string]any)
- func (BaseCallbackHandler) OnLLMEnd(_ context.Context, _ *LLMResult, _ string)
- func (BaseCallbackHandler) OnLLMError(_ context.Context, _ error, _ string)
- func (BaseCallbackHandler) OnLLMNewToken(_ context.Context, _ string, _ string)
- func (BaseCallbackHandler) OnLLMStart(_ context.Context, _ []string, _ string, _ string, _ map[string]any)
- func (BaseCallbackHandler) OnRetrieverEnd(_ context.Context, _ []*Document, _ string)
- func (BaseCallbackHandler) OnRetrieverError(_ context.Context, _ error, _ string)
- func (BaseCallbackHandler) OnRetrieverStart(_ context.Context, _ string, _ string, _ string)
- func (BaseCallbackHandler) OnText(_ context.Context, _ string, _ string)
- func (BaseCallbackHandler) OnToolEnd(_ context.Context, _ string, _ string)
- func (BaseCallbackHandler) OnToolError(_ context.Context, _ error, _ string)
- func (BaseCallbackHandler) OnToolStart(_ context.Context, _ string, _ string, _ string, _ string)
- type BaseMessage
- type CallbackHandler
- type Document
- type FunctionMessage
- type GenericMessage
- type HumanMessage
- type LLMResult
- type Message
- type MessageType
- type Option
- func WithCallbacks(handlers ...CallbackHandler) Option
- func WithConfigurable(values map[string]any) Option
- func WithMaxConcurrency(n int) Option
- func WithMetadata(metadata map[string]any) Option
- func WithRecursionLimit(n int) Option
- func WithRunID(id string) Option
- func WithRunName(name string) Option
- func WithStop(stop ...string) Option
- func WithTags(tags ...string) Option
- type Runnable
- type RunnableConfig
- type StreamChunk
- type StreamIterator
- type SystemMessage
- type ToolCall
- type ToolMessage
- type UsageMetadata
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
func Batch ¶
func Batch[I, O any](ctx context.Context, inputs []I, opts []Option, invoke func(context.Context, I, ...Option) (O, error)) ([]O, error)
Batch runs invoke for each input in parallel, honoring MaxConcurrency.
func GetBufferString ¶
GetBufferString formats messages into a string representation.
Types ¶
type AIMessage ¶
type AIMessage struct {
BaseMessage
ToolCalls []ToolCall `json:"tool_calls,omitempty"`
UsageMetadata *UsageMetadata `json:"usage_metadata,omitempty"`
}
AIMessage represents a message from the AI assistant.
func NewAIMessage ¶
NewAIMessage creates a new AIMessage with the given content.
func NewAIMessageWithToolCalls ¶
NewAIMessageWithToolCalls creates an AIMessage that includes tool calls.
func (*AIMessage) GetType ¶
func (m *AIMessage) GetType() MessageType
GetType returns MessageTypeAI.
type AgentActionData ¶
type AgentActionData struct {
Tool string `json:"tool"`
ToolInput string `json:"tool_input"`
Log string `json:"log"`
}
AgentActionData holds data for agent action callbacks.
type AgentFinishData ¶
AgentFinishData holds data for agent finish callbacks.
type BaseCallbackHandler ¶
type BaseCallbackHandler struct{}
BaseCallbackHandler provides no-op implementations of all CallbackHandler methods. Embed this in your handler to only override the methods you care about.
func (BaseCallbackHandler) OnAgentAction ¶
func (BaseCallbackHandler) OnAgentAction(_ context.Context, _ AgentActionData, _ string)
func (BaseCallbackHandler) OnAgentFinish ¶
func (BaseCallbackHandler) OnAgentFinish(_ context.Context, _ AgentFinishData, _ string)
func (BaseCallbackHandler) OnChainEnd ¶
func (BaseCallbackHandler) OnChainError ¶
func (BaseCallbackHandler) OnChainError(_ context.Context, _ error, _ string)
func (BaseCallbackHandler) OnChainStart ¶
func (BaseCallbackHandler) OnChatModelStart ¶
func (BaseCallbackHandler) OnLLMEnd ¶
func (BaseCallbackHandler) OnLLMEnd(_ context.Context, _ *LLMResult, _ string)
func (BaseCallbackHandler) OnLLMError ¶
func (BaseCallbackHandler) OnLLMError(_ context.Context, _ error, _ string)
func (BaseCallbackHandler) OnLLMNewToken ¶
func (BaseCallbackHandler) OnLLMNewToken(_ context.Context, _ string, _ string)
func (BaseCallbackHandler) OnLLMStart ¶
func (BaseCallbackHandler) OnRetrieverEnd ¶
func (BaseCallbackHandler) OnRetrieverEnd(_ context.Context, _ []*Document, _ string)
func (BaseCallbackHandler) OnRetrieverError ¶
func (BaseCallbackHandler) OnRetrieverError(_ context.Context, _ error, _ string)
func (BaseCallbackHandler) OnRetrieverStart ¶
func (BaseCallbackHandler) OnText ¶
func (BaseCallbackHandler) OnText(_ context.Context, _ string, _ string)
func (BaseCallbackHandler) OnToolEnd ¶
func (BaseCallbackHandler) OnToolEnd(_ context.Context, _ string, _ string)
func (BaseCallbackHandler) OnToolError ¶
func (BaseCallbackHandler) OnToolError(_ context.Context, _ error, _ string)
func (BaseCallbackHandler) OnToolStart ¶
type BaseMessage ¶
type BaseMessage struct {
Content string `json:"content"`
Name string `json:"name,omitempty"`
ID string `json:"id,omitempty"`
AdditionalKwargs map[string]any `json:"additional_kwargs,omitempty"`
ResponseMetadata map[string]any `json:"response_metadata,omitempty"`
}
BaseMessage contains fields shared by all message types.
func (*BaseMessage) GetAdditionalKwargs ¶
func (m *BaseMessage) GetAdditionalKwargs() map[string]any
GetAdditionalKwargs returns additional kwargs.
func (*BaseMessage) GetContent ¶
func (m *BaseMessage) GetContent() string
GetContent returns the text content.
type CallbackHandler ¶
type CallbackHandler interface {
// LLM callbacks
OnLLMStart(ctx context.Context, prompts []string, runID string, parentRunID string, extras map[string]any)
OnChatModelStart(ctx context.Context, messages []Message, runID string, parentRunID string, extras map[string]any)
OnLLMNewToken(ctx context.Context, token string, runID string)
OnLLMEnd(ctx context.Context, output *LLMResult, runID string)
OnLLMError(ctx context.Context, err error, runID string)
// Chain callbacks
OnChainStart(ctx context.Context, inputs map[string]any, runID string, parentRunID string, extras map[string]any)
OnChainEnd(ctx context.Context, outputs map[string]any, runID string)
OnChainError(ctx context.Context, err error, runID string)
// Tool callbacks
OnToolStart(ctx context.Context, toolName string, input string, runID string, parentRunID string)
OnToolEnd(ctx context.Context, output string, runID string)
OnToolError(ctx context.Context, err error, runID string)
// Agent callbacks
OnAgentAction(ctx context.Context, action AgentActionData, runID string)
OnAgentFinish(ctx context.Context, finish AgentFinishData, runID string)
// Retriever callbacks
OnRetrieverStart(ctx context.Context, query string, runID string, parentRunID string)
OnRetrieverEnd(ctx context.Context, documents []*Document, runID string)
OnRetrieverError(ctx context.Context, err error, runID string)
// Text callbacks
OnText(ctx context.Context, text string, runID string)
}
CallbackHandler is the interface for receiving events during LangChain execution. Implementations can override any subset of methods. The default no-op implementations are provided by BaseCallbackHandler.
type Document ¶
type Document struct {
// PageContent is the text content of the document.
PageContent string `json:"page_content"`
// Metadata contains arbitrary key-value pairs associated with the document.
Metadata map[string]any `json:"metadata,omitempty"`
// ID is an optional unique identifier for the document.
ID string `json:"id,omitempty"`
}
Document represents a piece of text with associated metadata. Documents are the fundamental unit of data in LangChain for retrieval, indexing, and processing.
type FunctionMessage ¶
type FunctionMessage struct {
BaseMessage
}
FunctionMessage represents the result of a function call (legacy).
func NewFunctionMessage ¶
func NewFunctionMessage(name, content string) *FunctionMessage
NewFunctionMessage creates a new FunctionMessage.
func (*FunctionMessage) GetType ¶
func (m *FunctionMessage) GetType() MessageType
GetType returns MessageTypeFunction.
type GenericMessage ¶
type GenericMessage struct {
BaseMessage
Role string `json:"role"`
}
GenericMessage represents a generic chat message with a custom role.
func NewGenericMessage ¶
func NewGenericMessage(role, content string) *GenericMessage
NewGenericMessage creates a new GenericMessage with a custom role.
func (*GenericMessage) GetType ¶
func (m *GenericMessage) GetType() MessageType
GetType returns MessageTypeGeneric.
type HumanMessage ¶
type HumanMessage struct {
BaseMessage
}
HumanMessage represents a message from the user.
func NewHumanMessage ¶
func NewHumanMessage(content string) *HumanMessage
NewHumanMessage creates a new HumanMessage with the given content.
func (*HumanMessage) GetType ¶
func (m *HumanMessage) GetType() MessageType
GetType returns MessageTypeHuman.
type LLMResult ¶
type LLMResult struct {
Generations []string `json:"generations"`
LLMOutput map[string]any `json:"llm_output,omitempty"`
}
LLMResult holds the result of an LLM call for callbacks.
type Message ¶
type Message interface {
// GetType returns the message type (human, ai, system, tool, function).
GetType() MessageType
// GetContent returns the text content of the message.
GetContent() string
// GetName returns the optional name associated with the message.
GetName() string
// GetAdditionalKwargs returns additional provider-specific data.
GetAdditionalKwargs() map[string]any
}
Message is the interface all message types implement.
type MessageType ¶
type MessageType string
MessageType identifies the role/type of a message.
const ( MessageTypeHuman MessageType = "human" MessageTypeAI MessageType = "ai" MessageTypeSystem MessageType = "system" MessageTypeTool MessageType = "tool" MessageTypeFunction MessageType = "function" MessageTypeGeneric MessageType = "generic" )
type Option ¶
type Option func(*RunnableConfig)
Option is a function that modifies a RunnableConfig.
func WithCallbacks ¶
func WithCallbacks(handlers ...CallbackHandler) Option
WithCallbacks sets the callback handlers.
func WithConfigurable ¶
WithConfigurable sets configurable runtime values.
func WithMaxConcurrency ¶
WithMaxConcurrency sets the maximum number of parallel calls in Batch.
func WithMetadata ¶
WithMetadata adds metadata to the config.
func WithRecursionLimit ¶
WithRecursionLimit sets the recursion limit.
type Runnable ¶
type Runnable[I, O any] interface { // Invoke transforms a single input into an output. Invoke(ctx context.Context, input I, opts ...Option) (O, error) // Stream transforms an input and streams output chunks as they are produced. // The caller must consume the returned StreamIterator until it is exhausted // or call Close() to release resources. Stream(ctx context.Context, input I, opts ...Option) (*StreamIterator[O], error) // Batch transforms multiple inputs in parallel. // Implementations honor WithMaxConcurrency when they delegate to the shared // batch helper used throughout this library. Batch(ctx context.Context, inputs []I, opts ...Option) ([]O, error) // GetName returns the name of this runnable for tracing and debugging. GetName() string }
Runnable is the core interface that all LangChain components implement. It provides a uniform interface for invoking, streaming, and batching operations across prompts, models, parsers, retrievers, and tools.
In Go, we don't split into sync/async variants. Use goroutines and context.Context for concurrency control.
type RunnableConfig ¶
type RunnableConfig struct {
// Tags for this call and any sub-calls (used for filtering and tracing).
Tags []string
// Metadata for this call and any sub-calls.
Metadata map[string]any
// Callbacks are the callback handlers for this call.
Callbacks []CallbackHandler
// RunName overrides the default name for tracing.
RunName string
// MaxConcurrency limits parallel calls in Batch operations.
// 0 means no limit.
MaxConcurrency int
// RecursionLimit is the maximum recursion depth. Default is 25.
RecursionLimit int
// Configurable holds runtime values for configurable fields.
Configurable map[string]any
// RunID is a unique identifier for this run. Auto-generated if empty.
RunID string
// Stop sequences to pass to the model.
Stop []string
}
RunnableConfig holds configuration for a Runnable invocation. All fields are optional and can be set via Option functions.
func ApplyOptions ¶
func ApplyOptions(opts ...Option) *RunnableConfig
ApplyOptions applies a set of options to a config, starting from defaults.
func DefaultConfig ¶
func DefaultConfig() *RunnableConfig
DefaultConfig returns a RunnableConfig with sensible defaults.
func MergeOptions ¶
func MergeOptions(base *RunnableConfig, opts ...Option) *RunnableConfig
MergeOptions merges a base config with additional options.
type StreamChunk ¶
StreamChunk wraps a streaming value with an optional error.
type StreamIterator ¶
type StreamIterator[T any] struct { // contains filtered or unexported fields }
StreamIterator provides a pull-based iterator for streaming results. It wraps a channel internally but exposes a simpler API.
func NewStreamIterator ¶
func NewStreamIterator[T any](ch <-chan StreamChunk[T]) *StreamIterator[T]
NewStreamIterator creates a new StreamIterator from a channel. The producer should close the channel when done.
func (*StreamIterator[T]) Close ¶
func (s *StreamIterator[T]) Close()
Close signals the stream is no longer needed.
func (*StreamIterator[T]) Collect ¶
func (s *StreamIterator[T]) Collect() ([]T, error)
Collect reads all remaining chunks and returns them as a slice.
func (*StreamIterator[T]) Done ¶
func (s *StreamIterator[T]) Done() <-chan struct{}
Done returns a channel that is closed when the iterator is closed.
func (*StreamIterator[T]) Next ¶
func (s *StreamIterator[T]) Next() (T, bool, error)
Next returns the next chunk from the stream. Returns false when the stream is exhausted.
type SystemMessage ¶
type SystemMessage struct {
BaseMessage
}
SystemMessage represents a system instruction message.
func NewSystemMessage ¶
func NewSystemMessage(content string) *SystemMessage
NewSystemMessage creates a new SystemMessage with the given content.
func (*SystemMessage) GetType ¶
func (m *SystemMessage) GetType() MessageType
GetType returns MessageTypeSystem.
type ToolCall ¶
type ToolCall struct {
ID string `json:"id"`
Name string `json:"name"`
Args json.RawMessage `json:"args"`
Type string `json:"type,omitempty"`
}
ToolCall represents a request from the AI to invoke a tool.
type ToolMessage ¶
type ToolMessage struct {
BaseMessage
ToolCallID string `json:"tool_call_id"`
}
ToolMessage represents the result of a tool execution.
func NewToolMessage ¶
func NewToolMessage(content, toolCallID string) *ToolMessage
NewToolMessage creates a new ToolMessage with the given content and tool call ID.
func (*ToolMessage) GetType ¶
func (m *ToolMessage) GetType() MessageType
GetType returns MessageTypeTool.
type UsageMetadata ¶
type UsageMetadata struct {
InputTokens int `json:"input_tokens"`
OutputTokens int `json:"output_tokens"`
TotalTokens int `json:"total_tokens"`
}
UsageMetadata contains token usage information from the provider.