Documentation
¶
Index ¶
- Constants
- Variables
- func GetNextNodeID(edgesMap EdgesMap, sourceID idwrap.IDWrap, handle EdgeHandle) []idwrap.IDWrap
- func StringNodeState(a NodeState) string
- func StringNodeStateWithIcons(a NodeState) string
- type AIMetrics
- type AIProviderOutput
- type AIToolCall
- type AITotalMetrics
- type AiMemoryType
- type AiModel
- type Edge
- type EdgeHandle
- type EdgesMap
- type ErrorHandling
- type Flow
- type FlowTag
- type FlowVariable
- type FlowVariableUpdate
- type Node
- type NodeAI
- type NodeAiProvider
- type NodeExecution
- type NodeFor
- type NodeForEach
- type NodeIf
- type NodeJS
- type NodeKind
- type NodeManualStart
- type NodeMemory
- type NodePosition
- type NodeRequest
- type NodeState
Constants ¶
const (
ModelStringGpt52 = "gpt-5.2"
)
Model string constants
Variables ¶
var ErrEdgeNotFound = errors.New("edge not found")
Functions ¶
func GetNextNodeID ¶
func StringNodeState ¶
Types ¶
type AIMetrics ¶
type AIMetrics struct {
PromptTokens int32 `json:"prompt_tokens"`
CompletionTokens int32 `json:"completion_tokens"`
TotalTokens int32 `json:"total_tokens"`
Model string `json:"model"`
Provider string `json:"provider"`
FinishReason string `json:"finish_reason,omitempty"`
}
AIMetrics contains metrics for a single LLM call
type AIProviderOutput ¶
type AIProviderOutput struct {
Text string `json:"text,omitempty"`
ToolCalls []AIToolCall `json:"tool_calls,omitempty"`
Metrics AIMetrics `json:"metrics"`
}
AIProviderOutput represents the output of a single LLM call from NodeAiProvider
type AIToolCall ¶
type AIToolCall struct {
ID string `json:"id"`
Type string `json:"type"` // Usually "function"
Name string `json:"name"`
Arguments string `json:"arguments"`
}
AIToolCall represents a tool call request from the LLM
type AITotalMetrics ¶
type AITotalMetrics struct {
PromptTokens int32 `json:"prompt_tokens"`
CompletionTokens int32 `json:"completion_tokens"`
TotalTokens int32 `json:"total_tokens"`
Model string `json:"model"`
Provider string `json:"provider"`
LLMCalls int32 `json:"llm_calls"`
ToolCalls int32 `json:"tool_calls"`
}
AITotalMetrics contains aggregated metrics for the entire AI orchestration
type AiMemoryType ¶
type AiMemoryType int8
--- Memory Node --- AiMemoryType represents the type of conversation memory
const (
AiMemoryTypeWindowBuffer AiMemoryType = 0 // Keeps last N messages
)
type AiModel ¶
type AiModel int8
const ( // Unspecified - must be 0 to match proto enum AiModelUnspecified AiModel = iota // OpenAI - GPT-5.2 family AiModelGpt52 AiModelGpt52Pro AiModelGpt52Codex // OpenAI - Reasoning models AiModelO3 AiModelO4Mini // Anthropic - Claude 4.5 family AiModelClaudeOpus45 AiModelClaudeSonnet45 AiModelClaudeHaiku45 // Google - Gemini 3 family AiModelGemini3Pro AiModelGemini3Flash // Custom AiModelCustom )
func AiModelFromString ¶
AiModelFromString parses a model string and returns the corresponding AiModel. Returns AiModelCustom if the string doesn't match any known model.
func (AiModel) ModelString ¶
ModelString returns the API model string for the LLM provider
type Edge ¶
type EdgeHandle ¶
type EdgeHandle = int32
const ( HandleUnspecified EdgeHandle = iota HandleThen HandleElse HandleLoop HandleAiProvider HandleAiMemory HandleAiTools HandleLength )
type ErrorHandling ¶
type ErrorHandling int8
const ( ErrorHandling_ERROR_HANDLING_UNSPECIFIED ErrorHandling = 0 ErrorHandling_ERROR_HANDLING_IGNORE ErrorHandling = 1 ErrorHandling_ERROR_HANDLING_BREAK ErrorHandling = 2 )
type FlowVariable ¶
type FlowVariable struct {
ID idwrap.IDWrap `json:"id"`
FlowID idwrap.IDWrap `json:"flow_id"`
Name string `json:"key"`
Value string `json:"value"`
Enabled bool `json:"enabled"`
Description string `json:"description"`
Order float64 `json:"order"`
}
FlowVariable represents a variable associated with a flow
func (FlowVariable) IsEnabled ¶
func (fv FlowVariable) IsEnabled() bool
type FlowVariableUpdate ¶
type NodeAiProvider ¶
type NodeAiProvider struct {
FlowNodeID idwrap.IDWrap
CredentialID *idwrap.IDWrap // nil means no credential set yet
Model AiModel
Temperature *float32 // nil means use provider default
MaxTokens *int32 // nil means use provider default
}
--- AI Provider Node --- NodeAiProvider is an active LLM executor node that makes LLM calls and tracks metrics. It connects via HandleAiProvider edge and is orchestrated by the NodeAI node. Each LLM call through this node gets its own node_execution record with metrics.
type NodeExecution ¶
type NodeExecution struct {
ID idwrap.IDWrap `json:"id"`
NodeID idwrap.IDWrap `json:"node_id"`
Name string `json:"name"`
State int8 `json:"state"`
Error *string `json:"error,omitempty"`
InputData []byte `json:"input_data,omitempty"`
InputDataCompressType int8 `json:"input_data_compress_type"`
OutputData []byte `json:"output_data,omitempty"`
OutputDataCompressType int8 `json:"output_data_compress_type"`
ResponseID *idwrap.IDWrap `json:"response_id,omitempty"`
CompletedAt *int64 `json:"completed_at,omitempty"`
}
func (*NodeExecution) GetInputJSON ¶
func (ne *NodeExecution) GetInputJSON() (json.RawMessage, error)
Helper methods for JSON handling with compression
func (*NodeExecution) GetOutputJSON ¶
func (ne *NodeExecution) GetOutputJSON() (json.RawMessage, error)
Similar methods for output data
func (*NodeExecution) SetInputJSON ¶
func (ne *NodeExecution) SetInputJSON(data json.RawMessage) error
func (*NodeExecution) SetOutputJSON ¶
func (ne *NodeExecution) SetOutputJSON(data json.RawMessage) error
type NodeFor ¶
type NodeFor struct {
FlowNodeID idwrap.IDWrap
IterCount int64
Condition mcondition.Condition
ErrorHandling ErrorHandling
}
type NodeForEach ¶
type NodeForEach struct {
FlowNodeID idwrap.IDWrap
IterExpression string
Condition mcondition.Condition
ErrorHandling ErrorHandling
}
type NodeIf ¶
type NodeIf struct {
FlowNodeID idwrap.IDWrap
Condition mcondition.Condition
}
--- If/Condition Node ---
type NodeJS ¶
type NodeJS struct {
FlowNodeID idwrap.IDWrap
Code []byte
CodeCompressType compress.CompressType
}
type NodeKind ¶
type NodeKind = int32
const ( NODE_KIND_UNSPECIFIED NodeKind = 0 NODE_KIND_MANUAL_START NodeKind = 1 NODE_KIND_REQUEST NodeKind = 2 NODE_KIND_CONDITION NodeKind = 3 NODE_KIND_FOR NodeKind = 4 NODE_KIND_FOR_EACH NodeKind = 5 NODE_KIND_JS NodeKind = 6 NODE_KIND_AI NodeKind = 7 NODE_KIND_AI_PROVIDER NodeKind = 8 NODE_KIND_AI_MEMORY NodeKind = 9 )
type NodeManualStart ¶
type NodeMemory ¶
type NodeMemory struct {
FlowNodeID idwrap.IDWrap
MemoryType AiMemoryType
WindowSize int32
}
NodeMemory is a passive configuration node that provides conversation memory to connected AI Agent nodes. It connects via HandleAiMemory edge and manages conversation history.
type NodePosition ¶
type NodePosition int
NodePosition represents the relative position of nodes
const ( NodeBefore NodePosition = iota NodeAfter )
func IsNodeCheckTarget ¶
func IsNodeCheckTarget(edgesMap EdgesMap, sourceNode, targetNode idwrap.IDWrap) NodePosition
IsNodeCheckTarget determines if sourceNode is before targetNode in the flow graph