Documentation
¶
Index ¶
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
This section is empty.
Types ¶
type DiagCode ¶
type DiagCode string
DiagCode identifies the kind of parse diagnostic.
const ( // Structural errors DiagUnexpectedToken DiagCode = "E001" // unexpected token DiagExpectedToken DiagCode = "E002" // expected specific token DiagBadIndentation DiagCode = "E003" // indentation mismatch DiagUnterminatedStr DiagCode = "E004" // unterminated string literal // Declaration errors DiagDuplicateDecl DiagCode = "E010" // duplicate declaration name DiagReservedName DiagCode = "E011" // use of reserved name (done/fail) as declaration DiagUnknownProperty DiagCode = "E012" // unknown property in a node block DiagMissingProperty DiagCode = "E013" // required property missing // Value errors DiagInvalidValue DiagCode = "E020" // invalid value (e.g. bad session mode) DiagInvalidType DiagCode = "E021" // invalid type expression )
type Diagnostic ¶
type Diagnostic struct {
Code DiagCode
Severity Severity
Message string
File string
Line int // 1-based
Column int // 1-based
}
Diagnostic represents a positioned parse error or warning.
func (Diagnostic) Error ¶
func (d Diagnostic) Error() string
type Lexer ¶
type Lexer struct {
// contains filtered or unexported fields
}
Lexer tokenizes an iterion DSL source file with indent-sensitive INDENT/DEDENT tokens.
type ParseResult ¶
type ParseResult struct {
File *ast.File
Diagnostics []Diagnostic
}
ParseResult is the output of Parse.
func Parse ¶
func Parse(filename, src string) *ParseResult
Parse parses an iterion DSL source file and returns the AST and any diagnostics.
type Token ¶
type Token struct {
Type TokenType
Value string // raw text of the token
Line int // 1-based
Column int // 1-based
}
Token is a single lexical token produced by the lexer.
type TokenType ¶
type TokenType int
TokenType identifies the kind of a lexical token.
const ( // Special TokenEOF TokenType = iota TokenError // lexer error TokenIndent // virtual: indentation increase TokenDedent // virtual: indentation decrease // Literals TokenIdent // identifier TokenString // "..." string literal TokenInt // integer literal TokenFloat // float literal TokenPromptLine // raw prompt body line // Punctuation TokenColon // : TokenArrow // -> TokenEquals // = TokenComma // , TokenLBrack // [ TokenRBrack // ] TokenLBrace // { TokenRBrace // } TokenLParen // ( TokenRParen // ) TokenDot // . TokenStar // * TokenNewline // logical newline (non-blank) // Comment TokenComment // ## ... // Keywords (contextual — also valid as identifiers in some positions) TokenVars TokenMCPServer TokenPrompt TokenSchema TokenAgent TokenJudge TokenRouter TokenJoin TokenHuman TokenTool TokenWorkflow TokenCompute TokenEntry TokenMCP TokenBudget TokenTransport TokenServers TokenDisable TokenAutoloadProject TokenModel TokenInput TokenOutput TokenPublish TokenSystem TokenUser TokenSession TokenTools TokenToolPolicy TokenToolMaxSteps TokenReasoningEffort TokenMode TokenStrategy TokenRequire TokenInstructions TokenCommand TokenArgs TokenURL TokenAuth TokenReadonly TokenBackend TokenDefaultBackend TokenInteraction TokenInteractionPrompt TokenInteractionModel TokenAwait TokenWhen TokenNot TokenAs TokenWith TokenEnum // Session modes TokenFresh TokenInherit TokenArtifactsOnly TokenFork // Router modes TokenFanOutAll TokenCondition TokenRoundRobin TokenLLM // Router properties TokenMulti // Join strategies TokenWaitAll TokenBestEffort // Booleans TokenTrue TokenFalse // Type keywords TokenTypeString TokenTypeBool TokenTypeInt TokenTypeFloat TokenTypeJSON TokenTypeStringArray // Budget properties TokenMaxParallelBranches TokenMaxDuration TokenMaxCostUSD TokenMaxTokens TokenMaxIterations // Compaction block + properties TokenCompaction TokenThreshold TokenPreserveRecent // Worktree TokenWorktree // Terminal node names (reserved identifiers) TokenDone TokenFail )
Click to show internal directories.
Click to hide internal directories.