parser

package
v0.3.0 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: May 4, 2026 License: MIT Imports: 5 Imported by: 0

Documentation

Index

Constants

This section is empty.

Variables

This section is empty.

Functions

This section is empty.

Types

type DiagCode

type DiagCode string

DiagCode identifies the kind of parse diagnostic.

const (
	// Structural errors
	DiagUnexpectedToken DiagCode = "E001" // unexpected token
	DiagExpectedToken   DiagCode = "E002" // expected specific token
	DiagBadIndentation  DiagCode = "E003" // indentation mismatch
	DiagUnterminatedStr DiagCode = "E004" // unterminated string literal

	// Declaration errors
	DiagDuplicateDecl   DiagCode = "E010" // duplicate declaration name
	DiagReservedName    DiagCode = "E011" // use of reserved name (done/fail) as declaration
	DiagUnknownProperty DiagCode = "E012" // unknown property in a node block
	DiagMissingProperty DiagCode = "E013" // required property missing

	// Value errors
	DiagInvalidValue DiagCode = "E020" // invalid value (e.g. bad session mode)
	DiagInvalidType  DiagCode = "E021" // invalid type expression
)

type Diagnostic

type Diagnostic struct {
	Code     DiagCode
	Severity Severity
	Message  string
	File     string
	Line     int // 1-based
	Column   int // 1-based
}

Diagnostic represents a positioned parse error or warning.

func (Diagnostic) Error

func (d Diagnostic) Error() string

type Lexer

type Lexer struct {
	// contains filtered or unexported fields
}

Lexer tokenizes an iterion DSL source file with indent-sensitive INDENT/DEDENT tokens.

func NewLexer

func NewLexer(filename, src string) *Lexer

NewLexer creates a new Lexer for the given source.

func (*Lexer) All

func (l *Lexer) All() []Token

All returns every token produced by the lexer (for debugging).

func (*Lexer) Backup

func (l *Lexer) Backup()

Backup unreads the last consumed token.

func (*Lexer) Next

func (l *Lexer) Next() Token

Next returns the next token.

func (*Lexer) Peek

func (l *Lexer) Peek() Token

Peek returns the next token without consuming it.

func (*Lexer) PeekAt

func (l *Lexer) PeekAt(offset int) Token

PeekAt returns the token at offset positions ahead without consuming.

type ParseResult

type ParseResult struct {
	File        *ast.File
	Diagnostics []Diagnostic
}

ParseResult is the output of Parse.

func Parse

func Parse(filename, src string) *ParseResult

Parse parses an iterion DSL source file and returns the AST and any diagnostics.

type Severity

type Severity int

Severity indicates the severity of a diagnostic.

const (
	SeverityError Severity = iota
	SeverityWarning
)

func (Severity) String

func (s Severity) String() string

type Token

type Token struct {
	Type   TokenType
	Value  string // raw text of the token
	Line   int    // 1-based
	Column int    // 1-based
}

Token is a single lexical token produced by the lexer.

func (Token) String

func (t Token) String() string

type TokenType

type TokenType int

TokenType identifies the kind of a lexical token.

const (
	// Special
	TokenEOF    TokenType = iota
	TokenError            // lexer error
	TokenIndent           // virtual: indentation increase
	TokenDedent           // virtual: indentation decrease

	// Literals
	TokenIdent      // identifier
	TokenString     // "..." string literal
	TokenInt        // integer literal
	TokenFloat      // float literal
	TokenPromptLine // raw prompt body line

	// Punctuation
	TokenColon   // :
	TokenArrow   // ->
	TokenEquals  // =
	TokenComma   // ,
	TokenLBrack  // [
	TokenRBrack  // ]
	TokenLBrace  // {
	TokenRBrace  // }
	TokenLParen  // (
	TokenRParen  // )
	TokenDot     // .
	TokenStar    // *
	TokenNewline // logical newline (non-blank)

	// Comment
	TokenComment // ## ...

	// Keywords (contextual — also valid as identifiers in some positions)
	TokenVars
	TokenMCPServer
	TokenPrompt
	TokenSchema
	TokenAgent
	TokenJudge
	TokenRouter
	TokenJoin
	TokenHuman
	TokenTool
	TokenWorkflow
	TokenCompute
	TokenEntry
	TokenMCP
	TokenBudget
	TokenTransport
	TokenServers
	TokenDisable
	TokenAutoloadProject
	TokenModel
	TokenInput
	TokenOutput
	TokenPublish
	TokenSystem
	TokenUser
	TokenSession
	TokenTools
	TokenToolPolicy
	TokenToolMaxSteps
	TokenReasoningEffort
	TokenMode
	TokenStrategy
	TokenRequire
	TokenInstructions
	TokenCommand
	TokenArgs
	TokenURL
	TokenAuth
	TokenReadonly
	TokenBackend
	TokenDefaultBackend
	TokenInteraction
	TokenInteractionPrompt
	TokenInteractionModel
	TokenAwait
	TokenWhen
	TokenNot
	TokenAs
	TokenWith
	TokenEnum
	// Session modes
	TokenFresh
	TokenInherit
	TokenArtifactsOnly
	TokenFork
	// Router modes
	TokenFanOutAll
	TokenCondition
	TokenRoundRobin
	TokenLLM
	// Router properties
	TokenMulti
	// Join strategies
	TokenWaitAll
	TokenBestEffort
	// Booleans
	TokenTrue
	TokenFalse
	// Type keywords
	TokenTypeString
	TokenTypeBool
	TokenTypeInt
	TokenTypeFloat
	TokenTypeJSON
	TokenTypeStringArray
	// Budget properties
	TokenMaxParallelBranches
	TokenMaxDuration
	TokenMaxCostUSD
	TokenMaxTokens
	TokenMaxIterations
	// Compaction block + properties
	TokenCompaction
	TokenThreshold
	TokenPreserveRecent
	// Worktree
	TokenWorktree
	// Terminal node names (reserved identifiers)
	TokenDone
	TokenFail
)

func (TokenType) String

func (t TokenType) String() string

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL