lexer

package
v1.49.2 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Feb 27, 2026 License: Apache-2.0 Imports: 9 Imported by: 1

Documentation

Index

Constants

View Source
const (
	// FlagExpiration indicates that `expiration` is supported as a first-class
	// feature in the schema.
	FlagExpiration = "expiration"

	// FlagTypeChecking indicates that `typechecking` is supported as a first-class
	// feature in the schema.
	FlagTypeChecking = "typechecking"

	// FlagSelf indicates that `self` is supported as a first-class
	// feature in the schema.
	FlagSelf = "self"

	// FlagPartials indicates that partials are supported in the schema.
	FlagPartials = "partial"

	// FlagImports indicates that imports are supported in the schema.
	FlagImports = "import"
)
View Source
const EOFRUNE = -1

Variables

View Source
var AllUseFlags []string
View Source
var Flags = map[string]transformer{
	FlagExpiration: func(lexeme Lexeme) (Lexeme, bool) {

		if lexeme.Kind == TokenTypeIdentifier && lexeme.Value == "expiration" {
			lexeme.Kind = TokenTypeKeyword
			return lexeme, true
		}

		if lexeme.Kind == TokenTypeIdentifier && lexeme.Value == "and" {
			lexeme.Kind = TokenTypeKeyword
			return lexeme, true
		}

		return lexeme, false
	},
	FlagTypeChecking: func(lexeme Lexeme) (Lexeme, bool) {

		if lexeme.Kind == TokenTypeIdentifier && lexeme.Value == "typechecking" {
			lexeme.Kind = TokenTypeKeyword
			return lexeme, true
		}

		return lexeme, false
	},
	FlagSelf: func(lexeme Lexeme) (Lexeme, bool) {

		if lexeme.Kind == TokenTypeIdentifier && lexeme.Value == "self" {
			lexeme.Kind = TokenTypeKeyword
			return lexeme, true
		}

		return lexeme, false
	},
	FlagPartials: func(lexeme Lexeme) (Lexeme, bool) {

		if lexeme.Kind == TokenTypeIdentifier && lexeme.Value == "partial" {
			lexeme.Kind = TokenTypeKeyword
			return lexeme, true
		}

		return lexeme, false
	},
	FlagImports: func(lexeme Lexeme) (Lexeme, bool) {

		if lexeme.Kind == TokenTypeIdentifier && lexeme.Value == "import" {
			lexeme.Kind = TokenTypeKeyword
			return lexeme, true
		}

		return lexeme, false
	},
}

Flags is a map of flag names to their corresponding transformers.

Functions

func IsKeyword added in v1.7.0

func IsKeyword(candidate string) bool

IsKeyword returns whether the specified input string is a reserved keyword.

Types

type FlaggableLexer added in v1.41.0

type FlaggableLexer struct {
	// contains filtered or unexported fields
}

FlaggableLexer wraps a lexer, automatically translating tokens based on flags, if any.

func NewFlaggableLexer added in v1.41.0

func NewFlaggableLexer(lex *Lexer) *FlaggableLexer

NewFlaggableLexer returns a new FlaggableLexer for the given lexer.

func (*FlaggableLexer) Close added in v1.41.0

func (l *FlaggableLexer) Close()

Close stops the lexer from running.

func (*FlaggableLexer) NextToken added in v1.41.0

func (l *FlaggableLexer) NextToken() Lexeme

NextToken returns the next token found in the lexer.

type Lexeme

type Lexeme struct {
	Kind     TokenType          // The type of this lexeme.
	Position input.BytePosition // The starting position of this token in the input string.
	Value    string             // The textual value of this token.
	Error    string             // The error associated with the lexeme, if any.
}

Lexeme represents a token returned from scanning the contents of a file.

type Lexer

type Lexer struct {
	sync.RWMutex
	// contains filtered or unexported fields
}

Lexer holds the state of the scanner.

func Lex

func Lex(source input.Source, input string) *Lexer

Lex creates a new scanner for the input string.

func (*Lexer) Close added in v1.1.0

func (l *Lexer) Close()

Close stops the lexer from running.

type TokenType

type TokenType int

TokenType identifies the type of lexer lexemes.

const (
	TokenTypeError TokenType = iota // error occurred; value is text of error

	// Synthetic semicolon
	TokenTypeSyntheticSemicolon

	TokenTypeEOF
	TokenTypeWhitespace
	TokenTypeSinglelineComment
	TokenTypeMultilineComment
	TokenTypeNewline

	TokenTypeKeyword    // interface
	TokenTypeIdentifier // helloworld
	TokenTypeNumber     // 123

	TokenTypeLeftBrace  // {
	TokenTypeRightBrace // }
	TokenTypeLeftParen  // (
	TokenTypeRightParen // )

	TokenTypePipe  // |
	TokenTypePlus  // +
	TokenTypeMinus // -
	TokenTypeAnd   // &
	TokenTypeDiv   // /

	TokenTypeEquals     // =
	TokenTypeColon      // :
	TokenTypeSemicolon  // ;
	TokenTypeRightArrow // ->
	TokenTypeHash       // #
	TokenTypeEllipsis   // ...
	TokenTypeStar       // *

	// Additional tokens for CEL: https://github.com/google/cel-spec/blob/master/doc/langdef.md#syntax
	TokenTypeQuestionMark       // ?
	TokenTypeConditionalOr      // ||
	TokenTypeConditionalAnd     // &&
	TokenTypeExclamationPoint   // !
	TokenTypeLeftBracket        // [
	TokenTypeRightBracket       // ]
	TokenTypePeriod             // .
	TokenTypeComma              // ,
	TokenTypePercent            // %
	TokenTypeLessThan           // <
	TokenTypeGreaterThan        // >
	TokenTypeLessThanOrEqual    // <=
	TokenTypeGreaterThanOrEqual // >=
	TokenTypeEqualEqual         // ==
	TokenTypeNotEqual           // !=
	TokenTypeString             // "...", '...', """...""", ”'...”'
)

func (TokenType) String

func (i TokenType) String() string

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL