Documentation
¶
Index ¶
- Variables
- func ComparatorKindToString(kind ComparatorKind) string
- func FieldTypeToString(fType FieldType) string
- func LiteralKindToString(lit LiteralKind) string
- func ModifierKindToString(kind ModifierKind) string
- func PredicateKindToString(kind PredicateKind) string
- type ASTAnd
- type ASTComparator
- type ASTLiteral
- type ASTModifier
- type ASTNode
- type ASTNot
- type ASTOr
- type ASTPredicate
- type ASTRange
- type BooleOp
- type ComparatorKind
- type Diagnostic
- type Disassembler
- type DisassemblerOpts
- type FieldSpec
- type FieldType
- type IRAnd
- type IRAny
- type IRFalse
- type IRGlob
- type IRIPCmp
- type IRIPRange
- type IRNode
- type IRNot
- type IRNumberCmp
- type IRNumberRange
- type IROr
- type IRPhrase
- type IRPrefix
- type IRRegex
- type IRStringEQ
- type IRStringNEQ
- type IRTimeCmp
- type IRTimeRange
- type IRTrue
- type Instr
- type LabelID
- type LexedToken
- type Lexer
- type Literal
- type LiteralKind
- type ModifierKind
- type NNF
- type OpCode
- type Parser
- type Position
- type PredicateKind
- type Program
- func (p *Program) AddFieldConstant(val string) int
- func (p *Program) AddIPConstant(val netip.Addr) int
- func (p *Program) AddNumberConstant(val float64) int
- func (p *Program) AddRegexConstant(val *regexp.Regexp) int
- func (p *Program) AddStringConstant(val string) int
- func (p *Program) AddTimeConstant(val int64) int
- func (p *Program) AppendIsn(opCode OpCode, args ...any)
- func (p *Program) AppendJump(opCode OpCode, target LabelID)
- func (p *Program) BindLabel(id LabelID)
- func (p *Program) Emit(irNode IRNode)
- func (p *Program) NewLabel() LabelID
- func (p *Program) Peephole()
- type Schema
- type Simplifier
- type Span
- type Token
- type Typer
Constants ¶
This section is empty.
Variables ¶
var ( // Returned when the typer detects an invalid datetime. ErrBadDateTime = errors.Base("bad datetime value") // Returned should an attempt be made to `unread` after a rune has // already been put back into the reader. ErrDoubleUnread = errors.Base("double unread") // Returned if the lexer is invoked without a valid reader. ErrInvalidReader = errors.Base("reader is not valid") // Returned when the code generator detects a label without a target. ErrJumpMissingArg = errors.Base("jump missing target arg") // Returned when the code generator detects a label with an invalid // target. ErrJumpNotLabelID = errors.Base("jump target arg not LabelID") // Returned when the code generator detects a label with a bad ID. ErrLabelBadIDType = errors.Base("LABEL has bad id type") // Returned when the code generator detects a label that lacks an ID. ErrLabelMissingID = errors.Base("LABEL missing id") // Returned when the lexer detects an embedded newline in a field // name. ErrNewlineInField = errors.Base("embedded newline in field") // Returned when the lexer detects an embedded newline in a phrase. ErrNewlineInPhrase = errors.Base("embedded newline in phrase") // Returned when the lexer detects a newline in a regular expression. ErrNewlineInRegex = errors.Base("embedded newline in regular expression") // Returned if no tokens were provided. ErrNoTokens = errors.Base("no tokens") // Returned when the lexer detects unsupported flags in a regular // expression. ErrRegexFlags = errors.Base("regex flags not supported") // Returned when the code generator detects a label that has not been // bound to a target. ErrUnboundLabel = errors.Base("unbound label") // Returned when the lexer detects an unexpected bareword in the // source code. ErrUnexpectedBareword = errors.Base("unexpected bareword (missing quotes or field?)") // Returned when the lexer detects an unexpected character. ErrUnexpectedRune = errors.Base("unexpected rune") // Returned when the lexer detects an unexpected token in the source // code. ErrUnexpectedToken = errors.Base("unexpected token") // Returned when the typer detects an unknown literal. ErrUnknownLiteral = errors.Base("unknown literal") // Returned when the lexer detects that a quoted field name is // unterminated. ErrUnterminatedField = errors.Base("unterminated quoted field") // Returned when the lexer detects an unterminated regular expression. ErrUnterminatedRegex = errors.Base("unterminated regular expression") // Returned when the lexer detects an unterminated quoted string. ErrUnterminatedString = errors.Base("unterminated string") )
var ( // A span with zero values. ZeroSpan = &Span{} )
Functions ¶
func ComparatorKindToString ¶
func ComparatorKindToString(kind ComparatorKind) string
Return the string representation for a comparator kind.
func FieldTypeToString ¶
func LiteralKindToString ¶
func LiteralKindToString(lit LiteralKind) string
Return the string representation of a literal type.
func ModifierKindToString ¶
func ModifierKindToString(kind ModifierKind) string
Return the string representation of a modifier.
func PredicateKindToString ¶
func PredicateKindToString(kind PredicateKind) string
Return the string representation for a predicate kind.
Types ¶
type ASTAnd ¶
type ASTAnd struct {
Kids []ASTNode // Child nodes.
// contains filtered or unexported fields
}
An AST node for the `AND' logical operator.
type ASTComparator ¶
type ASTComparator struct {
Atom ASTLiteral // Atom on which to operate.
Op ComparatorKind // Comparator operator.
}
Comparator structure.
type ASTLiteral ¶
type ASTLiteral struct {
String string // String value.
Kind LiteralKind // Kind of the literal.
Number float64 // Numeric value.
// contains filtered or unexported fields
}
An AST node for a `literal' of some kind.
type ASTModifier ¶
type ASTModifier struct {
Kid ASTNode // Node to which the modifier applies.
Kind ModifierKind // Modifier kind.
// contains filtered or unexported fields
}
An AST node for a `modifier' to an operation..
type ASTNode ¶
type ASTNode interface {
// Return the span for this node.
//
// Spans can be used in diagnostics to show where in the source file
// an issue exists.
Span() *Span
// Print debugging information for the given node.
Debug(...any) *debug.Debug
}
Abstract Syntax Tree.
type ASTNot ¶
type ASTNot struct {
Kid ASTNode // Child node.
// contains filtered or unexported fields
}
An AST node for the `NOT' logical operator.
type ASTOr ¶
type ASTOr struct {
Kids []ASTNode // Child nodes.
// contains filtered or unexported fields
}
An AST node for the `OR' logical operator.
type ASTPredicate ¶
type ASTPredicate struct {
Range *ASTRange // Target range value
Comparator *ASTComparator // Comparator to use.
Fuzz *float64 // Levenshtein Distance.
Boost *float64 // Boost value.
Field string // Target field.
String string // Target string value.
Regex string // Target regex pattern.
Kind PredicateKind // Predicate kind.
Number float64 // Target numeric value.
Proximity int // String promity.
// contains filtered or unexported fields
}
An AST node for predicates.
type ASTRange ¶
type ASTRange struct {
Lo *ASTLiteral // Start of range.
Hi *ASTLiteral // End of range.
IncL bool // Start is inclusive?
IncH bool // End is inclusive?
}
Range structure.
type ComparatorKind ¶
type ComparatorKind int
Comparator kind type.
const ( ComparatorLT ComparatorKind = iota // Comparator is `LT'. ComparatorLTE // Comparator is `LTE'. ComparatorGT // Comparator is `GT'. ComparatorGTE // Comparator is `GTE'. ComparatorEQ // Comparator is `EQ'. ComparatorNEQ // Comparator is `NEQ'. )
func InvertComparator ¶
func InvertComparator(kind ComparatorKind) ComparatorKind
type Diagnostic ¶
type Diagnostic struct {
Msg string // Diagnostic message.
At *Span // Location within source code.
Hint string // Hint message, if applicable.
}
func NewDiagnostic ¶
func NewDiagnostic(msg string, at *Span) Diagnostic
func NewDiagnosticHint ¶
func NewDiagnosticHint(msg, hint string, at *Span) Diagnostic
func (Diagnostic) String ¶
func (d Diagnostic) String() string
Return the string representation of a diagnostic.
type Disassembler ¶
type Disassembler struct {
// contains filtered or unexported fields
}
func NewDefaultDisassembler ¶
func NewDefaultDisassembler() *Disassembler
func NewDisassembler ¶
func NewDisassembler(opts DisassemblerOpts) *Disassembler
func (*Disassembler) Dissassemble ¶
func (d *Disassembler) Dissassemble(writer io.Writer)
func (*Disassembler) SetProgram ¶
func (d *Disassembler) SetProgram(program *Program)
type DisassemblerOpts ¶
type DisassemblerOpts struct {
WithComments bool // Include decoded comments?
AddrWidth int // Width of an address. 0 = auto.
OpcodeWidth int // Pad opcode column. 0 auto.
OperandWidth int // Pad operand column. 0 = auto.
}
func NewDefaultDisassemblerOpts ¶
func NewDefaultDisassemblerOpts() DisassemblerOpts
type IRAnd ¶
type IRAnd struct {
Kids []IRNode
}
type IRAny ¶
type IRAny struct {
Field string
}
type IRGlob ¶
type IRIPCmp ¶
type IRIPCmp struct {
Field string
Op ComparatorKind
Value netip.Addr
}
type IRIPRange ¶
type IRNode ¶
type IRNode interface {
// Return a unique key for the node.
//
// This is used during code generation.
Key() string
// Print debugging information.
Debug(...any) *debug.Debug
// Emit code.
Emit(*Program, LabelID, LabelID)
}
An intermediate representation node of a syntactic element.
type IRNot ¶
type IRNot struct {
Kid IRNode
}
type IRNumberCmp ¶
type IRNumberCmp struct {
Field string
Op ComparatorKind
Value float64
}
func (IRNumberCmp) Debug ¶
func (n IRNumberCmp) Debug(params ...any) *debug.Debug
Display debugging information.
func (IRNumberCmp) Emit ¶
func (n IRNumberCmp) Emit(program *Program, trueLabel, falseLabel LabelID)
Generate opcode.
type IRNumberRange ¶
func (IRNumberRange) Debug ¶
func (n IRNumberRange) Debug(params ...any) *debug.Debug
Display debugging information.
func (IRNumberRange) Emit ¶
func (n IRNumberRange) Emit(program *Program, trueLabel, falseLabel LabelID)
Generate opcode.
type IRPhrase ¶
func (IRPhrase) HasWildcard ¶
type IRPrefix ¶
type IRRegex ¶
type IRStringEQ ¶
func (IRStringEQ) Debug ¶
func (n IRStringEQ) Debug(params ...any) *debug.Debug
Display debugging information.
func (IRStringEQ) Emit ¶
func (n IRStringEQ) Emit(program *Program, trueLabel, falseLabel LabelID)
Generate opcode.
type IRStringNEQ ¶
func (IRStringNEQ) Debug ¶
func (n IRStringNEQ) Debug(params ...any) *debug.Debug
Display debugging information.
func (IRStringNEQ) Emit ¶
func (n IRStringNEQ) Emit(program *Program, trueLabel, falseLabel LabelID)
Generate opcode.
type IRTimeCmp ¶
type IRTimeCmp struct {
Field string
Op ComparatorKind
Value int64
}
type IRTimeRange ¶
func (IRTimeRange) Debug ¶
func (n IRTimeRange) Debug(params ...any) *debug.Debug
Display debugging information.
func (IRTimeRange) Emit ¶
func (n IRTimeRange) Emit(program *Program, trueLabel, falseLabel LabelID)
Generate opcode.
type LexedToken ¶
type LexedToken struct {
Literal Literal // Literal value for the token.
Lexeme string // Lexeme for the token.
Start Position // Start position within source code.
End Position // End position within source code.
Token Token // The token.
}
A lexed token.
func NewLexedToken ¶
func NewLexedToken(token Token, lexeme string, start, end Position) LexedToken
Return a new lexed token with the given lexeme.
func NewLexedTokenWithError ¶
func NewLexedTokenWithError(token Token, lexeme string, err error, start, end Position) LexedToken
Return a new lexed token with the given lexeme and error message.
func NewLexedTokenWithLiteral ¶
func NewLexedTokenWithLiteral(token Token, lexeme string, lit any, start, end Position) LexedToken
Return a new lexed token with the given lexeme and literal.
func (*LexedToken) Debug ¶
func (lt *LexedToken) Debug(params ...any) *debug.Debug
Display debugging information.
func (*LexedToken) String ¶
func (lt *LexedToken) String() string
type Lexer ¶
type Lexer interface {
Reset()
Tokens() []LexedToken
Lex(io.Reader) ([]LexedToken, error)
}
type Literal ¶
type Literal struct {
Value any // The literal value.
Err error // The error to pass as a literal.
}
Literal value structure.
type LiteralKind ¶
type LiteralKind int
Literal kind type.
const ( LString LiteralKind = iota // Literal is a string. LNumber // Literal is a number. LUnbounded // Literal has no value bound to it. )
type ModifierKind ¶
type ModifierKind int
Modifier kind type.
const ( // This modifier will mark the predicate to which it is attached as // having a required value. If the value is not present, then the // predicate will fail. ModRequire ModifierKind = iota // This modifier will mark the predicate to which it is attached as // having a prohibited value. If the value is present, then the // predicate will fail. ModProhibit )
type OpCode ¶
type OpCode int
Opcode.
const ( // No operation. OpNoOp OpCode = iota // Return the contents of the accumulator. // // `RET: <- ACC` OpReturn // Special instruction used by the code generator to mark a // program location as a label for use by the jump instructions. // // This instruction is not part of the bytecode. It is removed // when the label resolver processes code for jump addresses. // // Should it be included in a bytecode instruction stream, it will // equate to a NOP. OpLabel // Jump to label. // // `JMP lbl: (jump)` OpJump // Jump to label if accumulator is zero. // // `JZ lbl: (jump if ACC == 0)` OpJumpZ // Jump to label if accumulator is non-zero. // // `JNZ lbl: (jump if ACC > 0)` OpJumpNZ // Negate the value in the accumulator. // // `NOT: `ACC <- !ACC` OpNot // Load an immediate into accumulator. // // `LDA imm: ACC <- imm` OpLoadA // Load a field ID into the field register. // // `LDFLD fid: `FIELD <- fid` OpLoadField // Load a value into the boost register. // // `LDBST imm: `BOOST <- imm` OpLoadBoost // Load a value into the fuzzy register. // // `LDFZY imm: `FUZZY <- imm` OpLoadFuzzy // Compare the current field to the given string constant for // equality. // // Stores the result in the accumulator. // // `EQ.S sIdx: ACC <- field[FIELD} == string[sIdx]` OpStringEQ // Compare the current field to the given string constant for // inequality. // // Stores the result in the accumulator. // // `EQ.S sIdx: ACC <- field[FIELD} != string[sIdx]` OpStringNEQ // Test whether the current field has the given string constant as // a prefix. // // Stores the result in the accumulator. // // `PFX.S sIdx: ACC <- HasPrefix(field[FIELD], string[sIdx])` OpPrefix // Test whether the current field matches the given glob pattern. // // Stores the result in the accumulator. // // `GLB.S sIdx: ACC <- MatchesGlob(field[FIELD], string[sIdx])` OpGlob // Perform a regular expression match of the current field against // the given regular expression constant. // // Stores the result in the accumulator. // // `REX.S rIdx: ACC <- MatchesRexeg(field[FIELD], regex[rIdx])` OpRegex // Test whether the current field contains the given string constant // as a phrase. // // If non-zero, the `prox` argument specifies maximum Levenshtein // distance (proximity) allowed for a match. // // Stores the result in the accumulator. // // `PHR.S sIdx: ACC <- MatchesPhrase(field[FIELD], string[sIdx])` OpPhrase // Test whether the current field has any value at all. // // Stores the result in the accumulator. // // `ANY: ACC <- HasAnyValue(field[FIELD])` OpAny // Test whether the current field has equality with the given // number constant. // // Stores the result in the accumulator. // // `EQ.N nIdx: ACC <- (field[FIELD] == number[nIdx])` OpNumberEQ // Test whether the current field has inequality with the given // number constant. // // Stores the result in the accumulator. // // `NEQ.N nIdx: ACC <- (field[FIELD] != number[nIdx])` OpNumberNEQ // Test whether the current field has a value that is lesser than // the given number constant. // // Stores the result in the accumulator. // // `LT.N nIdx: ACC <- (field[FIELD] < number[nIdx]) OpNumberLT // Test whether the current field has a value that is lesser than or // equal to the given number constant. // // Stores the result in the accumulator. // // `LTE.N nIdx: ACC <- (field[FIELD] <= number[nIdx]) OpNumberLTE // Test whether the current field has a value that is greater than // the given number constant. // // Stores the result in the accumulator. // // `GT.N nIdx: ACC <- (field[FIELD] > number[nIdx]) OpNumberGT // Test whether the current field has a value that is greater than or // equal to the given number constant. // // Stores the result in the accumulator. // // `GTE.N nIdx: ACC <- (field[FIELD] >= number[nIdx]) OpNumberGTE // Test whether the current field has a value that falls within the // given range. // // `loIdx` is the starting number in the range. // `hiIdx` is the ending number in the range. // `incL` is non-zero if the range is to be inclusive at the lowest. // `incH' is non-zero if the range is to be inclusive at the highest. // // Stores the results in the accumulator. // // RNG.N loIdx hiIdx incL incH: ACC <- inRange(field[field]...)` OpNumberRange // Test whether the current field has equality with the given // date/time constant. // // Stores the result in the accumulator. // // `EQ.T tIdx: ACC <- (field[FIELD] == time[tIdx])` OpTimeEQ // Test whether the current field has inequality with the given // date/time constant. // // Stores the result in the accumulator. // // `NEQ.T tIdx: ACC <- (field[FIELD] != time[tIdx])` OpTimeNEQ // Test whether the current field has a value that is lesser than // the given date/time constant. // // Stores the result in the accumulator. // // `LT.T tIdx: ACC <- (field[FIELD] < time[tIdx]) OpTimeLT // Test whether the current field has a value that is lesser than or // equal to the given date/time constant. // // Stores the result in the accumulator. // // `LTE.T tIdx: ACC <- (field[FIELD] <= time[tIdx]) OpTimeLTE // Test whether the current field has a value that is greater than // the given date/time constant. // // Stores the result in the accumulator. // // `GT.T nIdx: ACC <- (field[FIELD] > time[tIdx]) OpTimeGT // Test whether the current field has a value that is greater than or // equal to the given date/time constant. // // Stores the result in the accumulator. // // `GTE.T tIdx: ACC <- (field[FIELD] >= timer[tIdx]) OpTimeGTE // Test whether the current field has a value that falls within the // given range. // // `loIdx` is the starting date/time in the range. // `hiIdx` is the ending date/time in the range. // `incL` is non-zero if the range is to be inclusive at the lowest. // `incH' is non-zero if the range is to be inclusive at the highest. // // Stores the results in the accumulator. // // RNG.T loIdx hiIdx incL incH: ACC <- inRange(field[field]...)` OpTimeRange // Test whether the current field has equality with the given // IP address constant. // // Stores the result in the accumulator. // // `EQ.IP ipIdx: ACC <- (field[FIELD] == address[ipIdx])` OpIPEQ // Test whether the current field has inequality with the given // IP address constant. // // Stores the result in the accumulator. // // `NEQ.IP ipIdx: ACC <- (field[FIELD] != address[ipIdx])` OpIPNEQ // Test whether the current field has a value that is lesser than // the given IP address constant. // // Stores the result in the accumulator. // // `LT.IP ipIdx: ACC <- (field[FIELD] < address[ipIdx]) OpIPLT // Test whether the current field has a value that is lesser than or // equal to the given IP address constant. // // Stores the result in the accumulator. // // `LTE.IP ipIdx: ACC <- (field[FIELD] <= address[ipIdx]) OpIPLTE // Test whether the current field has a value that is greater than // the given IP address constant. // // Stores the result in the accumulator. // // `GT.IP ipIdx: ACC <- (field[FIELD] > address[ipIdx]) OpIPGT // Test whether the current field has a value that is greater than or // equal to the given IP address constant. // // Stores the result in the accumulator. // // `GTE.IP ipIdx: ACC <- (field[FIELD] >= address[ipIdx]) OpIPGTE // Test whether the current field has a value that falls within the // given range. // // `loIdx` is the starting IP address in the range. // `hiIdx` is the ending IP address in the range. // `incL` is non-zero if the range is to be inclusive at the lowest. // `incH' is non-zero if the range is to be inclusive at the highest. // // Stores the results in the accumulator. // // RNG.IP loIdx hiIdx incL incH: ACC <- inRange(field[field]...)` OpIPRange // Test whether the current field is within a CIDR range. // // `IN.CIDR ipIdx, prefix: ACC <- (field[FIELD] = cidr[ipIdx,prefix])` OpInCIDR // Maximum number of opcode currently supported. OpMaximum )
func GetIPComparator ¶
func GetIPComparator(cmp ComparatorKind, def OpCode) OpCode
Return the relevant IP address opcode for the given comparator.
If no opcode is found, then `def` will be used instead.
func GetNumberComparator ¶
func GetNumberComparator(cmp ComparatorKind, def OpCode) OpCode
Return the relevant numeric opcode for the given comparator.
If no opcode is found, then `def` will be used instead.
func GetTimeComparator ¶
func GetTimeComparator(cmp ComparatorKind, def OpCode) OpCode
Return the relevant date/time opcode for the given comparator.
If no opcode is found, then `def` will be used instead.
type Parser ¶
type Parser interface {
// Reset the parser state.
Reset()
// Parse the list of lexed tokens and generate an AST.
Parse([]LexedToken) (ASTNode, []Diagnostic)
// Return a list of diagnostic messages.
Diagnostics() []Diagnostic
}
type Position ¶
Position within source code.
func NewPosition ¶
Create a new position with the given line and column numbers.
type PredicateKind ¶
type PredicateKind int
Predicate kind type.
const ( PredicateCMP PredicateKind = iota // Predicate is a comparator. PredicateEQS // Predicate is `EQ.S'. PredicateANY // Predicate is `ANY'. PredicateGLOB // Predicate is `GLOB'. PredicateNEQS // Predicate is `NEQ.S'. PredicatePHRASE // Predicate is `PHRASE'. PredicatePREFIX // Predicate is `PREFIX'. PredicateRANGE // Predicate is `RANGE'. PredicateREGEX // Predicate is `REGEX'. )
type Program ¶
type Program struct {
Fields []string // Field constants.
Strings []string // String constants.
Numbers []float64 // Number constants.
Times []int64 // Date/time constants.
IPs []netip.Addr // IP address constants.
Patterns []*regexp.Regexp // Regular expression constants.
Code []Instr // Bytecode.
// contains filtered or unexported fields
}
func (*Program) AddFieldConstant ¶
Add a field name constant.
If the given constant value exists, then an index to its array position is returned.
func (*Program) AddIPConstant ¶
Add an IP address constant.
If the given constant value exists, then an index to its array position is returned.
func (*Program) AddNumberConstant ¶
Add a numeric constant.
If the given constant value exists, then an index to its array position is returned.
func (*Program) AddRegexConstant ¶
Add a regular expression constant.
If the given constant value exists, then an index to its array position is returned.
func (*Program) AddStringConstant ¶
Add a string constant.
If the given constant value exists, then an index to its array position is returned.
func (*Program) AddTimeConstant ¶
Add a date/time constant.
If the given constant value exists, then an index to its array position is returned.
func (*Program) AppendIsn ¶
Append an instruction to the bytecode.
Any provided arguments will be added as the instruction's operands.
func (*Program) AppendJump ¶
Append a jump instruction to the bytecode.
The instruction's operand will be the target label ID.
func (*Program) BindLabel ¶
Append a `LABEL` instruction bound to the given label identifier.
The instruction will be removed by the label resolver.
type Simplifier ¶
func NewSimplifier ¶
func NewSimplifier() Simplifier
type Span ¶
type Span struct {
// contains filtered or unexported fields
}
Span within source code.
type Token ¶
type Token int
Lucette token type.
const ( TokenEOF Token = iota // End of file. TokenNumber // Numeric value. TokenPhrase // String phrase value. TokenField // Field name. TokenRegex // Regular expression. TokenPlus // '+' TokenMinus // '-' TokenStar // '*' TokenQuestion // '?' TokenLParen // '(' TokenLBracket // '[' TokenLCurly // '{' TokenRParen // ')' TokenRBracket // ']' TokenRCurly // '}' TokenColon // ':' TokenTilde // '~' TokenCaret // '^' TokenTo // 'TO'. TokenAnd // 'AND'/'&&'. TokenOr // 'OR'/'||'. TokenNot // 'NOT'/'!'. TokenLT // '<'. TokenLTE // '<='. TokenGT // '>' TokenGTE // '>=' TokenIllegal // Illegal token. TokenUnknown // Unknown token. )
type Typer ¶
type Typer interface {
// Generate typed IR from the given AST root node.
Type(ASTNode) (IRNode, []Diagnostic)
// Return the diagnostic messages generated during IR generation.
Diagnostics() []Diagnostic
}
Source Files
¶
- ast.go
- astand.go
- astcomparator.go
- astliteral.go
- astmodifier.go
- astnot.go
- astor.go
- astpredicate.go
- astrange.go
- constants.go
- diagnostic.go
- disassembler.go
- errors.go
- fieldtype.go
- instruction.go
- ir.go
- irand.go
- irany.go
- irfalse.go
- irglob.go
- iripcmp.go
- iriprange.go
- irnot.go
- irnumbercmp.go
- irnumberrange.go
- iror.go
- irphrase.go
- irprefix.go
- irregex.go
- irstringeq.go
- irstringneq.go
- irtimecmp.go
- irtimerange.go
- irtrue.go
- lexedtoken.go
- lexer.go
- lexer_interface.go
- lexer_lexer.go
- lexer_reader.go
- lexer_token.go
- literal.go
- nnf.go
- parser.go
- parser_interface.go
- parser_parse.go
- parser_primary.go
- parser_reader.go
- parser_reduce.go
- parser_stack.go
- position.go
- program.go
- program_peephole.go
- simplify.go
- span.go
- tokentype.go
- typer.go