...

Package lex

import "cmd/asm/internal/lex"
Overview
Index

Overview ▾

Package lex implements lexical analysis for the assembler.

Index ▾

func IsRegisterShift(r ScanToken) bool
type Input
    func NewInput(name string) *Input
    func (in *Input) Close()
    func (in *Input) Error(args ...interface{})
    func (in *Input) Next() ScanToken
    func (in *Input) Push(r TokenReader)
    func (in *Input) Text() string
type Macro
type ScanToken
    func (t ScanToken) String() string
type Slice
    func NewSlice(base *src.PosBase, line int, tokens []Token) *Slice
    func (s *Slice) Base() *src.PosBase
    func (s *Slice) Close()
    func (s *Slice) Col() int
    func (s *Slice) File() string
    func (s *Slice) Line() int
    func (s *Slice) Next() ScanToken
    func (s *Slice) SetBase(base *src.PosBase)
    func (s *Slice) Text() string
type Stack
    func (s *Stack) Base() *src.PosBase
    func (s *Stack) Close()
    func (s *Stack) Col() int
    func (s *Stack) File() string
    func (s *Stack) Line() int
    func (s *Stack) Next() ScanToken
    func (s *Stack) Push(tr TokenReader)
    func (s *Stack) SetBase(base *src.PosBase)
    func (s *Stack) Text() string
type Token
    func Make(token ScanToken, text string) Token
    func Tokenize(str string) []Token
    func (l Token) String() string
type TokenReader
    func NewLexer(name string) TokenReader
type Tokenizer
    func NewTokenizer(name string, r io.Reader, file *os.File) *Tokenizer
    func (t *Tokenizer) Base() *src.PosBase
    func (t *Tokenizer) Close()
    func (t *Tokenizer) Col() int
    func (t *Tokenizer) File() string
    func (t *Tokenizer) Line() int
    func (t *Tokenizer) Next() ScanToken
    func (t *Tokenizer) SetBase(base *src.PosBase)
    func (t *Tokenizer) Text() string

Package files

input.go lex.go slice.go stack.go tokenizer.go

func IsRegisterShift

func IsRegisterShift(r ScanToken) bool

IsRegisterShift reports whether the token is one of the ARM register shift operators.

type Input

Input is the main input: a stack of readers and some macro definitions. It also handles #include processing (by pushing onto the input stack) and parses and instantiates macro definitions.

type Input struct {
    Stack
    // contains filtered or unexported fields
}

func NewInput

func NewInput(name string) *Input

NewInput returns an Input from the given path.

func (*Input) Close

func (in *Input) Close()

func (*Input) Error

func (in *Input) Error(args ...interface{})

func (*Input) Next

func (in *Input) Next() ScanToken

func (*Input) Push

func (in *Input) Push(r TokenReader)

func (*Input) Text

func (in *Input) Text() string

type Macro

A Macro represents the definition of a #defined macro.

type Macro struct {
    // contains filtered or unexported fields
}

type ScanToken

A ScanToken represents an input item. It is a simple wrapping of rune, as returned by text/scanner.Scanner, plus a couple of extra values.

type ScanToken rune
const (
    // Asm defines some two-character lexemes. We make up
    // a rune/ScanToken value for them - ugly but simple.
    LSH          ScanToken = -1000 - iota // << Left shift.
    RSH                                   // >> Logical right shift.
    ARR                                   // -> Used on ARM for shift type 3, arithmetic right shift.
    ROT                                   // @> Used on ARM for shift type 4, rotate right.
    Include                               // included file started here
    BuildComment                          // //go:build or +build comment

)

func (ScanToken) String

func (t ScanToken) String() string

type Slice

A Slice reads from a slice of Tokens.

type Slice struct {
    // contains filtered or unexported fields
}

func NewSlice

func NewSlice(base *src.PosBase, line int, tokens []Token) *Slice

func (*Slice) Base

func (s *Slice) Base() *src.PosBase

func (*Slice) Close

func (s *Slice) Close()

func (*Slice) Col

func (s *Slice) Col() int

func (*Slice) File

func (s *Slice) File() string

func (*Slice) Line

func (s *Slice) Line() int

func (*Slice) Next

func (s *Slice) Next() ScanToken

func (*Slice) SetBase

func (s *Slice) SetBase(base *src.PosBase)

func (*Slice) Text

func (s *Slice) Text() string

type Stack

A Stack is a stack of TokenReaders. As the top TokenReader hits EOF, it resumes reading the next one down.

type Stack struct {
    // contains filtered or unexported fields
}

func (*Stack) Base

func (s *Stack) Base() *src.PosBase

func (*Stack) Close

func (s *Stack) Close()

func (*Stack) Col

func (s *Stack) Col() int

func (*Stack) File

func (s *Stack) File() string

func (*Stack) Line

func (s *Stack) Line() int

func (*Stack) Next

func (s *Stack) Next() ScanToken

func (*Stack) Push

func (s *Stack) Push(tr TokenReader)

Push adds tr to the top (end) of the input stack. (Popping happens automatically.)

func (*Stack) SetBase

func (s *Stack) SetBase(base *src.PosBase)

func (*Stack) Text

func (s *Stack) Text() string

type Token

A Token is a scan token plus its string value. A macro is stored as a sequence of Tokens with spaces stripped.

type Token struct {
    ScanToken
    // contains filtered or unexported fields
}

func Make

func Make(token ScanToken, text string) Token

Make returns a Token with the given rune (ScanToken) and text representation.

func Tokenize

func Tokenize(str string) []Token

Tokenize turns a string into a list of Tokens; used to parse the -D flag and in tests.

func (Token) String

func (l Token) String() string

type TokenReader

A TokenReader is like a reader, but returns lex tokens of type Token. It also can tell you what the text of the most recently returned token is, and where it was found. The underlying scanner elides all spaces except newline, so the input looks like a stream of Tokens; original spacing is lost but we don't need it.

type TokenReader interface {
    // Next returns the next token.
    Next() ScanToken
    // The following methods all refer to the most recent token returned by Next.
    // Text returns the original string representation of the token.
    Text() string
    // File reports the source file name of the token.
    File() string
    // Base reports the position base of the token.
    Base() *src.PosBase
    // SetBase sets the position base.
    SetBase(*src.PosBase)
    // Line reports the source line number of the token.
    Line() int
    // Col reports the source column number of the token.
    Col() int
    // Close does any teardown required.
    Close()
}

func NewLexer

func NewLexer(name string) TokenReader

NewLexer returns a lexer for the named file and the given link context.

type Tokenizer

A Tokenizer is a simple wrapping of text/scanner.Scanner, configured for our purposes and made a TokenReader. It forms the lowest level, turning text from readers into tokens.

type Tokenizer struct {
    // contains filtered or unexported fields
}

func NewTokenizer

func NewTokenizer(name string, r io.Reader, file *os.File) *Tokenizer

func (*Tokenizer) Base

func (t *Tokenizer) Base() *src.PosBase

func (*Tokenizer) Close

func (t *Tokenizer) Close()

func (*Tokenizer) Col

func (t *Tokenizer) Col() int

func (*Tokenizer) File

func (t *Tokenizer) File() string

func (*Tokenizer) Line

func (t *Tokenizer) Line() int

func (*Tokenizer) Next

func (t *Tokenizer) Next() ScanToken

func (*Tokenizer) SetBase

func (t *Tokenizer) SetBase(base *src.PosBase)

func (*Tokenizer) Text

func (t *Tokenizer) Text() string