class Tartrazine::Tokenizer

Included Modules

Defined in:

lexer.cr

Constructors

Instance Method Summary

Constructor Detail

def self.new(lexer : BaseLexer, text : String, secondary = false) #

[View source]

Instance Method Detail

def lexer : BaseLexer #

[View source]
def lexer=(lexer : BaseLexer) #

[View source]
def next : Iterator::Stop | Token #
Description copied from module Iterator({type: String, value: String})

Returns the next element in this iterator, or Iterator::Stop::INSTANCE if there are no more elements.


[View source]
def pos : Int32 #

[View source]
def pos=(pos : Int32) #

[View source]
def split_tokens(tokens : Array(Token)) : Array(Token) #

If a token contains a newline, split it into two tokens


[View source]
def state_stack : Array(String) #

[View source]
def state_stack=(state_stack : Array(String)) #

[View source]
def text : Bytes #

[View source]
def text=(text : Bytes) #

[View source]