suppress | Lexer(const Lexer&) |
public | Lexer(const String<uchar>& content_, const String<char>& fileName_, int fileIndex_) |
public | Lexer(const uchar* start_, const uchar* end_, const String<char>& fileName_, int fileIndex_) |
public virtual | ~Lexer() |
public void | AddError(const Span& span, const String<uchar>& name) |
public nothrow const Set<int, Less<int>>& | BlockCommentStates() const |
public void | ConvertExternal(Span& span) |
public const uchar* | End() const |
public String<uchar> | ErrorLines(const Span& span) const |
public String<uchar> | ErrorLines(const Token& token) const |
public nothrow List<UniquePtr<Exception>> | Errors() |
public nothrow const List<int>& | FarthestRuleContext() const |
public const String<char>& | FileName() const |
public inline nothrow LexerFlags | Flags() const |
public char | GetChar(long pos) const |
public void | GetColumns(const Span& span, int& startCol, int& endCol) const |
protected virtual nothrow int | GetCommentTokenId() const |
public inline nothrow Span | GetCurrentSpan() const |
public double | GetDouble(long pos) const |
public String<char> | GetFarthestError() const |
public inline nothrow bool | GetFlag(LexerFlags flag) const |
public int | GetInt(long pos) const |
public KeywordMap* | GetKeywordMap() |
public int | GetKeywordToken(const Lexeme& lexeme) const |
public String<uchar> | GetMatch(const Span& span) const |
public nothrow String<char> | GetParserStateStr() const |
public long | GetPos() const |
public Span | GetSpan() const |
public Token | GetToken(long pos) const |
public uchar | GetUChar(long pos) const |
public wchar | GetWChar(long pos) const |
public ParsingLog* | Log() const |
public virtual int | NextState(int state, uchar c) |
public void | NextToken() |
public void | PopRule() |
public Span | PopSpan() |
public const uchar* | Pos() const |
public void | PushRule(int ruleId) |
public void | PushSpan() |
public inline nothrow void | ResetFlag(LexerFlags flag) |
public String<uchar> | RestOfLine(int maxLineLength) |
public void | Retract() |
public nothrow const List<int>& | RuleContext() const |
public void | SetBlockCommentStates(const Set<int, Less<int>>& blockCommentStates_) const |
public nothrow void | SetCommentTokenId(int commentTokenId_) |
public void | SetCountLines(bool countLines_) |
public inline nothrow void | SetFlag(LexerFlags flag) |
public void | SetKeywordMap(KeywordMap* keywordMap_) |
public void | SetLine(int line_) |
public void | SetLog(ParsingLog* log_) |
public void | SetPos(long pos) |
public nothrow void | SetRuleNameListPtr(List<String<char>>* ruleNameListPtr_) |
public void | SetSeparatorChar(uchar separatorChar_) |
public void | SetSpan(long pos) |
public nothrow void | SetSyncTokens(const List<int>& syncTokens_) |
public void | SetTokens(const List<Token>& tokens_) |
public const uchar* | Start() const |
public nothrow bool | Synchronize() |
public void | ThrowExpectationFailure(const Span& span, const String<uchar>& name) |
public void | ThrowFarthestError() |
public TokenLine | TokenizeLine(const String<uchar>& line, int lineNumber, int startState) |
public int | operator*() const |
public void | operator++() |
suppress void | operator=(const Lexer&) |
private Set<int, Less<int>> | blockCommentStates |
private int | commentTokenId |
private String<uchar> | content |
private bool | countLines |
private RandomAccessIter<Token, Token&, Token*> | current |
private Span | currentSpan |
private const uchar* | end |
private List<UniquePtr<Exception>> | errors |
private long | farthestPos |
private List<int> | farthestRuleContext |
private int | fileIndex |
private String<char> | fileName |
private LexerFlags | flags |
private KeywordMap* | keywordMap |
protected Lexeme | lexeme |
protected int | line |
private ParsingLog* | log |
private const uchar* | pos |
private List<int> | ruleContext |
private List<String<char>>* | ruleNameListPtr |
private uchar | separatorChar |
private Stack<Span> | spanStack |
private const uchar* | start |
private List<int> | syncTokens |
public Token | token |
private List<Token> | tokens |