suppress | Lexer(const Lexer&) |
public | Lexer(const uchar* start_, const uchar* end_, const String<char>& fileName_, const String<char>& classMapName_) |
public const Set<int, Less<int>>& | BlockCommentStates() const |
public inline const String<char>& | ClassMapName() |
private void | ComputeLineStarts() |
public inline const uchar* | End() const |
public String<uchar> | ErrorLines(long pos) |
public const List<long>& | FarthestRuleContext() const |
public inline int | FileIndex() const |
public const String<char>& | FileName() const |
public char | GetChar(long pos) |
public int | GetClass(uchar c) const |
protected virtual int | GetCommentTokenId() const |
public Result<double> | GetDouble(long pos) |
public ErrorId | GetFarthestError() |
public Result<int> | GetInt(long pos) |
public inline KeywordMap* | GetKeywordMap() |
public long | GetKeywordToken(const Lexeme& lexeme) const |
public List<int> | GetLineStartIndeces() const |
public String<uchar> | GetMatch(long pos) |
public String<char> | GetParserStateStr() const |
public inline long | GetPos() const |
public Span | GetSpan(long pos) const |
public Token | GetToken(long pos) |
public uchar | GetUChar(long pos) |
public wchar | GetWChar(long pos) |
public ParsingLog* | Log() const |
public virtual int | NextState(int state, uchar c) |
public Result<bool> | NextToken() |
public void | PopRule() |
public inline const uchar* | Pos() const |
public void | PushRule(long ruleId) |
public String<uchar> | RestOfLine(int maxLineLength) |
public void | Retract() |
public const List<long>& | RuleContext() const |
public void | SetBlockCommentStates(const Set<int, Less<int>>& blockCommentStates_) const |
public void | SetClassMap(int* classMap_) |
public void | SetCommentTokenId(int commentTokenId_) |
public void | SetCountLines(bool countLines_) |
public void | SetFileIndex(int fileIndex_) |
public inline void | SetKeywordMap(KeywordMap* keywordMap_) |
public void | SetLine(int line_) |
public void | SetLog(ParsingLog* log_) |
public inline void | SetPos(long pos) |
public void | SetRuleNameMapPtr(Map<long, String<char>, Less<long>>* ruleNameMapPtr_) |
public void | SetSeparatorChar(uchar separatorChar_) |
public void | SetTokens(const List<Token>& tokens_) |
public inline const uchar* | Start() const |
public TokenLine | TokenizeLine(const String<uchar>& line, int lineNumber, int startState) |
public inline long | operator*() const |
public Result<bool> | operator++() |
suppress void | operator=(const Lexer&) |
private Set<int, Less<int>> | blockCommentStates |
private int* | classMap |
private String<char> | classMapName |
private int | commentTokenId |
private String<uchar> | content |
private bool | countLines |
private RandomAccessIter<Token, Token&, Token*> | current |
private const uchar* | end |
private long | farthestPos |
private List<long> | farthestRuleContext |
private int | fileIndex |
private String<char> | fileName |
private KeywordMap* | keywordMap |
protected Lexeme | lexeme |
protected int | line |
private List<const uchar*> | lineStarts |
private ParsingLog* | log |
private const uchar* | pos |
private List<long> | ruleContext |
private Map<long, String<char>, Less<long>>* | ruleNameMapPtr |
private uchar | separatorChar |
private const uchar* | start |
public Token | token |
private List<Token> | tokens |