1
2
3
4
5
6 using System;
7 using System.Collections;
8
9 namespace System.Lex
10 {
11 public const long CONTINUE_TOKEN = -2;
12 public const long INVALID_TOKEN = -1;
13 public const long END_TOKEN = 0;
14
15 public class Token
16 {
17 public Token() : id(INVALID_TOKEN), match(), line(1)
18 {
19 }
20 public Token(long id_) : id(id_), match(), line(1)
21 {
22 }
23 public Token(long id_, const Lexeme& match_, int line_) : id(id_), match(match_), line(line_)
24 {
25 }
26 public inline ustring ToString() const
27 {
28 return match.ToString();
29 }
30 public inline uchar Chr() const
31 {
32 return *match.begin;
33 }
34 public long id;
35 public Lexeme match;
36 public int line;
37 }
38
39 public bool NoWhiteSpaceBetweenTokens(const Token& first, const Token& second)
40 {
41 if (first.match.end == second.match.begin) return true;
42 return false;
43 }
44
45 public ustring GetEndTokenInfo()
46 {
47 return u"end of file";
48 }
49
50 public class TokenLine
51 {
52 public TokenLine() : tokens(), startState(0), endState(0)
53 {
54 }
55 public int TokenIndex(short columnNumber)
56 {
57 short col = 1;
58 int index = 0;
59 for (const Token& token : tokens)
60 {
61 short len = cast<short>(token.match.end - token.match.begin);
62 if (columnNumber >= col && columnNumber < col + len)
63 {
64 return index;
65 }
66 col = col + len;
67 ++index;
68 }
69 return -1;
70 }
71 public List<Token> tokens;
72 public int startState;
73 public int endState;
74 }
75