forked from larsjuhljensen/tagger
-
Notifications
You must be signed in to change notification settings - Fork 0
/
tokens.h
108 lines (96 loc) · 2.97 KB
/
tokens.h
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
#ifndef __REFLECT_TOKENS_HEADER__
#define __REFLECT_TOKENS_HEADER__
class Token {
public:
int start;
int stop;
int length;
public:
Token();
Token(const Token& other);
Token(int start, int stop);
};
class Tokens : public list< vector<Token> >
{
public:
void add(char* document, int offset, const GetMatchesParams& params);
};
////////////////////////////////////////////////////////////////////////////////
Token::Token()
{
this->start = 0;
this->stop = 0;
this->length = 0;
}
Token::Token(const Token& other)
{
this->start = other.start;
this->stop = other.stop;
this->length = other.length;
}
Token::Token(int start, int stop) {
this->start = start;
this->stop = stop;
this->length = stop-start+1;
}
////////////////////////////////////////////////////////////////////////////////
/* Lookup table with tokenization characters (whitespaces and symbols) */
const unsigned char tokenize_type[256] = {
0, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 1, 1, 2, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
2, 2, 1, 1, 1, 1, 1, 2, 2, 2, 3, 2, 2, 3, 2, 2,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 1, 1, 1, 2,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
};
void Tokens::add(char* document, int offset, const GetMatchesParams& params)
{
vector<Token> tokens;
if (params.tokenize_characters) {
for (int i = offset; document[i]; ++i) {
tokens.push_back(Token(i, i));
}
}
else {
int start = -1;
int i = offset;
while (true) {
unsigned char type = tokenize_type[(unsigned char)document[i]];
if (type == 1) {
if (start == -1) {
start = i;
}
}
else {
if (start != -1) {
Token cur(start, i-1);
tokens.push_back(cur);
start = -1;
}
if (type == 3) {
start = i;
while (tokenize_type[(unsigned char)document[i+1]] == 3) ++i;
Token cur(start, i);
tokens.push_back(cur);
start = -1;
}
else if (type == 0) {
break;
}
}
++i;
}
}
this->push_back(tokens);
}
#endif