-
-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathtokenizer.lua
More file actions
81 lines (72 loc) · 1.77 KB
/
tokenizer.lua
File metadata and controls
81 lines (72 loc) · 1.77 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
local string = require 'ext.string'
local assert = require 'ext.assert'
local Tokenizer = require 'parser.base.tokenizer'
local C_H_Tokenizer = Tokenizer:subclass()
function C_H_Tokenizer:initSymbolsAndKeywords()
for w in ([[
... ( ) { } [ ] ; : , =
&& || < > <= >= != == | ^ & << >> + - * / % ! - ~
]]):gmatch('%S+') do
self.symbols:insert(w)
end
-- self.keywords lets the tokenizer flag if this is a reserved word or not, that's all
for w in ([[
const enum extern
struct union
typedef
static
extern
inline
__inline
__inline__
__attribute
__attribute__
volatile
restrict
]]):gmatch'%S+' do
self.keywords[w] = true
end
end
C_H_Tokenizer.singleLineComment = string.patescape'//'
function C_H_Tokenizer:parseBlockComment()
local r = self.r
if not r:canbe'/%*' then return end
local start = r.index
if not r:seekpast'%*/' then
error{msg="expected closing block comment"}
end
r.lasttoken = r.data:sub(start, r.index - #r.lasttoken - 1)
return true --r.lasttoken
end
function C_H_Tokenizer:checkNumberSuffix(token)
local r = self.r
if r:canbe'[Uu]'
or r:canbe'[Ll]'
or r:canbe'[Zz]'
or r:canbe'[Uu][Ll]'
or r:canbe'[Ll][Ll]'
or r:canbe'[Uu][Ll][Ll]'
then
return token .. r.lasttoken
end
return token
end
function C_H_Tokenizer:parseHexNumber()
local r = self.r
local token = r:mustbe('[%da-fA-F]+', 'malformed number')
token = self:checkNumberSuffix(token)
coroutine.yield('0x'..token, 'number')
end
function C_H_Tokenizer:parseDecNumber()
local r = self.r
local token = r:canbe'[%.%d]+'
assert.le(#token:gsub('[^%.]',''), 1, 'malformed number')
if r:canbe'e' then
token = token .. r.lasttoken
token = token .. r:mustbe('[%+%-]%d+', 'malformed number')
else
token = self:checkNumberSuffix(token)
end
coroutine.yield(token, 'number')
end
return C_H_Tokenizer