Grooming, reading source info, parsing Class WIP.
parent
a93c718e1f
commit
0d9130ec89
3166
src/parse.zig
3166
src/parse.zig
File diff suppressed because it is too large
Load Diff
|
@ -12,6 +12,7 @@ pub const Token = struct {
|
||||||
|
|
||||||
pub const keywords = std.ComptimeStringMap(Id, .{
|
pub const keywords = std.ComptimeStringMap(Id, .{
|
||||||
.{ "property", .Keyword_property },
|
.{ "property", .Keyword_property },
|
||||||
|
.{ "define", .Keyword_define },
|
||||||
.{ "require", .Keyword_require },
|
.{ "require", .Keyword_require },
|
||||||
.{ "false", .Keyword_false },
|
.{ "false", .Keyword_false },
|
||||||
.{ "null", .Keyword_null },
|
.{ "null", .Keyword_null },
|
||||||
|
@ -100,6 +101,7 @@ pub const Token = struct {
|
||||||
ShebangLine,
|
ShebangLine,
|
||||||
|
|
||||||
Keyword_property,
|
Keyword_property,
|
||||||
|
Keyword_define,
|
||||||
Keyword_require,
|
Keyword_require,
|
||||||
Keyword_false,
|
Keyword_false,
|
||||||
Keyword_null,
|
Keyword_null,
|
||||||
|
@ -184,6 +186,7 @@ pub const Token = struct {
|
||||||
.Tilde => "~",
|
.Tilde => "~",
|
||||||
|
|
||||||
.Keyword_property => "property",
|
.Keyword_property => "property",
|
||||||
|
.Keyword_define => "define",
|
||||||
.Keyword_require => "require",
|
.Keyword_require => "require",
|
||||||
.Keyword_false => "false",
|
.Keyword_false => "false",
|
||||||
.Keyword_null => "null",
|
.Keyword_null => "null",
|
||||||
|
@ -565,7 +568,9 @@ pub const Tokenizer = struct {
|
||||||
},
|
},
|
||||||
|
|
||||||
.identifier => switch (c) {
|
.identifier => switch (c) {
|
||||||
'a'...'z', 'A'...'Z', '_', '0'...'9' => {},
|
// Include "-" as an identifer token, this is the only difference
|
||||||
|
// with the Zig tokenizer.
|
||||||
|
'a'...'z', 'A'...'Z', '_', '-', '0'...'9' => {},
|
||||||
else => {
|
else => {
|
||||||
if (Token.getKeyword(self.buffer[result.loc.start..self.index])) |id| {
|
if (Token.getKeyword(self.buffer[result.loc.start..self.index])) |id| {
|
||||||
result.id = id;
|
result.id = id;
|
||||||
|
|
Loading…
Reference in New Issue