Better display.
parent
c25151fde7
commit
29f763dde1
|
@ -19,15 +19,15 @@ fn nextArg(args: [][]const u8, idx: *usize) ?[]const u8 {
|
||||||
|
|
||||||
|
|
||||||
// Get all tokens from the input.
|
// Get all tokens from the input.
|
||||||
fn getAllTokens(allocator: *mem.Allocator, source: []const u8) !std.ArrayList(lexer.Token.Id) {
|
fn getAllTokens(allocator: *mem.Allocator, source: []const u8) !std.ArrayList(lexer.Token) {
|
||||||
|
|
||||||
// Getting the tokenizer, initialized with the source code we want to check.
|
// Getting the tokenizer, initialized with the source code we want to check.
|
||||||
var tokenizer = lexer.Tokenizer.init(source);
|
var tokenizer = lexer.Tokenizer.init(source);
|
||||||
var list = std.ArrayList(lexer.Token.Id).init(allocator);
|
var list = std.ArrayList(lexer.Token).init(allocator);
|
||||||
|
|
||||||
while(true) {
|
while(true) {
|
||||||
const token = tokenizer.next();
|
const token = tokenizer.next();
|
||||||
try list.append(token.id);
|
try list.append(token);
|
||||||
if(token.id == .Eof) break;
|
if(token.id == .Eof) break;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -76,8 +76,8 @@ pub fn lexer_analyze() !void {
|
||||||
|
|
||||||
// Get the file size and allocate memory.
|
// Get the file size and allocate memory.
|
||||||
const tokens = try getAllTokens(allocator, content);
|
const tokens = try getAllTokens(allocator, content);
|
||||||
for(tokens.items) |tokenid| {
|
for(tokens.items) |token| {
|
||||||
print("token: {}\n", .{@tagName(tokenid)});
|
print("{s:20} => {}\n", .{@tagName(token.id), buffer[token.loc.start..token.loc.end]});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue