2020-12-05 01:11:41 +01:00
|
|
|
const std = @import("std");
|
|
|
|
const mem = std.mem;
|
|
|
|
const stdout = std.io.getStdOut().writer();
|
|
|
|
const process = std.process;
|
|
|
|
|
|
|
|
const fs = std.fs;
|
|
|
|
|
|
|
|
const warn = std.debug.warn;
|
|
|
|
const print = std.debug.print;
|
|
|
|
|
|
|
|
const lexer = @import("./tokenizer.zig");
|
|
|
|
|
|
|
|
|
|
|
|
fn nextArg(args: [][]const u8, idx: *usize) ?[]const u8 {
|
|
|
|
if (idx.* >= args.len) return null;
|
|
|
|
defer idx.* += 1;
|
|
|
|
return args[idx.*];
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Get all tokens from the input.
|
2020-12-05 16:48:55 +01:00
|
|
|
fn getAllTokens(allocator: *mem.Allocator, source: []const u8) !std.ArrayList(lexer.Token) {
|
2020-12-05 01:11:41 +01:00
|
|
|
|
|
|
|
// Getting the tokenizer, initialized with the source code we want to check.
|
|
|
|
var tokenizer = lexer.Tokenizer.init(source);
|
2020-12-05 16:48:55 +01:00
|
|
|
var list = std.ArrayList(lexer.Token).init(allocator);
|
2020-12-05 01:11:41 +01:00
|
|
|
|
|
|
|
while(true) {
|
|
|
|
const token = tokenizer.next();
|
2020-12-05 16:48:55 +01:00
|
|
|
try list.append(token);
|
2020-12-05 01:11:41 +01:00
|
|
|
if(token.id == .Eof) break;
|
|
|
|
}
|
|
|
|
|
|
|
|
return list;
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn get_file_size(path: []const u8) !u64 {
|
|
|
|
var file = try fs.cwd().openFile(path, .{});
|
|
|
|
defer file.close();
|
|
|
|
|
|
|
|
// Find the size of the file and create a buffer with this size.
|
|
|
|
var file_stat = try file.stat();
|
|
|
|
return file_stat.size;
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn lexer_analyze() !void {
|
|
|
|
// 1. get an allocator.
|
|
|
|
// 2. get the file path.
|
|
|
|
// 3. get the file size, and allocate file_size+1 bytes.
|
|
|
|
// 4. get the content of the file.
|
|
|
|
// 5. perform the analyze, and print each element.
|
|
|
|
|
|
|
|
// Create an allocator, for the arguments and the file.
|
|
|
|
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
|
|
|
|
defer arena.deinit();
|
|
|
|
|
|
|
|
const allocator = &arena.allocator;
|
|
|
|
var args = try process.argsAlloc(allocator);
|
|
|
|
defer process.argsFree(allocator, args);
|
|
|
|
|
|
|
|
// Get the file path.
|
|
|
|
var arg_idx: usize = 1; // Skipping the executable binary name.
|
|
|
|
const gui_file_path = nextArg(args, &arg_idx) orelse {
|
|
|
|
warn("Expected first argument to be path to gui file\n", .{});
|
|
|
|
return error.InvalidArgs;
|
|
|
|
};
|
|
|
|
|
|
|
|
// Get the file size.
|
|
|
|
const file_size = try get_file_size(gui_file_path);
|
|
|
|
|
|
|
|
// Get the file size and allocate memory.
|
|
|
|
const buffer = try allocator.alloc(u8, file_size + 1); // Last value will be a null-byte.
|
|
|
|
buffer[file_size] = 0;
|
|
|
|
const content = try fs.cwd().readFile(gui_file_path, buffer);
|
|
|
|
// print("file content is: {}", .{content}); // Working.
|
|
|
|
|
|
|
|
// Get the file size and allocate memory.
|
|
|
|
const tokens = try getAllTokens(allocator, content);
|
2020-12-05 16:48:55 +01:00
|
|
|
for(tokens.items) |token| {
|
|
|
|
print("{s:20} => {}\n", .{@tagName(token.id), buffer[token.loc.start..token.loc.end]});
|
2020-12-05 01:11:41 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn main() !void {
|
|
|
|
try lexer_analyze();
|
|
|
|
}
|