Prototype for a parser. Very experimental, doesn't do anything yet.
parent
29f763dde1
commit
1ee88ce284
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,127 @@
|
|||
// test "recovery: invalid parameter" {
|
||||
// try testError(
|
||||
// \\fn main() void {
|
||||
// \\ a(comptime T: type)
|
||||
// \\}
|
||||
// , &[_]Error{
|
||||
// .ExpectedToken,
|
||||
// });
|
||||
// }
|
||||
|
||||
const std = @import("std");
|
||||
const mem = std.mem;
|
||||
const warn = std.debug.warn;
|
||||
const io = std.io;
|
||||
const maxInt = std.math.maxInt;
|
||||
const process = std.process;
|
||||
const fs = std.fs;
|
||||
|
||||
const ast = @import("ast.zig");
|
||||
|
||||
const own_parser = @import("./parse.zig");
|
||||
|
||||
fn testParse(source: []const u8, allocator: *mem.Allocator) !void {
|
||||
const stderr = io.getStdErr().outStream();
|
||||
|
||||
const tree = try own_parser.parse(allocator, source);
|
||||
defer tree.deinit();
|
||||
|
||||
for (tree.errors) |*parse_error| {
|
||||
const token = tree.token_locs[parse_error.loc()];
|
||||
const loc = tree.tokenLocation(0, parse_error.loc());
|
||||
try stderr.print("(memory buffer):{}:{}: error: ", .{ loc.line + 1, loc.column + 1 });
|
||||
try tree.renderError(parse_error, stderr);
|
||||
try stderr.print("\n{}\n", .{source[loc.line_start..loc.line_end]});
|
||||
{
|
||||
var i: usize = 0;
|
||||
while (i < loc.column) : (i += 1) {
|
||||
try stderr.writeAll(" ");
|
||||
}
|
||||
}
|
||||
{
|
||||
const caret_count = token.end - token.start;
|
||||
var i: usize = 0;
|
||||
while (i < caret_count) : (i += 1) {
|
||||
try stderr.writeAll("~");
|
||||
}
|
||||
}
|
||||
try stderr.writeAll("\n");
|
||||
}
|
||||
if (tree.errors.len != 0) {
|
||||
return error.ParseError;
|
||||
}
|
||||
}
|
||||
|
||||
const Error = @TagType(ast.Error);
|
||||
|
||||
fn testError(source: []const u8, expected_errors: []const Error) !void {
|
||||
const tree = try own_parser.parse(std.testing.allocator, source);
|
||||
defer tree.deinit();
|
||||
|
||||
std.testing.expect(tree.errors.len == expected_errors.len);
|
||||
for (expected_errors) |expected, i| {
|
||||
std.testing.expect(expected == tree.errors[i]);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
fn nextArg(args: [][]const u8, idx: *usize) ?[]const u8 {
|
||||
if (idx.* >= args.len) return null;
|
||||
defer idx.* += 1;
|
||||
return args[idx.*];
|
||||
}
|
||||
|
||||
pub fn get_file_size(path: []const u8) !u64 {
|
||||
var file = try fs.cwd().openFile(path, .{});
|
||||
defer file.close();
|
||||
|
||||
// Find the size of the file and create a buffer with this size.
|
||||
var file_stat = try file.stat();
|
||||
return file_stat.size;
|
||||
}
|
||||
|
||||
pub fn parser_analyze() !void {
|
||||
// 1. get an allocator.
|
||||
// 2. get the file path.
|
||||
// 3. get the file size, and allocate file_size+1 bytes.
|
||||
// 4. get the content of the file.
|
||||
// 5. perform the analyze, and print each element.
|
||||
|
||||
// Create an allocator, for the arguments and the file.
|
||||
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
|
||||
defer arena.deinit();
|
||||
|
||||
const allocator = &arena.allocator;
|
||||
var args = try process.argsAlloc(allocator);
|
||||
defer process.argsFree(allocator, args);
|
||||
|
||||
// Get the file path.
|
||||
var arg_idx: usize = 1; // Skipping the executable binary name.
|
||||
const gui_file_path = nextArg(args, &arg_idx) orelse {
|
||||
warn("Expected first argument to be path to gui file\n", .{});
|
||||
return error.InvalidArgs;
|
||||
};
|
||||
|
||||
// Get the file size.
|
||||
const file_size = try get_file_size(gui_file_path);
|
||||
|
||||
// Get the file size and allocate memory.
|
||||
const buffer = try allocator.alloc(u8, file_size + 1); // Last value will be a null-byte.
|
||||
buffer[file_size] = 0;
|
||||
const content = try fs.cwd().readFile(gui_file_path, buffer);
|
||||
// print("file content is: {}", .{content}); // Working.
|
||||
|
||||
// Get the file size and allocate memory.
|
||||
// const tokens = try getAllTokens(allocator, content);
|
||||
// for(tokens.items) |token| {
|
||||
// print("{s:20} => {}\n", .{@tagName(token.id), buffer[token.loc.start..token.loc.end]});
|
||||
// }
|
||||
|
||||
try testParse(content, allocator);
|
||||
}
|
||||
|
||||
|
||||
pub fn main() !void {
|
||||
try parser_analyze();
|
||||
}
|
||||
|
|
@ -182,7 +182,6 @@ pub const Token = struct {
|
|||
.Tilde => "~",
|
||||
|
||||
.Keyword_property => "property",
|
||||
.Keyword_and => "and",
|
||||
.Keyword_false => "false",
|
||||
.Keyword_null => "null",
|
||||
.Keyword_true => "true",
|
||||
|
@ -1629,12 +1628,12 @@ test "tokenizer - comments with literal tab" {
|
|||
});
|
||||
}
|
||||
|
||||
//test "tokenizer - pipe and then invalid" {
|
||||
// testTokenize("||=", &[_]Token.Id{
|
||||
// .PipePipe,
|
||||
// .Equal,
|
||||
// });
|
||||
//}
|
||||
test "tokenizer - pipe and then invalid" {
|
||||
testTokenize("||=", &[_]Token.Id{
|
||||
.PipePipe,
|
||||
.Equal,
|
||||
});
|
||||
}
|
||||
|
||||
//test "tokenizer - line comment and doc comment" {
|
||||
// testTokenize("//", &[_]Token.Id{.LineComment});
|
||||
|
|
Loading…
Reference in New Issue