Compilable again.

mess
Karchnu 2020-12-23 02:16:10 +01:00
parent 45b0f212a8
commit c3c418c80c
3 changed files with 34 additions and 44 deletions

View File

@ -97,7 +97,7 @@ fn print_node(node: Node) void {
print_properties(node.properties); print_properties(node.properties);
} }
fn print_tree(tree: Tree) void { pub fn print_tree(tree: Tree) void {
say("\ntree.definitions:\n"); say("\ntree.definitions:\n");
var it = tree.definitions.iterator(); var it = tree.definitions.iterator();
while(it.next()) |kv| { while(it.next()) |kv| {

View File

@ -83,7 +83,7 @@ pub fn parse(gpa: *Allocator, source: []const u8) Allocator.Error!cs.Tree {
// Create a parsing Tree, with the nodes parsed early on. // Create a parsing Tree, with the nodes parsed early on.
// toOwnedSlice: free the memory and return the list. Arrays are empty, // toOwnedSlice: free the memory and return the list. Arrays are empty,
// allocator can be free, arrays are owned by a different allocator. // allocator can be free, arrays are owned by a different allocator.
const tree = try parser.parseTree(); var tree = try parser.parseTree();
return tree; return tree;
} }
@ -106,23 +106,25 @@ const Parser = struct {
// parseTree: create a cs.Tree with all its content. // parseTree: create a cs.Tree with all its content.
fn parseTree(p: *Parser) Allocator.Error!cs.Tree { fn parseTree(p: *Parser) Allocator.Error!cs.Tree {
// Create a tree.
var tree = try cs.Tree.create(&p.arena.allocator);
// Parse the content. // Parse the content.
const decls = try parseTopLevel(p, true); try parseTopLevel(p, &tree);
defer p.gpa.free(decls);
// parseTopLevel will try to skip as much // parseTopLevel will try to skip as much
// invalid tokens as it can so this can only be the EOF // invalid tokens as it can so this can only be the EOF
// eatToken returns next token or null (if current token id isn't parameter). // eatToken returns next token or null (if current token id isn't parameter).
// If current token is .Eof, next token is actually the first. // If current token is .Eof, next token is actually the first.
const eof_token = p.eatToken(.Eof).?; const eof_token = p.eatToken(.Eof).?;
const tree = try cs.Tree.create(&p.arena.allocator, decls_len, eof_token);
return tree; return tree;
} }
// parseTopLevel: actual parsing code starts here. // parseTopLevel: actual parsing code starts here.
fn parseTopLevel(p: *Parser, top_level: bool) ![]*Node { fn parseTopLevel(p: *Parser, tree: *cs.Tree) !void {
// std.debug.print("parseTopLevel: is top? {}\n", .{top_level});
// list: all nodes in the ast. // list: all nodes in the ast.
var list = std.ArrayList(*Node).init(p.gpa); var list = std.ArrayList(*Node).init(p.gpa);
defer list.deinit(); defer list.deinit();
@ -178,8 +180,6 @@ const Parser = struct {
}, },
} }
} }
return list.toOwnedSlice();
} }
fn say(p: *Parser, comptime fmt: []const u8, args: anytype) void { fn say(p: *Parser, comptime fmt: []const u8, args: anytype) void {

View File

@ -1,13 +1,3 @@
// test "recovery: invalid parameter" {
// try testError(
// \\fn main() void {
// \\ a(comptime T: type)
// \\}
// , &[_]Error{
// .ExpectedToken,
// });
// }
const std = @import("std"); const std = @import("std");
const mem = std.mem; const mem = std.mem;
const warn = std.debug.warn; const warn = std.debug.warn;
@ -23,34 +13,34 @@ const own_parser = @import("./parse.zig");
fn testParse(source: []const u8, allocator: *mem.Allocator) !void { fn testParse(source: []const u8, allocator: *mem.Allocator) !void {
const stderr = io.getStdErr().outStream(); const stderr = io.getStdErr().outStream();
const tree = try own_parser.parse(allocator, source); var tree = try own_parser.parse(allocator, source);
defer tree.deinit(); defer tree.deinit();
for (tree.errors) |*parse_error| { // for (tree.errors) |*parse_error| {
const token = tree.token_locs[parse_error.loc()]; // const token = tree.token_locs[parse_error.loc()];
const loc = tree.tokenLocation(0, parse_error.loc()); // const loc = tree.tokenLocation(0, parse_error.loc());
try stderr.print("(memory buffer):{}:{}: error: ", .{ loc.line + 1, loc.column + 1 }); // try stderr.print("(memory buffer):{}:{}: error: ", .{ loc.line + 1, loc.column + 1 });
try tree.renderError(parse_error, stderr); // try tree.renderError(parse_error, stderr);
try stderr.print("\n{}\n", .{source[loc.line_start..loc.line_end]}); // try stderr.print("\n{}\n", .{source[loc.line_start..loc.line_end]});
{ // {
var i: usize = 0; // var i: usize = 0;
while (i < loc.column) : (i += 1) { // while (i < loc.column) : (i += 1) {
try stderr.writeAll(" "); // try stderr.writeAll(" ");
} // }
} // }
{ // {
const caret_count = token.end - token.start; // const caret_count = token.end - token.start;
var i: usize = 0; // var i: usize = 0;
while (i < caret_count) : (i += 1) { // while (i < caret_count) : (i += 1) {
try stderr.writeAll("~"); // try stderr.writeAll("~");
} // }
} // }
try stderr.writeAll("\n"); // try stderr.writeAll("\n");
} // }
if (tree.errors.len != 0) { // if (tree.errors.len != 0) {
return error.ParseError; // return error.ParseError;
} // }
} }
const Error = @TagType(ast.Error); const Error = @TagType(ast.Error);