Messing around.
parent
bbd2e67cb8
commit
774fff73a4
|
@ -1,3 +1,5 @@
|
|||
|
||||
# This is a comment.
|
||||
Object {
|
||||
property string thing: "i has the thing"
|
||||
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
Rectangle {
|
||||
color: "red"
|
||||
width: 300
|
||||
height: 200
|
||||
|
||||
property String color: "red"
|
||||
property Int width: 300
|
||||
property Int height: 200
|
||||
|
||||
Rectangle {
|
||||
color: "blue"
|
||||
width: 100
|
||||
height: 100
|
||||
property String color: "blue"
|
||||
property Int width: 100
|
||||
property Int height: 100
|
||||
|
||||
anchors {
|
||||
top: 50
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
Rectangle {
|
||||
id: clickable
|
||||
color: "blue"
|
||||
width: 300
|
||||
height: 300
|
||||
onClick: emit ["hello, there", "i has events"]
|
||||
id: clickable
|
||||
color: "blue"
|
||||
width: 300
|
||||
height: 300
|
||||
onClick: emit ["hello, there", "i has events"]
|
||||
}
|
||||
|
|
331
src/parse.zig
331
src/parse.zig
|
@ -141,10 +141,33 @@ const Parser = struct {
|
|||
err,
|
||||
} = .none;
|
||||
|
||||
/// Start => Requires Thing => End
|
||||
/// Requires => require StringLiteral Requires | nil
|
||||
/// Thing => Definition Thing | ClassHdr Thing | nil
|
||||
|
||||
/// Definition => define Identifier ClassHdrSimple |
|
||||
|
||||
/// ClassHdr => ClassHdrFull | ClassHdrSimple
|
||||
/// ClassHdrSimple => Identifier LBrace ClassCon RBrace
|
||||
/// ClassHdrFull => Identifier LParen Identifier RParen LBrace ClassCon RBrace
|
||||
/// ClassCon => ClassHdr | statement ClassCon | nil
|
||||
|
||||
/// statement => Keyword_property Identifier Identifier Colon value
|
||||
/// value => StringLiteral | NullLiteral | IntegerLiteral | FloatLiteral
|
||||
|
||||
// True start of parsing.
|
||||
while (true) {
|
||||
const token = p.nextToken();
|
||||
switch (p.token_ids[token]) {
|
||||
.Keyword_require => {
|
||||
p.putBackToken(token);
|
||||
// TODO: read require
|
||||
p.parseRequire();
|
||||
},
|
||||
.Identifier => {
|
||||
p.putBackToken(token);
|
||||
p.parseClass();
|
||||
},
|
||||
.Eof => {
|
||||
p.putBackToken(token);
|
||||
break;
|
||||
|
@ -156,110 +179,121 @@ const Parser = struct {
|
|||
}
|
||||
}
|
||||
|
||||
// // Documentation comments. Ignored.
|
||||
// if (try p.parseContainerDocComments()) |node| {
|
||||
// std.debug.print("found: Doc Comments: {}\n", .{node});
|
||||
// try list.append(node);
|
||||
// continue;
|
||||
// }
|
||||
//
|
||||
// const doc_comments = try p.parseDocComment();
|
||||
//
|
||||
// if (p.parseContainerField() catch |err| switch (err) {
|
||||
// error.OutOfMemory => return error.OutOfMemory,
|
||||
// error.ParseError => {
|
||||
// // attempt to recover
|
||||
// p.findNextContainerMember();
|
||||
// continue;
|
||||
// },
|
||||
// }) |node| {
|
||||
// std.debug.print("found: ContainerField: {}\n", .{node});
|
||||
// switch (field_state) {
|
||||
// .none => field_state = .seen,
|
||||
// .err, .seen => {},
|
||||
// .end => |tok| {
|
||||
// try p.errors.append(p.gpa, .{
|
||||
// .DeclBetweenFields = .{ .token = tok },
|
||||
// });
|
||||
// // continue parsing, error will be reported later
|
||||
// field_state = .err;
|
||||
// },
|
||||
// }
|
||||
//
|
||||
// const field = node.cast(Node.ContainerField).?;
|
||||
// field.doc_comments = doc_comments;
|
||||
// try list.append(node);
|
||||
// const comma = p.eatToken(.Comma) orelse {
|
||||
// // try to continue parsing
|
||||
// const index = p.tok_i;
|
||||
// p.findNextContainerMember();
|
||||
// const next = p.token_ids[p.tok_i];
|
||||
// switch (next) {
|
||||
// .Eof => {
|
||||
// // no invalid tokens were found
|
||||
// if (index == p.tok_i) break;
|
||||
//
|
||||
// // Invalid tokens, add error and exit
|
||||
// try p.errors.append(p.gpa, .{
|
||||
// .ExpectedToken = .{ .token = index, .expected_id = .Comma },
|
||||
// });
|
||||
// break;
|
||||
// },
|
||||
// else => {
|
||||
// if (next == .RBrace) {
|
||||
// if (!top_level) break;
|
||||
// _ = p.nextToken();
|
||||
// }
|
||||
//
|
||||
// // add error and continue
|
||||
// try p.errors.append(p.gpa, .{
|
||||
// .ExpectedToken = .{ .token = index, .expected_id = .Comma },
|
||||
// });
|
||||
// continue;
|
||||
// },
|
||||
// }
|
||||
// };
|
||||
// if (try p.parseAppendedDocComment(comma)) |appended_comment|
|
||||
// field.doc_comments = appended_comment;
|
||||
// continue;
|
||||
// }
|
||||
//
|
||||
// // Dangling doc comment
|
||||
// if (doc_comments != null) {
|
||||
// try p.errors.append(p.gpa, .{
|
||||
// .UnattachedDocComment = .{ .token = doc_comments.?.firstToken() },
|
||||
// });
|
||||
// }
|
||||
//
|
||||
// const next = p.token_ids[p.tok_i];
|
||||
// switch (next) {
|
||||
// .Eof => break,
|
||||
// .Keyword_comptime => {
|
||||
// _ = p.nextToken();
|
||||
// try p.errors.append(p.gpa, .{
|
||||
// .ExpectedBlockOrField = .{ .token = p.tok_i },
|
||||
// });
|
||||
// },
|
||||
// else => {
|
||||
// const index = p.tok_i;
|
||||
// if (next == .RBrace) {
|
||||
// if (!top_level) break;
|
||||
// _ = p.nextToken();
|
||||
// }
|
||||
//
|
||||
// // this was likely not supposed to end yet,
|
||||
// // try to find the next declaration
|
||||
// p.findNextContainerMember();
|
||||
// try p.errors.append(p.gpa, .{
|
||||
// .ExpectedContainerMembers = .{ .token = index },
|
||||
// });
|
||||
// },
|
||||
// }
|
||||
// }
|
||||
|
||||
return list.toOwnedSlice();
|
||||
}
|
||||
|
||||
// TODO: require "file"
|
||||
// file should be read, parsed and a loop detection should take place.
|
||||
fn parseRequire(p: *Parser) !void {
|
||||
const require_token = p.eatToken(.Keyword_require);
|
||||
const file_to_read = p.eatToken(.StringLiteral);
|
||||
std.debug.print("TODO: file required: {}\n", .{file_to_read});
|
||||
}
|
||||
|
||||
// TODO: class
|
||||
// file should be read, parsed and a loop detection should take place.
|
||||
fn parseRequire(p: *Parser) !void {
|
||||
const require_token = p.eatToken(.Keyword_require);
|
||||
const file_to_read = p.eatToken(.StringLiteral);
|
||||
std.debug.print("TODO: file required: {}\n", .{file_to_read});
|
||||
}
|
||||
|
||||
/// Statement
|
||||
/// <- KEYWORD_comptime? VarDecl
|
||||
/// / KEYWORD_comptime BlockExprStatement
|
||||
/// / KEYWORD_nosuspend BlockExprStatement
|
||||
/// / KEYWORD_suspend (SEMICOLON / BlockExprStatement)
|
||||
/// / KEYWORD_defer BlockExprStatement
|
||||
/// / KEYWORD_errdefer Payload? BlockExprStatement
|
||||
/// / IfStatement
|
||||
/// / LabeledStatement
|
||||
/// / SwitchExpr
|
||||
/// / AssignExpr SEMICOLON
|
||||
fn parseStatement(p: *Parser) Error!?*Node {
|
||||
const comptime_token = p.eatToken(.Keyword_comptime);
|
||||
|
||||
if (try p.parseVarDecl(.{
|
||||
.comptime_token = comptime_token,
|
||||
})) |node| {
|
||||
return node;
|
||||
}
|
||||
|
||||
if (comptime_token) |token| {
|
||||
const block_expr = try p.expectNode(parseBlockExprStatement, .{
|
||||
.ExpectedBlockOrAssignment = .{ .token = p.tok_i },
|
||||
});
|
||||
|
||||
const node = try p.arena.allocator.create(Node.Comptime);
|
||||
node.* = .{
|
||||
.doc_comments = null,
|
||||
.comptime_token = token,
|
||||
.expr = block_expr,
|
||||
};
|
||||
return &node.base;
|
||||
}
|
||||
|
||||
if (p.eatToken(.Keyword_nosuspend)) |nosuspend_token| {
|
||||
const block_expr = try p.expectNode(parseBlockExprStatement, .{
|
||||
.ExpectedBlockOrAssignment = .{ .token = p.tok_i },
|
||||
});
|
||||
|
||||
const node = try p.arena.allocator.create(Node.Nosuspend);
|
||||
node.* = .{
|
||||
.nosuspend_token = nosuspend_token,
|
||||
.expr = block_expr,
|
||||
};
|
||||
return &node.base;
|
||||
}
|
||||
|
||||
if (p.eatToken(.Keyword_suspend)) |suspend_token| {
|
||||
const semicolon = p.eatToken(.Semicolon);
|
||||
|
||||
const body_node = if (semicolon == null) blk: {
|
||||
break :blk try p.expectNode(parseBlockExprStatement, .{
|
||||
.ExpectedBlockOrExpression = .{ .token = p.tok_i },
|
||||
});
|
||||
} else null;
|
||||
|
||||
const node = try p.arena.allocator.create(Node.Suspend);
|
||||
node.* = .{
|
||||
.suspend_token = suspend_token,
|
||||
.body = body_node,
|
||||
};
|
||||
return &node.base;
|
||||
}
|
||||
|
||||
const defer_token = p.eatToken(.Keyword_defer) orelse p.eatToken(.Keyword_errdefer);
|
||||
if (defer_token) |token| {
|
||||
const payload = if (p.token_ids[token] == .Keyword_errdefer)
|
||||
try p.parsePayload()
|
||||
else
|
||||
null;
|
||||
const expr_node = try p.expectNode(parseBlockExprStatement, .{
|
||||
.ExpectedBlockOrExpression = .{ .token = p.tok_i },
|
||||
});
|
||||
const node = try p.arena.allocator.create(Node.Defer);
|
||||
node.* = .{
|
||||
.defer_token = token,
|
||||
.expr = expr_node,
|
||||
.payload = payload,
|
||||
};
|
||||
return &node.base;
|
||||
}
|
||||
|
||||
if (try p.parseIfStatement()) |node| return node;
|
||||
if (try p.parseLabeledStatement()) |node| return node;
|
||||
if (try p.parseSwitchExpr()) |node| return node;
|
||||
if (try p.parseAssignExpr()) |node| {
|
||||
_ = try p.expectTokenRecoverable(.Semicolon);
|
||||
return node;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
// /// Attempts to find next container member by searching for certain tokens
|
||||
// fn findNextContainerMember(p: *Parser) void {
|
||||
// var level: u32 = 0;
|
||||
|
@ -562,100 +596,7 @@ const Parser = struct {
|
|||
// };
|
||||
// return &node.base;
|
||||
// }
|
||||
//
|
||||
// /// Statement
|
||||
// /// <- KEYWORD_comptime? VarDecl
|
||||
// /// / KEYWORD_comptime BlockExprStatement
|
||||
// /// / KEYWORD_nosuspend BlockExprStatement
|
||||
// /// / KEYWORD_suspend (SEMICOLON / BlockExprStatement)
|
||||
// /// / KEYWORD_defer BlockExprStatement
|
||||
// /// / KEYWORD_errdefer Payload? BlockExprStatement
|
||||
// /// / IfStatement
|
||||
// /// / LabeledStatement
|
||||
// /// / SwitchExpr
|
||||
// /// / AssignExpr SEMICOLON
|
||||
// fn parseStatement(p: *Parser) Error!?*Node {
|
||||
// const comptime_token = p.eatToken(.Keyword_comptime);
|
||||
//
|
||||
// if (try p.parseVarDecl(.{
|
||||
// .comptime_token = comptime_token,
|
||||
// })) |node| {
|
||||
// return node;
|
||||
// }
|
||||
//
|
||||
// if (comptime_token) |token| {
|
||||
// const block_expr = try p.expectNode(parseBlockExprStatement, .{
|
||||
// .ExpectedBlockOrAssignment = .{ .token = p.tok_i },
|
||||
// });
|
||||
//
|
||||
// const node = try p.arena.allocator.create(Node.Comptime);
|
||||
// node.* = .{
|
||||
// .doc_comments = null,
|
||||
// .comptime_token = token,
|
||||
// .expr = block_expr,
|
||||
// };
|
||||
// return &node.base;
|
||||
// }
|
||||
//
|
||||
// if (p.eatToken(.Keyword_nosuspend)) |nosuspend_token| {
|
||||
// const block_expr = try p.expectNode(parseBlockExprStatement, .{
|
||||
// .ExpectedBlockOrAssignment = .{ .token = p.tok_i },
|
||||
// });
|
||||
//
|
||||
// const node = try p.arena.allocator.create(Node.Nosuspend);
|
||||
// node.* = .{
|
||||
// .nosuspend_token = nosuspend_token,
|
||||
// .expr = block_expr,
|
||||
// };
|
||||
// return &node.base;
|
||||
// }
|
||||
//
|
||||
// if (p.eatToken(.Keyword_suspend)) |suspend_token| {
|
||||
// const semicolon = p.eatToken(.Semicolon);
|
||||
//
|
||||
// const body_node = if (semicolon == null) blk: {
|
||||
// break :blk try p.expectNode(parseBlockExprStatement, .{
|
||||
// .ExpectedBlockOrExpression = .{ .token = p.tok_i },
|
||||
// });
|
||||
// } else null;
|
||||
//
|
||||
// const node = try p.arena.allocator.create(Node.Suspend);
|
||||
// node.* = .{
|
||||
// .suspend_token = suspend_token,
|
||||
// .body = body_node,
|
||||
// };
|
||||
// return &node.base;
|
||||
// }
|
||||
//
|
||||
// const defer_token = p.eatToken(.Keyword_defer) orelse p.eatToken(.Keyword_errdefer);
|
||||
// if (defer_token) |token| {
|
||||
// const payload = if (p.token_ids[token] == .Keyword_errdefer)
|
||||
// try p.parsePayload()
|
||||
// else
|
||||
// null;
|
||||
// const expr_node = try p.expectNode(parseBlockExprStatement, .{
|
||||
// .ExpectedBlockOrExpression = .{ .token = p.tok_i },
|
||||
// });
|
||||
// const node = try p.arena.allocator.create(Node.Defer);
|
||||
// node.* = .{
|
||||
// .defer_token = token,
|
||||
// .expr = expr_node,
|
||||
// .payload = payload,
|
||||
// };
|
||||
// return &node.base;
|
||||
// }
|
||||
//
|
||||
// if (try p.parseIfStatement()) |node| return node;
|
||||
// if (try p.parseLabeledStatement()) |node| return node;
|
||||
// if (try p.parseSwitchExpr()) |node| return node;
|
||||
// if (try p.parseAssignExpr()) |node| {
|
||||
// _ = try p.expectTokenRecoverable(.Semicolon);
|
||||
// return node;
|
||||
// }
|
||||
//
|
||||
// return null;
|
||||
// }
|
||||
//
|
||||
|
||||
// /// IfStatement
|
||||
// /// <- IfPrefix BlockExpr ( KEYWORD_else Payload? Statement )?
|
||||
// /// / IfPrefix AssignExpr ( SEMICOLON / KEYWORD_else Payload? Statement )
|
||||
|
|
|
@ -47,6 +47,7 @@ fn testParse(source: []const u8, allocator: *mem.Allocator) !void {
|
|||
}
|
||||
try stderr.writeAll("\n");
|
||||
}
|
||||
|
||||
if (tree.errors.len != 0) {
|
||||
return error.ParseError;
|
||||
}
|
||||
|
|
|
@ -12,6 +12,7 @@ pub const Token = struct {
|
|||
|
||||
pub const keywords = std.ComptimeStringMap(Id, .{
|
||||
.{ "property", .Keyword_property },
|
||||
.{ "require", .Keyword_require },
|
||||
.{ "false", .Keyword_false },
|
||||
.{ "null", .Keyword_null },
|
||||
.{ "true", .Keyword_true },
|
||||
|
@ -99,6 +100,7 @@ pub const Token = struct {
|
|||
ShebangLine,
|
||||
|
||||
Keyword_property,
|
||||
Keyword_require,
|
||||
Keyword_false,
|
||||
Keyword_null,
|
||||
Keyword_true,
|
||||
|
@ -182,6 +184,7 @@ pub const Token = struct {
|
|||
.Tilde => "~",
|
||||
|
||||
.Keyword_property => "property",
|
||||
.Keyword_require => "require",
|
||||
.Keyword_false => "false",
|
||||
.Keyword_null => "null",
|
||||
.Keyword_true => "true",
|
||||
|
|
Loading…
Reference in New Issue