Compare commits

...

29 Commits
master ... mess

Author SHA1 Message Date
Karchnu c3c418c80c Compilable again. 2020-12-23 02:16:10 +01:00
Karchnu 45b0f212a8 Parser: WIP (not to lose everything, again). 2020-12-23 01:54:20 +01:00
Karchnu 80d46f16e7 Common structures: grooming. 2020-12-22 20:53:58 +01:00
Karchnu 6d354fd7c2 Print the tree once. 2020-12-22 18:18:34 +01:00
Karchnu 5adcd203f9 Fixing memory leaks. 2020-12-22 18:11:47 +01:00
Karchnu c460858a69 Memory leaks kinda fixed. 2020-12-22 05:53:48 +01:00
Karchnu 0e1dcf7342 Working on memory leaks. 2020-12-22 04:51:41 +01:00
Karchnu 80a94a5fdd Better debug prints. 2020-12-21 16:55:21 +01:00
Karchnu 3d604c3238 Print nodes. 2020-12-21 05:03:10 +01:00
Karchnu eac5b58017 Test: create structures. 2020-12-21 03:53:41 +01:00
Karchnu 78080c43a0 Common structures: create and deinit functions. 2020-12-21 03:29:39 +01:00
Karchnu 00eac1ff5f Adding '+' and '?' as legit characters in identifiers. 2020-12-19 14:09:18 +01:00
Karchnu 0ec3b80d6c Ignoring literals. 2020-12-19 03:32:43 +01:00
Karchnu 8a0abd070e Naming: class => node. 2020-12-19 02:58:44 +01:00
Karchnu bf8283b7cf Reading long references (anchor.something.blah). 2020-12-19 02:44:01 +01:00
Karchnu 70d0454695 Almost done reading source files. 2020-12-19 00:38:24 +01:00
Karchnu 1bea958fed Reading property values. 2020-12-18 04:12:10 +01:00
Karchnu bf6aa3ecf6 Removing unused tokens. 2020-12-18 01:32:25 +01:00
Karchnu 4f6891c4ab Parsing values, very incomplete for now (identifiers WIP). 2020-12-18 01:31:39 +01:00
Karchnu c887e2d786 Tokenizer: no more keywords text, family, etc. 2020-12-17 03:43:47 +01:00
Karchnu 46df247608 Better debug output. 2020-12-17 03:43:22 +01:00
Karchnu 6e9f8cf142 SansSerif => from identifier to StringLiteral. 2020-12-17 03:42:40 +01:00
Karchnu 3bfff518b2 Parsing the `define` instruction. 2020-12-17 03:27:37 +01:00
Karchnu dc61c8ee6f Reading classes and inner classes, properties, assignments. 2020-12-17 03:19:53 +01:00
Karchnu 69122b1e7b Parsing properties, imbricated classes WIP. 2020-12-17 03:00:56 +01:00
Karchnu 78afcf554e Parsing assignments. 2020-12-16 23:13:36 +01:00
Karchnu 0d9130ec89 Grooming, reading source info, parsing Class WIP. 2020-12-16 18:38:11 +01:00
Karchnu a93c718e1f Adding class with fully extended header. 2020-12-16 06:50:00 +01:00
Karchnu 774fff73a4 Messing around. 2020-12-16 06:49:05 +01:00
8 changed files with 596 additions and 3280 deletions

View File

@ -1,7 +1,9 @@
# This is a comment.
Object {
property string thing: "i has the thing"
Object {
Object (my-children-id) {
property string thing: "i has the other thing"
}
}

View File

@ -0,0 +1,9 @@
Text (my-identifier) {
text: "Hello, there!"
font {
family: "SansSerif"
pixelSize: 15
}
width: 200
height: 200
}

View File

@ -1,12 +1,12 @@
Rectangle {
color: "red"
width: 300
height: 200
property String color: "red"
property Int width: 300
property Int height: 200
Rectangle {
color: "blue"
width: 100
height: 100
property String color: "blue"
property Int width: 100
property Int height: 100
anchors {
top: 50

View File

@ -1,7 +1,7 @@
Rectangle {
id: clickable
color: "blue"
width: 300
height: 300
onClick: emit ["hello, there", "i has events"]
id: clickable
color: "blue"
width: 300
height: 300
onClick: emit ["hello, there", "i has events"]
}

View File

@ -1,51 +1,185 @@
const std = @import("std");
const Allocator = std.mem.Allocator;
const hashString = std.hash_map.hashString;
const eqlString = std.hash_map.eqlString;
const HashMap = std.HashMap;
const AutoHashMap = std.AutoHashMap;
const ArrayList = std.ArrayList;
/// This file introduces the main structures used by guid once running.
/// Tests show how to use them.
/// Type.create do no allocate memory for the structure, only to its attributes.
/// Type.deinit works in cascade (deinit its children).
const PropertyHashMap = AutoHashMap([] const u8, PropertyValue);
const NodeList = ArrayList(Node);
const Definitions = AutoHashMap([] const u8, Node);
const PropertyHashMap = AutoHashMap([] const u8, PropertyValue);
const NodeList = ArrayList(Node);
const DefinitionHashMap = AutoHashMap([] const u8, Node);
pub const Node = struct {
id: ?[] const u8,
type_name: [] const u8,
properties: PropertyHashMap,
children: NodeList,
gpa: *Allocator,
pub fn deinit(self: *Node) void {
self.properties.deinit();
for (self.children.items) |*value| {
value.deinit();
}
self.children.deinit();
}
pub fn create(allocator: *Allocator, type_name: []const u8, id: ?[]const u8) !Node {
return Node{
.id = id,
.type_name = type_name,
.properties = PropertyHashMap.init(allocator),
.children = NodeList.init(allocator),
.gpa = allocator,
};
}
};
pub const PropertyValue = union {
string: [] const u8, // String.
integer: u64, // Num (integer).
float: f64, // Num (float).
reference: *u8, // Reference to another property (property binding).
const PropertyValueTags = enum {
string,
integer,
float,
reference,
};
pub const Root = struct {
definitions: Definitions,
pub const PropertyValue = union(PropertyValueTags) {
// nil: null,
string: [] const u8, // String.
integer: u64, // Num (integer).
float: f64, // Num (float).
reference: [] const u8, // Reference to another property (property binding).
};
pub const Tree = struct {
definitions: DefinitionHashMap,
children: NodeList,
gpa: *Allocator,
pub fn deinit(self: *Tree) void {
self.definitions.deinit();
for (self.children.items) |*value| {
value.deinit();
}
self.children.deinit();
}
pub fn create(allocator: *Allocator) !Tree {
return Tree{
.definitions = DefinitionHashMap.init(allocator),
.children = NodeList.init(allocator),
.gpa = allocator,
};
}
};
test "simple test about structures" {
const allocator = std.heap.page_allocator;
var value = PropertyValue { .integer = 10 };
// TESTS and private util functions.
var properties = PropertyHashMap.init(allocator);
defer properties.deinit();
fn say(tosay: []const u8) void {
std.debug.print("{}", .{tosay});
}
try properties.put("hello", value);
std.debug.print("\n", .{});
fn print_properties(properties: PropertyHashMap) void {
var it = properties.iterator();
while(it.next()) |kv| {
std.debug.print("key: {} => value: {}\n", .{kv.key, properties.get(kv.key)});
std.debug.print("\t{} => {}\n", .{kv.key, properties.get(kv.key)});
}
}
fn print_node(node: Node) void {
std.debug.print("Node type {} (id: {})\n", .{node.type_name, node.id});
print_properties(node.properties);
}
pub fn print_tree(tree: Tree) void {
say("\ntree.definitions:\n");
var it = tree.definitions.iterator();
while(it.next()) |kv| {
std.debug.print("{} => ", .{kv.key});
const node = tree.definitions.get(kv.key);
if(node) |n| { print_node(n); }
}
say("tree.children:\n");
for(tree.children.items) |v, k| {
std.debug.print("{} => ", .{k});
print_node(v);
}
}
test "simple test about structures" {
var gpa = std.heap.GeneralPurposeAllocator(.{.safety = true}){};
const allocator = &gpa.allocator;
var value = PropertyValue { .integer = 10 };
var properties = PropertyHashMap.init(allocator);
try properties.put("hello", value);
// Displaying the content.
// say("\n");
// print_properties(properties);
// Freeing the properties.
properties.deinit();
// Testing memory leaks at the end of the test.
const leaks = gpa.deinit();
if (leaks) {
say("\nthere were leaks, oh no\n");
}
else {
say("\nno leaks, yay!\n");
}
std.testing.expect(! leaks);
}
fn init_stuff(allocator: *Allocator) !Tree {
var tree = try Tree.create(allocator);
// Creating a definition and a few children.
try tree.definitions.put("MyObject", try Node.create(allocator, "my-type-name", "my-id"));
var new_node = try Node.create(allocator, "Object", "some-id-for-this-object");
var value = PropertyValue { .integer = 10 };
try new_node.properties.put("integer-val", value);
value = PropertyValue { .string = "some value" };
try new_node.properties.put("string-val", value);
try tree.children.append(new_node);
try tree.children.append(try Node.create(allocator, "Object", "my-id"));
try tree.children.append(try Node.create(allocator, "OtherObject", null));
try tree.children.append(try Node.create(allocator, "Text", "my-id-for-text-object"));
return tree;
}
test "init a Tree structure" {
// Allocator with safety on: checking for memory leaks.
var gpa = std.heap.GeneralPurposeAllocator(.{.safety = true}){};
const allocator = &gpa.allocator;
// Creating a tree.
var tree = try init_stuff(allocator);
// Display the content of the tree.
// print_tree(tree);
// Freeing the tree.
tree.deinit();
// Testing memory leaks at the end of the test.
const leaks = gpa.deinit();
if (leaks) {
say("\nthere were leaks, oh no\n");
}
else {
say("\nno leaks, yay!\n");
}
std.testing.expect(! leaks);
}

File diff suppressed because it is too large Load Diff

View File

@ -1,13 +1,3 @@
// test "recovery: invalid parameter" {
// try testError(
// \\fn main() void {
// \\ a(comptime T: type)
// \\}
// , &[_]Error{
// .ExpectedToken,
// });
// }
const std = @import("std");
const mem = std.mem;
const warn = std.debug.warn;
@ -23,33 +13,34 @@ const own_parser = @import("./parse.zig");
fn testParse(source: []const u8, allocator: *mem.Allocator) !void {
const stderr = io.getStdErr().outStream();
const tree = try own_parser.parse(allocator, source);
var tree = try own_parser.parse(allocator, source);
defer tree.deinit();
for (tree.errors) |*parse_error| {
const token = tree.token_locs[parse_error.loc()];
const loc = tree.tokenLocation(0, parse_error.loc());
try stderr.print("(memory buffer):{}:{}: error: ", .{ loc.line + 1, loc.column + 1 });
try tree.renderError(parse_error, stderr);
try stderr.print("\n{}\n", .{source[loc.line_start..loc.line_end]});
{
var i: usize = 0;
while (i < loc.column) : (i += 1) {
try stderr.writeAll(" ");
}
}
{
const caret_count = token.end - token.start;
var i: usize = 0;
while (i < caret_count) : (i += 1) {
try stderr.writeAll("~");
}
}
try stderr.writeAll("\n");
}
if (tree.errors.len != 0) {
return error.ParseError;
}
// for (tree.errors) |*parse_error| {
// const token = tree.token_locs[parse_error.loc()];
// const loc = tree.tokenLocation(0, parse_error.loc());
// try stderr.print("(memory buffer):{}:{}: error: ", .{ loc.line + 1, loc.column + 1 });
// try tree.renderError(parse_error, stderr);
// try stderr.print("\n{}\n", .{source[loc.line_start..loc.line_end]});
// {
// var i: usize = 0;
// while (i < loc.column) : (i += 1) {
// try stderr.writeAll(" ");
// }
// }
// {
// const caret_count = token.end - token.start;
// var i: usize = 0;
// while (i < caret_count) : (i += 1) {
// try stderr.writeAll("~");
// }
// }
// try stderr.writeAll("\n");
// }
// if (tree.errors.len != 0) {
// return error.ParseError;
// }
}
const Error = @TagType(ast.Error);

View File

@ -12,16 +12,13 @@ pub const Token = struct {
pub const keywords = std.ComptimeStringMap(Id, .{
.{ "property", .Keyword_property },
.{ "define", .Keyword_define },
.{ "require", .Keyword_require },
.{ "false", .Keyword_false },
.{ "null", .Keyword_null },
.{ "true", .Keyword_true },
.{ "undefined", .Keyword_undefined },
.{ "text", .Keyword_text },
.{ "pixel-size", .Keyword_pixel_size },
.{ "family", .Keyword_family },
.{ "height", .Keyword_height },
});
pub fn getKeyword(bytes: []const u8) ?Id {
@ -99,16 +96,13 @@ pub const Token = struct {
ShebangLine,
Keyword_property,
Keyword_define,
Keyword_require,
Keyword_false,
Keyword_null,
Keyword_true,
Keyword_undefined,
Keyword_text,
Keyword_pixel_size,
Keyword_family,
Keyword_height,
pub fn symbol(id: Id) []const u8 {
return switch (id) {
.Invalid => "Invalid",
@ -182,16 +176,13 @@ pub const Token = struct {
.Tilde => "~",
.Keyword_property => "property",
.Keyword_define => "define",
.Keyword_require => "require",
.Keyword_false => "false",
.Keyword_null => "null",
.Keyword_true => "true",
.Keyword_undefined => "undefined",
.Keyword_text => "text",
.Keyword_pixel_size => "pixel-size",
.Keyword_family => "family",
.Keyword_height => "height",
};
}
};
@ -562,7 +553,9 @@ pub const Tokenizer = struct {
},
.identifier => switch (c) {
'a'...'z', 'A'...'Z', '_', '0'...'9' => {},
// Include "-", "+" and "?" as an identifer token, this is the only difference
// with the Zig tokenizer.
'a'...'z', 'A'...'Z', '_', '-', '+', '?', '0'...'9' => {},
else => {
if (Token.getKeyword(self.buffer[result.loc.start..self.index])) |id| {
result.id = id;