diff --git a/lib/std/c.zig b/lib/std/c.zig index 684758286b..b0d0e50079 100644 --- a/lib/std/c.zig +++ b/lib/std/c.zig @@ -2,6 +2,11 @@ const builtin = @import("builtin"); const std = @import("std"); const page_size = std.mem.page_size; +const tokenizer = @import("c/tokenizer.zig"); +pub const Token = tokenizer.Token; +pub const Tokenizer = tokenizer.Tokenizer; +pub const ast = @import("c/ast.zig"); + pub usingnamespace @import("os/bits.zig"); pub usingnamespace switch (builtin.os) { diff --git a/lib/std/c/ast.zig b/lib/std/c/ast.zig new file mode 100644 index 0000000000..bc992bc549 --- /dev/null +++ b/lib/std/c/ast.zig @@ -0,0 +1,66 @@ +const std = @import("std.zig"); +const SegmentedList = std.SegmentedList; +const Token = std.c.Token; +const Source = std.c.tokenizer.Source; + +pub const TokenIndex = usize; + +pub const Tree = struct { + tokens: TokenList, + sources: SourceList, + root_node: *Node.Root, + arena_allocator: std.heap.ArenaAllocator, + errors: ErrorList, + + pub const SourceList = SegmentedList(Source, 4); + pub const TokenList = Source.TokenList; + pub const ErrorList = SegmentedList(Error, 0); + + pub fn deinit(self: *Tree) void { + // Here we copy the arena allocator into stack memory, because + // otherwise it would destroy itself while it was still working. + var arena_allocator = self.arena_allocator; + arena_allocator.deinit(); + // self is destroyed + } +}; + +pub const Error = union(enum) { + InvalidToken: InvalidToken, + + pub fn render(self: *const Error, tokens: *Tree.TokenList, stream: var) !void { + switch (self.*) { + .InvalidToken => |*x| return x.render(tokens, stream), + } + } + + pub fn loc(self: *const Error) TokenIndex { + switch (self.*) { + .InvalidToken => |x| return x.token, + } + } + + pub const InvalidToken = SingleTokenError("Invalid token '{}'"); + + fn SingleTokenError(comptime msg: []const u8) type { + return struct { + token: TokenIndex, + + pub fn render(self: *const @This(), tokens: *Tree.TokenList, stream: var) !void { + const actual_token = tokens.at(self.token); + return stream.print(msg, .{actual_token.id.symbol()}); + } + }; + } +}; + +pub const Root = struct { + decls: DeclList, + eof_token: TokenIndex, + + pub const DeclList = SegmentedList(*Decl, 4); +}; + +pub const Decl = struct { + +}; \ No newline at end of file diff --git a/lib/std/c/tokenizer.zig b/lib/std/c/tokenizer.zig index 1d06c6a523..b8e515bec9 100644 --- a/lib/std/c/tokenizer.zig +++ b/lib/std/c/tokenizer.zig @@ -4,6 +4,9 @@ const mem = std.mem; pub const Source = struct { buffer: []const u8, file_name: []const u8, + tokens: TokenList, + + pub const TokenList = SegmentedList(Token, 64); }; pub const Token = struct {