From c0e8b0702580a88e9cc0eb4ec6480640192c7598 Mon Sep 17 00:00:00 2001 From: MrBounty Date: Sun, 12 Jan 2025 00:11:03 +0100 Subject: [PATCH] Moved thread stuff into a directory --- src/dataStructure/UUIDFileIndex.zig | 9 +- src/entityWriter.zig | 278 ++++++++++++++-------------- src/fileEngine.zig | 10 +- src/main.zig | 10 +- src/schemaEngine.zig | 10 +- src/thread/context.zig | 43 +++++ src/thread/engine.zig | 38 ++++ src/threadEngine.zig | 81 -------- src/ziqlParser.zig | 1 + 9 files changed, 236 insertions(+), 244 deletions(-) create mode 100644 src/thread/context.zig create mode 100644 src/thread/engine.zig delete mode 100644 src/threadEngine.zig diff --git a/src/dataStructure/UUIDFileIndex.zig b/src/dataStructure/UUIDFileIndex.zig index 985b0d5..c8ceaa8 100644 --- a/src/dataStructure/UUIDFileIndex.zig +++ b/src/dataStructure/UUIDFileIndex.zig @@ -28,14 +28,7 @@ pub fn deinit(self: *UUIDIndexMap) void { } pub fn put(self: *UUIDIndexMap, uuid: UUID, file_index: usize) !void { - const allocator = self.arena.allocator(); - const new_uuid = try allocator.create(UUID); - new_uuid.* = uuid; - - const new_file_index = try allocator.create(usize); - new_file_index.* = file_index; - - try self.map.*.put(new_uuid.*, new_file_index.*); + try self.map.*.put(uuid, file_index); } pub fn contains(self: UUIDIndexMap, uuid: UUID) bool { diff --git a/src/entityWriter.zig b/src/entityWriter.zig index 7c7fb04..4906b2f 100644 --- a/src/entityWriter.zig +++ b/src/entityWriter.zig @@ -10,160 +10,158 @@ const UUID = dtype.UUID; const ZipponError = @import("errors.zig").ZipponError; -pub const EntityWriter = struct { - pub fn writeEntityTable( - writer: anytype, - row: []zid.Data, - additional_data: AdditionalData, - data_types: []const DataType, - ) !void { - try writer.writeAll("| "); - for (additional_data.childrens.items) |member| { - try writeValue(writer, row[member.index], data_types[member.index]); - try writer.writeAll(" \t| "); - } - try writer.writeByte('\n'); +pub fn writeEntityTable( + writer: anytype, + row: []zid.Data, + additional_data: AdditionalData, + data_types: []const DataType, +) !void { + try writer.writeAll("| "); + for (additional_data.childrens.items) |member| { + try writeValue(writer, row[member.index], data_types[member.index]); + try writer.writeAll(" \t| "); } + try writer.writeByte('\n'); +} - pub fn writeHeaderCsv( - writer: anytype, - members: [][]const u8, - delimiter: u8, - ) !void { - for (members, 0..) |member, i| { - try writer.writeAll(member); - if (i < members.len - 1) try writer.writeByte(delimiter); - } - try writer.writeByte('\n'); +pub fn writeHeaderCsv( + writer: anytype, + members: [][]const u8, + delimiter: u8, +) !void { + for (members, 0..) |member, i| { + try writer.writeAll(member); + if (i < members.len - 1) try writer.writeByte(delimiter); } + try writer.writeByte('\n'); +} - pub fn writeEntityCsv( // FIXME: I think if one value str have a \n this will broke. I need to use like """ - writer: anytype, - row: []zid.Data, - data_types: []const DataType, - delimiter: u8, - ) !void { - for (0..row.len) |i| { - try writeValue(writer, row[i], data_types[i]); - if (i < row.len - 1) try writer.writeByte(delimiter); - } - try writer.writeByte('\n'); +pub fn writeEntityCsv( // FIXME: I think if one value str have a \n this will broke. I need to use like """ + writer: anytype, + row: []zid.Data, + data_types: []const DataType, + delimiter: u8, +) !void { + for (0..row.len) |i| { + try writeValue(writer, row[i], data_types[i]); + if (i < row.len - 1) try writer.writeByte(delimiter); } + try writer.writeByte('\n'); +} - pub fn writeEntityJSON( - writer: anytype, - row: []zid.Data, - additional_data: AdditionalData, - data_types: []const DataType, - ) !void { - try writer.writeByte('{'); - for (additional_data.childrens.items) |member| { - try writer.print("{s}: ", .{member.name}); - try writeValue(writer, row[member.index], data_types[member.index]); - try writer.writeAll(", "); - } - try writer.writeAll("}, "); +pub fn writeEntityJSON( + writer: anytype, + row: []zid.Data, + additional_data: AdditionalData, + data_types: []const DataType, +) !void { + try writer.writeByte('{'); + for (additional_data.childrens.items) |member| { + try writer.print("{s}: ", .{member.name}); + try writeValue(writer, row[member.index], data_types[member.index]); + try writer.writeAll(", "); } + try writer.writeAll("}, "); +} - fn writeValue(writer: anytype, value: zid.Data, data_type: DataType) !void { - switch (value) { - .Float => |v| try writer.print("{d}", .{v}), - .Int => |v| try writer.print("{d}", .{v}), - .Str => |v| try writer.print("\"{s}\"", .{v}), - .UUID => |v| { - if (data_type == .self) { - try writer.print("\"{s}\"", .{UUID.format_bytes(v)}); - return; - } - const uuid = try UUID.parse("00000000-0000-0000-0000-000000000000"); // Maybe pass that comptime to prevent parsing it everytime - if (!std.meta.eql(v, uuid.bytes)) { - try writer.print("{{|<{s}>|}}", .{v}); - } else { - try writer.print("{{}}", .{}); - } - }, - .Bool => |v| try writer.print("{any}", .{v}), - .Unix => |v| { - const datetime = DateTime.initUnix(v); - try writer.writeByte('"'); - switch (data_type) { - .date => try datetime.format("YYYY/MM/DD", writer), - .time => try datetime.format("HH:mm:ss.SSSS", writer), - .datetime => try datetime.format("YYYY/MM/DD-HH:mm:ss.SSSS", writer), - else => unreachable, - } - try writer.writeByte('"'); - }, - .IntArray, .FloatArray, .StrArray, .UUIDArray, .BoolArray, .UnixArray => try writeArray(writer, value, data_type), - } - } - - fn writeArray(writer: anytype, data: zid.Data, data_type: DataType) ZipponError!void { - writer.writeByte('[') catch return ZipponError.WriteError; - var iter = zid.ArrayIterator.init(data) catch return ZipponError.ZipponDataError; - switch (data) { - .IntArray => while (iter.next()) |v| writer.print("{d}, ", .{v.Int}) catch return ZipponError.WriteError, - .FloatArray => while (iter.next()) |v| writer.print("{d}", .{v.Float}) catch return ZipponError.WriteError, - .StrArray => while (iter.next()) |v| writer.print("\"{s}\"", .{v.Str}) catch return ZipponError.WriteError, - .UUIDArray => while (iter.next()) |v| writer.print("{{|<{s}>|}},", .{v.UUID}) catch return ZipponError.WriteError, - .BoolArray => while (iter.next()) |v| writer.print("{any}", .{v.Bool}) catch return ZipponError.WriteError, - .UnixArray => while (iter.next()) |v| { - const datetime = DateTime.initUnix(v.Unix); - writer.writeByte('"') catch return ZipponError.WriteError; - switch (data_type) { - .date => datetime.format("YYYY/MM/DD", writer) catch return ZipponError.WriteError, - .time => datetime.format("HH:mm:ss.SSSS", writer) catch return ZipponError.WriteError, - .datetime => datetime.format("YYYY/MM/DD-HH:mm:ss.SSSS", writer) catch return ZipponError.WriteError, - else => unreachable, - } - writer.writeAll("\", ") catch return ZipponError.WriteError; - }, - else => unreachable, - } - writer.writeByte(']') catch return ZipponError.WriteError; - } - - /// Take a string in the JSON format and look for {|<[16]u8>|}, then will look into the map and check if it can find this UUID - /// If it find it, it ill replace the {|<[16]u8>|} will the value - pub fn updateWithRelation(writer: anytype, input: []const u8, map: std.AutoHashMap([16]u8, JsonString)) ZipponError!void { - var uuid_bytes: [16]u8 = undefined; - var start: usize = 0; - while (std.mem.indexOf(u8, input[start..], "{|<")) |pos| { - const pattern_start = start + pos + 3; - const pattern_end = pattern_start + 16; - - // Write the text before the pattern - writer.writeAll(input[start .. pattern_start - 3]) catch return ZipponError.WriteError; - - if (input[pattern_start - 4] == '[') { - start = try updateArray(writer, input, map, pattern_start - 3); - continue; +fn writeValue(writer: anytype, value: zid.Data, data_type: DataType) !void { + switch (value) { + .Float => |v| try writer.print("{d}", .{v}), + .Int => |v| try writer.print("{d}", .{v}), + .Str => |v| try writer.print("\"{s}\"", .{v}), + .UUID => |v| { + if (data_type == .self) { + try writer.print("\"{s}\"", .{UUID.format_bytes(v)}); + return; } - - @memcpy(uuid_bytes[0..], input[pattern_start..pattern_end]); - if (map.get(uuid_bytes)) |json_string| { - writer.writeAll(json_string.slice) catch return ZipponError.WriteError; + const uuid = try UUID.parse("00000000-0000-0000-0000-000000000000"); // Maybe pass that comptime to prevent parsing it everytime + if (!std.meta.eql(v, uuid.bytes)) { + try writer.print("{{|<{s}>|}}", .{v}); } else { - writer.writeAll(input[pattern_start - 3 .. pattern_end + 3]) catch return ZipponError.WriteError; + try writer.print("{{}}", .{}); } - start = pattern_end + 5; + }, + .Bool => |v| try writer.print("{any}", .{v}), + .Unix => |v| { + const datetime = DateTime.initUnix(v); + try writer.writeByte('"'); + switch (data_type) { + .date => try datetime.format("YYYY/MM/DD", writer), + .time => try datetime.format("HH:mm:ss.SSSS", writer), + .datetime => try datetime.format("YYYY/MM/DD-HH:mm:ss.SSSS", writer), + else => unreachable, + } + try writer.writeByte('"'); + }, + .IntArray, .FloatArray, .StrArray, .UUIDArray, .BoolArray, .UnixArray => try writeArray(writer, value, data_type), + } +} + +fn writeArray(writer: anytype, data: zid.Data, data_type: DataType) ZipponError!void { + writer.writeByte('[') catch return ZipponError.WriteError; + var iter = zid.ArrayIterator.init(data) catch return ZipponError.ZipponDataError; + switch (data) { + .IntArray => while (iter.next()) |v| writer.print("{d}, ", .{v.Int}) catch return ZipponError.WriteError, + .FloatArray => while (iter.next()) |v| writer.print("{d}", .{v.Float}) catch return ZipponError.WriteError, + .StrArray => while (iter.next()) |v| writer.print("\"{s}\"", .{v.Str}) catch return ZipponError.WriteError, + .UUIDArray => while (iter.next()) |v| writer.print("{{|<{s}>|}},", .{v.UUID}) catch return ZipponError.WriteError, + .BoolArray => while (iter.next()) |v| writer.print("{any}", .{v.Bool}) catch return ZipponError.WriteError, + .UnixArray => while (iter.next()) |v| { + const datetime = DateTime.initUnix(v.Unix); + writer.writeByte('"') catch return ZipponError.WriteError; + switch (data_type) { + .date => datetime.format("YYYY/MM/DD", writer) catch return ZipponError.WriteError, + .time => datetime.format("HH:mm:ss.SSSS", writer) catch return ZipponError.WriteError, + .datetime => datetime.format("YYYY/MM/DD-HH:mm:ss.SSSS", writer) catch return ZipponError.WriteError, + else => unreachable, + } + writer.writeAll("\", ") catch return ZipponError.WriteError; + }, + else => unreachable, + } + writer.writeByte(']') catch return ZipponError.WriteError; +} + +/// Take a string in the JSON format and look for {|<[16]u8>|}, then will look into the map and check if it can find this UUID +/// If it find it, it ill replace the {|<[16]u8>|} will the value +pub fn updateWithRelation(writer: anytype, input: []const u8, map: std.AutoHashMap([16]u8, JsonString)) ZipponError!void { + var uuid_bytes: [16]u8 = undefined; + var start: usize = 0; + while (std.mem.indexOf(u8, input[start..], "{|<")) |pos| { + const pattern_start = start + pos + 3; + const pattern_end = pattern_start + 16; + + // Write the text before the pattern + writer.writeAll(input[start .. pattern_start - 3]) catch return ZipponError.WriteError; + + if (input[pattern_start - 4] == '[') { + start = try updateArray(writer, input, map, pattern_start - 3); + continue; } - // Write any remaining text - writer.writeAll(input[start..]) catch return ZipponError.WriteError; + @memcpy(uuid_bytes[0..], input[pattern_start..pattern_end]); + if (map.get(uuid_bytes)) |json_string| { + writer.writeAll(json_string.slice) catch return ZipponError.WriteError; + } else { + writer.writeAll(input[pattern_start - 3 .. pattern_end + 3]) catch return ZipponError.WriteError; + } + start = pattern_end + 5; } - fn updateArray(writer: anytype, input: []const u8, map: std.AutoHashMap([16]u8, JsonString), origin: usize) ZipponError!usize { - var uuid_bytes: [16]u8 = undefined; - var start = origin; - while (input.len > start + 23 and std.mem.eql(u8, input[start .. start + 3], "{|<") and std.mem.eql(u8, input[start + 19 .. start + 23], ">|},")) : (start += 23) { - @memcpy(uuid_bytes[0..], input[start + 3 .. start + 19]); - if (map.get(uuid_bytes)) |json_string| { - writer.writeAll(json_string.slice) catch return ZipponError.WriteError; - } else { - writer.writeAll(input[start .. start + 23]) catch return ZipponError.WriteError; - } + // Write any remaining text + writer.writeAll(input[start..]) catch return ZipponError.WriteError; +} + +fn updateArray(writer: anytype, input: []const u8, map: std.AutoHashMap([16]u8, JsonString), origin: usize) ZipponError!usize { + var uuid_bytes: [16]u8 = undefined; + var start = origin; + while (input.len > start + 23 and std.mem.eql(u8, input[start .. start + 3], "{|<") and std.mem.eql(u8, input[start + 19 .. start + 23], ">|},")) : (start += 23) { + @memcpy(uuid_bytes[0..], input[start + 3 .. start + 19]); + if (map.get(uuid_bytes)) |json_string| { + writer.writeAll(json_string.slice) catch return ZipponError.WriteError; + } else { + writer.writeAll(input[start .. start + 23]) catch return ZipponError.WriteError; } - return start; } -}; + return start; +} diff --git a/src/fileEngine.zig b/src/fileEngine.zig index 738f29b..8b4239b 100644 --- a/src/fileEngine.zig +++ b/src/fileEngine.zig @@ -4,10 +4,10 @@ const zid = @import("ZipponData"); const U64 = std.atomic.Value(u64); const Pool = std.Thread.Pool; const Allocator = std.mem.Allocator; -const SchemaEngine = @import("schemaEngine.zig").SchemaEngine; +const SchemaEngine = @import("schemaEngine.zig"); const SchemaStruct = @import("schemaEngine.zig").SchemaStruct; -const ThreadSyncContext = @import("threadEngine.zig").ThreadSyncContext; -const EntityWriter = @import("entityWriter.zig").EntityWriter; +const ThreadSyncContext = @import("thread/context.zig"); +const EntityWriter = @import("entityWriter.zig"); const dtype = @import("dtype"); const s2t = dtype.s2t; @@ -15,9 +15,9 @@ const UUID = dtype.UUID; const DateTime = dtype.DateTime; const DataType = dtype.DataType; -const AdditionalData = @import("dataStructure/additionalData.zig").AdditionalData; +const AdditionalData = @import("dataStructure/additionalData.zig"); const Filter = @import("dataStructure/filter.zig").Filter; -const RelationMap = @import("dataStructure/relationMap.zig").RelationMap; +const RelationMap = @import("dataStructure/relationMap.zig"); const JsonString = @import("dataStructure/relationMap.zig").JsonString; const ConditionValue = @import("dataStructure/filter.zig").ConditionValue; diff --git a/src/main.zig b/src/main.zig index ff59783..782eb9a 100644 --- a/src/main.zig +++ b/src/main.zig @@ -4,16 +4,16 @@ const send = utils.send; const Allocator = std.mem.Allocator; const Pool = std.Thread.Pool; -const FileEngine = @import("fileEngine.zig").FileEngine; -const SchemaEngine = @import("schemaEngine.zig").SchemaEngine; -const ThreadEngine = @import("threadEngine.zig").ThreadEngine; +const FileEngine = @import("fileEngine.zig"); +const SchemaEngine = @import("schemaEngine.zig"); +const ThreadEngine = @import("thread/engine.zig"); const cliTokenizer = @import("tokenizers/cli.zig").Tokenizer; const cliToken = @import("tokenizers/cli.zig").Token; const ziqlTokenizer = @import("tokenizers/ziql.zig").Tokenizer; const ziqlToken = @import("tokenizers/ziql.zig").Token; -const ziqlParser = @import("ziqlParser.zig").Parser; +const ziqlParser = @import("ziqlParser.zig"); const ZipponError = @import("errors.zig").ZipponError; @@ -86,7 +86,7 @@ pub const DBEngine = struct { pub fn init(potential_main_path: ?[]const u8, potential_schema_path: ?[]const u8) DBEngine { var self = DBEngine{}; - self.thread_engine = ThreadEngine.init(); + self.thread_engine = ThreadEngine.init() catch @panic("TODO"); const potential_main_path_or_environment_variable = potential_main_path orelse utils.getEnvVariable("ZIPPONDB_PATH"); if (potential_main_path_or_environment_variable) |main_path| { diff --git a/src/schemaEngine.zig b/src/schemaEngine.zig index 62127bd..621b774 100644 --- a/src/schemaEngine.zig +++ b/src/schemaEngine.zig @@ -1,18 +1,18 @@ const std = @import("std"); const zid = @import("ZipponData"); const Allocator = std.mem.Allocator; -const Parser = @import("schemaParser.zig").Parser; +const Parser = @import("schemaParser.zig"); const Tokenizer = @import("tokenizers/schema.zig").Tokenizer; const ZipponError = @import("errors.zig").ZipponError; const dtype = @import("dtype"); const DataType = dtype.DataType; -const AdditionalData = @import("dataStructure/additionalData.zig").AdditionalData; -const RelationMap = @import("dataStructure/relationMap.zig").RelationMap; +const AdditionalData = @import("dataStructure/additionalData.zig"); +const RelationMap = @import("dataStructure/relationMap.zig"); const JsonString = @import("dataStructure/relationMap.zig").JsonString; const ConditionValue = @import("dataStructure/filter.zig").ConditionValue; const UUID = dtype.UUID; -const UUIDFileIndex = @import("dataStructure/UUIDFileIndex.zig").UUIDIndexMap; -const FileEngine = @import("fileEngine.zig").FileEngine; +const UUIDFileIndex = @import("dataStructure/UUIDFileIndex.zig"); +const FileEngine = @import("fileEngine.zig"); // TODO: Create a schemaEngine directory and add this as core and the parser with it diff --git a/src/thread/context.zig b/src/thread/context.zig new file mode 100644 index 0000000..ac09487 --- /dev/null +++ b/src/thread/context.zig @@ -0,0 +1,43 @@ +const std = @import("std"); +const log = std.log.scoped(.thread); +const U64 = std.atomic.Value(u64); + +pub const Self = @This(); + +processed_struct: U64 = U64.init(0), +error_file: U64 = U64.init(0), +completed_file: U64 = U64.init(0), +max_struct: u64, +max_file: u64, + +pub fn init(max_struct: u64, max_file: u64) Self { + return Self{ + .max_struct = max_struct, + .max_file = max_file, + }; +} + +pub fn isComplete(self: *Self) bool { + return (self.completed_file.load(.acquire) + self.error_file.load(.acquire)) >= self.max_file; +} + +pub fn completeThread(self: *Self) void { + _ = self.completed_file.fetchAdd(1, .release); +} + +pub fn incrementAndCheckStructLimit(self: *Self) bool { + if (self.max_struct == 0) return false; + const new_count = self.processed_struct.fetchAdd(1, .monotonic); + return (new_count + 1) >= self.max_struct; +} + +pub fn checkStructLimit(self: *Self) bool { + if (self.max_struct == 0) return false; + const count = self.processed_struct.load(.monotonic); + return (count) >= self.max_struct; +} + +pub fn logError(self: *Self, message: []const u8, err: anyerror) void { + log.err("{s}: {any}", .{ message, err }); + _ = self.error_file.fetchAdd(1, .acquire); +} diff --git a/src/thread/engine.zig b/src/thread/engine.zig new file mode 100644 index 0000000..e575c6d --- /dev/null +++ b/src/thread/engine.zig @@ -0,0 +1,38 @@ +const std = @import("std"); +const Pool = std.Thread.Pool; +const Allocator = std.mem.Allocator; + +const CPU_CORE = @import("config").CPU_CORE; +const log = std.log.scoped(.thread); +const ZipponError = @import("../errors.zig").ZipponError; + +pub const Self = @This(); + +var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator); +const allocator = arena.allocator(); + +thread_arena: *std.heap.ThreadSafeAllocator, +thread_pool: *Pool, + +pub fn init() ZipponError!Self { + const thread_arena = allocator.create(std.heap.ThreadSafeAllocator) catch return ZipponError.MemoryError; + thread_arena.* = std.heap.ThreadSafeAllocator{ + .child_allocator = allocator, + }; + + const thread_pool = allocator.create(Pool) catch return ZipponError.MemoryError; + thread_pool.init(Pool.Options{ + .allocator = thread_arena.allocator(), + .n_jobs = CPU_CORE, + }) catch return ZipponError.ThreadError; + + return Self{ + .thread_pool = thread_pool, + .thread_arena = thread_arena, + }; +} + +pub fn deinit(self: *Self) void { + self.thread_pool.deinit(); + arena.deinit(); +} diff --git a/src/threadEngine.zig b/src/threadEngine.zig deleted file mode 100644 index e9ea2a9..0000000 --- a/src/threadEngine.zig +++ /dev/null @@ -1,81 +0,0 @@ -// TODO: Put the ThreadSynx stuff and create a ThreadEngine with the arena, pool, and some methods - -const std = @import("std"); -const U64 = std.atomic.Value(u64); -const Pool = std.Thread.Pool; -const Allocator = std.mem.Allocator; - -const ZipponError = @import("errors.zig").ZipponError; -const CPU_CORE = @import("config").CPU_CORE; -const OUT_BUFFER_SIZE = @import("config").OUT_BUFFER_SIZE; -const log = std.log.scoped(.thread); - -const allocator = std.heap.page_allocator; - -var thread_arena: std.heap.ThreadSafeAllocator = undefined; -var thread_pool: Pool = undefined; - -pub const ThreadSyncContext = struct { - processed_struct: std.atomic.Value(u64) = std.atomic.Value(u64).init(0), - error_file: std.atomic.Value(u64) = std.atomic.Value(u64).init(0), - completed_file: std.atomic.Value(u64) = std.atomic.Value(u64).init(0), - max_struct: u64, - max_file: u64, - - pub fn init(max_struct: u64, max_file: u64) ThreadSyncContext { - return ThreadSyncContext{ - .max_struct = max_struct, - .max_file = max_file, - }; - } - - pub fn isComplete(self: *ThreadSyncContext) bool { - return (self.completed_file.load(.acquire) + self.error_file.load(.acquire)) >= self.max_file; - } - - pub fn completeThread(self: *ThreadSyncContext) void { - _ = self.completed_file.fetchAdd(1, .release); - } - - pub fn incrementAndCheckStructLimit(self: *ThreadSyncContext) bool { - if (self.max_struct == 0) return false; - const new_count = self.processed_struct.fetchAdd(1, .monotonic); - return (new_count + 1) >= self.max_struct; - } - - pub fn checkStructLimit(self: *ThreadSyncContext) bool { - if (self.max_struct == 0) return false; - const count = self.processed_struct.load(.monotonic); - return (count) >= self.max_struct; - } - - pub fn logError(self: *ThreadSyncContext, message: []const u8, err: anyerror) void { - log.err("{s}: {any}", .{ message, err }); - _ = self.error_file.fetchAdd(1, .acquire); - } -}; - -pub const ThreadEngine = @This(); - -thread_arena: *std.heap.ThreadSafeAllocator, -thread_pool: *Pool, - -pub fn init() ThreadEngine { - thread_arena = std.heap.ThreadSafeAllocator{ - .child_allocator = allocator, - }; - - thread_pool.init(std.Thread.Pool.Options{ - .allocator = thread_arena.allocator(), - .n_jobs = CPU_CORE, - }) catch @panic("=("); - - return ThreadEngine{ - .thread_pool = &thread_pool, - .thread_arena = &thread_arena, - }; -} - -pub fn deinit(_: ThreadEngine) void { - thread_pool.deinit(); -} diff --git a/src/ziqlParser.zig b/src/ziqlParser.zig index 509742d..020b8e0 100644 --- a/src/ziqlParser.zig +++ b/src/ziqlParser.zig @@ -66,6 +66,7 @@ const State = enum { }; pub const Parser = @This(); + toker: *Tokenizer, file_engine: *FileEngine, schema_engine: *SchemaEngine,