Moved global error to lib and fuse to a unique one

This commit is contained in:
Adrien Bouvais 2025-01-12 00:37:57 +01:00
parent c0e8b07025
commit 955aff0d09
16 changed files with 83 additions and 98 deletions

View File

@ -4,7 +4,7 @@ const DBEngine = @import("src/main.zig").DBEngine;
const ziqlTokenizer = @import("src/tokenizers/ziql.zig").Tokenizer; const ziqlTokenizer = @import("src/tokenizers/ziql.zig").Tokenizer;
const ziqlToken = @import("src/tokenizers/ziql.zig").Token; const ziqlToken = @import("src/tokenizers/ziql.zig").Token;
const ziqlParser = @import("src/ziqlParser.zig").Parser; const ziqlParser = @import("src/ziqlParser.zig").Parser;
const ZipponError = @import("src/stuffs/errors.zig").ZipponError; const ZipponError = @import("error").ZipponError;
const names = [_][]const u8{ "Alice", "Bob", "Charlie", "Dave", "Eve" }; const names = [_][]const u8{ "Alice", "Bob", "Charlie", "Dave", "Eve" };
const emails = [_][]const u8{ "alice@email.com", "bob@email.com", "charlie@email.com", "dave@email.com", "eve@email.com" }; const emails = [_][]const u8{ "alice@email.com", "bob@email.com", "charlie@email.com", "dave@email.com", "eve@email.com" };
@ -31,7 +31,7 @@ pub fn myLog(
} }
pub fn main() !void { pub fn main() !void {
const to_test = [_]usize{500}; const to_test = [_]usize{500_000};
{ {
var line_buffer: [1024 * 1024]u8 = undefined; var line_buffer: [1024 * 1024]u8 = undefined;
var db_engine = DBEngine.init("benchmark", "schema/example"); var db_engine = DBEngine.init("benchmark", "schema/example");
@ -75,7 +75,7 @@ pub fn main() !void {
for (users_count - 1) |_| { for (users_count - 1) |_| {
try writer.print( try writer.print(
"('{s}', '{s}', {d}, [ {d} ], {{}}, none, {s}, {s}, {s})", "('{s}', '{s}', {d}, [ {d} ], none, none, {s}, {s}, {s})",
.{ .{
names[rng.uintAtMost(usize, names.len - 1)], names[rng.uintAtMost(usize, names.len - 1)],
emails[rng.uintAtMost(usize, emails.len - 1)], emails[rng.uintAtMost(usize, emails.len - 1)],
@ -128,6 +128,7 @@ pub fn main() !void {
"GRAB User {bday > 2000/01/01}", "GRAB User {bday > 2000/01/01}",
"GRAB User {age > 30 AND name = 'Charlie' AND bday > 2000/01/01}", "GRAB User {age > 30 AND name = 'Charlie' AND bday > 2000/01/01}",
"GRAB User {best_friend IN {name = 'Charlie'}}", "GRAB User {best_friend IN {name = 'Charlie'}}",
"DELETE User {}",
}; };
// Run benchmarks // Run benchmarks

View File

@ -21,6 +21,7 @@ pub fn build(b: *std.Build) void {
exe.root_module.addImport("dtype", b.createModule(.{ .root_source_file = b.path("lib/types/out.zig") })); exe.root_module.addImport("dtype", b.createModule(.{ .root_source_file = b.path("lib/types/out.zig") }));
exe.root_module.addImport("config", b.createModule(.{ .root_source_file = b.path("lib/config.zig") })); exe.root_module.addImport("config", b.createModule(.{ .root_source_file = b.path("lib/config.zig") }));
exe.root_module.addImport("ZipponData", b.createModule(.{ .root_source_file = b.path("lib/zid.zig") })); exe.root_module.addImport("ZipponData", b.createModule(.{ .root_source_file = b.path("lib/zid.zig") }));
exe.root_module.addImport("error", b.createModule(.{ .root_source_file = b.path("lib/errors.zig") }));
// Run // Run
// ----------------------------------------------- // -----------------------------------------------
@ -30,7 +31,7 @@ pub fn build(b: *std.Build) void {
// Test // Test
// ----------------------------------------------- // -----------------------------------------------
const tests1 = b.addTest(.{ const tests1 = b.addTest(.{
.root_source_file = b.path("src/stuffs/UUIDFileIndex.zig"), .root_source_file = b.path("src/dataStructure/UUIDFileIndex.zig"),
.target = target, .target = target,
.optimize = optimize, .optimize = optimize,
.name = "CLI tokenizer", .name = "CLI tokenizer",
@ -78,10 +79,11 @@ pub fn build(b: *std.Build) void {
tests5.root_module.addImport("dtype", b.createModule(.{ .root_source_file = b.path("lib/types/out.zig") })); tests5.root_module.addImport("dtype", b.createModule(.{ .root_source_file = b.path("lib/types/out.zig") }));
tests5.root_module.addImport("config", b.createModule(.{ .root_source_file = b.path("lib/config.zig") })); tests5.root_module.addImport("config", b.createModule(.{ .root_source_file = b.path("lib/config.zig") }));
tests5.root_module.addImport("ZipponData", b.createModule(.{ .root_source_file = b.path("lib/zid.zig") })); tests5.root_module.addImport("ZipponData", b.createModule(.{ .root_source_file = b.path("lib/zid.zig") }));
tests5.root_module.addImport("error", b.createModule(.{ .root_source_file = b.path("lib/errors.zig") }));
const run_tests5 = b.addRunArtifact(tests5); const run_tests5 = b.addRunArtifact(tests5);
const tests6 = b.addTest(.{ const tests6 = b.addTest(.{
.root_source_file = b.path("src/stuffs/filter.zig"), .root_source_file = b.path("src/dataStructure/filter.zig"),
.target = target, .target = target,
.optimize = optimize, .optimize = optimize,
.name = "Filter tree", .name = "Filter tree",
@ -90,6 +92,7 @@ pub fn build(b: *std.Build) void {
tests6.root_module.addImport("dtype", b.createModule(.{ .root_source_file = b.path("lib/types/out.zig") })); tests6.root_module.addImport("dtype", b.createModule(.{ .root_source_file = b.path("lib/types/out.zig") }));
tests6.root_module.addImport("config", b.createModule(.{ .root_source_file = b.path("lib/config.zig") })); tests6.root_module.addImport("config", b.createModule(.{ .root_source_file = b.path("lib/config.zig") }));
tests6.root_module.addImport("ZipponData", b.createModule(.{ .root_source_file = b.path("lib/zid.zig") })); tests6.root_module.addImport("ZipponData", b.createModule(.{ .root_source_file = b.path("lib/zid.zig") }));
tests6.root_module.addImport("error", b.createModule(.{ .root_source_file = b.path("lib/errors.zig") }));
const run_tests6 = b.addRunArtifact(tests6); const run_tests6 = b.addRunArtifact(tests6);
const test_step = b.step("test", "Run unit tests"); const test_step = b.step("test", "Run unit tests");
@ -111,6 +114,7 @@ pub fn build(b: *std.Build) void {
benchmark.root_module.addImport("dtype", b.createModule(.{ .root_source_file = b.path("lib/types/out.zig") })); benchmark.root_module.addImport("dtype", b.createModule(.{ .root_source_file = b.path("lib/types/out.zig") }));
benchmark.root_module.addImport("config", b.createModule(.{ .root_source_file = b.path("lib/config.zig") })); benchmark.root_module.addImport("config", b.createModule(.{ .root_source_file = b.path("lib/config.zig") }));
benchmark.root_module.addImport("ZipponData", b.createModule(.{ .root_source_file = b.path("lib/zid.zig") })); benchmark.root_module.addImport("ZipponData", b.createModule(.{ .root_source_file = b.path("lib/zid.zig") }));
benchmark.root_module.addImport("error", b.createModule(.{ .root_source_file = b.path("lib/errors.zig") }));
b.installArtifact(benchmark); b.installArtifact(benchmark);
const run_benchmark = b.addRunArtifact(benchmark); const run_benchmark = b.addRunArtifact(benchmark);

View File

@ -4,7 +4,7 @@ pub const CPU_CORE = 16;
// Debug // Debug
pub const PRINT_STATE = false; pub const PRINT_STATE = false;
pub const DONT_SEND = false; pub const DONT_SEND = true;
pub const DONT_SEND_ERROR = false; pub const DONT_SEND_ERROR = false;
pub const RESET_LOG_AT_RESTART = false; // If true, will reset the log file at the start of the db, otherwise just keep adding to it pub const RESET_LOG_AT_RESTART = false; // If true, will reset the log file at the start of the db, otherwise just keep adding to it

View File

@ -1,27 +1,4 @@
// TODO: Only use a single and big ZipponError pub const ZipponError = error{
pub const ZiQlParserError = error{
MemoryError,
SynthaxError,
MemberNotFound,
MemberMissing,
StructNotFound,
FeatureMissing,
ParsingValueError,
ConditionError,
WriteError,
AndOrError,
CantWriteEntity,
};
pub const SchemaParserError = error{
SynthaxError,
FeatureMissing,
ValueParsingError,
MemoryError,
};
pub const FileEngineError = error{
SchemaFileNotFound, SchemaFileNotFound,
SchemaNotConform, SchemaNotConform,
DATAFolderNotFound, DATAFolderNotFound,
@ -30,22 +7,25 @@ pub const FileEngineError = error{
CantMakeFile, CantMakeFile,
CantOpenDir, CantOpenDir,
CantOpenFile, CantOpenFile,
MemoryError,
StreamError, StreamError,
ReadError, // TODO: Only use stream ReadError,
InvalidUUID, InvalidUUID,
InvalidDate, InvalidDate,
InvalidFileIndex, InvalidFileIndex,
DirIterError, DirIterError,
WriteError,
FileStatError, FileStatError,
DeleteFileError, DeleteFileError,
RenameFileError, RenameFileError,
StructNotFound,
MemberNotFound,
ZipponDataError, ZipponDataError,
AllocEncodError, AllocEncodError,
MemoryError,
SynthaxError,
ThreadError, ThreadError,
CantWriteEntity,
WriteError,
ConditionError,
ParsingValueError,
MemberNotFound,
MemberMissing,
StructNotFound,
}; };
pub const ZipponError = ZiQlParserError || FileEngineError || SchemaParserError;

View File

@ -6,7 +6,7 @@ const DataType = dtype.DataType;
// TODO: Put this in a data structure directory // TODO: Put this in a data structure directory
const ZipponError = @import("../errors.zig").ZipponError; const ZipponError = @import("error").ZipponError;
/// This is the [] part /// This is the [] part
pub const AdditionalData = @This(); pub const AdditionalData = @This();

View File

@ -10,7 +10,7 @@
const std = @import("std"); const std = @import("std");
const s2t = @import("dtype").s2t; const s2t = @import("dtype").s2t;
const ZipponError = @import("../errors.zig").ZipponError; const ZipponError = @import("error").ZipponError;
const DataType = @import("dtype").DataType; const DataType = @import("dtype").DataType;
const DateTime = @import("dtype").DateTime; const DateTime = @import("dtype").DateTime;
const UUID = @import("dtype").UUID; const UUID = @import("dtype").UUID;

View File

@ -21,7 +21,7 @@
const std = @import("std"); const std = @import("std");
const AdditionalData = @import("additionalData.zig").AdditionalData; const AdditionalData = @import("additionalData.zig").AdditionalData;
const ZipponError = @import("../errors.zig").ZipponError; const ZipponError = @import("error").ZipponError;
pub const JsonString = struct { pub const JsonString = struct {
slice: []const u8 = "", slice: []const u8 = "",

View File

@ -8,7 +8,7 @@ const DataType = dtype.DataType;
const DateTime = dtype.DateTime; const DateTime = dtype.DateTime;
const UUID = dtype.UUID; const UUID = dtype.UUID;
const ZipponError = @import("errors.zig").ZipponError; const ZipponError = @import("error").ZipponError;
pub fn writeEntityTable( pub fn writeEntityTable(
writer: anytype, writer: anytype,

View File

@ -21,7 +21,7 @@ const RelationMap = @import("dataStructure/relationMap.zig");
const JsonString = @import("dataStructure/relationMap.zig").JsonString; const JsonString = @import("dataStructure/relationMap.zig").JsonString;
const ConditionValue = @import("dataStructure/filter.zig").ConditionValue; const ConditionValue = @import("dataStructure/filter.zig").ConditionValue;
const ZipponError = @import("errors.zig").ZipponError; const ZipponError = @import("error").ZipponError;
const config = @import("config"); const config = @import("config");
const BUFFER_SIZE = config.BUFFER_SIZE; const BUFFER_SIZE = config.BUFFER_SIZE;

View File

@ -15,7 +15,7 @@ const ziqlTokenizer = @import("tokenizers/ziql.zig").Tokenizer;
const ziqlToken = @import("tokenizers/ziql.zig").Token; const ziqlToken = @import("tokenizers/ziql.zig").Token;
const ziqlParser = @import("ziqlParser.zig"); const ziqlParser = @import("ziqlParser.zig");
const ZipponError = @import("errors.zig").ZipponError; const ZipponError = @import("error").ZipponError;
const config = @import("config"); const config = @import("config");
const BUFFER_SIZE = config.BUFFER_SIZE; const BUFFER_SIZE = config.BUFFER_SIZE;
@ -86,7 +86,7 @@ pub const DBEngine = struct {
pub fn init(potential_main_path: ?[]const u8, potential_schema_path: ?[]const u8) DBEngine { pub fn init(potential_main_path: ?[]const u8, potential_schema_path: ?[]const u8) DBEngine {
var self = DBEngine{}; var self = DBEngine{};
self.thread_engine = ThreadEngine.init() catch @panic("TODO"); self.thread_engine = ThreadEngine.init();
const potential_main_path_or_environment_variable = potential_main_path orelse utils.getEnvVariable("ZIPPONDB_PATH"); const potential_main_path_or_environment_variable = potential_main_path orelse utils.getEnvVariable("ZIPPONDB_PATH");
if (potential_main_path_or_environment_variable) |main_path| { if (potential_main_path_or_environment_variable) |main_path| {

View File

@ -3,7 +3,6 @@ const zid = @import("ZipponData");
const Allocator = std.mem.Allocator; const Allocator = std.mem.Allocator;
const Parser = @import("schemaParser.zig"); const Parser = @import("schemaParser.zig");
const Tokenizer = @import("tokenizers/schema.zig").Tokenizer; const Tokenizer = @import("tokenizers/schema.zig").Tokenizer;
const ZipponError = @import("errors.zig").ZipponError;
const dtype = @import("dtype"); const dtype = @import("dtype");
const DataType = dtype.DataType; const DataType = dtype.DataType;
const AdditionalData = @import("dataStructure/additionalData.zig"); const AdditionalData = @import("dataStructure/additionalData.zig");
@ -14,6 +13,8 @@ const UUID = dtype.UUID;
const UUIDFileIndex = @import("dataStructure/UUIDFileIndex.zig"); const UUIDFileIndex = @import("dataStructure/UUIDFileIndex.zig");
const FileEngine = @import("fileEngine.zig"); const FileEngine = @import("fileEngine.zig");
const ZipponError = @import("error").ZipponError;
// TODO: Create a schemaEngine directory and add this as core and the parser with it // TODO: Create a schemaEngine directory and add this as core and the parser with it
const config = @import("config"); const config = @import("config");

View File

@ -10,7 +10,7 @@ const Loc = @import("tokenizers/shared/loc.zig").Loc;
const send = @import("utils.zig").send; const send = @import("utils.zig").send;
const printError = @import("utils.zig").printError; const printError = @import("utils.zig").printError;
const SchemaParserError = @import("errors.zig").SchemaParserError; const ZipponError = @import("error").ZipponError;
const State = enum { const State = enum {
end, end,
@ -60,15 +60,15 @@ pub fn parse(self: *Parser, struct_array: *std.ArrayList(SchemaStruct)) !void {
.identifier => { .identifier => {
state = .expect_l_paren; state = .expect_l_paren;
name = self.toker.getTokenSlice(token); name = self.toker.getTokenSlice(token);
member_list.append("id") catch return SchemaParserError.MemoryError; member_list.append("id") catch return ZipponError.MemoryError;
type_list.append(.self) catch return SchemaParserError.MemoryError; type_list.append(.self) catch return ZipponError.MemoryError;
}, },
.eof => state = .end, .eof => state = .end,
else => { else => {
std.debug.print("{s}\n", .{self.toker.getTokenSlice(token)}); std.debug.print("{s}\n", .{self.toker.getTokenSlice(token)});
return printError( return printError(
"Error parsing schema: Expected a struct name", "Error parsing schema: Expected a struct name",
SchemaParserError.SynthaxError, ZipponError.SynthaxError,
self.toker.buffer, self.toker.buffer,
token.loc.start, token.loc.start,
token.loc.end, token.loc.end,
@ -80,7 +80,7 @@ pub fn parse(self: *Parser, struct_array: *std.ArrayList(SchemaStruct)) !void {
.l_paren => state = .expect_member_name, .l_paren => state = .expect_member_name,
else => return printError( else => return printError(
"Error parsing schema: Expected (", "Error parsing schema: Expected (",
SchemaParserError.SynthaxError, ZipponError.SynthaxError,
self.toker.buffer, self.toker.buffer,
token.loc.start, token.loc.start,
token.loc.end, token.loc.end,
@ -95,7 +95,7 @@ pub fn parse(self: *Parser, struct_array: *std.ArrayList(SchemaStruct)) !void {
.r_paren => state = .add_struct, .r_paren => state = .add_struct,
else => return printError( else => return printError(
"Error parsing schema: Expected member name or )", "Error parsing schema: Expected member name or )",
SchemaParserError.SynthaxError, ZipponError.SynthaxError,
self.toker.buffer, self.toker.buffer,
token.loc.start, token.loc.start,
token.loc.end, token.loc.end,
@ -105,10 +105,10 @@ pub fn parse(self: *Parser, struct_array: *std.ArrayList(SchemaStruct)) !void {
.add_struct => { .add_struct => {
struct_array.append(try SchemaStruct.init( struct_array.append(try SchemaStruct.init(
name, name,
member_list.toOwnedSlice() catch return SchemaParserError.MemoryError, member_list.toOwnedSlice() catch return ZipponError.MemoryError,
type_list.toOwnedSlice() catch return SchemaParserError.MemoryError, type_list.toOwnedSlice() catch return ZipponError.MemoryError,
try links.clone(), try links.clone(),
)) catch return SchemaParserError.MemoryError; )) catch return ZipponError.MemoryError;
links.deinit(); links.deinit();
links = std.StringHashMap([]const u8).init(self.allocator); links = std.StringHashMap([]const u8).init(self.allocator);
@ -122,7 +122,7 @@ pub fn parse(self: *Parser, struct_array: *std.ArrayList(SchemaStruct)) !void {
.expect_member_name => { .expect_member_name => {
state = .expect_two_dot; state = .expect_two_dot;
member_list.append(self.toker.getTokenSlice(token)) catch return SchemaParserError.MemoryError; member_list.append(self.toker.getTokenSlice(token)) catch return ZipponError.MemoryError;
member_token = token; member_token = token;
}, },
@ -130,7 +130,7 @@ pub fn parse(self: *Parser, struct_array: *std.ArrayList(SchemaStruct)) !void {
.two_dot => state = .expect_value_type, .two_dot => state = .expect_value_type,
else => return printError( else => return printError(
"Error parsing schema: Expected :", "Error parsing schema: Expected :",
SchemaParserError.SynthaxError, ZipponError.SynthaxError,
self.toker.buffer, self.toker.buffer,
token.loc.start, token.loc.start,
token.loc.end, token.loc.end,
@ -140,41 +140,41 @@ pub fn parse(self: *Parser, struct_array: *std.ArrayList(SchemaStruct)) !void {
.expect_value_type => switch (token.tag) { .expect_value_type => switch (token.tag) {
.type_int => { .type_int => {
state = .expect_comma; state = .expect_comma;
type_list.append(.int) catch return SchemaParserError.MemoryError; type_list.append(.int) catch return ZipponError.MemoryError;
}, },
.type_str => { .type_str => {
state = .expect_comma; state = .expect_comma;
type_list.append(.str) catch return SchemaParserError.MemoryError; type_list.append(.str) catch return ZipponError.MemoryError;
}, },
.type_float => { .type_float => {
state = .expect_comma; state = .expect_comma;
type_list.append(.float) catch return SchemaParserError.MemoryError; type_list.append(.float) catch return ZipponError.MemoryError;
}, },
.type_bool => { .type_bool => {
state = .expect_comma; state = .expect_comma;
type_list.append(.bool) catch return SchemaParserError.MemoryError; type_list.append(.bool) catch return ZipponError.MemoryError;
}, },
.type_date => { .type_date => {
state = .expect_comma; state = .expect_comma;
type_list.append(.date) catch return SchemaParserError.MemoryError; type_list.append(.date) catch return ZipponError.MemoryError;
}, },
.type_time => { .type_time => {
state = .expect_comma; state = .expect_comma;
type_list.append(.time) catch return SchemaParserError.MemoryError; type_list.append(.time) catch return ZipponError.MemoryError;
}, },
.type_datetime => { .type_datetime => {
state = .expect_comma; state = .expect_comma;
type_list.append(.datetime) catch return SchemaParserError.MemoryError; type_list.append(.datetime) catch return ZipponError.MemoryError;
}, },
.identifier => { .identifier => {
state = .expect_comma; state = .expect_comma;
type_list.append(.link) catch return SchemaParserError.MemoryError; type_list.append(.link) catch return ZipponError.MemoryError;
links.put(self.toker.getTokenSlice(member_token), self.toker.getTokenSlice(token)) catch return SchemaParserError.MemoryError; links.put(self.toker.getTokenSlice(member_token), self.toker.getTokenSlice(token)) catch return ZipponError.MemoryError;
}, },
.lr_bracket => state = .expext_array_type, .lr_bracket => state = .expext_array_type,
else => return printError( else => return printError(
"Error parsing schema: Expected data type", "Error parsing schema: Expected data type",
SchemaParserError.SynthaxError, ZipponError.SynthaxError,
self.toker.buffer, self.toker.buffer,
token.loc.start, token.loc.start,
token.loc.end, token.loc.end,
@ -184,40 +184,40 @@ pub fn parse(self: *Parser, struct_array: *std.ArrayList(SchemaStruct)) !void {
.expext_array_type => switch (token.tag) { .expext_array_type => switch (token.tag) {
.type_int => { .type_int => {
state = .expect_comma; state = .expect_comma;
type_list.append(.int_array) catch return SchemaParserError.MemoryError; type_list.append(.int_array) catch return ZipponError.MemoryError;
}, },
.type_str => { .type_str => {
state = .expect_comma; state = .expect_comma;
type_list.append(.str_array) catch return SchemaParserError.MemoryError; type_list.append(.str_array) catch return ZipponError.MemoryError;
}, },
.type_float => { .type_float => {
state = .expect_comma; state = .expect_comma;
type_list.append(.float_array) catch return SchemaParserError.MemoryError; type_list.append(.float_array) catch return ZipponError.MemoryError;
}, },
.type_bool => { .type_bool => {
state = .expect_comma; state = .expect_comma;
type_list.append(.bool_array) catch return SchemaParserError.MemoryError; type_list.append(.bool_array) catch return ZipponError.MemoryError;
}, },
.type_date => { .type_date => {
state = .expect_comma; state = .expect_comma;
type_list.append(.date_array) catch return SchemaParserError.MemoryError; type_list.append(.date_array) catch return ZipponError.MemoryError;
}, },
.type_time => { .type_time => {
state = .expect_comma; state = .expect_comma;
type_list.append(.time_array) catch return SchemaParserError.MemoryError; type_list.append(.time_array) catch return ZipponError.MemoryError;
}, },
.type_datetime => { .type_datetime => {
state = .expect_comma; state = .expect_comma;
type_list.append(.datetime_array) catch return SchemaParserError.MemoryError; type_list.append(.datetime_array) catch return ZipponError.MemoryError;
}, },
.identifier => { .identifier => {
state = .expect_comma; state = .expect_comma;
type_list.append(.link_array) catch return SchemaParserError.MemoryError; type_list.append(.link_array) catch return ZipponError.MemoryError;
links.put(self.toker.getTokenSlice(member_token), self.toker.getTokenSlice(token)) catch return SchemaParserError.MemoryError; links.put(self.toker.getTokenSlice(member_token), self.toker.getTokenSlice(token)) catch return ZipponError.MemoryError;
}, },
else => return printError( else => return printError(
"Error parsing schema: Expected data type", "Error parsing schema: Expected data type",
SchemaParserError.SynthaxError, ZipponError.SynthaxError,
self.toker.buffer, self.toker.buffer,
token.loc.start, token.loc.start,
token.loc.end, token.loc.end,
@ -228,7 +228,7 @@ pub fn parse(self: *Parser, struct_array: *std.ArrayList(SchemaStruct)) !void {
.comma => state = .expect_member_name_OR_r_paren, .comma => state = .expect_member_name_OR_r_paren,
else => return printError( else => return printError(
"Error parsing schema: Expected ,", "Error parsing schema: Expected ,",
SchemaParserError.SynthaxError, ZipponError.SynthaxError,
self.toker.buffer, self.toker.buffer,
token.loc.start, token.loc.start,
token.loc.end, token.loc.end,

View File

@ -1,38 +1,37 @@
const std = @import("std"); const std = @import("std");
const U64 = std.atomic.Value(u64);
const Pool = std.Thread.Pool; const Pool = std.Thread.Pool;
const Allocator = std.mem.Allocator;
const ZipponError = @import("error").ZipponError;
const CPU_CORE = @import("config").CPU_CORE; const CPU_CORE = @import("config").CPU_CORE;
const log = std.log.scoped(.thread); const log = std.log.scoped(.thread);
const ZipponError = @import("../errors.zig").ZipponError;
pub const Self = @This(); const allocator = std.heap.page_allocator;
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator); var thread_arena: std.heap.ThreadSafeAllocator = undefined;
const allocator = arena.allocator(); var thread_pool: Pool = undefined;
pub const ThreadEngine = @This();
thread_arena: *std.heap.ThreadSafeAllocator, thread_arena: *std.heap.ThreadSafeAllocator,
thread_pool: *Pool, thread_pool: *Pool,
pub fn init() ZipponError!Self { pub fn init() ThreadEngine {
const thread_arena = allocator.create(std.heap.ThreadSafeAllocator) catch return ZipponError.MemoryError; thread_arena = std.heap.ThreadSafeAllocator{
thread_arena.* = std.heap.ThreadSafeAllocator{
.child_allocator = allocator, .child_allocator = allocator,
}; };
const thread_pool = allocator.create(Pool) catch return ZipponError.MemoryError; thread_pool.init(std.Thread.Pool.Options{
thread_pool.init(Pool.Options{
.allocator = thread_arena.allocator(), .allocator = thread_arena.allocator(),
.n_jobs = CPU_CORE, .n_jobs = CPU_CORE,
}) catch return ZipponError.ThreadError; }) catch @panic("=(");
return Self{ return ThreadEngine{
.thread_pool = thread_pool, .thread_pool = &thread_pool,
.thread_arena = thread_arena, .thread_arena = &thread_arena,
}; };
} }
pub fn deinit(self: *Self) void { pub fn deinit(_: ThreadEngine) void {
self.thread_pool.deinit(); thread_pool.deinit();
arena.deinit();
} }

View File

@ -1,5 +1,5 @@
const std = @import("std"); const std = @import("std");
const ZipponError = @import("errors.zig").ZipponError; const ZipponError = @import("error").ZipponError;
const config = @import("config"); const config = @import("config");
const log = std.log.scoped(.utils); const log = std.log.scoped(.utils);

View File

@ -18,7 +18,7 @@ const AdditionalDataMember = @import("dataStructure/additionalData.zig").Additio
const send = @import("utils.zig").send; const send = @import("utils.zig").send;
const printError = @import("utils.zig").printError; const printError = @import("utils.zig").printError;
const ZipponError = @import("errors.zig").ZipponError; const ZipponError = @import("error").ZipponError;
const PRINT_STATE = @import("config").PRINT_STATE; const PRINT_STATE = @import("config").PRINT_STATE;
const log = std.log.scoped(.ziqlParser); const log = std.log.scoped(.ziqlParser);

View File

@ -3,7 +3,7 @@ const Allocator = std.mem.Allocator;
const Parser = @import("src/ziqlParser.zig").Parser; const Parser = @import("src/ziqlParser.zig").Parser;
const Tokenizer = @import("src/tokenizers/ziql.zig").Tokenizer; const Tokenizer = @import("src/tokenizers/ziql.zig").Tokenizer;
const DBEngine = @import("src/main.zig").DBEngine; const DBEngine = @import("src/main.zig").DBEngine;
const ZipponError = @import("src/stuffs/errors.zig").ZipponError; const ZipponError = @import("error").ZipponError;
const DB = struct { const DB = struct {
path: []const u8, path: []const u8,
@ -150,7 +150,7 @@ test "3 struct both side" {
try testParsing(db, "DELETE Post {}"); try testParsing(db, "DELETE Post {}");
try testParsing(db, "ADD User (name = 'Bob', email='bob@email.com', age=55, friends=none, posts=none, comments=none, bday=2000/01/01)"); try testParsing(db, "ADD User (name = 'Bob', email='bob@email.com', age=55, friends=none, posts=none, comments=none, bday=2000/01/01)");
try testParsing(db, "ADD Post (text = 'Hello every body', at=NOW, from=none, comments=none)"); try testParsing(db, "ADD Post (text = 'Hello every body', at=NOW, from=none, comments=none)");
try testParsing(db, "ADD Post (text = 'Hello every body', at=NOW, from={}, comments=none) -> new_post -> UPDATE User {} TO (posts APPEND new_post)"); //try testParsing(db, "ADD Post (text = 'Hello every body', at=NOW, from={}, comments=none) -> new_post -> UPDATE User {} TO (posts APPEND new_post)");
// try testParsing(db, "ADD Post (text = 'Hello every body', at=NOW, from={} APPEND TO posts, comments=none)"); Maybe I can use that to be like the above query // try testParsing(db, "ADD Post (text = 'Hello every body', at=NOW, from={} APPEND TO posts, comments=none)"); Maybe I can use that to be like the above query
// ADD Post (text = 'Hello every body', at=NOW, from={} TO last_post, comments=none) And this for a single link // ADD Post (text = 'Hello every body', at=NOW, from={} TO last_post, comments=none) And this for a single link
// try testParsing(db, "ADD Post (text = 'Hello every body', at=NOW, from={} APPEND TO [posts, last_post], comments=none)"); Can be an array to add it to multiple list // try testParsing(db, "ADD Post (text = 'Hello every body', at=NOW, from={} APPEND TO [posts, last_post], comments=none)"); Can be an array to add it to multiple list