diff --git a/.gitignore b/.gitignore index 39aa628..0034652 100644 --- a/.gitignore +++ b/.gitignore @@ -1,8 +1,8 @@ .zig-cache .venv ZipponDB +data engine engine.o zig-out -TODO v0.1.md -generate_dummy_data.py +TODO v0.2.md diff --git a/README.md b/README.md index bd3fc9d..b03068d 100644 --- a/README.md +++ b/README.md @@ -154,11 +154,9 @@ There is 5 data types for the moment: - `bool`: Boolean, can be `true` or `false` - `string`: Character array between `''` - `UUID`: Id in the UUID format, used for relationship, ect. All struct have an id member. - -Comming soon: - `date`: A date in yyyy/mm/dd -- `datetime`: A date time in yyyy/mm/dd/hh/mm/ss -- `time`: A time in hh/mm/ss +- `time`: A time in hh:mm:ss.mmmm +- `datetime`: A date time in yyyy/mm/dd-hh:mm:ss:mmmm All data types can be an array of those types using `[]` in front of it. So `[]int` is an array of integer. diff --git a/ZiQL.md b/ZiQL.md index aea2477..4550ab7 100644 --- a/ZiQL.md +++ b/ZiQL.md @@ -27,6 +27,22 @@ GRAB User {name = 'Bob' AND {age > 10}} ^ ``` +# Data types + +There is 5 data types for the moment: +- `int`: 64 bit integer +- `float`: 64 bit float. Need to have a dot, `1.` is a float `1` is an integer. +- `bool`: Boolean, can be `true` or `false` +- `string`: Character array between `''` +- `UUID`: Id in the UUID format, used for relationship, ect. All struct have an id member. +- `date`: A date in yyyy/mm/dd +- `time`: A time in hh:mm:ss.mmmm +- `datetime`: A date time in yyyy/mm/dd-hh:mm:ss:mmmm + +All data types can be an array of those types using `[]` in front of it. So `[]int` is an array of integer. + +All data types can also be `null`. Except arrays that can only be empty. + # Examples ## GRAB @@ -248,3 +264,35 @@ UPDATE User {name='Bob'} TO (comments REMOVE { at < '2023/12/31'}) ``` I may include more options later. + +# Date + +***WIP*** + +## Date + +To write a date, you use this format: `yyyy/mm/dd`. +Like that: `2024/10/19`. + +***Note: You cant use negative years*** + +## Time + +To write a time, you use this format: `hh:mm:ss.mmmmm`. +Like that: `12:45:00.0000`. + +Millisecond and second are optional so this work too: `12:45:00` and `12:45` + + +## Datetime + +Mix of both, to write a datetime, you use this format: `yyyy/mm/dd-hh:mm:ss.mmmmm`. +Like that: `2024/10/19-12:45:00.0000`. + +Millisecond and second are optional so this work too: `2024/10/19-12:45:00` and `2024/10/19-12:45` + +GRAB User {birthday > 2020/10/19 AND birthday < 2024/01/01 } + +GRAB IOT {id = '0000-0000' AND .TemperatureSensor.Temperature.timestamp > 22-45-50.0000} +GRAB IOT {.TemperatureSensor.TemperatureRecord IN TemperatureRecord{.timestamp > 22-45-50.0000 AND version = 3}} +GRAB IOT {.TemperatureSensor.TemperatureRecord IN .{timestamp > 22-45-50.0000} AND .TemperatureSensor IN .{version = 3}} diff --git a/example.zipponschema b/example.zipponschema index 8782eb5..60b9cd6 100644 --- a/example.zipponschema +++ b/example.zipponschema @@ -2,6 +2,9 @@ User ( name: str, age: int, email: str, + bday: date, + last_order: datetime, + a_time: time, scores: []int, friends: []str, ) diff --git a/src/config.zig b/src/config.zig index 41bfb19..204ce50 100644 --- a/src/config.zig +++ b/src/config.zig @@ -1 +1,6 @@ pub const BUFFER_SIZE = 1024 * 50; // Line limit when parsing file +pub const MAX_FILE_SIZE = 5e+4; // 50kb TODO: Put in config file + +// Testing + +pub const TEST_DATA_DIR = "test_data/v0.1.1"; // Maybe put that directly in the build diff --git a/src/fileEngine.zig b/src/fileEngine.zig index 149530e..315436a 100644 --- a/src/fileEngine.zig +++ b/src/fileEngine.zig @@ -2,6 +2,7 @@ const std = @import("std"); const utils = @import("stuffs/utils.zig"); const Allocator = std.mem.Allocator; const UUID = @import("types/uuid.zig").UUID; +const DateTime = @import("types/date.zig").DateTime; const DataType = @import("types/dataType.zig").DataType; const s2t = @import("types/stringToType.zig"); const FileTokenizer = @import("tokenizers/file.zig").Tokenizer; @@ -12,28 +13,20 @@ const SchemaTokenizer = @import("tokenizers/schema.zig").Tokenizer; const SchemaToken = @import("tokenizers/schema.zig").Token; const AdditionalData = @import("stuffs/additionalData.zig").AdditionalData; -const BUFFER_SIZE = @import("config.zig").BUFFER_SIZE; +const FileEngineError = @import("stuffs/errors.zig").FileEngineError; -// TODO: Use those errors everywhere in this file -const FileEngineError = error{ - SchemaFileNotFound, - SchemaNotConform, - DATAFolderNotFound, - StructFolderNotFound, - CantMakeDir, - CantMakeFile, -}; +const BUFFER_SIZE = @import("config.zig").BUFFER_SIZE; +const MAX_FILE_SIZE = @import("config.zig").MAX_FILE_SIZE; /// Manage everything that is relate to read or write in files /// Or even get stats, whatever. If it touch files, it's here pub const FileEngine = struct { allocator: Allocator, - usable: bool, - path_to_ZipponDB_dir: []const u8, // TODO: Put in config file - max_file_size: usize = 5e+4, // 50kb TODO: Put in config file + path_to_ZipponDB_dir: []const u8, null_terminated_schema_buff: [:0]u8, struct_array: std.ArrayList(SchemaStruct), + // TODO: Check is all DATA folder are ok. Meaning there is all struct dir, at least one zippon file and all file are 0.zippondata or csv later pub fn init(allocator: Allocator, path: []const u8) FileEngine { const path_to_ZipponDB_dir = path; @@ -54,7 +47,6 @@ pub const FileEngine = struct { .path_to_ZipponDB_dir = path_to_ZipponDB_dir, .null_terminated_schema_buff = null_terminated_schema_buff, .struct_array = struct_array, - .usable = !std.mem.eql(u8, path, ""), }; } @@ -67,26 +59,33 @@ pub const FileEngine = struct { self.allocator.free(self.path_to_ZipponDB_dir); } + pub fn usable(self: FileEngine) bool { + return !std.mem.eql(u8, "", self.path_to_ZipponDB_dir); + } + const ComparisonValue = union { int: i64, float: f64, str: []const u8, bool_: bool, id: UUID, + datetime: DateTime, int_array: std.ArrayList(i64), str_array: std.ArrayList([]const u8), float_array: std.ArrayList(f64), bool_array: std.ArrayList(bool), id_array: std.ArrayList(UUID), + datetime_array: std.ArrayList(DateTime), }; /// use to parse file. It take a struct name and member name to know what to parse. /// An Operation from equal, different, superior, superior_or_equal, ... /// The DataType from int, float and str + /// TODO: Use token from the query for struct_name, member_name and value, to save memory pub const Condition = struct { struct_name: []const u8, member_name: []const u8 = undefined, - value: []const u8 = undefined, + value: []const u8 = undefined, // Could be just one with data_type if using union(enum) or can use ComparisonValue directly operation: enum { equal, different, superior, superior_or_equal, inferior, inferior_or_equal, in } = undefined, // Add more stuff like IN data_type: DataType = undefined, @@ -97,101 +96,101 @@ pub const FileEngine = struct { // --------------------Other-------------------- - pub fn readSchemaFile(allocator: Allocator, sub_path: []const u8, buffer: []u8) !usize { - const path = try std.fmt.allocPrint(allocator, "{s}/schema.zipponschema", .{sub_path}); + pub fn readSchemaFile(allocator: Allocator, sub_path: []const u8, buffer: []u8) FileEngineError!usize { + const path = std.fmt.allocPrint(allocator, "{s}/schema.zipponschema", .{sub_path}) catch return FileEngineError.MemoryError; defer allocator.free(path); - const file = try std.fs.cwd().openFile(path, .{}); + const file = std.fs.cwd().openFile(path, .{}) catch return FileEngineError.CantOpenFile; defer file.close(); - const len = try file.readAll(buffer); + const len = file.readAll(buffer) catch return FileEngineError.ReadError; return len; } - pub fn writeDbMetrics(self: *FileEngine, buffer: *std.ArrayList(u8)) !void { - const path = try std.fmt.allocPrint(self.allocator, "{s}", .{self.path_to_ZipponDB_dir}); + pub fn writeDbMetrics(self: *FileEngine, buffer: *std.ArrayList(u8)) FileEngineError!void { + const path = std.fmt.allocPrint(self.allocator, "{s}", .{self.path_to_ZipponDB_dir}) catch return FileEngineError.MemoryError; defer self.allocator.free(path); - const main_dir = try std.fs.cwd().openDir(path, .{ .iterate = true }); + const main_dir = std.fs.cwd().openDir(path, .{ .iterate = true }) catch return FileEngineError.CantOpenDir; const writer = buffer.writer(); - try writer.print("Database path: {s}\n", .{path}); - const main_size = try utils.getDirTotalSize(main_dir); - try writer.print("Total size: {d:.2}Mb\n", .{@as(f64, @floatFromInt(main_size)) / 1e6}); + writer.print("Database path: {s}\n", .{path}) catch return FileEngineError.WriteError; + const main_size = utils.getDirTotalSize(main_dir) catch 0; + writer.print("Total size: {d:.2}Mb\n", .{@as(f64, @floatFromInt(main_size)) / 1e6}) catch return FileEngineError.WriteError; - const log_dir = try main_dir.openDir("LOG", .{ .iterate = true }); - const log_size = try utils.getDirTotalSize(log_dir); - try writer.print("LOG: {d:.2}Mb\n", .{@as(f64, @floatFromInt(log_size)) / 1e6}); + const log_dir = main_dir.openDir("LOG", .{ .iterate = true }) catch return FileEngineError.CantOpenDir; + const log_size = utils.getDirTotalSize(log_dir) catch 0; + writer.print("LOG: {d:.2}Mb\n", .{@as(f64, @floatFromInt(log_size)) / 1e6}) catch return FileEngineError.WriteError; - const backup_dir = try main_dir.openDir("BACKUP", .{ .iterate = true }); - const backup_size = try utils.getDirTotalSize(backup_dir); - try writer.print("BACKUP: {d:.2}Mb\n", .{@as(f64, @floatFromInt(backup_size)) / 1e6}); + const backup_dir = main_dir.openDir("BACKUP", .{ .iterate = true }) catch return FileEngineError.CantOpenDir; + const backup_size = utils.getDirTotalSize(backup_dir) catch 0; + writer.print("BACKUP: {d:.2}Mb\n", .{@as(f64, @floatFromInt(backup_size)) / 1e6}) catch return FileEngineError.WriteError; - const data_dir = try main_dir.openDir("DATA", .{ .iterate = true }); - const data_size = try utils.getDirTotalSize(data_dir); - try writer.print("DATA: {d:.2}Mb\n", .{@as(f64, @floatFromInt(data_size)) / 1e6}); + const data_dir = main_dir.openDir("DATA", .{ .iterate = true }) catch return FileEngineError.CantOpenDir; + const data_size = utils.getDirTotalSize(data_dir) catch 0; + writer.print("DATA: {d:.2}Mb\n", .{@as(f64, @floatFromInt(data_size)) / 1e6}) catch return FileEngineError.WriteError; var iter = data_dir.iterate(); - while (try iter.next()) |entry| { + while (iter.next() catch return FileEngineError.DirIterError) |entry| { if (entry.kind != .directory) continue; - const sub_dir = try data_dir.openDir(entry.name, .{ .iterate = true }); - const size = try utils.getDirTotalSize(sub_dir); - try writer.print(" {s}: {d:.}Mb\n", .{ entry.name, @as(f64, @floatFromInt(size)) / 1e6 }); + const sub_dir = data_dir.openDir(entry.name, .{ .iterate = true }) catch return FileEngineError.CantOpenDir; + const size = utils.getDirTotalSize(sub_dir) catch 0; + writer.print(" {s}: {d:.}Mb\n", .{ entry.name, @as(f64, @floatFromInt(size)) / 1e6 }) catch return FileEngineError.WriteError; } } // --------------------Init folder and files-------------------- /// Create the main folder. Including DATA, LOG and BACKUP - pub fn checkAndCreateDirectories(self: *FileEngine) !void { - var path_buff = try std.fmt.allocPrint(self.allocator, "{s}", .{self.path_to_ZipponDB_dir}); + pub fn checkAndCreateDirectories(self: *FileEngine) FileEngineError!void { + var path_buff = std.fmt.allocPrint(self.allocator, "{s}", .{self.path_to_ZipponDB_dir}) catch return FileEngineError.MemoryError; defer self.allocator.free(path_buff); const cwd = std.fs.cwd(); cwd.makeDir(path_buff) catch |err| switch (err) { error.PathAlreadyExists => {}, - else => return err, + else => return FileEngineError.CantMakeDir, }; self.allocator.free(path_buff); - path_buff = try std.fmt.allocPrint(self.allocator, "{s}/DATA", .{self.path_to_ZipponDB_dir}); + path_buff = std.fmt.allocPrint(self.allocator, "{s}/DATA", .{self.path_to_ZipponDB_dir}) catch return FileEngineError.MemoryError; cwd.makeDir(path_buff) catch |err| switch (err) { error.PathAlreadyExists => {}, - else => return err, + else => return FileEngineError.CantMakeDir, }; self.allocator.free(path_buff); - path_buff = try std.fmt.allocPrint(self.allocator, "{s}/BACKUP", .{self.path_to_ZipponDB_dir}); + path_buff = std.fmt.allocPrint(self.allocator, "{s}/BACKUP", .{self.path_to_ZipponDB_dir}) catch return FileEngineError.MemoryError; cwd.makeDir(path_buff) catch |err| switch (err) { error.PathAlreadyExists => {}, - else => return err, + else => return FileEngineError.CantMakeDir, }; self.allocator.free(path_buff); - path_buff = try std.fmt.allocPrint(self.allocator, "{s}/LOG", .{self.path_to_ZipponDB_dir}); + path_buff = std.fmt.allocPrint(self.allocator, "{s}/LOG", .{self.path_to_ZipponDB_dir}) catch return FileEngineError.MemoryError; cwd.makeDir(path_buff) catch |err| switch (err) { error.PathAlreadyExists => {}, - else => return err, + else => return FileEngineError.CantMakeDir, }; } /// Request a path to a schema file and then create the struct folder /// TODO: Check if some data already exist and if so ask if the user want to delete it and make a backup pub fn initDataFolder(self: *FileEngine, path_to_schema_file: []const u8) FileEngineError!void { - var schema_buf = self.allocator.alloc(u8, BUFFER_SIZE) catch @panic("Cant allocate the schema buffer"); + var schema_buf = self.allocator.alloc(u8, BUFFER_SIZE) catch return FileEngineError.MemoryError; defer self.allocator.free(schema_buf); const file = std.fs.cwd().openFile(path_to_schema_file, .{}) catch return FileEngineError.SchemaFileNotFound; defer file.close(); - const len = file.readAll(schema_buf) catch @panic("Can't read schema file"); + const len = file.readAll(schema_buf) catch return FileEngineError.ReadError; self.allocator.free(self.null_terminated_schema_buff); - self.null_terminated_schema_buff = self.allocator.dupeZ(u8, schema_buf[0..len]) catch @panic("Cant allocate null term buffer for the schema"); + self.null_terminated_schema_buff = self.allocator.dupeZ(u8, schema_buf[0..len]) catch return FileEngineError.MemoryError; var toker = SchemaTokenizer.init(self.null_terminated_schema_buff); var parser = SchemaParser.init(&toker, self.allocator); @@ -202,10 +201,10 @@ pub const FileEngine = struct { parser.parse(&self.struct_array) catch return error.SchemaNotConform; - const path = std.fmt.allocPrint(self.allocator, "{s}/DATA", .{self.path_to_ZipponDB_dir}) catch @panic("Cant allocate path"); + const path = std.fmt.allocPrint(self.allocator, "{s}/DATA", .{self.path_to_ZipponDB_dir}) catch return FileEngineError.MemoryError; defer self.allocator.free(path); - var data_dir = std.fs.cwd().openDir(path, .{}) catch return FileEngineError.DATAFolderNotFound; + var data_dir = std.fs.cwd().openDir(path, .{}) catch return FileEngineError.CantOpenDir; defer data_dir.close(); for (self.struct_array.items) |struct_item| { @@ -213,7 +212,7 @@ pub const FileEngine = struct { error.PathAlreadyExists => {}, else => return FileEngineError.CantMakeDir, }; - const struct_dir = data_dir.openDir(self.locToSlice(struct_item.name), .{}) catch return FileEngineError.StructFolderNotFound; + const struct_dir = data_dir.openDir(self.locToSlice(struct_item.name), .{}) catch return FileEngineError.CantOpenDir; _ = struct_dir.createFile("0.zippondata", .{}) catch |err| switch (err) { error.PathAlreadyExists => {}, @@ -221,7 +220,7 @@ pub const FileEngine = struct { }; } - self.writeSchemaFile(); + try self.writeSchemaFile(); } // --------------------Read and parse files-------------------- @@ -229,17 +228,18 @@ pub const FileEngine = struct { /// Take a list of UUID and, a buffer array and the additional data to write into the buffer the JSON to send /// TODO: Optimize /// FIXME: Array of string are not working - pub fn parseAndWriteToSend(self: *FileEngine, struct_name: []const u8, uuids: []UUID, buffer: *std.ArrayList(u8), additional_data: AdditionalData) !void { + pub fn parseAndWriteToSend(self: *FileEngine, struct_name: []const u8, uuids: []UUID, buffer: *std.ArrayList(u8), additional_data: AdditionalData) FileEngineError!void { const max_file_index = try self.maxFileIndex(struct_name); var current_index: usize = 0; - var path_buff = std.fmt.allocPrint(self.allocator, "{s}/DATA/{s}/{d}.zippondata", .{ self.path_to_ZipponDB_dir, struct_name, current_index }) catch @panic("Can't create sub_path for init a DataIterator"); + var path_buff = std.fmt.allocPrint( + self.allocator, + "{s}/DATA/{s}/{d}.zippondata", + .{ self.path_to_ZipponDB_dir, struct_name, current_index }, + ) catch return FileEngineError.MemoryError; defer self.allocator.free(path_buff); - var file = std.fs.cwd().openFile(path_buff, .{}) catch { - std.debug.print("Path: {s}", .{path_buff}); - @panic("Can't open first file to init a data iterator"); - }; + var file = std.fs.cwd().openFile(path_buff, .{}) catch return FileEngineError.CantOpenFile; defer file.close(); var output: [BUFFER_SIZE]u8 = undefined; // Maybe need to increase that as it limit the size of a line in a file @@ -252,7 +252,7 @@ pub const FileEngine = struct { var token: FileToken = undefined; var out_writer = buffer.writer(); - try out_writer.writeAll("["); + out_writer.writeAll("[") catch return FileEngineError.WriteError; // Write the start { @@ -281,17 +281,14 @@ pub const FileEngine = struct { reader = buffered.reader(); continue; }, // file read till the end - else => { - std.debug.print("Error while reading file: {any}\n", .{err}); - break; - }, + else => return FileEngineError.StreamError, }; - const null_terminated_string = try self.allocator.dupeZ(u8, output_fbs.getWritten()[37..]); + const null_terminated_string = self.allocator.dupeZ(u8, output_fbs.getWritten()[37..]) catch return FileEngineError.MemoryError; defer self.allocator.free(null_terminated_string); var data_toker = FileTokenizer.init(null_terminated_string); - const uuid = try UUID.parse(output_fbs.getWritten()[0..36]); + const uuid = UUID.parse(output_fbs.getWritten()[0..36]) catch return FileEngineError.InvalidUUID; founded = false; // Optimize this @@ -304,64 +301,65 @@ pub const FileEngine = struct { if (!founded) continue; - try out_writer.writeAll("{"); - try out_writer.writeAll("id:\""); - try out_writer.print("{s}", .{output_fbs.getWritten()[0..36]}); - try out_writer.writeAll("\", "); - for (self.structName2structMembers(struct_name), self.structName2DataType(struct_name)) |member_name, member_type| { + out_writer.writeAll("{") catch return FileEngineError.WriteError; + out_writer.writeAll("id:\"") catch return FileEngineError.WriteError; + out_writer.print("{s}", .{output_fbs.getWritten()[0..36]}) catch return FileEngineError.WriteError; + out_writer.writeAll("\", ") catch return FileEngineError.WriteError; + for (try self.structName2structMembers(struct_name), try self.structName2DataType(struct_name)) |member_name, member_type| { token = data_toker.next(); // FIXME: When relationship will be implemented, need to check if the len of NON link is 0 if (!(additional_data.member_to_find.items.len == 0) or !(additional_data.contains(self.locToSlice(member_name)))) continue; // write the member name and = sign - try out_writer.print("{s}: ", .{self.locToSlice(member_name)}); + out_writer.print("{s}: ", .{self.locToSlice(member_name)}) catch return FileEngineError.WriteError; switch (member_type) { .str => { const str_slice = data_toker.getTokenSlice(token); - try out_writer.print("\"{s}\"", .{str_slice[1 .. str_slice.len - 1]}); + out_writer.print("\"{s}\"", .{str_slice[1 .. str_slice.len - 1]}) catch return FileEngineError.WriteError; }, .str_array => { - try out_writer.writeAll(data_toker.getTokenSlice(token)); + out_writer.writeAll(data_toker.getTokenSlice(token)) catch return FileEngineError.WriteError; token = data_toker.next(); while (token.tag != .r_bracket) : (token = data_toker.next()) { - try out_writer.writeAll("\""); - try out_writer.writeAll(data_toker.getTokenSlice(token)[1..(token.loc.end - token.loc.start)]); - try out_writer.writeAll("\""); - try out_writer.writeAll(" "); + out_writer.writeAll("\"") catch return FileEngineError.WriteError; + out_writer.writeAll(data_toker.getTokenSlice(token)[1..(token.loc.end - token.loc.start)]) catch return FileEngineError.WriteError; + out_writer.writeAll("\"") catch return FileEngineError.WriteError; + out_writer.writeAll(" ") catch return FileEngineError.WriteError; } - try out_writer.writeAll(data_toker.getTokenSlice(token)); + out_writer.writeAll(data_toker.getTokenSlice(token)) catch return FileEngineError.WriteError; }, - .int_array, .float_array, .bool_array, .id_array => { + .int_array, .float_array, .bool_array, .id_array, .date_array, .time_array, .datetime_array => { while (token.tag != .r_bracket) : (token = data_toker.next()) { - try out_writer.writeAll(data_toker.getTokenSlice(token)); - try out_writer.writeAll(" "); + out_writer.writeAll(data_toker.getTokenSlice(token)) catch return FileEngineError.WriteError; + out_writer.writeAll(" ") catch return FileEngineError.WriteError; } - try out_writer.writeAll(data_toker.getTokenSlice(token)); + out_writer.writeAll(data_toker.getTokenSlice(token)) catch return FileEngineError.WriteError; }, - else => try out_writer.writeAll(data_toker.getTokenSlice(token)), //write the value as if + else => out_writer.writeAll(data_toker.getTokenSlice(token)) catch return FileEngineError.WriteError, //write the value as if } - try out_writer.writeAll(", "); + out_writer.writeAll(", ") catch return FileEngineError.WriteError; } - try out_writer.writeAll("}"); - try out_writer.writeAll(", "); + out_writer.writeAll("}") catch return FileEngineError.WriteError; + out_writer.writeAll(", ") catch return FileEngineError.WriteError; } - try out_writer.writeAll("]"); + out_writer.writeAll("]") catch return FileEngineError.WriteError; } /// Use a struct name to populate a list with all UUID of this struct /// TODO: Optimize this, I'm sure I can do better than that - pub fn getAllUUIDList(self: *FileEngine, struct_name: []const u8, uuid_array: *std.ArrayList(UUID)) !void { + pub fn getAllUUIDList(self: *FileEngine, struct_name: []const u8, uuid_array: *std.ArrayList(UUID)) FileEngineError!void { const max_file_index = try self.maxFileIndex(struct_name); var current_index: usize = 0; - var path_buff = std.fmt.allocPrint(self.allocator, "{s}/DATA/{s}/{d}.zippondata", .{ self.path_to_ZipponDB_dir, struct_name, current_index }) catch @panic("Can't create sub_path for init a DataIterator"); + var path_buff = std.fmt.allocPrint( + self.allocator, + "{s}/DATA/{s}/{d}.zippondata", + .{ self.path_to_ZipponDB_dir, struct_name, current_index }, + ) catch return FileEngineError.MemoryError; defer self.allocator.free(path_buff); - var file = std.fs.cwd().openFile(path_buff, .{}) catch { - std.debug.print("Path: {s}", .{path_buff}); - @panic("Can't open first file to init a data iterator"); - }; + var file = std.fs.cwd().openFile(path_buff, .{}) catch return FileEngineError.CantOpenFile; defer file.close(); var output: [BUFFER_SIZE]u8 = undefined; // Maybe need to increase that as it limit the size of a line in a file @@ -384,26 +382,24 @@ pub const FileEngine = struct { current_index += 1; self.allocator.free(path_buff); - path_buff = std.fmt.allocPrint(self.allocator, "{s}/DATA/{s}/{d}.zippondata", .{ self.path_to_ZipponDB_dir, struct_name, current_index }) catch @panic("Can't create sub_path for init a DataIterator"); + path_buff = std.fmt.allocPrint( + self.allocator, + "{s}/DATA/{s}/{d}.zippondata", + .{ self.path_to_ZipponDB_dir, struct_name, current_index }, + ) catch return FileEngineError.MemoryError; file.close(); // Do I need to close ? I think so - file = std.fs.cwd().openFile(path_buff, .{}) catch { - std.debug.print("Error trying to open {s}\n", .{path_buff}); - @panic("Can't open file to update a data iterator"); - }; + file = std.fs.cwd().openFile(path_buff, .{}) catch return FileEngineError.CantOpenFile; buffered = std.io.bufferedReader(file.reader()); reader = buffered.reader(); continue; }, // file read till the end - else => { - std.debug.print("Error while reading file: {any}\n", .{err}); - break; - }, + else => return FileEngineError.StreamError, }; const uuid = try UUID.parse(output_fbs.getWritten()[0..36]); - try uuid_array.append(uuid); + uuid_array.append(uuid) catch return FileEngineError.MemoryError; } } @@ -416,20 +412,21 @@ pub const FileEngine = struct { /// The threselhold can be like if the average len is > 400 character. So UUID would take less that 10% of the storage /// - Save data in a more compact way /// - Multithreading, each thread take a list of files and we mix them at the end - pub fn getUUIDListUsingCondition(self: *FileEngine, condition: Condition, uuid_array: *std.ArrayList(UUID)) !void { + pub fn getUUIDListUsingCondition(self: *FileEngine, condition: Condition, uuid_array: *std.ArrayList(UUID)) FileEngineError!void { const max_file_index = try self.maxFileIndex(condition.struct_name); var current_index: usize = 0; - var path_buff = std.fmt.allocPrint(self.allocator, "{s}/DATA/{s}/{d}.zippondata", .{ self.path_to_ZipponDB_dir, condition.struct_name, current_index }) catch @panic("Can't create sub_path for init a DataIterator"); + var path_buff = std.fmt.allocPrint( + self.allocator, + "{s}/DATA/{s}/{d}.zippondata", + .{ self.path_to_ZipponDB_dir, condition.struct_name, current_index }, + ) catch return FileEngineError.MemoryError; defer self.allocator.free(path_buff); - var file = std.fs.cwd().openFile(path_buff, .{}) catch { - std.debug.print("Path: {s}", .{path_buff}); - @panic("Can't open first file to init a data iterator"); - }; + var file = std.fs.cwd().openFile(path_buff, .{}) catch return FileEngineError.CantOpenFile; defer file.close(); - var output: [BUFFER_SIZE]u8 = undefined; // Maybe need to increase that as it limit the size of a line in a file + var output: [BUFFER_SIZE]u8 = undefined; var output_fbs = std.io.fixedBufferStream(&output); const writer = output_fbs.writer(); @@ -442,25 +439,36 @@ pub const FileEngine = struct { .str => compare_value = ComparisonValue{ .str = condition.value }, .float => compare_value = ComparisonValue{ .float = s2t.parseFloat(condition.value) }, .bool => compare_value = ComparisonValue{ .bool_ = s2t.parseBool(condition.value) }, - .id => compare_value = ComparisonValue{ .id = try UUID.parse(condition.value) }, + .id => compare_value = ComparisonValue{ .id = UUID.parse(condition.value) catch return FileEngineError.InvalidUUID }, + .date => compare_value = ComparisonValue{ .datetime = s2t.parseDate(condition.value) }, + .time => compare_value = ComparisonValue{ .datetime = s2t.parseTime(condition.value) }, + .datetime => compare_value = ComparisonValue{ .datetime = s2t.parseDatetime(condition.value) }, .int_array => compare_value = ComparisonValue{ .int_array = s2t.parseArrayInt(self.allocator, condition.value) }, .str_array => compare_value = ComparisonValue{ .str_array = s2t.parseArrayStr(self.allocator, condition.value) }, .float_array => compare_value = ComparisonValue{ .float_array = s2t.parseArrayFloat(self.allocator, condition.value) }, .bool_array => compare_value = ComparisonValue{ .bool_array = s2t.parseArrayBool(self.allocator, condition.value) }, .id_array => compare_value = ComparisonValue{ .id_array = s2t.parseArrayUUID(self.allocator, condition.value) }, + .date_array => compare_value = ComparisonValue{ .datetime_array = s2t.parseArrayDate(self.allocator, condition.value) }, + .time_array => compare_value = ComparisonValue{ .datetime_array = s2t.parseArrayTime(self.allocator, condition.value) }, + .datetime_array => compare_value = ComparisonValue{ .datetime_array = s2t.parseArrayDatetime(self.allocator, condition.value) }, } defer { switch (condition.data_type) { .int_array => compare_value.int_array.deinit(), - .str_array => compare_value.str_array.deinit(), + .str_array => { + for (compare_value.str_array.items) |value| self.allocator.free(value); + compare_value.str_array.deinit(); + }, .float_array => compare_value.float_array.deinit(), .bool_array => compare_value.bool_array.deinit(), .id_array => compare_value.id_array.deinit(), + .datetime_array => compare_value.datetime_array.deinit(), else => {}, } } var token: FileToken = undefined; + var found = false; while (true) { output_fbs.reset(); @@ -475,79 +483,96 @@ pub const FileEngine = struct { current_index += 1; self.allocator.free(path_buff); - path_buff = std.fmt.allocPrint(self.allocator, "{s}/DATA/{s}/{d}.zippondata", .{ self.path_to_ZipponDB_dir, condition.struct_name, current_index }) catch @panic("Can't create sub_path for init a DataIterator"); + path_buff = std.fmt.allocPrint( + self.allocator, + "{s}/DATA/{s}/{d}.zippondata", + .{ self.path_to_ZipponDB_dir, condition.struct_name, current_index }, + ) catch return FileEngineError.MemoryError; file.close(); // Do I need to close ? I think so - file = std.fs.cwd().openFile(path_buff, .{}) catch { - std.debug.print("Error trying to open {s}\n", .{path_buff}); - @panic("Can't open file to update a data iterator"); - }; + file = std.fs.cwd().openFile(path_buff, .{}) catch return FileEngineError.CantOpenFile; buffered = std.io.bufferedReader(file.reader()); reader = buffered.reader(); continue; }, // file read till the end - else => { - std.debug.print("Error while reading file: {any}\n", .{err}); - break; - }, + else => return FileEngineError.StreamError, }; // Maybe use the stream directly to prevent duplicate the data // But I would need to change the Tokenizer a lot... - const null_terminated_string = try self.allocator.dupeZ(u8, output_fbs.getWritten()[37..]); + const null_terminated_string = self.allocator.dupeZ(u8, output_fbs.getWritten()[37..]) catch return FileEngineError.MemoryError; defer self.allocator.free(null_terminated_string); var data_toker = FileTokenizer.init(null_terminated_string); - const uuid = try UUID.parse(output_fbs.getWritten()[0..36]); + const uuid = UUID.parse(output_fbs.getWritten()[0..36]) catch return FileEngineError.InvalidUUID; // Skip unwanted token - for (self.structName2structMembers(condition.struct_name)) |mn| { + for (try self.structName2structMembers(condition.struct_name)) |mn| { if (std.mem.eql(u8, self.locToSlice(mn), condition.member_name)) break; _ = data_toker.next(); } token = data_toker.next(); - // TODO: Make sure in amount that the rest is unreachable by sending an error for wrong condition like superior between 2 string or array + const row_value = data_toker.getTokenSlice(token); + switch (condition.operation) { .equal => switch (condition.data_type) { - .int => if (compare_value.int == s2t.parseInt(data_toker.getTokenSlice(token))) try uuid_array.append(uuid), - .float => if (compare_value.float == s2t.parseFloat(data_toker.getTokenSlice(token))) try uuid_array.append(uuid), - .str => if (std.mem.eql(u8, compare_value.str, data_toker.getTokenSlice(token))) try uuid_array.append(uuid), - .bool => if (compare_value.bool_ == s2t.parseBool(data_toker.getTokenSlice(token))) try uuid_array.append(uuid), - .id => if (compare_value.id.compare(uuid)) try uuid_array.append(uuid), + .int => found = compare_value.int == s2t.parseInt(row_value), + .float => found = compare_value.float == s2t.parseFloat(row_value), + .str => found = std.mem.eql(u8, compare_value.str, row_value), + .bool => found = compare_value.bool_ == s2t.parseBool(row_value), + .id => found = compare_value.id.compare(uuid), + .date => found = compare_value.datetime.compareDate(s2t.parseDate(row_value)), + .time => found = compare_value.datetime.compareTime(s2t.parseTime(row_value)), + .datetime => found = compare_value.datetime.compareDatetime(s2t.parseDatetime(row_value)), else => unreachable, }, .different => switch (condition.data_type) { - .int => if (compare_value.int != s2t.parseInt(data_toker.getTokenSlice(token))) try uuid_array.append(uuid), - .float => if (compare_value.float != s2t.parseFloat(data_toker.getTokenSlice(token))) try uuid_array.append(uuid), - .str => if (!std.mem.eql(u8, compare_value.str, data_toker.getTokenSlice(token))) try uuid_array.append(uuid), - .bool => if (compare_value.bool_ != s2t.parseBool(data_toker.getTokenSlice(token))) try uuid_array.append(uuid), + .int => found = compare_value.int != s2t.parseInt(row_value), + .float => found = compare_value.float != s2t.parseFloat(row_value), + .str => found = !std.mem.eql(u8, compare_value.str, row_value), + .bool => found = compare_value.bool_ != s2t.parseBool(row_value), + .date => found = !compare_value.datetime.compareDate(s2t.parseDate(row_value)), + .time => found = !compare_value.datetime.compareTime(s2t.parseTime(row_value)), + .datetime => found = !compare_value.datetime.compareDatetime(s2t.parseDatetime(row_value)), else => unreachable, }, .superior_or_equal => switch (condition.data_type) { - .int => if (compare_value.int <= s2t.parseInt(data_toker.getTokenSlice(token))) try uuid_array.append(uuid), - .float => if (compare_value.float <= s2t.parseFloat(data_toker.getTokenSlice(token))) try uuid_array.append(uuid), + .int => found = compare_value.int <= s2t.parseInt(data_toker.getTokenSlice(token)), + .float => found = compare_value.float <= s2t.parseFloat(data_toker.getTokenSlice(token)), + .date => found = compare_value.datetime.toUnix() <= s2t.parseDate(row_value).toUnix(), + .time => found = compare_value.datetime.toUnix() <= s2t.parseTime(row_value).toUnix(), + .datetime => found = compare_value.datetime.toUnix() <= s2t.parseDatetime(row_value).toUnix(), else => unreachable, }, .superior => switch (condition.data_type) { - .int => if (compare_value.int < s2t.parseInt(data_toker.getTokenSlice(token))) try uuid_array.append(uuid), - .float => if (compare_value.float < s2t.parseFloat(data_toker.getTokenSlice(token))) try uuid_array.append(uuid), + .int => found = compare_value.int < s2t.parseInt(data_toker.getTokenSlice(token)), + .float => found = compare_value.float < s2t.parseFloat(data_toker.getTokenSlice(token)), + .date => found = compare_value.datetime.toUnix() < s2t.parseDate(row_value).toUnix(), + .time => found = compare_value.datetime.toUnix() < s2t.parseTime(row_value).toUnix(), + .datetime => found = compare_value.datetime.toUnix() < s2t.parseDatetime(row_value).toUnix(), else => unreachable, }, .inferior_or_equal => switch (condition.data_type) { - .int => if (compare_value.int >= s2t.parseInt(data_toker.getTokenSlice(token))) try uuid_array.append(uuid), - .float => if (compare_value.float >= s2t.parseFloat(data_toker.getTokenSlice(token))) try uuid_array.append(uuid), + .int => found = compare_value.int >= s2t.parseInt(data_toker.getTokenSlice(token)), + .float => found = compare_value.float >= s2t.parseFloat(data_toker.getTokenSlice(token)), + .date => found = compare_value.datetime.toUnix() >= s2t.parseDate(row_value).toUnix(), + .time => found = compare_value.datetime.toUnix() >= s2t.parseTime(row_value).toUnix(), + .datetime => found = compare_value.datetime.toUnix() >= s2t.parseDatetime(row_value).toUnix(), else => unreachable, }, .inferior => switch (condition.data_type) { - .int => if (compare_value.int > s2t.parseInt(data_toker.getTokenSlice(token))) try uuid_array.append(uuid), - .float => if (compare_value.float > s2t.parseFloat(data_toker.getTokenSlice(token))) try uuid_array.append(uuid), + .int => found = compare_value.int > s2t.parseInt(data_toker.getTokenSlice(token)), + .float => found = compare_value.float > s2t.parseFloat(data_toker.getTokenSlice(token)), + .date => found = compare_value.datetime.toUnix() > s2t.parseDate(row_value).toUnix(), + .time => found = compare_value.datetime.toUnix() > s2t.parseTime(row_value).toUnix(), + .datetime => found = compare_value.datetime.toUnix() > s2t.parseDatetime(row_value).toUnix(), else => unreachable, }, @@ -555,19 +580,21 @@ pub const FileEngine = struct { .in => switch (condition.data_type) { .id_array => { for (compare_value.id_array.items) |elem| { - if (elem.compare(uuid)) try uuid_array.append(uuid); + if (elem.compare(uuid)) uuid_array.append(uuid) catch return FileEngineError.MemoryError; } }, else => unreachable, }, } + + if (found) uuid_array.append(uuid) catch return FileEngineError.MemoryError; } } // --------------------Change existing files-------------------- // Do I need a map here ? Cant I use something else ? - pub fn writeEntity(self: *FileEngine, struct_name: []const u8, data_map: std.StringHashMap([]const u8)) !UUID { + pub fn writeEntity(self: *FileEngine, struct_name: []const u8, data_map: std.StringHashMap([]const u8)) FileEngineError!UUID { const uuid = UUID.init(); const potential_file_index = try self.getFirstUsableIndexFile(struct_name); @@ -578,23 +605,31 @@ pub const FileEngine = struct { defer self.allocator.free(path); if (potential_file_index) |file_index| { - path = try std.fmt.allocPrint(self.allocator, "{s}/DATA/{s}/{d}.zippondata", .{ self.path_to_ZipponDB_dir, struct_name, file_index }); - file = std.fs.cwd().openFile(path, .{ .mode = .read_write }) catch @panic("=("); + path = std.fmt.allocPrint( + self.allocator, + "{s}/DATA/{s}/{d}.zippondata", + .{ self.path_to_ZipponDB_dir, struct_name, file_index }, + ) catch return FileEngineError.MemoryError; + file = std.fs.cwd().openFile(path, .{ .mode = .read_write }) catch return FileEngineError.CantOpenFile; } else { const max_index = try self.maxFileIndex(struct_name); - path = try std.fmt.allocPrint(self.allocator, "{s}/DATA/{s}/{d}.zippondata", .{ self.path_to_ZipponDB_dir, struct_name, max_index + 1 }); - file = std.fs.cwd().createFile(path, .{}) catch @panic("Error creating new data file"); + path = std.fmt.allocPrint( + self.allocator, + "{s}/DATA/{s}/{d}.zippondata", + .{ self.path_to_ZipponDB_dir, struct_name, max_index + 1 }, + ) catch return FileEngineError.MemoryError; + file = std.fs.cwd().createFile(path, .{}) catch return FileEngineError.CantMakeFile; } - try file.seekFromEnd(0); - try file.writer().print("{s}", .{uuid.format_uuid()}); + file.seekFromEnd(0) catch return FileEngineError.WriteError; // Not really a write error tho + file.writer().print("{s}", .{uuid.format_uuid()}) catch return FileEngineError.WriteError; - for (self.structName2structMembers(struct_name)) |member_name| { - try file.writer().print(" {s}", .{data_map.get(self.locToSlice(member_name)).?}); + for (try self.structName2structMembers(struct_name)) |member_name| { + file.writer().print(" {s}", .{data_map.get(self.locToSlice(member_name)).?}) catch return FileEngineError.WriteError; // Change that for csv } - try file.writer().print("\n", .{}); + file.writer().print("\n", .{}) catch return FileEngineError.WriteError; return uuid; } @@ -604,28 +639,34 @@ pub const FileEngine = struct { /// TODO: Optmize a lot, I did that quickly to work but it is far from optimized. Idea: /// - Once all uuid found, stream until the end of the file without delimiter or uuid compare /// - Change map to array - pub fn updateEntities(self: *FileEngine, struct_name: []const u8, uuids: []UUID, new_data_map: std.StringHashMap([]const u8)) !void { - const max_file_index = self.maxFileIndex(struct_name) catch @panic("Cant get max index file when updating"); + pub fn updateEntities(self: *FileEngine, struct_name: []const u8, uuids: []UUID, new_data_map: std.StringHashMap([]const u8)) FileEngineError!void { + const max_file_index = try self.maxFileIndex(struct_name); var current_file_index: usize = 0; - var path_buff = std.fmt.allocPrint(self.allocator, "{s}/DATA/{s}/{d}.zippondata", .{ self.path_to_ZipponDB_dir, struct_name, current_file_index }) catch @panic("Can't create sub_path for init a DataIterator"); + var path_buff = std.fmt.allocPrint( + self.allocator, + "{s}/DATA/{s}/{d}.zippondata", + .{ self.path_to_ZipponDB_dir, struct_name, current_file_index }, + ) catch return FileEngineError.MemoryError; defer self.allocator.free(path_buff); - var path_buff2 = std.fmt.allocPrint(self.allocator, "{s}/DATA/{s}/{d}.zippondata", .{ self.path_to_ZipponDB_dir, struct_name, current_file_index }) catch @panic("Can't create sub_path for init a DataIterator"); + var path_buff2 = std.fmt.allocPrint( + self.allocator, + "{s}/DATA/{s}/{d}.zippondata", + .{ self.path_to_ZipponDB_dir, struct_name, current_file_index }, + ) catch return FileEngineError.MemoryError; defer self.allocator.free(path_buff2); - var old_file = std.fs.cwd().openFile(path_buff, .{}) catch { - std.debug.print("Path: {s}", .{path_buff}); - @panic("Can't open first file to init a data iterator"); - }; + var old_file = std.fs.cwd().openFile(path_buff, .{}) catch return FileEngineError.CantOpenFile; self.allocator.free(path_buff); - path_buff = std.fmt.allocPrint(self.allocator, "{s}/DATA/{s}/{d}.zippondata.new", .{ self.path_to_ZipponDB_dir, struct_name, current_file_index }) catch @panic("Can't create sub_path for init a DataIterator"); + path_buff = std.fmt.allocPrint( + self.allocator, + "{s}/DATA/{s}/{d}.zippondata.new", + .{ self.path_to_ZipponDB_dir, struct_name, current_file_index }, + ) catch return FileEngineError.MemoryError; - var new_file = std.fs.cwd().createFile(path_buff, .{}) catch { - std.debug.print("Path: {s}", .{path_buff}); - @panic("Can't create new file to init a data iterator"); - }; + var new_file = std.fs.cwd().createFile(path_buff, .{}) catch return FileEngineError.CantOpenFile; defer new_file.close(); var output: [BUFFER_SIZE]u8 = undefined; // Maybe need to increase that as it limit the size of a line in a file @@ -646,49 +687,56 @@ pub const FileEngine = struct { // Start by deleting and renaming the new file self.allocator.free(path_buff); - path_buff = std.fmt.allocPrint(self.allocator, "{s}/DATA/{s}/{d}.zippondata", .{ self.path_to_ZipponDB_dir, struct_name, current_file_index }) catch @panic("Can't create sub_path for init a DataIterator"); + path_buff = std.fmt.allocPrint( + self.allocator, + "{s}/DATA/{s}/{d}.zippondata", + .{ self.path_to_ZipponDB_dir, struct_name, current_file_index }, + ) catch return FileEngineError.MemoryError; self.allocator.free(path_buff2); - path_buff2 = std.fmt.allocPrint(self.allocator, "{s}/DATA/{s}/{d}.zippondata.new", .{ self.path_to_ZipponDB_dir, struct_name, current_file_index }) catch @panic("Can't create sub_path for init a DataIterator"); + path_buff2 = std.fmt.allocPrint( + self.allocator, + "{s}/DATA/{s}/{d}.zippondata.new", + .{ self.path_to_ZipponDB_dir, struct_name, current_file_index }, + ) catch return FileEngineError.MemoryError; old_file.close(); - try std.fs.cwd().deleteFile(path_buff); - try std.fs.cwd().rename(path_buff2, path_buff); + std.fs.cwd().deleteFile(path_buff) catch return FileEngineError.DeleteFileError; + std.fs.cwd().rename(path_buff2, path_buff) catch return FileEngineError.RenameFileError; if (current_file_index == max_file_index) break; current_file_index += 1; self.allocator.free(path_buff); - path_buff = std.fmt.allocPrint(self.allocator, "{s}/DATA/{s}/{d}.zippondata", .{ self.path_to_ZipponDB_dir, struct_name, current_file_index }) catch @panic("Can't create sub_path for init a DataIterator"); + path_buff = std.fmt.allocPrint( + self.allocator, + "{s}/DATA/{s}/{d}.zippondata", + .{ self.path_to_ZipponDB_dir, struct_name, current_file_index }, + ) catch return FileEngineError.MemoryError; self.allocator.free(path_buff2); - path_buff2 = std.fmt.allocPrint(self.allocator, "{s}/DATA/{s}/{d}.zippondata.new", .{ self.path_to_ZipponDB_dir, struct_name, current_file_index }) catch @panic("Can't create sub_path for init a DataIterator"); + path_buff2 = std.fmt.allocPrint(self.allocator, "{s}/DATA/{s}/{d}.zippondata.new", .{ + self.path_to_ZipponDB_dir, + struct_name, + current_file_index, + }) catch return FileEngineError.MemoryError; - old_file = std.fs.cwd().openFile(path_buff, .{}) catch { - std.debug.print("Error trying to open {s}\n", .{path_buff}); - @panic("Can't open file to update entities"); - }; + old_file = std.fs.cwd().openFile(path_buff, .{}) catch return FileEngineError.CantOpenFile; - new_file = std.fs.cwd().createFile(path_buff2, .{}) catch { - std.debug.print("Error trying to create {s}\n", .{path_buff2}); - @panic("Can't create file to update entities"); - }; + new_file = std.fs.cwd().createFile(path_buff2, .{}) catch return FileEngineError.CantMakeFile; buffered = std.io.bufferedReader(old_file.reader()); reader = buffered.reader(); continue; }, // file read till the end - else => { - std.debug.print("Error while reading file: {any}\n", .{err}); - break; - }, + else => return FileEngineError.StreamError, }; - try new_file.writeAll(output_fbs.getWritten()); + new_file.writeAll(output_fbs.getWritten()) catch return FileEngineError.WriteError; // THis is the uuid of the current row - const uuid = try UUID.parse(output_fbs.getWritten()[0..36]); + const uuid = UUID.parse(output_fbs.getWritten()[0..36]) catch return FileEngineError.InvalidUUID; founded = false; // Optimize this @@ -702,82 +750,92 @@ pub const FileEngine = struct { if (!founded) { // stream until the delimiter output_fbs.reset(); - try new_file.writeAll(" "); - try reader.streamUntilDelimiter(writer, '\n', null); - try new_file.writeAll(output_fbs.getWritten()); - try new_file.writeAll("\n"); + new_file.writeAll(" ") catch return FileEngineError.WriteError; + reader.streamUntilDelimiter(writer, '\n', null) catch return FileEngineError.WriteError; + new_file.writeAll(output_fbs.getWritten()) catch return FileEngineError.WriteError; + new_file.writeAll("\n") catch return FileEngineError.WriteError; } else { - for (self.structName2structMembers(struct_name), self.structName2DataType(struct_name)) |member_name, member_type| { + for (try self.structName2structMembers(struct_name), try self.structName2DataType(struct_name)) |member_name, member_type| { // For all collum in the right order, check if the key is in the map, if so use it to write the new value, otherwise use the old file output_fbs.reset(); switch (member_type) { .str => { - try reader.streamUntilDelimiter(writer, '\'', null); - try reader.streamUntilDelimiter(writer, '\'', null); + reader.streamUntilDelimiter(writer, '\'', null) catch return FileEngineError.StreamError; + reader.streamUntilDelimiter(writer, '\'', null) catch return FileEngineError.StreamError; }, - .int_array, .float_array, .bool_array, .id_array => try reader.streamUntilDelimiter(writer, ']', null), - .str_array => try reader.streamUntilDelimiter(writer, ']', null), // FIXME: If the string itself contain ], this will be a problem + .int_array, .float_array, .bool_array, .id_array => { + reader.streamUntilDelimiter(writer, ']', null) catch return FileEngineError.StreamError; + }, + .str_array => { + reader.streamUntilDelimiter(writer, ']', null) catch return FileEngineError.StreamError; + }, // FIXME: If the string itself contain ], this will be a problem else => { - try reader.streamUntilDelimiter(writer, ' ', null); - try reader.streamUntilDelimiter(writer, ' ', null); + reader.streamUntilDelimiter(writer, ' ', null) catch return FileEngineError.StreamError; + reader.streamUntilDelimiter(writer, ' ', null) catch return FileEngineError.StreamError; }, } if (new_data_map.contains(self.locToSlice(member_name))) { // Write the new data - try new_file.writer().print(" {s}", .{new_data_map.get(self.locToSlice(member_name)).?}); + new_file.writer().print(" {s}", .{new_data_map.get(self.locToSlice(member_name)).?}) catch return FileEngineError.WriteError; } else { // Write the old data switch (member_type) { - .str => try new_file.writeAll(" \'"), - .int_array => try new_file.writeAll(" "), - .float_array => try new_file.writeAll(" "), - .str_array => try new_file.writeAll(" "), - .bool_array => try new_file.writeAll(" "), - .id_array => try new_file.writeAll(" "), - else => try new_file.writeAll(" "), + .str => new_file.writeAll(" \'") catch return FileEngineError.WriteError, + .int_array => new_file.writeAll(" ") catch return FileEngineError.WriteError, + .float_array => new_file.writeAll(" ") catch return FileEngineError.WriteError, + .str_array => new_file.writeAll(" ") catch return FileEngineError.WriteError, + .bool_array => new_file.writeAll(" ") catch return FileEngineError.WriteError, + .id_array => new_file.writeAll(" ") catch return FileEngineError.WriteError, + else => new_file.writeAll(" ") catch return FileEngineError.WriteError, } - try new_file.writeAll(output_fbs.getWritten()); + new_file.writeAll(output_fbs.getWritten()) catch return FileEngineError.WriteError; switch (member_type) { - .str => try new_file.writeAll("\'"), - .int_array, .float_array, .bool_array, .id_array => try new_file.writeAll("]"), + .str => new_file.writeAll("\'") catch return FileEngineError.WriteError, + .int_array, .float_array, .bool_array, .id_array => new_file.writeAll("]") catch return FileEngineError.WriteError, else => {}, } } } - try reader.streamUntilDelimiter(writer, '\n', null); - try new_file.writeAll("\n"); + reader.streamUntilDelimiter(writer, '\n', null) catch return FileEngineError.WriteError; + new_file.writeAll("\n") catch return FileEngineError.WriteError; } } } /// Take a kist of UUID and a struct name and delete the row with same UUID /// TODO: Use B+Tree - pub fn deleteEntities(self: *FileEngine, struct_name: []const u8, uuids: []UUID) !usize { + pub fn deleteEntities(self: *FileEngine, struct_name: []const u8, uuids: []UUID) FileEngineError!usize { const max_file_index = self.maxFileIndex(struct_name) catch @panic("Cant get max index file when updating"); var current_file_index: usize = 0; - var path_buff = std.fmt.allocPrint(self.allocator, "{s}/DATA/{s}/{d}.zippondata", .{ self.path_to_ZipponDB_dir, struct_name, current_file_index }) catch @panic("Can't create sub_path for init a DataIterator"); + var path_buff = std.fmt.allocPrint( + self.allocator, + "{s}/DATA/{s}/{d}.zippondata", + .{ self.path_to_ZipponDB_dir, struct_name, current_file_index }, + ) catch return FileEngineError.MemoryError; defer self.allocator.free(path_buff); - var path_buff2 = std.fmt.allocPrint(self.allocator, "{s}/DATA/{s}/{d}.zippondata", .{ self.path_to_ZipponDB_dir, struct_name, current_file_index }) catch @panic("Can't create sub_path for init a DataIterator"); + var path_buff2 = std.fmt.allocPrint( + self.allocator, + "{s}/DATA/{s}/{d}.zippondata", + .{ self.path_to_ZipponDB_dir, struct_name, current_file_index }, + ) catch return FileEngineError.MemoryError; defer self.allocator.free(path_buff2); - var old_file = std.fs.cwd().openFile(path_buff, .{}) catch { - std.debug.print("Path: {s}", .{path_buff}); - @panic("Can't open first file to init a data iterator"); - }; + var old_file = std.fs.cwd().openFile(path_buff, .{}) catch return FileEngineError.CantOpenFile; self.allocator.free(path_buff); - path_buff = std.fmt.allocPrint(self.allocator, "{s}/DATA/{s}/{d}.zippondata.new", .{ self.path_to_ZipponDB_dir, struct_name, current_file_index }) catch @panic("Can't create sub_path for init a DataIterator"); + path_buff = std.fmt.allocPrint( + self.allocator, + "{s}/DATA/{s}/{d}.zippondata.new", + .{ self.path_to_ZipponDB_dir, struct_name, current_file_index }, + ) catch return FileEngineError.MemoryError; - var new_file = std.fs.cwd().createFile(path_buff, .{}) catch { - std.debug.print("Path: {s}", .{path_buff}); - @panic("Can't create new file to init a data iterator"); - }; + var new_file = std.fs.cwd().createFile(path_buff, .{}) catch return FileEngineError.CantOpenFile; defer new_file.close(); var output: [BUFFER_SIZE]u8 = undefined; // Maybe need to increase that as it limit the size of a line in a file @@ -799,34 +857,44 @@ pub const FileEngine = struct { // Start by deleting and renaming the new file self.allocator.free(path_buff); - path_buff = std.fmt.allocPrint(self.allocator, "{s}/DATA/{s}/{d}.zippondata", .{ self.path_to_ZipponDB_dir, struct_name, current_file_index }) catch @panic("Can't create sub_path for init a DataIterator"); + path_buff = std.fmt.allocPrint( + self.allocator, + "{s}/DATA/{s}/{d}.zippondata", + .{ self.path_to_ZipponDB_dir, struct_name, current_file_index }, + ) catch return FileEngineError.MemoryError; self.allocator.free(path_buff2); - path_buff2 = std.fmt.allocPrint(self.allocator, "{s}/DATA/{s}/{d}.zippondata.new", .{ self.path_to_ZipponDB_dir, struct_name, current_file_index }) catch @panic("Can't create sub_path for init a DataIterator"); + path_buff2 = std.fmt.allocPrint( + self.allocator, + "{s}/DATA/{s}/{d}.zippondata.new", + .{ self.path_to_ZipponDB_dir, struct_name, current_file_index }, + ) catch return FileEngineError.MemoryError; old_file.close(); - try std.fs.cwd().deleteFile(path_buff); - try std.fs.cwd().rename(path_buff2, path_buff); + std.fs.cwd().deleteFile(path_buff) catch return FileEngineError.DeleteFileError; + std.fs.cwd().rename(path_buff2, path_buff) catch return FileEngineError.RenameFileError; if (current_file_index == max_file_index) break; current_file_index += 1; self.allocator.free(path_buff); - path_buff = std.fmt.allocPrint(self.allocator, "{s}/DATA/{s}/{d}.zippondata", .{ self.path_to_ZipponDB_dir, struct_name, current_file_index }) catch @panic("Can't create sub_path for init a DataIterator"); + path_buff = std.fmt.allocPrint( + self.allocator, + "{s}/DATA/{s}/{d}.zippondata", + .{ self.path_to_ZipponDB_dir, struct_name, current_file_index }, + ) catch return FileEngineError.MemoryError; self.allocator.free(path_buff2); - path_buff2 = std.fmt.allocPrint(self.allocator, "{s}/DATA/{s}/{d}.zippondata.new", .{ self.path_to_ZipponDB_dir, struct_name, current_file_index }) catch @panic("Can't create sub_path for init a DataIterator"); + path_buff2 = std.fmt.allocPrint( + self.allocator, + "{s}/DATA/{s}/{d}.zippondata.new", + .{ self.path_to_ZipponDB_dir, struct_name, current_file_index }, + ) catch return FileEngineError.MemoryError; - old_file = std.fs.cwd().openFile(path_buff, .{}) catch { - std.debug.print("Error trying to open {s}\n", .{path_buff}); - @panic("Can't open file to update entities"); - }; + old_file = std.fs.cwd().openFile(path_buff, .{}) catch return FileEngineError.CantOpenFile; - new_file = std.fs.cwd().createFile(path_buff2, .{}) catch { - std.debug.print("Error trying to create {s}\n", .{path_buff2}); - @panic("Can't create file to update entities"); - }; + new_file = std.fs.cwd().createFile(path_buff2, .{}) catch return FileEngineError.CantOpenFile; buffered = std.io.bufferedReader(old_file.reader()); reader = buffered.reader(); @@ -839,7 +907,7 @@ pub const FileEngine = struct { }; // THis is the uuid of the current row - const uuid = try UUID.parse(output_fbs.getWritten()[0..36]); + const uuid = UUID.parse(output_fbs.getWritten()[0..36]) catch return FileEngineError.InvalidUUID; founded = false; // Optimize this @@ -853,15 +921,15 @@ pub const FileEngine = struct { if (!founded) { // stream until the delimiter - try new_file.writeAll(output_fbs.getWritten()); + new_file.writeAll(output_fbs.getWritten()) catch return FileEngineError.WriteError; output_fbs.reset(); - try new_file.writeAll(" "); - try reader.streamUntilDelimiter(writer, '\n', null); - try new_file.writeAll(output_fbs.getWritten()); - try new_file.writeAll("\n"); + new_file.writeAll(" ") catch return FileEngineError.WriteError; + reader.streamUntilDelimiter(writer, '\n', null) catch return FileEngineError.WriteError; + new_file.writeAll(output_fbs.getWritten()) catch return FileEngineError.WriteError; + new_file.writeAll("\n") catch return FileEngineError.WriteError; } else { - try reader.streamUntilDelimiter(writer, '\n', null); + reader.streamUntilDelimiter(writer, '\n', null) catch return FileEngineError.WriteError; } } @@ -871,50 +939,60 @@ pub const FileEngine = struct { // --------------------Schema utils-------------------- /// Get the index of the first file that is bellow the size limit. If not found, return null - fn getFirstUsableIndexFile(self: FileEngine, struct_name: []const u8) !?usize { - const path = try std.fmt.allocPrint(self.allocator, "{s}/DATA/{s}", .{ self.path_to_ZipponDB_dir, struct_name }); + fn getFirstUsableIndexFile(self: FileEngine, struct_name: []const u8) FileEngineError!?usize { + const path = std.fmt.allocPrint( + self.allocator, + "{s}/DATA/{s}", + .{ self.path_to_ZipponDB_dir, struct_name }, + ) catch return FileEngineError.MemoryError; defer self.allocator.free(path); - var member_dir = try std.fs.cwd().openDir(path, .{ .iterate = true }); + var member_dir = std.fs.cwd().openDir(path, .{ .iterate = true }) catch return FileEngineError.CantOpenDir; defer member_dir.close(); var iter = member_dir.iterate(); - while (try iter.next()) |entry| { - const file_stat = try member_dir.statFile(entry.name); - if (file_stat.size < self.max_file_size) return try std.fmt.parseInt(usize, entry.name[0..(entry.name.len - 11)], 10); + while (iter.next() catch return FileEngineError.DirIterError) |entry| { + const file_stat = member_dir.statFile(entry.name) catch return FileEngineError.FileStatError; + if (file_stat.size < MAX_FILE_SIZE) { + return std.fmt.parseInt(usize, entry.name[0..(entry.name.len - 11)], 10) catch return FileEngineError.InvalidFileIndex; // TODO: Change the slice when start using CSV + } } return null; } /// Iterate over all file of a struct and return the index of the last file. /// E.g. a struct with 0.csv and 1.csv it return 1. - fn maxFileIndex(self: FileEngine, struct_name: []const u8) !usize { - const path = try std.fmt.allocPrint(self.allocator, "{s}/DATA/{s}", .{ self.path_to_ZipponDB_dir, struct_name }); + fn maxFileIndex(self: FileEngine, struct_name: []const u8) FileEngineError!usize { + const path = std.fmt.allocPrint( + self.allocator, + "{s}/DATA/{s}", + .{ self.path_to_ZipponDB_dir, struct_name }, + ) catch return FileEngineError.MemoryError; defer self.allocator.free(path); - const member_dir = try std.fs.cwd().openDir(path, .{ .iterate = true }); + const member_dir = std.fs.cwd().openDir(path, .{ .iterate = true }) catch return FileEngineError.CantOpenDir; var count: usize = 0; var iter = member_dir.iterate(); - while (try iter.next()) |entry| { + while (iter.next() catch return FileEngineError.DirIterError) |entry| { if (entry.kind != .file) continue; count += 1; } return count - 1; } - pub fn writeSchemaFile(self: *FileEngine) void { - var zippon_dir = std.fs.cwd().openDir(self.path_to_ZipponDB_dir, .{}) catch @panic("Cant open main folder!"); + pub fn writeSchemaFile(self: *FileEngine) FileEngineError!void { + var zippon_dir = std.fs.cwd().openDir(self.path_to_ZipponDB_dir, .{}) catch return FileEngineError.MemoryError; defer zippon_dir.close(); zippon_dir.deleteFile("schema.zipponschema") catch |err| switch (err) { error.FileNotFound => {}, - else => @panic("Error other than file not found when writing the schema."), + else => return FileEngineError.DeleteFileError, }; - var file = zippon_dir.createFile("schema.zipponschema", .{}) catch @panic("Can't create new schema file"); + var file = zippon_dir.createFile("schema.zipponschema", .{}) catch return FileEngineError.CantMakeFile; defer file.close(); - file.writeAll(self.null_terminated_schema_buff) catch @panic("Can't write new schema"); + file.writeAll(self.null_terminated_schema_buff) catch return FileEngineError.WriteError; } pub fn locToSlice(self: *FileEngine, loc: SchemaToken.Loc) []const u8 { @@ -922,34 +1000,41 @@ pub const FileEngine = struct { } /// Get the type of the member - pub fn memberName2DataType(self: *FileEngine, struct_name: []const u8, member_name: []const u8) ?DataType { + pub fn memberName2DataType(self: *FileEngine, struct_name: []const u8, member_name: []const u8) FileEngineError!DataType { var i: u16 = 0; - for (self.structName2structMembers(struct_name)) |mn| { - if (std.mem.eql(u8, self.locToSlice(mn), member_name)) return self.structName2DataType(struct_name)[i]; + for (try self.structName2structMembers(struct_name)) |mn| { + const dtypes = try self.structName2DataType(struct_name); + if (std.mem.eql(u8, self.locToSlice(mn), member_name)) return dtypes[i]; i += 1; } - return null; + return FileEngineError.MemberNotFound; } /// Get the list of all member name for a struct name - pub fn structName2structMembers(self: *FileEngine, struct_name: []const u8) []SchemaToken.Loc { + pub fn structName2structMembers(self: *FileEngine, struct_name: []const u8) FileEngineError![]SchemaToken.Loc { var i: u16 = 0; while (i < self.struct_array.items.len) : (i += 1) if (std.mem.eql(u8, self.locToSlice(self.struct_array.items[i].name), struct_name)) break; if (i == self.struct_array.items.len) { - @panic("Struct name not found!"); + return FileEngineError.StructNotFound; } return self.struct_array.items[i].members.items; } - pub fn structName2DataType(self: *FileEngine, struct_name: []const u8) []const DataType { + pub fn structName2DataType(self: *FileEngine, struct_name: []const u8) FileEngineError![]const DataType { var i: u16 = 0; - while (i < self.struct_array.items.len) : (i += 1) if (std.mem.eql(u8, self.locToSlice(self.struct_array.items[i].name), struct_name)) break; + while (i < self.struct_array.items.len) : (i += 1) { + if (std.mem.eql(u8, self.locToSlice(self.struct_array.items[i].name), struct_name)) break; + } + + if (i == self.struct_array.items.len and !std.mem.eql(u8, self.locToSlice(self.struct_array.items[i].name), struct_name)) { + return FileEngineError.StructNotFound; + } return self.struct_array.items[i].types.items; } @@ -962,20 +1047,20 @@ pub const FileEngine = struct { } /// Check if a struct have the member name - pub fn isMemberNameInStruct(self: *FileEngine, struct_name: []const u8, member_name: []const u8) bool { - for (self.structName2structMembers(struct_name)) |mn| { + pub fn isMemberNameInStruct(self: *FileEngine, struct_name: []const u8, member_name: []const u8) FileEngineError!bool { + for (try self.structName2structMembers(struct_name)) |mn| { // I do not return an error here because I should already check before is the struct exist if (std.mem.eql(u8, self.locToSlice(mn), member_name)) return true; } return false; } // Return true if the map have all the member name as key and not more - pub fn checkIfAllMemberInMap(self: *FileEngine, struct_name: []const u8, map: *std.StringHashMap([]const u8)) bool { - const all_struct_member = self.structName2structMembers(struct_name); + pub fn checkIfAllMemberInMap(self: *FileEngine, struct_name: []const u8, map: *std.StringHashMap([]const u8)) FileEngineError!bool { + const all_struct_member = try self.structName2structMembers(struct_name); var count: u16 = 0; for (all_struct_member) |mn| { - if (map.contains(self.locToSlice(mn))) count += 1 else std.debug.print("Missing: {s}\n", .{self.locToSlice(mn)}); + if (map.contains(self.locToSlice(mn))) count += 1 else std.debug.print("Missing: {s}\n", .{self.locToSlice(mn)}); // TODO: Handle missing print better } return ((count == all_struct_member.len) and (count == map.count())); @@ -983,9 +1068,10 @@ pub const FileEngine = struct { }; test "Get list of UUID using condition" { + const TEST_DATA_DIR = @import("config.zig").TEST_DATA_DIR; const allocator = std.testing.allocator; - const path = try allocator.dupe(u8, "ZipponDB"); + const path = try allocator.dupe(u8, TEST_DATA_DIR); var file_engine = FileEngine.init(allocator, path); defer file_engine.deinit(); @@ -995,3 +1081,9 @@ test "Get list of UUID using condition" { const condition = FileEngine.Condition{ .struct_name = "User", .member_name = "email", .value = "adrien@mail.com", .operation = .equal, .data_type = .str }; try file_engine.getUUIDListUsingCondition(condition, &uuid_array); } + +// FIXME: +// You were adding proper error to the file engine and implement the date. +// Next step is trying build until all error are gone +// +// Next step also is to implement date to the schema parser and also add that only parser Error are allow diff --git a/src/main.zig b/src/main.zig index b1e1033..64f7a3c 100644 --- a/src/main.zig +++ b/src/main.zig @@ -23,6 +23,7 @@ const State = enum { end, }; +// TODO: If an argument is given when starting the binary, it is the db path pub fn main() !void { var state: State = .expect_main_command; @@ -44,10 +45,10 @@ pub fn main() !void { _ = std.fs.cwd().openDir(path, .{}) catch { std.debug.print("Error opening ZipponDB path using environment variable, please select the database using 'db use' or create a new one with 'db new'\n", .{}); file_engine = FileEngine.init(allocator, try allocator.dupe(u8, "")); - to_init = true; + to_init = false; }; if (to_init) { - file_engine = FileEngine.init(allocator, path_env_variable.?); + file_engine = FileEngine.init(allocator, path); try file_engine.checkAndCreateDirectories(); } } else { @@ -73,7 +74,7 @@ pub fn main() !void { while ((state != .end) and (state != .quit)) : (token = toker.next()) switch (state) { .expect_main_command => switch (token.tag) { .keyword_run => { - if (!file_engine.usable) { + if (!file_engine.usable()) { send("Error: No database selected. Please use db new or db use.", .{}); state = .end; continue; @@ -82,7 +83,7 @@ pub fn main() !void { }, .keyword_db => state = .expect_db_command, .keyword_schema => { - if (!file_engine.usable) { + if (!file_engine.usable()) { send("Error: No database selected. Please use db new or db use.", .{}); state = .end; continue; @@ -116,7 +117,7 @@ pub fn main() !void { .keyword_new => state = .expect_path_to_new_db, .keyword_use => state = .expect_path_to_db, .keyword_metrics => { - if (!file_engine.usable) { + if (!file_engine.usable()) { send("Error: No database selected. Please use db new or db use.", .{}); state = .end; continue; diff --git a/src/schemaParser.zig b/src/schemaParser.zig index 2206d0d..e323d65 100644 --- a/src/schemaParser.zig +++ b/src/schemaParser.zig @@ -6,10 +6,7 @@ const Token = @import("tokenizers/schema.zig").Token; const send = @import("stuffs/utils.zig").send; const printError = @import("stuffs/utils.zig").printError; -const SchemaParserError = error{ - SynthaxError, - FeatureMissing, -}; +const SchemaParserError = @import("stuffs/errors.zig").SchemaParserError; const State = enum { end, @@ -35,10 +32,6 @@ pub const Parser = struct { }; } - // Maybe I the name and member can be Loc, with a start and end, and use the buffer to get back the value - // This is how Token works - // From my understanding this is the same here. I put slices, that can just a len and a pointer, put I con't save the value itself. - // Or maybe I do actually, and an array of pointer would be *[]u8 pub const SchemaStruct = struct { allocator: Allocator, name: Token.Loc, @@ -46,7 +39,12 @@ pub const Parser = struct { types: std.ArrayList(DataType), pub fn init(allocator: Allocator, name: Token.Loc) SchemaStruct { - return SchemaStruct{ .allocator = allocator, .name = name, .members = std.ArrayList(Token.Loc).init(allocator), .types = std.ArrayList(DataType).init(allocator) }; + return SchemaStruct{ + .allocator = allocator, + .name = name, + .members = std.ArrayList(Token.Loc).init(allocator), + .types = std.ArrayList(DataType).init(allocator), + }; } pub fn deinit(self: *SchemaStruct) void { @@ -78,15 +76,27 @@ pub const Parser = struct { .expect_struct_name_OR_end => switch (token.tag) { .identifier => { state = .expect_l_paren; - struct_array.append(SchemaStruct.init(self.allocator, token.loc)) catch @panic("Error appending a struct name."); + struct_array.append(SchemaStruct.init(self.allocator, token.loc)) catch return SchemaParserError.MemoryError; }, .eof => state = .end, - else => return printError("Error parsing schema: Expected a struct name", SchemaParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end), + else => return printError( + "Error parsing schema: Expected a struct name", + SchemaParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), }, .expect_l_paren => switch (token.tag) { .l_paren => state = .expect_member_name, - else => return printError("Error parsing schema: Expected (", SchemaParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end), + else => return printError( + "Error parsing schema: Expected (", + SchemaParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), }, .expect_member_name_OR_r_paren => switch (token.tag) { @@ -98,67 +108,125 @@ pub const Parser = struct { state = .expect_struct_name_OR_end; index += 1; }, - else => return printError("Error parsing schema: Expected member name or )", SchemaParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end), + else => return printError( + "Error parsing schema: Expected member name or )", + SchemaParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), }, .expect_member_name => { state = .expect_two_dot; - struct_array.items[index].members.append(token.loc) catch @panic("Error appending a member name."); + struct_array.items[index].members.append(token.loc) catch return SchemaParserError.MemoryError; }, .expect_two_dot => switch (token.tag) { .two_dot => state = .expect_value_type, - else => return printError("Error parsing schema: Expected :", SchemaParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end), + else => return printError( + "Error parsing schema: Expected :", + SchemaParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), }, .expect_value_type => switch (token.tag) { .type_int => { state = .expect_comma; - struct_array.items[index].types.append(DataType.int) catch @panic("Error appending a type."); + struct_array.items[index].types.append(DataType.int) catch return SchemaParserError.MemoryError; }, .type_str => { state = .expect_comma; - struct_array.items[index].types.append(DataType.str) catch @panic("Error appending a type."); + struct_array.items[index].types.append(DataType.str) catch return SchemaParserError.MemoryError; }, .type_float => { state = .expect_comma; - struct_array.items[index].types.append(DataType.float) catch @panic("Error appending a type."); + struct_array.items[index].types.append(DataType.float) catch return SchemaParserError.MemoryError; }, .type_bool => { state = .expect_comma; - struct_array.items[index].types.append(DataType.bool) catch @panic("Error appending a type."); + struct_array.items[index].types.append(DataType.bool) catch return SchemaParserError.MemoryError; }, - .type_date => @panic("Date not yet implemented"), - .identifier => @panic("Link not yet implemented"), + .type_date => { + state = .expect_comma; + struct_array.items[index].types.append(DataType.date) catch return SchemaParserError.MemoryError; + }, + .type_time => { + state = .expect_comma; + struct_array.items[index].types.append(DataType.time) catch return SchemaParserError.MemoryError; + }, + .type_datetime => { + state = .expect_comma; + struct_array.items[index].types.append(DataType.datetime) catch return SchemaParserError.MemoryError; + }, + .identifier => return SchemaParserError.FeatureMissing, .lr_bracket => state = .expext_array_type, - else => return printError("Error parsing schema: Expected data type", SchemaParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end), + else => return printError( + "Error parsing schema: Expected data type", + SchemaParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), }, .expext_array_type => switch (token.tag) { .type_int => { state = .expect_comma; - struct_array.items[index].types.append(DataType.int_array) catch @panic("Error appending a type."); + struct_array.items[index].types.append(DataType.int_array) catch return SchemaParserError.MemoryError; }, .type_str => { state = .expect_comma; - struct_array.items[index].types.append(DataType.str_array) catch @panic("Error appending a type."); + struct_array.items[index].types.append(DataType.str_array) catch return SchemaParserError.MemoryError; }, .type_float => { state = .expect_comma; - struct_array.items[index].types.append(DataType.float_array) catch @panic("Error appending a type."); + struct_array.items[index].types.append(DataType.float_array) catch return SchemaParserError.MemoryError; }, .type_bool => { state = .expect_comma; - struct_array.items[index].types.append(DataType.bool_array) catch @panic("Error appending a type."); + struct_array.items[index].types.append(DataType.bool_array) catch return SchemaParserError.MemoryError; }, - .type_date => return printError("Error parsing schema: Data not yet implemented", SchemaParserError.FeatureMissing, self.toker.buffer, token.loc.start, token.loc.end), - .identifier => return printError("Error parsing schema: Relationship not yet implemented", SchemaParserError.FeatureMissing, self.toker.buffer, token.loc.start, token.loc.end), - else => return printError("Error parsing schema: Expected data type", SchemaParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end), + .type_date => { + state = .expect_comma; + struct_array.items[index].types.append(DataType.date_array) catch return SchemaParserError.MemoryError; + }, + .type_time => { + state = .expect_comma; + struct_array.items[index].types.append(DataType.time_array) catch return SchemaParserError.MemoryError; + }, + .type_datetime => { + state = .expect_comma; + struct_array.items[index].types.append(DataType.datetime_array) catch return SchemaParserError.MemoryError; + }, + .identifier => return printError( + "Error parsing schema: Relationship not yet implemented", + SchemaParserError.FeatureMissing, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), + else => return printError( + "Error parsing schema: Expected data type", + SchemaParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), }, .expect_comma => switch (token.tag) { .comma => state = .expect_member_name_OR_r_paren, - else => return printError("Error parsing schema: Expected ,", SchemaParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end), + else => return printError( + "Error parsing schema: Expected ,", + SchemaParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), }, else => unreachable, diff --git a/src/stuffs/errors.zig b/src/stuffs/errors.zig new file mode 100644 index 0000000..c0574f3 --- /dev/null +++ b/src/stuffs/errors.zig @@ -0,0 +1,42 @@ +pub const ZiQlParserError = error{ + SynthaxError, + MemberNotFound, + MemberMissing, + StructNotFound, + FeatureMissing, + ParsingValueError, + ConditionError, +}; + +pub const SchemaParserError = error{ + SynthaxError, + FeatureMissing, + ValueParsingError, + MemoryError, +}; + +pub const FileEngineError = error{ + SchemaFileNotFound, + SchemaNotConform, + DATAFolderNotFound, + StructFolderNotFound, + CantMakeDir, + CantMakeFile, + CantOpenDir, + CantOpenFile, + MemoryError, + StreamError, + ReadError, // TODO: Only use stream + InvalidUUID, + InvalidDate, + InvalidFileIndex, + DirIterError, + WriteError, + FileStatError, + DeleteFileError, + RenameFileError, + StructNotFound, + MemberNotFound, +}; + +pub const ZipponError = ZiQlParserError || FileEngineError || SchemaParserError; diff --git a/src/stuffs/utils.zig b/src/stuffs/utils.zig index 69cbe50..5722b61 100644 --- a/src/stuffs/utils.zig +++ b/src/stuffs/utils.zig @@ -1,4 +1,5 @@ const std = @import("std"); +const ZipponError = @import("errors.zig").ZipponError; const stdout = std.io.getStdOut().writer(); @@ -43,7 +44,7 @@ pub fn send(comptime format: []const u8, args: anytype) void { } /// Print an error and send it to the user pointing to the token -pub fn printError(message: []const u8, err: anyerror, query: ?[]const u8, start: ?usize, end: ?usize) anyerror { +pub fn printError(message: []const u8, err: ZipponError, query: ?[]const u8, start: ?usize, end: ?usize) ZipponError { const allocator = std.heap.page_allocator; var buffer = std.ArrayList(u8).init(allocator); defer buffer.deinit(); @@ -54,7 +55,7 @@ pub fn printError(message: []const u8, err: anyerror, query: ?[]const u8, start: writer.print("{s}\n", .{message}) catch {}; if ((start != null) and (end != null) and (query != null)) { - const buffer_query = try allocator.dupe(u8, query.?); + const buffer_query = allocator.dupe(u8, query.?) catch return ZipponError.MemoryError; defer allocator.free(buffer_query); std.mem.replaceScalar(u8, buffer_query, '\n', ' '); diff --git a/src/tokenizers/cli.zig b/src/tokenizers/cli.zig index 57ab64d..c1fa9ae 100644 --- a/src/tokenizers/cli.zig +++ b/src/tokenizers/cli.zig @@ -116,7 +116,7 @@ pub const Tokenizer = struct { }, .identifier => switch (c) { - 'a'...'z', 'A'...'Z', '_', '0'...'9', '.' => continue, + 'a'...'z', 'A'...'Z', '_', '0'...'9', '.', '/' => continue, else => { if (Token.getKeyword(self.buffer[result.loc.start..self.index])) |tag| { result.tag = tag; diff --git a/src/tokenizers/file.zig b/src/tokenizers/file.zig index 90fa83c..24458d4 100644 --- a/src/tokenizers/file.zig +++ b/src/tokenizers/file.zig @@ -13,6 +13,10 @@ pub const Token = struct { string_literal, int_literal, float_literal, + uuid_literal, + date_literal, + time_literal, + datetime_literal, l_bracket, // [ r_bracket, // ] }; @@ -36,6 +40,9 @@ pub const Tokenizer = struct { string_literal, float, int, + uuid_literal, + date_literal, + time_literal, }; pub fn getTokenSlice(self: *Tokenizer, token: Token) []const u8 { @@ -65,6 +72,11 @@ pub const Tokenizer = struct { state = .string_literal; result.tag = .string_literal; }, + 'a'...'z' => { + state = .uuid_literal; + result.tag = .uuid_literal; + }, + '0'...'9', '-' => { state = .int; result.tag = .int_literal; @@ -95,9 +107,22 @@ pub const Tokenizer = struct { state = .float; result.tag = .float_literal; }, - '0'...'9' => continue, + 'a'...'z', '-' => { + state = .uuid_literal; + result.tag = .uuid_literal; + }, + '/' => { + state = .date_literal; + result.tag = .date_literal; + }, + ':' => { + state = .time_literal; + result.tag = .time_literal; + }, + '_', '0'...'9' => continue, else => break, }, + .float => switch (c) { '0'...'9' => { continue; @@ -106,6 +131,25 @@ pub const Tokenizer = struct { break; }, }, + + .date_literal => switch (c) { + '-' => { + state = .time_literal; + result.tag = .datetime_literal; + }, + '0'...'9', '/' => continue, + else => break, + }, + + .time_literal => switch (c) { + '0'...'9', ':', '.' => continue, + else => break, + }, + + .uuid_literal => switch (c) { + '0'...'9', 'a'...'z', '-' => continue, + else => break, + }, } } diff --git a/src/tokenizers/ziql.zig b/src/tokenizers/ziql.zig index 1067f24..a9b6faf 100644 --- a/src/tokenizers/ziql.zig +++ b/src/tokenizers/ziql.zig @@ -95,7 +95,6 @@ pub const Tokenizer = struct { string_literal, date_literal, time_literal, - datetime_literal, uuid_literal, identifier, equal, @@ -103,9 +102,7 @@ pub const Tokenizer = struct { angle_bracket_left, angle_bracket_right, string_literal_backslash, - int_exponent, float, - float_exponent, int, }; @@ -204,7 +201,7 @@ pub const Tokenizer = struct { state = .float; result.tag = .float_literal; }, - '0'...'9' => { + '0'...'9', '-' => { state = .int; result.tag = .int_literal; }, @@ -324,7 +321,7 @@ pub const Tokenizer = struct { state = .float; result.tag = .float_literal; }, - 'a'...'d', 'f'...'z' => { + 'a'...'z', '-' => { state = .uuid_literal; result.tag = .uuid_literal; }, @@ -332,35 +329,15 @@ pub const Tokenizer = struct { state = .date_literal; result.tag = .date_literal; }, - '-' => { - if ((self.index - result.loc.start) == 2) { - state = .time_literal; - result.tag = .time_literal; - } else { // Just in case a uuid have only number as fist part of its UUID - state = .uuid_literal; - result.tag = .uuid_literal; - } - }, - 'e', 'E' => { - state = .int_exponent; - result.tag = .float_literal; + ':' => { + state = .time_literal; + result.tag = .time_literal; }, '_', '0'...'9' => continue, else => break, }, - .int_exponent => switch (c) { - '+', '-', '0'...'9' => { - state = .float; - }, - else => { - self.index -= 1; - break; - }, - }, + .float => switch (c) { - 'e', 'E' => { - state = .float_exponent; - }, '_', '0'...'9' => { continue; }, @@ -368,31 +345,21 @@ pub const Tokenizer = struct { break; }, }, - .float_exponent => switch (c) { - '+', '-', '0'...'9' => { - continue; - }, - else => { - self.index -= 1; - break; - }, - }, + .date_literal => switch (c) { - '|' => { - state = .datetime_literal; + '-' => { + state = .time_literal; result.tag = .datetime_literal; }, '0'...'9', '/' => continue, else => break, }, + .time_literal => switch (c) { - '0'...'9', '-', '.' => continue, - else => break, - }, - .datetime_literal => switch (c) { - '0'...'9', '-', '.' => continue, + '0'...'9', ':', '.' => continue, else => break, }, + .uuid_literal => switch (c) { '0'...'9', 'a'...'z', '-' => continue, else => break, @@ -421,9 +388,9 @@ test "basic query" { test "basic date" { try testTokenize("1a5527af-88fb-48c1-8d5c-49c9b73c2379", &.{.uuid_literal}); - try testTokenize("21/01/1998", &.{.date_literal}); - try testTokenize("17-55-31.0000", &.{.time_literal}); - try testTokenize("21/01/1998|17-55-31.0000", &.{.datetime_literal}); + try testTokenize("1998/01/21", &.{.date_literal}); + try testTokenize("17:55:31.0000", &.{.time_literal}); + try testTokenize("1998/01/21-17:55:31.0000", &.{.datetime_literal}); } fn testTokenize(source: [:0]const u8, expected_token_tags: []const Token.Tag) !void { diff --git a/src/types/dataType.zig b/src/types/dataType.zig index 7da5f1e..51ac343 100644 --- a/src/types/dataType.zig +++ b/src/types/dataType.zig @@ -1,13 +1,20 @@ /// Suported dataType for the DB +/// Maybe start using a unionenum pub const DataType = enum { int, float, str, bool, id, + date, + time, + datetime, int_array, float_array, str_array, bool_array, id_array, + date_array, + time_array, + datetime_array, }; diff --git a/src/types/date.zig b/src/types/date.zig index ab08bb3..eaa6490 100644 --- a/src/types/date.zig +++ b/src/types/date.zig @@ -2,7 +2,6 @@ const std = @import("std"); const string = []const u8; -const extras = @import("extras"); const time = @This(); pub const DateTime = struct { @@ -13,9 +12,8 @@ pub const DateTime = struct { days: u8, months: u8, years: u16, - timezone: TimeZone, - const Self = @This(); + const Self = @This(); // Pas mal ca pub fn initUnixMs(unix: u64) Self { return epoch_unix.addMs(unix); @@ -26,14 +24,15 @@ pub const DateTime = struct { } /// Caller asserts that this is > epoch - pub fn init(year: u16, month: u16, day: u16, hr: u16, min: u16, sec: u16) Self { + pub fn init(year: u16, month: u16, day: u16, hr: u16, min: u16, sec: u16, ms: u16) Self { return epoch_unix .addYears(year - epoch_unix.years) .addMonths(month) .addDays(day) .addHours(hr) .addMins(min) - .addSecs(sec); + .addSecs(sec) + .addMs(ms); } pub fn now() Self { @@ -47,8 +46,7 @@ pub const DateTime = struct { .hours = 0, .days = 0, .months = 0, - .years = 1970, // Why ? - .timezone = .UTC, + .years = 0, }; pub fn eql(self: Self, other: Self) bool { @@ -58,9 +56,30 @@ pub const DateTime = struct { self.hours == other.hours and self.days == other.days and self.months == other.months and - self.years == other.years and - self.timezone == other.timezone and - self.weekday == other.weekday; + self.years == other.years; + } + + pub fn compareDate(self: Self, other: Self) bool { + return self.days == other.days and + self.months == other.months and + self.years == other.years; + } + + pub fn compareTime(self: Self, other: Self) bool { + return self.ms == other.ms and + self.seconds == other.seconds and + self.minutes == other.minutes and + self.hours == other.hours; + } + + pub fn compareDatetime(self: Self, other: Self) bool { + return self.ms == other.ms and + self.seconds == other.seconds and + self.minutes == other.minutes and + self.hours == other.hours and + self.days == other.days and + self.months == other.months and + self.years == other.years; } // So as long as the count / unit, it continue adding the next unit, smart @@ -390,20 +409,6 @@ pub const DateTime = struct { .ms = self.toUnixMilli() - other_in_the_past.toUnixMilli(), }; } - - pub fn era(self: Self) Era { - if (self.years >= 0) return .AD; - @compileError("TODO"); - } - - pub fn weekday(self: Self) WeekDay { - var i = self.daysSinceEpoch() % 7; - var result = WeekDay.Thu; // weekday of epoch_unix - while (i > 0) : (i -= 1) { - result = result.next(); - } - return result; - } }; pub const format = struct { @@ -419,43 +424,6 @@ pub const format = struct { pub const llll = "ddd, " ++ lll; }; -pub const TimeZone = enum { - UTC, - - usingnamespace extras.TagNameJsonStringifyMixin(@This()); -}; - -pub const WeekDay = enum { - Sun, - Mon, - Tue, - Wed, - Thu, - Fri, - Sat, - - pub fn next(self: WeekDay) WeekDay { - return switch (self) { - .Sun => .Mon, - .Mon => .Tue, - .Tue => .Wed, - .Wed => .Thu, - .Thu => .Fri, - .Fri => .Sat, - .Sat => .Sun, - }; - } - - usingnamespace extras.TagNameJsonStringifyMixin(@This()); -}; - -pub const Era = enum { - // BC, - AD, - - usingnamespace extras.TagNameJsonStringifyMixin(@This()); -}; - pub fn isLeapYear(year: u16) bool { var ret = false; if (year % 4 == 0) ret = true; diff --git a/src/types/stringToType.zig b/src/types/stringToType.zig index 6e4e7ba..6a11dfe 100644 --- a/src/types/stringToType.zig +++ b/src/types/stringToType.zig @@ -1,5 +1,6 @@ const std = @import("std"); const UUID = @import("uuid.zig").UUID; +const DateTime = @import("date.zig").DateTime; // TODO: Put those functions somewhere else pub fn parseInt(value_str: []const u8) i64 { @@ -36,6 +37,69 @@ pub fn parseBool(value_str: []const u8) bool { return (value_str[0] != '0'); } +// TODO: Optimize all date parsing +pub fn parseDate(value_str: []const u8) DateTime { + const year: u16 = std.fmt.parseInt(u16, value_str[0..4], 10) catch 0; + const month: u16 = std.fmt.parseInt(u16, value_str[5..7], 10) catch 0; + const day: u16 = std.fmt.parseInt(u16, value_str[8..10], 10) catch 0; + + return DateTime.init(year, month, day, 0, 0, 0, 0); +} + +pub fn parseArrayDate(allocator: std.mem.Allocator, array_str: []const u8) std.ArrayList(DateTime) { + var array = std.ArrayList(DateTime).init(allocator); + + var it = std.mem.splitAny(u8, array_str[1 .. array_str.len - 1], " "); + while (it.next()) |x| { + array.append(parseDate(x)) catch {}; + } + + return array; +} + +pub fn parseTime(value_str: []const u8) DateTime { + const hours: u16 = std.fmt.parseInt(u16, value_str[0..2], 10) catch 0; + const minutes: u16 = std.fmt.parseInt(u16, value_str[3..5], 10) catch 0; + const seconds: u16 = if (value_str.len > 6) std.fmt.parseInt(u16, value_str[6..8], 10) catch 0 else 0; + const milliseconds: u16 = if (value_str.len > 9) std.fmt.parseInt(u16, value_str[9..13], 10) catch 0 else 0; + + return DateTime.init(0, 0, 0, hours, minutes, seconds, milliseconds); +} + +pub fn parseArrayTime(allocator: std.mem.Allocator, array_str: []const u8) std.ArrayList(DateTime) { + var array = std.ArrayList(DateTime).init(allocator); + + var it = std.mem.splitAny(u8, array_str[1 .. array_str.len - 1], " "); + while (it.next()) |x| { + array.append(parseTime(x)) catch {}; + } + + return array; +} + +pub fn parseDatetime(value_str: []const u8) DateTime { + const year: u16 = std.fmt.parseInt(u16, value_str[0..4], 10) catch 0; + const month: u16 = std.fmt.parseInt(u16, value_str[5..7], 10) catch 0; + const day: u16 = std.fmt.parseInt(u16, value_str[8..10], 10) catch 0; + const hours: u16 = std.fmt.parseInt(u16, value_str[11..13], 10) catch 0; + const minutes: u16 = std.fmt.parseInt(u16, value_str[14..16], 10) catch 0; + const seconds: u16 = if (value_str.len > 17) std.fmt.parseInt(u16, value_str[17..19], 10) catch 0 else 0; + const milliseconds: u16 = if (value_str.len > 20) std.fmt.parseInt(u16, value_str[20..24], 10) catch 0 else 0; + + return DateTime.init(year, month, day, hours, minutes, seconds, milliseconds); +} + +pub fn parseArrayDatetime(allocator: std.mem.Allocator, array_str: []const u8) std.ArrayList(DateTime) { + var array = std.ArrayList(DateTime).init(allocator); + + var it = std.mem.splitAny(u8, array_str[1 .. array_str.len - 1], " "); + while (it.next()) |x| { + array.append(parseDatetime(x)) catch {}; + } + + return array; +} + pub fn parseArrayBool(allocator: std.mem.Allocator, array_str: []const u8) std.ArrayList(bool) { var array = std.ArrayList(bool).init(allocator); @@ -59,65 +123,176 @@ pub fn parseArrayUUID(allocator: std.mem.Allocator, array_str: []const u8) std.A return array; } -// FIXME: I think it will not work if there is a ' inside the string +// FIXME: I think it will not work if there is a ' inside the string, even \', need to fix that pub fn parseArrayStr(allocator: std.mem.Allocator, array_str: []const u8) std.ArrayList([]const u8) { var array = std.ArrayList([]const u8).init(allocator); var it = std.mem.splitAny(u8, array_str[1 .. array_str.len - 1], "'"); + _ = it.next(); // SSkip first token that is empty while (it.next()) |x| { if (std.mem.eql(u8, " ", x)) continue; - const x_copy = allocator.dupe(u8, x) catch @panic("=("); - // FIXME: I think I need to add the '' on each side again + const x_copy = std.fmt.allocPrint(allocator, "'{s}'", .{x}) catch @panic("=("); array.append(x_copy) catch {}; } + allocator.free(array.pop()); // Remove the last because empty like the first one + return array; } -test "Data parsing" { +test "Value parsing: Int" { const allocator = std.testing.allocator; // Int - const in1: [3][]const u8 = .{ "1", "42", "Hello" }; - const expected_out1: [3]i64 = .{ 1, 42, 0 }; - for (in1, 0..) |value, i| { - try std.testing.expect(parseInt(value) == expected_out1[i]); + const values: [3][]const u8 = .{ "1", "42", "Hello" }; + const expected_values: [3]i64 = .{ 1, 42, 0 }; + for (values, 0..) |value, i| { + try std.testing.expect(parseInt(value) == expected_values[i]); } // Int array - const in2 = "[1 14 44 42 hello]"; - const out2 = parseArrayInt(allocator, in2); - defer out2.deinit(); - const expected_out2: [5]i64 = .{ 1, 14, 44, 42, 0 }; - try std.testing.expect(std.mem.eql(i64, out2.items, &expected_out2)); + const array_str = "[1 14 44 42 hello]"; + const array = parseArrayInt(allocator, array_str); + defer array.deinit(); + const expected_array: [5]i64 = .{ 1, 14, 44, 42, 0 }; + try std.testing.expect(std.mem.eql(i64, array.items, &expected_array)); +} +test "Value parsing: Float" { + const allocator = std.testing.allocator; // Float - const in3: [3][]const u8 = .{ "1.3", "65.991", "Hello" }; - const expected_out3: [3]f64 = .{ 1.3, 65.991, 0 }; - for (in3, 0..) |value, i| { - try std.testing.expect(parseFloat(value) == expected_out3[i]); + const values: [3][]const u8 = .{ "1.3", "65.991", "Hello" }; + const expected_values: [3]f64 = .{ 1.3, 65.991, 0 }; + for (values, 0..) |value, i| { + try std.testing.expect(parseFloat(value) == expected_values[i]); } // Float array - const in4 = "[1.5 14.3 44.9999 42 hello]"; - const out4 = parseArrayFloat(allocator, in4); - defer out4.deinit(); - const expected_out4: [5]f64 = .{ 1.5, 14.3, 44.9999, 42, 0 }; - try std.testing.expect(std.mem.eql(f64, out4.items, &expected_out4)); + const array_str = "[1.5 14.3 44.9999 42 hello]"; + const array = parseArrayFloat(allocator, array_str); + defer array.deinit(); + const expected_array: [5]f64 = .{ 1.5, 14.3, 44.9999, 42, 0 }; + try std.testing.expect(std.mem.eql(f64, array.items, &expected_array)); +} - // Bool - const in5: [3][]const u8 = .{ "1", "Hello", "0" }; - const expected_out5: [3]bool = .{ true, true, false }; - for (in5, 0..) |value, i| { - try std.testing.expect(parseBool(value) == expected_out5[i]); +test "Value parsing: String" { + // Note that I dont parse string because I dont need to, a string is a string + + const allocator = std.testing.allocator; + + // string array + const array_str = "['Hello' 'How are you doing ?' '']"; + const array = parseArrayStr(allocator, array_str); + defer { + for (array.items) |parsed| { + allocator.free(parsed); + } + array.deinit(); + } + const expected_array: [3][]const u8 = .{ "'Hello'", "'How are you doing ?'", "''" }; + for (array.items, expected_array) |parsed, expected| { + try std.testing.expect(std.mem.eql(u8, parsed, expected)); + } +} + +test "Value parsing: Bool array" { + const allocator = std.testing.allocator; + + const values: [3][]const u8 = .{ "1", "Hello", "0" }; + const expected_values: [3]bool = .{ true, true, false }; + for (values, 0..) |value, i| { + try std.testing.expect(parseBool(value) == expected_values[i]); } // Bool array - const in6 = "[1 0 0 1 1]"; - const out6 = parseArrayBool(allocator, in6); - defer out6.deinit(); - const expected_out6: [5]bool = .{ true, false, false, true, true }; - try std.testing.expect(std.mem.eql(bool, out6.items, &expected_out6)); - - // TODO: Test the string array + const array_str = "[1 0 0 1 1]"; + const array = parseArrayBool(allocator, array_str); + defer array.deinit(); + const expected_array: [5]bool = .{ true, false, false, true, true }; + try std.testing.expect(std.mem.eql(bool, array.items, &expected_array)); +} + +test "Value parsing: Date" { + const allocator = std.testing.allocator; + // Date + const values: [3][]const u8 = .{ "1920/01/01", "1998/01/21", "2024/12/31" }; + const expected_values: [3]DateTime = .{ + DateTime.init(1920, 1, 1, 0, 0, 0, 0), + DateTime.init(1998, 1, 21, 0, 0, 0, 0), + DateTime.init(2024, 12, 31, 0, 0, 0, 0), + }; + for (values, 0..) |value, i| { + try std.testing.expect(expected_values[i].compareDate(parseDate(value))); + } + + // Date array + const array_str = "[1920/01/01 1998/01/21 2024/12/31]"; + const array = parseArrayDate(allocator, array_str); + defer array.deinit(); + const expected_array: [3]DateTime = .{ + DateTime.init(1920, 1, 1, 0, 0, 0, 0), + DateTime.init(1998, 1, 21, 0, 0, 0, 0), + DateTime.init(2024, 12, 31, 0, 0, 0, 0), + }; + for (array.items, expected_array) |parsed, expected| { + try std.testing.expect(expected.compareDate(parsed)); + } +} + +test "Value parsing: Time" { + const allocator = std.testing.allocator; + + const values: [4][]const u8 = .{ "12:45:00.0000", "18:12:53.7491", "02:30:10", "12:30" }; + const expected_values: [4]DateTime = .{ + DateTime.init(0, 0, 0, 12, 45, 0, 0), + DateTime.init(0, 0, 0, 18, 12, 53, 7491), + DateTime.init(0, 0, 0, 2, 30, 10, 0), + DateTime.init(0, 0, 0, 12, 30, 0, 0), + }; + for (values, 0..) |value, i| { + try std.testing.expect(expected_values[i].compareTime(parseTime(value))); + } + + // Time array + const array_str = "[12:45:00.0000 18:12:53.7491 02:30:10 12:30]"; + const array = parseArrayTime(allocator, array_str); + defer array.deinit(); + const expected_array: [4]DateTime = .{ + DateTime.init(0, 0, 0, 12, 45, 0, 0), + DateTime.init(0, 0, 0, 18, 12, 53, 7491), + DateTime.init(0, 0, 0, 2, 30, 10, 0), + DateTime.init(0, 0, 0, 12, 30, 0, 0), + }; + for (array.items, expected_array) |parsed, expected| { + try std.testing.expect(expected.compareTime(parsed)); + } +} + +test "Value parsing: Datetime" { + const allocator = std.testing.allocator; + + const values: [4][]const u8 = .{ "1920/01/01-12:45:00.0000", "1920/01/01-18:12:53.7491", "1920/01/01-02:30:10", "1920/01/01-12:30" }; + const expected_values: [4]DateTime = .{ + DateTime.init(1920, 1, 1, 12, 45, 0, 0), + DateTime.init(1920, 1, 1, 18, 12, 53, 7491), + DateTime.init(1920, 1, 1, 2, 30, 10, 0), + DateTime.init(1920, 1, 1, 12, 30, 0, 0), + }; + for (values, 0..) |value, i| { + try std.testing.expect(expected_values[i].compareDatetime(parseDatetime(value))); + } + + // Time array + const array_str = "[1920/01/01-12:45:00.0000 1920/01/01-18:12:53.7491 1920/01/01-02:30:10 1920/01/01-12:30]"; + const array = parseArrayDatetime(allocator, array_str); + defer array.deinit(); + const expected_array: [4]DateTime = .{ + DateTime.init(1920, 1, 1, 12, 45, 0, 0), + DateTime.init(1920, 1, 1, 18, 12, 53, 7491), + DateTime.init(1920, 1, 1, 2, 30, 10, 0), + DateTime.init(1920, 1, 1, 12, 30, 0, 0), + }; + for (array.items, expected_array) |parsed, expected| { + try std.testing.expect(expected.compareDatetime(parsed)); + } } diff --git a/src/ziqlParser.zig b/src/ziqlParser.zig index b9c4be2..7ad2f91 100644 --- a/src/ziqlParser.zig +++ b/src/ziqlParser.zig @@ -12,15 +12,8 @@ const AdditionalDataMember = @import("stuffs/additionalData.zig").AdditionalData const send = @import("stuffs/utils.zig").send; const printError = @import("stuffs/utils.zig").printError; -const ZiQlParserError = error{ - SynthaxError, - MemberNotFound, - MemberMissing, - StructNotFound, - FeatureMissing, - ParsingValueError, - ConditionError, -}; +const ZiQlParserError = @import("stuffs/errors.zig").ZiQlParserError; +const ZipponError = @import("stuffs/errors.zig").ZipponError; const State = enum { start, @@ -65,10 +58,10 @@ const State = enum { pub const Parser = struct { allocator: Allocator, - state: State, + state: State, // TODO: No need to make it part of the struct toker: *Tokenizer, - additional_data: AdditionalData, - struct_name: []const u8 = undefined, + additional_data: AdditionalData, // TODO: No need to make it part of the struct + struct_name: []const u8 = undefined, // Start using some ids for speeding up things. query -> structName -> structId. So only need to save struct name at one place file_engine: *FileEngine, action: enum { GRAB, ADD, UPDATE, DELETE } = undefined, @@ -132,14 +125,32 @@ pub const Parser = struct { self.action = .DELETE; self.state = .expect_struct_name; }, - else => return printError("Error: Expected action keyword. Available: GRAB ADD DELETE UPDATE", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end), + else => return printError( + "Error: Expected action keyword. Available: GRAB ADD DELETE UPDATE", + ZiQlParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), }, .expect_struct_name => { // Check if the struct name is in the schema self.struct_name = try self.allocator.dupe(u8, self.toker.getTokenSlice(token)); - if (token.tag != .identifier) return printError("Error: Missing struct name", ZiQlParserError.StructNotFound, self.toker.buffer, token.loc.start, token.loc.end); - if (!self.file_engine.isStructNameExists(self.struct_name)) return printError("Error: struct name not found in schema.", ZiQlParserError.StructNotFound, self.toker.buffer, token.loc.start, token.loc.end); + if (token.tag != .identifier) return printError( + "Error: Missing struct name", + ZiQlParserError.StructNotFound, + self.toker.buffer, + token.loc.start, + token.loc.end, + ); + if (!self.file_engine.isStructNameExists(self.struct_name)) return printError( + "Error: struct name not found in schema.", + ZiQlParserError.StructNotFound, + self.toker.buffer, + token.loc.start, + token.loc.end, + ); switch (self.action) { .ADD => self.state = .expect_new_data, else => self.state = .expect_filter_or_additional_data, @@ -157,7 +168,13 @@ pub const Parser = struct { else => unreachable, }, .eof => self.state = .filter_and_send, - else => return printError("Error: Expect [ for additional data or { for a filter", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end), + else => return printError( + "Error: Expect [ for additional data or { for a filter", + ZiQlParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), } }, @@ -188,7 +205,13 @@ pub const Parser = struct { self.sendEntity(&array); self.state = .end; }, - else => return printError("Error: Expected filter.", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end), + else => return printError( + "Error: Expected filter.", + ZiQlParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), }, // TODO: Optimize so it doesnt use parseFilter but just parse the file and directly check the condition. Here I end up parsing 2 times. @@ -198,10 +221,22 @@ pub const Parser = struct { defer array.deinit(); token = try self.parseFilter(&array, self.struct_name, true); - if (token.tag != .keyword_to) return printError("Error: Expected TO", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end); + if (token.tag != .keyword_to) return printError( + "Error: Expected TO", + ZiQlParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ); token = self.toker.next(); - if (token.tag != .l_paren) return printError("Error: Expected (", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end); + if (token.tag != .l_paren) return printError( + "Error: Expected (", + ZiQlParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ); var data_map = std.StringHashMap([]const u8).init(self.allocator); defer data_map.deinit(); @@ -216,7 +251,13 @@ pub const Parser = struct { try self.file_engine.getAllUUIDList(self.struct_name, &array); token = self.toker.next(); - if (token.tag != .l_paren) return printError("Error: Expected (", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end); + if (token.tag != .l_paren) return printError( + "Error: Expected (", + ZiQlParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ); var data_map = std.StringHashMap([]const u8).init(self.allocator); defer data_map.deinit(); @@ -225,7 +266,13 @@ pub const Parser = struct { try self.file_engine.updateEntities(self.struct_name, array.items, data_map); self.state = .end; }, - else => return printError("Error: Expected filter or TO.", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end), + else => return printError( + "Error: Expected filter or TO.", + ZiQlParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), }, .filter_and_delete => switch (token.tag) { @@ -247,7 +294,13 @@ pub const Parser = struct { std.debug.print("Successfully deleted all {d} {s}\n", .{ deleted_count, self.struct_name }); self.state = .end; }, - else => return printError("Error: Expected filter.", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end), + else => return printError( + "Error: Expected filter.", + ZiQlParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), }, .expect_new_data => switch (token.tag) { @@ -255,7 +308,13 @@ pub const Parser = struct { keep_next = true; self.state = .parse_new_data_and_add_data; }, - else => return printError("Error: Expected new data starting with (", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end), + else => return printError( + "Error: Expected new data starting with (", + ZiQlParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), }, .parse_new_data_and_add_data => { @@ -264,7 +323,17 @@ pub const Parser = struct { try self.parseNewData(&data_map); // TODO: Print the entire list of missing - if (!self.file_engine.checkIfAllMemberInMap(self.struct_name, &data_map)) return printError("Error: Missing member", ZiQlParserError.MemberMissing, self.toker.buffer, token.loc.start, token.loc.end); + if (!(self.file_engine.checkIfAllMemberInMap(self.struct_name, &data_map) catch { + return ZiQlParserError.StructNotFound; + })) { + return printError( + "Error: Missing member", + ZiQlParserError.MemberMissing, + self.toker.buffer, + token.loc.start, + token.loc.end, + ); + } const uuid = self.file_engine.writeEntity(self.struct_name, data_map) catch { send("ZipponDB error: Couln't write new data to file", .{}); continue; @@ -309,12 +378,24 @@ pub const Parser = struct { .r_brace => if (main) { self.state = .end; } else { - return printError("Error: Expected } to end main condition or AND/OR to continue it", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end); + return printError( + "Error: Expected } to end main condition or AND/OR to continue it", + ZiQlParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ); }, .r_paren => if (!main) { self.state = .end; } else { - return printError("Error: Expected ) to end inside condition or AND/OR to continue it", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end); + return printError( + "Error: Expected ) to end inside condition or AND/OR to continue it", + ZiQlParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ); }, .keyword_and => { curent_operation = .and_; @@ -324,7 +405,13 @@ pub const Parser = struct { curent_operation = .or_; self.state = .expect_right_uuid_array; }, - else => return printError("Error: Expected a condition including AND OR or the end of the filter with } or )", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end), + else => return printError( + "Error: Expected a condition including AND OR or the end of the filter with } or )", + ZiQlParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), }, .expect_right_uuid_array => { @@ -340,7 +427,13 @@ pub const Parser = struct { keep_next = true; try self.file_engine.getUUIDListUsingCondition(right_condition, &right_array); }, // Create a new condition and compare it - else => return printError("Error: Expected ( or member name.", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end), + else => return printError( + "Error: Expected ( or member name.", + ZiQlParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), } switch (curent_operation) { @@ -373,14 +466,37 @@ pub const Parser = struct { }) switch (self.state) { .expect_member => switch (token.tag) { .identifier => { - if (!self.file_engine.isMemberNameInStruct(condition.struct_name, self.toker.getTokenSlice(token))) { - return printError("Error: Member not part of struct.", ZiQlParserError.MemberNotFound, self.toker.buffer, token.loc.start, token.loc.end); + if (!(self.file_engine.isMemberNameInStruct(condition.struct_name, self.toker.getTokenSlice(token)) catch { + return printError( + "Error: Struct not found.", + ZiQlParserError.StructNotFound, + self.toker.buffer, + token.loc.start, + token.loc.end, + ); + })) { + return printError( + "Error: Member not part of struct.", + ZiQlParserError.MemberNotFound, + self.toker.buffer, + token.loc.start, + token.loc.end, + ); } - condition.data_type = self.file_engine.memberName2DataType(condition.struct_name, self.toker.getTokenSlice(token)) orelse @panic("Couldn't find the struct and member"); + condition.data_type = self.file_engine.memberName2DataType( + condition.struct_name, + self.toker.getTokenSlice(token), + ) catch return ZiQlParserError.MemberNotFound; condition.member_name = self.toker.getTokenSlice(token); self.state = State.expect_operation; }, - else => return printError("Error: Expected member name.", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end), + else => return printError( + "Error: Expected member name.", + ZiQlParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), }, .expect_operation => { @@ -391,81 +507,155 @@ pub const Parser = struct { .angle_bracket_left_equal => condition.operation = .inferior_or_equal, // <= .angle_bracket_right_equal => condition.operation = .superior_or_equal, // >= .bang_equal => condition.operation = .different, // != - else => return printError("Error: Expected condition. Including < > <= >= = !=", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end), + else => return printError( + "Error: Expected condition. Including < > <= >= = !=", + ZiQlParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), } self.state = State.expect_value; }, .expect_value => { switch (condition.data_type) { - .int => { - switch (token.tag) { - .int_literal => condition.value = self.toker.getTokenSlice(token), - else => return printError("Error: Expected int", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end), - } + .int => switch (token.tag) { + .int_literal => condition.value = self.toker.getTokenSlice(token), + else => return printError( + "Error: Expected int", + ZiQlParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), }, - .float => { - switch (token.tag) { - .float_literal => condition.value = self.toker.getTokenSlice(token), - else => return printError("Error: Expected float", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end), - } + + .float => switch (token.tag) { + .float_literal => condition.value = self.toker.getTokenSlice(token), + else => return printError( + "Error: Expected float", + ZiQlParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), }, - .str, .id => { - switch (token.tag) { - .string_literal => condition.value = self.toker.getTokenSlice(token), - else => return printError("Error: Expected string", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end), - } + + .str, .id => switch (token.tag) { + .string_literal => condition.value = self.toker.getTokenSlice(token), + else => return printError( + "Error: Expected string", + ZiQlParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), }, - .bool => { - switch (token.tag) { - .bool_literal_true, .bool_literal_false => condition.value = self.toker.getTokenSlice(token), - else => return printError("Error: Expected bool", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end), - } + + .bool => switch (token.tag) { + .bool_literal_true, .bool_literal_false => condition.value = self.toker.getTokenSlice(token), + else => return printError( + "Error: Expected bool", + ZiQlParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), }, + + .date => switch (token.tag) { + .date_literal => condition.value = self.toker.getTokenSlice(token), + else => return printError( + "Error: Expected date", + ZiQlParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), + }, + + .time => switch (token.tag) { + .time_literal => condition.value = self.toker.getTokenSlice(token), + else => return printError( + "Error: Expected time", + ZiQlParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), + }, + + .datetime => switch (token.tag) { + .datetime_literal => condition.value = self.toker.getTokenSlice(token), + else => return printError( + "Error: Expected datetime", + ZiQlParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), + }, + .int_array => { const start_index = token.loc.start; - token = self.toker.next(); - while (token.tag != Token.Tag.r_bracket) : (token = self.toker.next()) { - switch (token.tag) { - .int_literal => continue, - else => return printError("Error: Expected int or ].", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end), - } - } + token = try self.checkTokensInArray(.int_literal); condition.value = self.toker.buffer[start_index..token.loc.end]; }, + .float_array => { const start_index = token.loc.start; - token = self.toker.next(); - while (token.tag != Token.Tag.r_bracket) : (token = self.toker.next()) { - switch (token.tag) { - .float_literal => continue, - else => return printError("Error: Expected float or ].", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end), - } - } + token = try self.checkTokensInArray(.float_literal); condition.value = self.toker.buffer[start_index..token.loc.end]; }, - .str_array, .id_array => { + + .id_array => { const start_index = token.loc.start; - token = self.toker.next(); - while (token.tag != Token.Tag.r_bracket) : (token = self.toker.next()) { - switch (token.tag) { - .string_literal => continue, - else => return printError("Error: Expected string or ].", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end), - } - } + token = try self.checkTokensInArray(.uuid_literal); condition.value = self.toker.buffer[start_index..token.loc.end]; }, + + .str_array => { + const start_index = token.loc.start; + token = try self.checkTokensInArray(.string_literal); + condition.value = self.toker.buffer[start_index..token.loc.end]; + }, + .bool_array => { const start_index = token.loc.start; token = self.toker.next(); while (token.tag != Token.Tag.r_bracket) : (token = self.toker.next()) { switch (token.tag) { .bool_literal_false, .bool_literal_true => continue, - else => return printError("Error: Expected bool or ].", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end), + else => return printError( + "Error: Expected bool or ].", + ZiQlParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), } } condition.value = self.toker.buffer[start_index..token.loc.end]; }, + + .date_array => { + const start_index = token.loc.start; + token = try self.checkTokensInArray(.date_literal); + condition.value = self.toker.buffer[start_index..token.loc.end]; + }, + + .time_array => { + const start_index = token.loc.start; + token = try self.checkTokensInArray(.time_literal); + condition.value = self.toker.buffer[start_index..token.loc.end]; + }, + + .datetime_array => { + const start_index = token.loc.start; + token = try self.checkTokensInArray(.datetime_literal); + condition.value = self.toker.buffer[start_index..token.loc.end]; + }, } self.state = .end; }, @@ -477,33 +667,69 @@ pub const Parser = struct { // TODO: Mqke q function outside the Parser switch (condition.operation) { .equal => switch (condition.data_type) { - .int, .float, .str, .bool, .id => {}, - else => return printError("Error: Only int, float, str, bool can be compare with =.", ZiQlParserError.ConditionError, self.toker.buffer, token.loc.start, token.loc.end), + .int, .float, .str, .bool, .id, .date, .time, .datetime => {}, + else => return printError( + "Error: Only int, float, str, bool, date, time, datetime can be compare with =.", + ZiQlParserError.ConditionError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), }, .different => switch (condition.data_type) { - .int, .float, .str, .bool, .id => {}, - else => return printError("Error: Only int, float, str, bool can be compare with !=.", ZiQlParserError.ConditionError, self.toker.buffer, token.loc.start, token.loc.end), + .int, .float, .str, .bool, .id, .date, .time, .datetime => {}, + else => return printError( + "Error: Only int, float, str, bool, date, time, datetime can be compare with !=.", + ZiQlParserError.ConditionError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), }, .superior_or_equal => switch (condition.data_type) { - .int, .float => {}, - else => return printError("Error: Only int, float can be compare with <=.", ZiQlParserError.ConditionError, self.toker.buffer, token.loc.start, token.loc.end), + .int, .float, .date, .time, .datetime => {}, + else => return printError( + "Error: Only int, float, date, time, datetime can be compare with >=.", + ZiQlParserError.ConditionError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), }, .superior => switch (condition.data_type) { - .int, .float => {}, - else => return printError("Error: Only int, float can be compare with <.", ZiQlParserError.ConditionError, self.toker.buffer, token.loc.start, token.loc.end), + .int, .float, .date, .time, .datetime => {}, + else => return printError( + "Error: Only int, float, date, time, datetime can be compare with >.", + ZiQlParserError.ConditionError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), }, .inferior_or_equal => switch (condition.data_type) { - .int, .float => {}, - else => return printError("Error: Only int, float can be compare with >=.", ZiQlParserError.ConditionError, self.toker.buffer, token.loc.start, token.loc.end), + .int, .float, .date, .time, .datetime => {}, + else => return printError( + "Error: Only int, float, date, time, datetime can be compare with <=.", + ZiQlParserError.ConditionError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), }, .inferior => switch (condition.data_type) { - .int, .float => {}, - else => return printError("Error: Only int, float can be compare with >.", ZiQlParserError.ConditionError, self.toker.buffer, token.loc.start, token.loc.end), + .int, .float, .date, .time, .datetime => {}, + else => return printError( + "Error: Only int, float, date, time, datetime can be compare with <.", + ZiQlParserError.ConditionError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), }, // TODO: Do it for IN and other stuff to @@ -527,7 +753,13 @@ pub const Parser = struct { .expect_count_of_entity_to_find => switch (token.tag) { .int_literal => { const count = std.fmt.parseInt(usize, self.toker.getTokenSlice(token), 10) catch { - return printError("Error while transforming this into a integer.", ZiQlParserError.ParsingValueError, self.toker.buffer, token.loc.start, token.loc.end); + return printError( + "Error while transforming this into a integer.", + ZiQlParserError.ParsingValueError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ); }; additional_data.entity_count_to_find = count; self.state = .expect_semicolon_OR_right_bracket; @@ -541,12 +773,34 @@ pub const Parser = struct { .expect_semicolon_OR_right_bracket => switch (token.tag) { .semicolon => self.state = .expect_member, .r_bracket => self.state = .end, - else => return printError("Error: Expect ';' or ']'.", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end), + else => return printError( + "Error: Expect ';' or ']'.", + ZiQlParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), }, .expect_member => switch (token.tag) { .identifier => { - if (!self.file_engine.isMemberNameInStruct(self.struct_name, self.toker.getTokenSlice(token))) return printError("Member not found in struct.", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end); + if (!(self.file_engine.isMemberNameInStruct(self.struct_name, self.toker.getTokenSlice(token)) catch { + return printError( + "Struct not found.", + ZiQlParserError.StructNotFound, + self.toker.buffer, + token.loc.start, + token.loc.end, + ); + })) { + return printError( + "Member not found in struct.", + ZiQlParserError.MemberNotFound, + self.toker.buffer, + token.loc.start, + token.loc.end, + ); + } try additional_data.member_to_find.append( AdditionalDataMember.init( self.allocator, @@ -556,7 +810,13 @@ pub const Parser = struct { self.state = .expect_comma_OR_r_bracket_OR_l_bracket; }, - else => return printError("Error: Expected a member name.", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end), + else => return printError( + "Error: Expected a member name.", + ZiQlParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), }, .expect_comma_OR_r_bracket_OR_l_bracket => switch (token.tag) { @@ -568,13 +828,25 @@ pub const Parser = struct { ); self.state = .expect_comma_OR_r_bracket; }, - else => return printError("Error: Expected , or ] or [", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end), + else => return printError( + "Error: Expected , or ] or [", + ZiQlParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), }, .expect_comma_OR_r_bracket => switch (token.tag) { .comma => self.state = .expect_member, .r_bracket => self.state = .end, - else => return printError("Error: Expected , or ]", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end), + else => return printError( + "Error: Expected , or ]", + ZiQlParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), }, else => unreachable, @@ -597,34 +869,66 @@ pub const Parser = struct { .expect_member => switch (token.tag) { .identifier => { member_name = self.toker.getTokenSlice(token); - if (!self.file_engine.isMemberNameInStruct(self.struct_name, member_name)) return printError("Member not found in struct.", ZiQlParserError.MemberNotFound, self.toker.buffer, token.loc.start, token.loc.end); + if (!(self.file_engine.isMemberNameInStruct(self.struct_name, member_name) catch { + return ZiQlParserError.StructNotFound; + })) return printError( + "Member not found in struct.", + ZiQlParserError.MemberNotFound, + self.toker.buffer, + token.loc.start, + token.loc.end, + ); self.state = .expect_equal; }, - else => return printError("Error: Expected member name.", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end), + else => return printError( + "Error: Expected member name.", + ZiQlParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), }, .expect_equal => switch (token.tag) { // TODO: Implement stuff to manipulate array like APPEND or REMOVE .equal => self.state = .expect_new_value, - else => return printError("Error: Expected =", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end), + else => return printError( + "Error: Expected =", + ZiQlParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), }, .expect_new_value => { - const data_type = self.file_engine.memberName2DataType(self.struct_name, member_name); - switch (data_type.?) { + const data_type = self.file_engine.memberName2DataType(self.struct_name, member_name) catch return ZiQlParserError.StructNotFound; + switch (data_type) { .int => switch (token.tag) { .int_literal, .keyword_null => { member_map.put(member_name, self.toker.getTokenSlice(token)) catch @panic("Could not add member name and value to map in getMapOfMember"); self.state = .expect_comma_OR_end; }, - else => return printError("Error: Expected int", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end), + else => return printError( + "Error: Expected int", + ZiQlParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), }, .float => switch (token.tag) { .float_literal, .keyword_null => { member_map.put(member_name, self.toker.getTokenSlice(token)) catch @panic("Could not add member name and value to map in getMapOfMember"); self.state = .expect_comma_OR_end; }, - else => return printError("Error: Expected float", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end), + else => return printError( + "Error: Expected float", + ZiQlParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), }, .bool => switch (token.tag) { .bool_literal_true => { @@ -639,79 +943,215 @@ pub const Parser = struct { member_map.put(member_name, self.toker.getTokenSlice(token)) catch @panic("Could not add member name and value to map in getMapOfMember"); self.state = .expect_comma_OR_end; }, - else => return printError("Error: Expected bool: true false", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end), + else => return printError( + "Error: Expected bool: true false", + ZiQlParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), }, - .str, .id => switch (token.tag) { + .date => switch (token.tag) { + .date_literal, .keyword_null => { + member_map.put(member_name, self.toker.getTokenSlice(token)) catch @panic("Could not add member name and value to map in getMapOfMember"); + self.state = .expect_comma_OR_end; + }, + else => return printError( + "Error: Expected date", + ZiQlParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), + }, + .time => switch (token.tag) { + .time_literal, .keyword_null => { + member_map.put(member_name, self.toker.getTokenSlice(token)) catch @panic("Could not add member name and value to map in getMapOfMember"); + self.state = .expect_comma_OR_end; + }, + else => return printError( + "Error: Expected time", + ZiQlParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), + }, + .datetime => switch (token.tag) { + .datetime_literal, .keyword_null => { + member_map.put(member_name, self.toker.getTokenSlice(token)) catch @panic("Could not add member name and value to map in getMapOfMember"); + self.state = .expect_comma_OR_end; + }, + else => return printError( + "Error: Expected datetime", + ZiQlParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), + }, + .str => switch (token.tag) { .string_literal, .keyword_null => { member_map.put(member_name, self.toker.getTokenSlice(token)) catch @panic("Could not add member name and value to map in getMapOfMember"); self.state = .expect_comma_OR_end; }, - else => return printError("Error: Expected string between ''", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end), + else => return printError( + "Error: Expected string between ''", + ZiQlParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), + }, + .id => switch (token.tag) { + .uuid_literal, .keyword_null => { + member_map.put(member_name, self.toker.getTokenSlice(token)) catch @panic("Could not add member name and value to map in getMapOfMember"); + self.state = .expect_comma_OR_end; + }, + else => return printError( + "Error: Expected uuid", + ZiQlParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), }, // TODO: Maybe upgrade that to use multiple state .int_array => switch (token.tag) { .l_bracket => { const start_index = token.loc.start; - token = self.toker.next(); - while (token.tag != .r_bracket) : (token = self.toker.next()) { - switch (token.tag) { - .int_literal => continue, - else => return printError("Error: Expected int or ].", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end), - } - } - // Maybe change that as it just recreate a string that is already in the buffer + token = try self.checkTokensInArray(.int_literal); member_map.put(member_name, self.toker.buffer[start_index..token.loc.end]) catch @panic("Couln't add string of array in data map"); self.state = .expect_comma_OR_end; }, - else => return printError("Error: Expected [ to start an array", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end), + else => return printError( + "Error: Expected [ to start an array", + ZiQlParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), }, .float_array => switch (token.tag) { .l_bracket => { const start_index = token.loc.start; - token = self.toker.next(); - while (token.tag != .r_bracket) : (token = self.toker.next()) { - switch (token.tag) { - .float_literal => continue, - else => return printError("Error: Expected float or ].", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end), - } - } - // Maybe change that as it just recreate a string that is already in the buffer + token = try self.checkTokensInArray(.float_literal); member_map.put(member_name, self.toker.buffer[start_index..token.loc.end]) catch @panic("Couln't add string of array in data map"); self.state = .expect_comma_OR_end; }, - else => return printError("Error: Expected [ to start an array", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end), + else => return printError( + "Error: Expected [ to start an array", + ZiQlParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), }, .bool_array => switch (token.tag) { .l_bracket => { const start_index = token.loc.start; + + // Same as the function checkTokensInArray. + // Just that I can only use one tag at the time using the function. And need 2 here token = self.toker.next(); while (token.tag != .r_bracket) : (token = self.toker.next()) { switch (token.tag) { .bool_literal_false, .bool_literal_true => continue, - else => return printError("Error: Expected bool or ].", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end), + else => return printError( + "Error: Expected bool or ].", + ZiQlParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), } } // Maybe change that as it just recreate a string that is already in the buffer member_map.put(member_name, self.toker.buffer[start_index..token.loc.end]) catch @panic("Couln't add string of array in data map"); self.state = .expect_comma_OR_end; }, - else => return printError("Error: Expected [ to start an array", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end), + else => return printError( + "Error: Expected [ to start an array", + ZiQlParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), }, - .str_array, .id_array => switch (token.tag) { + .str_array => switch (token.tag) { .l_bracket => { const start_index = token.loc.start; - token = self.toker.next(); - while (token.tag != .r_bracket) : (token = self.toker.next()) { - switch (token.tag) { - .string_literal => continue, - else => return printError("Error: Expected str or ].", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end), - } - } - // Maybe change that as it just recreate a string that is already in the buffer + token = try self.checkTokensInArray(.string_literal); member_map.put(member_name, self.toker.buffer[start_index..token.loc.end]) catch @panic("Couln't add string of array in data map"); self.state = .expect_comma_OR_end; }, - else => return printError("Error: Expected [ to start an array", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end), + else => return printError( + "Error: Expected [ to start an array", + ZiQlParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), + }, + .id_array => switch (token.tag) { + .l_bracket => { + const start_index = token.loc.start; + token = try self.checkTokensInArray(.uuid_literal); + member_map.put(member_name, self.toker.buffer[start_index..token.loc.end]) catch @panic("Couln't add string of array in data map"); + self.state = .expect_comma_OR_end; + }, + else => return printError( + "Error: Expected [ to start an array", + ZiQlParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), + }, + .date_array => switch (token.tag) { + .l_bracket => { + const start_index = token.loc.start; + token = try self.checkTokensInArray(.date_literal); + member_map.put(member_name, self.toker.buffer[start_index..token.loc.end]) catch @panic("Couln't add string of array in data map"); + self.state = .expect_comma_OR_end; + }, + else => return printError( + "Error: Expected [ to start an array", + ZiQlParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), + }, + .time_array => switch (token.tag) { + .l_bracket => { + const start_index = token.loc.start; + token = try self.checkTokensInArray(.time_literal); + member_map.put(member_name, self.toker.buffer[start_index..token.loc.end]) catch @panic("Couln't add string of array in data map"); + self.state = .expect_comma_OR_end; + }, + else => return printError( + "Error: Expected [ to start an array", + ZiQlParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), + }, + .datetime_array => switch (token.tag) { + .l_bracket => { + const start_index = token.loc.start; + token = try self.checkTokensInArray(.datetime_literal); + member_map.put(member_name, self.toker.buffer[start_index..token.loc.end]) catch @panic("Couln't add string of array in data map"); + self.state = .expect_comma_OR_end; + }, + else => return printError( + "Error: Expected [ to start an array", + ZiQlParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), }, } }, @@ -720,19 +1160,45 @@ pub const Parser = struct { switch (token.tag) { .r_paren => self.state = .end, .comma => self.state = .expect_member, - else => return printError("Error: Expect , or )", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end), + else => return printError( + "Error: Expect , or )", + ZiQlParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), } }, else => unreachable, }; } + + // Utils + + /// Check if all token in an array is of one specific type + fn checkTokensInArray(self: *Parser, comptime tag: Token.Tag) ZipponError!Token { + var token = self.toker.next(); + while (token.tag != .r_bracket) : (token = self.toker.next()) { + switch (token.tag) { + tag => continue, + else => return printError( + "Error: Wrong type.", + ZiQlParserError.SynthaxError, + self.toker.buffer, + token.loc.start, + token.loc.end, + ), + } + } + return token; + } }; test "ADD" { - try testParsing("ADD User (name = 'Bob', email='bob@email.com', age=55, scores=[ 1 ], friends=[])"); - try testParsing("ADD User (name = 'Bob', email='bob@email.com', age=55, scores=[ 1 ], friends=[])"); - try testParsing("ADD User (name = 'Bob', email='bob@email.com', age=55, scores=[ 1 ], friends=[])"); + try testParsing("ADD User (name = 'Bob', email='bob@email.com', age=55, scores=[ 1 ], friends=[], bday=2000/01/01, a_time=12:04, last_order=2000/01/01-12:45)"); + try testParsing("ADD User (name = 'Bob', email='bob@email.com', age=55, scores=[ 1 ], friends=[], bday=2000/01/01, a_time=12:04:54, last_order=2000/01/01-12:45)"); + try testParsing("ADD User (name = 'Bob', email='bob@email.com', age=-55, scores=[ 1 ], friends=[], bday=2000/01/01, a_time=12:04:54.8741, last_order=2000/01/01-12:45)"); } test "UPDATE" { @@ -756,13 +1222,19 @@ test "GRAB with additional data" { test "GRAB filter with int" { try testParsing("GRAB User {age = 18}"); - try testParsing("GRAB User {age > 18}"); + try testParsing("GRAB User {age > -18}"); try testParsing("GRAB User {age < 18}"); try testParsing("GRAB User {age <= 18}"); try testParsing("GRAB User {age >= 18}"); try testParsing("GRAB User {age != 18}"); } +test "GRAB filter with date" { + try testParsing("GRAB User {bday > 2000/01/01}"); + try testParsing("GRAB User {a_time < 08:00}"); + try testParsing("GRAB User {last_order > 2000/01/01-12:45}"); +} + test "Specific query" { try testParsing("GRAB User"); try testParsing("GRAB User {}"); @@ -780,9 +1252,10 @@ test "Synthax error" { } fn testParsing(source: [:0]const u8) !void { + const TEST_DATA_DIR = @import("config.zig").TEST_DATA_DIR; const allocator = std.testing.allocator; - const path = try allocator.dupe(u8, "ZipponDB"); + const path = try allocator.dupe(u8, TEST_DATA_DIR); var file_engine = FileEngine.init(allocator, path); defer file_engine.deinit(); @@ -794,9 +1267,10 @@ fn testParsing(source: [:0]const u8) !void { } fn expectParsingError(source: [:0]const u8, err: ZiQlParserError) !void { + const TEST_DATA_DIR = @import("config.zig").TEST_DATA_DIR; const allocator = std.testing.allocator; - const path = try allocator.dupe(u8, "ZipponDB"); + const path = try allocator.dupe(u8, TEST_DATA_DIR); var file_engine = FileEngine.init(allocator, path); defer file_engine.deinit(); diff --git a/test_data/v0.1.1/DATA/User/0.zippondata b/test_data/v0.1.1/DATA/User/0.zippondata new file mode 100644 index 0000000..0ab1b85 --- /dev/null +++ b/test_data/v0.1.1/DATA/User/0.zippondata @@ -0,0 +1,95 @@ +6f15128c-f6fc-4017-b047-618da3dd59ec 'Nicole Wright' 89 'icarter@example.org' 2006/01/17 1995/02/03-12:41:35.347682 00:34:40.853006 [9 81 75 41 66 39 69 28 2] [] +c55ee39e-d519-4801-a817-4905f1014686 'Andrea Salazar' 31 'jeremymiller@example.net' 1982/11/19 2000/01/27-11:05:27.447913 11:09:30.575356 [43 100 7 26 30 56] [] +62740397-5a9b-47ef-bfb8-3c5e61b79c8c 'Devin Burns' 44 'coliver@example.net' 2003/02/27 1975/07/24-15:05:53.420305 13:20:58.874282 [84 35 62 84 25 69 99 30] [] +4f31323f-3bf4-4091-a8cd-d4bc620dd9ff 'Lawrence Craig' 34 'dannyfisher@example.org' 1998/07/12 2002/04/10-09:39:48.189347 14:42:58.739696 [34 61 2 14 16] [] +7d6a71c3-b3d0-4165-a660-bf2085333b77 'Katherine King' 83 'mirandawatkins@example.org' 2018/01/15 2000/09/27-12:08:16.778628 02:58:57.603440 [96 56 9 8 11 78 81 92 33] [] +69bb8ec8-1dfb-430f-a921-502441bb80ce 'Holly Griffin' 56 'troyarias@example.net' 1988/12/14 1987/07/07-00:22:27.255196 13:54:28.097706 [67 12 38 41 73 84 27 30 27] [] +02d063ca-bf05-4d8f-a26a-2c425053ed5f 'Karen Ford' 25 'brewerjulie@example.org' 2021/06/05 2013/12/23-09:52:40.159762 13:58:02.129021 [32 38] [] +79f470f3-daa0-4e3c-b7ce-49c56ed71efa 'Heather Walter' 45 'nathan09@example.org' 2005/02/15 2011/04/08-12:46:07.231369 20:25:35.561851 [78 76] [] +b4ca1d16-9844-434c-b51e-f5f954631e5b 'Timothy Gilbert' 9 'prattangela@example.org' 1971/03/22 1995/11/30-19:55:29.967903 06:59:02.893410 [64 30 47 7 88 46 32 100 69] [] +6a194c32-1971-4544-bf0c-1f044095cc68 'Jose Becker' 79 'ymartinez@example.com' 2008/08/14 1984/10/03-17:12:17.833573 17:47:37.450798 [93 60 53 76 17 14 6 55] [] +2a7bfc0f-6026-4112-a7b2-665c7429a0b6 'Tony Contreras' 62 'lbrown@example.org' 2014/07/07 1991/08/19-17:41:13.825644 06:00:52.599448 [35] [] +ef5f2f67-8c26-400f-865d-b5c512938b5c 'Karen Gonzalez' 25 'klong@example.org' 1989/08/04 1970/02/27-07:08:05.564124 10:13:57.631108 [95 55 80 78 42 77 90 37 86] [] +b25015b6-f78b-4ec4-a2a4-67e12b59d20c 'Nicole Woodard' 70 'rhernandez@example.com' 2005/05/14 2000/06/30-17:04:05.448483 05:20:12.524938 [75 98 100 91 82 68] [] +6c7855e6-05b4-4f57-b306-db46fb05b973 'Monique Lee' 12 'cruzmatthew@example.com' 1989/05/20 2000/10/30-03:22:34.691532 00:58:43.618501 [8 51 50 24 29 53 42 65] [] +fd8d9292-609b-4e9f-b4bc-3f10a58b0b14 'Donald Bush' 42 'ucontreras@example.net' 2003/08/04 1974/09/14-07:30:16.101479 23:14:47.929777 [64] [] +4071a5a2-6350-422b-b2aa-935b128ebe87 'Michael Young' 17 'timothy68@example.net' 1970/05/16 1981/04/03-13:41:01.404102 13:05:47.903322 [70] [] +d2d3e163-b914-4fdb-b5a4-2c0082bd7f86 'Scott Johnson' 57 'amanda44@example.org' 2012/05/04 1990/10/31-16:30:09.218291 00:20:15.601723 [10 71] [] +fe49b76b-21d5-423a-b139-35e7c8f49d83 'Harry Frank' 69 'kayleeshaw@example.org' 1970/01/18 2013/01/16-17:05:34.108621 12:17:46.996227 [93 7 97 20 43] [] +c01a286d-936f-4d3f-adde-abe900b403c6 'Steven Jones' 67 'bonniewalls@example.com' 1988/07/03 1997/01/11-09:52:57.654011 01:37:24.771031 [] [] +7ca08195-c5ff-4143-a243-0ad1bac72f1e 'Gregory Thomas' 81 'tjordan@example.org' 1988/07/08 1997/03/01-23:06:34.417278 18:32:20.997120 [60 38 95 33 70 18 62 40] [] +0ab42d73-5c9e-4269-b03d-fd722333a1e1 'Jason Taylor' 2 'katherine20@example.net' 1977/10/10 2005/01/13-16:03:48.152308 01:10:42.486388 [13 41 52] [] +34732e47-061c-4a36-bbf6-452c62258c07 'Courtney Buckley' 35 'pwatkins@example.net' 2001/10/27 2012/03/30-13:12:25.647147 06:10:25.862129 [25 50 98 54 19 21 99 22 26 6] [] +a115e144-6adc-4948-a13d-0c6a98240cf7 'John Young' 11 'ygarcia@example.com' 2001/06/11 1980/08/10-10:26:05.493420 13:46:04.978166 [11 80 70 25 17] [] +a8983cf9-7f9b-4009-9e43-a30cd783e538 'Lindsay Perry' 30 'dominique55@example.org' 2013/01/21 2003/05/26-05:31:54.921319 13:01:07.743739 [73 41 4 2 84 97 92 84] [] +e4ef6974-58c5-4233-88ec-fcc0409f656f 'Matthew Silva' 51 'colecarlos@example.org' 2001/12/27 1984/05/29-16:23:04.249520 03:48:15.293856 [94 2 3 81] [] +6f72bdcc-d9c0-4852-969e-e81a0af2f0a7 'Kathryn Thomas' 96 'sloankelly@example.org' 2023/07/22 2000/01/28-06:45:08.683359 23:16:37.477658 [1] [] +3f459011-74d2-465b-94a1-2f23f19615e0 'Bethany Walker' 66 'brooke05@example.com' 2002/11/26 1988/03/26-14:03:37.178294 10:57:40.636124 [75 97 89] [] +1aab1aa1-6148-4e71-8109-f1421d7a0256 'Nancy Ayala' 64 'oreed@example.com' 1985/01/14 1985/09/24-03:37:28.419038 12:58:02.017099 [26] [] +98dfba72-24c4-4780-b489-13ebd78c4006 'Andrea Carter' 91 'ylopez@example.org' 2006/11/13 1983/12/06-02:36:18.776250 11:44:44.393381 [81 5 94 100 36 67 70] [] +e8641c86-1e63-44d9-b610-8ad30b0b00dc 'Courtney Ramirez' 87 'byrdhenry@example.net' 1998/05/03 2014/05/05-15:03:02.703651 12:37:45.835634 [93] [] +87e7ee8f-7424-44a0-8ea2-354e80f13a67 'Jesse Diaz' 43 'rmclaughlin@example.com' 1992/02/19 1988/12/02-03:30:13.802912 13:43:02.317048 [78 63 41] [] +699ba43d-8a20-4cfe-aa96-305372110f9e 'Sarah Williams' 49 'donaldsimpson@example.net' 1989/10/08 2011/05/18-09:22:54.189722 11:07:05.033493 [52 71 84 82 21 66 99 49 91 57] [] +5eecc879-1acb-461b-8b81-2a0b6d8b0045 'Jesus Martinez' 17 'oedwards@example.org' 2008/12/08 2022/03/09-08:59:07.297182 21:52:17.891999 [17 35 4 54 88 12 64 20 31] [] +f1c6fd43-5d99-4025-a1c0-02264144d7de 'Stephen Lyons' 59 'diazkayla@example.org' 2013/10/22 2011/08/11-22:38:36.345329 07:49:48.479873 [32 56 38 56 71 3 52 64 20] [] +ff3f6544-1060-4c94-ad52-4ef2bb04f639 'Sheena Vaughn' 22 'danielhowell@example.com' 1974/07/21 2022/08/19-00:41:18.729117 03:22:58.505303 [100 81 15 7 25] [] +b672f982-9fff-4c3c-889d-b2d71a876a2e 'Nicholas Stewart' 29 'cwinters@example.com' 1984/02/10 1990/01/13-05:57:18.864281 21:27:16.731434 [] [] +16d18dcb-0c4e-4176-83ae-dde38194da9e 'Jacqueline Harris' 27 'hernandezmatthew@example.net' 1975/09/13 1976/11/05-10:43:58.720147 01:59:09.360304 [88 74 71 68 13 24 92] [] +e7b8f128-264a-48c4-ad7b-fe223a48942e 'Randall Roberts' 32 'amanda35@example.net' 2023/12/17 2021/04/23-16:33:49.938777 17:21:05.318298 [] [] +6c6bd267-3b42-4b12-b24a-c8ed6b47e373 'Kim Marshall' 54 'ramirezmary@example.org' 1972/05/28 1989/05/07-09:48:35.231589 06:57:10.569574 [33 22 59 34 7] [] +69b9ebcc-9e5c-4a9e-99b3-e97b79cd4a7c 'Kenneth Taylor' 76 'sarahgarrett@example.com' 1982/09/13 1974/07/20-18:24:31.972235 15:13:37.414090 [23] [] +4c682774-7b58-4a4a-a388-5b0bca50408e 'Melissa Singh' 40 'walkerkyle@example.net' 1996/06/02 1972/10/10-08:09:15.367654 18:47:13.558160 [19 13 86 2 17 35 48] [] +8a554cbe-90cf-441e-8313-6a6b5874ca7c 'Monique Taylor' 30 'bmiller@example.com' 2011/09/12 1999/09/04-04:11:54.860487 03:11:25.453971 [20 89] [] +b6d5db38-6708-4057-8d41-bda61e00de19 'Crystal Morse PhD' 37 'troyclark@example.com' 1978/08/29 1983/07/29-12:15:00.474676 12:25:42.213966 [] [] +29687dea-ed28-4e7d-868b-fe95a1bd0ae1 'Jorge Camacho' 64 'ncalhoun@example.org' 2005/06/03 1987/11/11-04:25:06.339660 03:38:53.874215 [26 97 78 94 79 82 16 73] [] +badd9d70-15fd-4d92-9c63-1e516fd94bf5 'Ronald Bowman' 6 'jessicaroberts@example.org' 1989/12/10 1997/04/04-14:33:17.025758 22:23:38.684410 [49 41 72 91 56 22 89 5 66] [] +991f06d6-5c41-48f5-a6cf-bb0e8f4d0a2b 'Alex Mcintyre' 21 'omurillo@example.net' 2018/07/07 1980/12/30-07:08:33.241262 22:38:12.623654 [81 44 88 61 91 20 44 12 11 51] [] +7a8dfc49-3603-436d-88fb-f425fcb341f5 'Jennifer Tate' 100 'kleinashley@example.com' 1979/07/20 1989/11/03-09:09:07.664687 03:30:19.034533 [74 80 56] [] +4105ced1-f986-406c-92ce-b771c4475b92 'Lindsay Russell' 47 'jeremy04@example.org' 1975/04/25 2006/11/17-23:05:10.042363 11:30:19.238273 [87] [] +1fed0010-5a9f-44fe-a518-60935961b521 'William Carter' 47 'ryan63@example.org' 1983/12/29 1972/02/01-21:05:07.185256 15:58:24.650869 [31 73 95 36 50 11 47 71] [] +9eb2de04-c20b-4a8c-8726-4e1ebe849d4f 'Eric Brewer' 61 'allison33@example.com' 2010/04/17 1978/12/12-15:20:08.485655 06:39:24.067146 [] [] +c6d56661-2e08-4099-b98b-a0df14fda563 'Thomas Nguyen' 86 'robinsonmichael@example.net' 1989/06/03 1999/12/13-05:51:32.860085 07:41:36.279351 [] [] +ef6c7b92-d0f9-4541-b37c-1b19ec9c3d76 'Gregory Hawkins' 68 'christinabrown@example.net' 2020/12/05 1989/11/22-01:40:05.282515 04:25:25.661361 [41 92 0] [] +e92c2a5e-9886-4bc6-b041-03426fdda9b4 'Allison Hansen DDS' 79 'mooneydanielle@example.org' 2024/06/07 1983/07/15-16:53:03.604798 15:56:43.407431 [51 5 37 41] [] +1569cf7a-7c5a-489a-a7fc-17c868a4d955 'Nicole Walker' 69 'ysmith@example.com' 2023/04/16 1989/08/13-05:22:33.896806 05:29:40.942315 [77 98 34] [] +3a43b066-40ab-4aeb-84e1-33d8b0b38393 'Angela Nelson' 12 'yrobinson@example.org' 1973/06/10 2000/01/29-10:37:36.189531 12:02:18.070502 [] [] +ddf229ad-07f5-42a0-abcb-962286246665 'Michael Garza' 9 'cruzrodney@example.org' 1996/03/29 2018/09/18-20:28:23.590399 06:02:48.237654 [] [] +01870e4b-8ab8-478c-b422-dfd5cf0d5194 'Amy Jennings' 38 'luiscarey@example.com' 2011/09/10 1993/03/14-19:26:31.568862 22:12:11.540630 [58] [] +0fa38607-208c-4e0f-9007-ec8160ecc881 'Emily Wade' 59 'barajassusan@example.net' 1982/04/02 1983/11/15-03:15:31.100487 16:16:22.748024 [37 28 7 59 22 22 82] [] +6d5c1287-d51b-45c8-8d66-bd4abfd5ca75 'Mrs. Julie Oconnor MD' 43 'rwallace@example.net' 1983/10/24 1977/03/01-20:04:48.664772 19:51:54.960776 [66 0 1] [] +dfbc47e3-b74a-42ea-8e47-efa5b5b1eb34 'Jessica Aguilar' 29 'cjones@example.org' 1979/03/19 1993/08/27-19:45:10.648767 07:44:34.802907 [] [] +d39fc239-aa4b-44a7-8eed-cec344972ac9 'Kenneth Ramos' 21 'mcdonaldmichael@example.org' 1973/02/08 1987/01/18-07:15:40.134471 02:00:26.886714 [22 38 69 33 9 86 50 62 14] [] +275d0acb-2ddd-4af9-997f-2a863dd8bdcd 'Jonathan Mccoy' 44 'graywanda@example.net' 1981/12/18 1973/06/24-10:23:26.893595 19:25:38.105128 [79 78] [] +7bbe5d3f-41c3-447f-adc8-17b001ab57bf 'Mark Lamb' 74 'millertimothy@example.com' 2020/07/28 2019/08/24-05:29:49.061194 00:50:20.659030 [92 7 74 27 44 69 9 85] [] +192f07a6-5e51-4955-84c5-9a1e935ceb0c 'Jose Hahn' 88 'larrybrewer@example.net' 1996/10/11 1990/09/17-05:09:53.913448 11:05:19.668141 [21 63 63 13 63 38 68] [] +a1479495-4520-46b5-b8df-2a2e7a7caf12 'Christopher Moore' 35 'milleralbert@example.org' 1985/10/10 2003/11/26-05:20:51.267371 10:31:31.669712 [66] [] +2accecd9-8bdb-4be0-85a1-1294c4b421c1 'Joshua Wilson' 67 'chad03@example.net' 2011/12/16 2013/06/23-23:46:45.533988 17:53:55.824275 [71 50 50 34 63 42 41 83 87 46] [] +0632a861-fd7e-40e6-a3db-c37f9174bf72 'Craig Wells' 39 'blevinsjessica@example.org' 2024/06/17 2016/06/08-08:43:33.145082 17:38:33.801481 [62 47 11 67 19] [] +57f94d12-41b5-45a0-b509-286893029a68 'Catherine Nelson' 12 'apowell@example.net' 2011/08/26 1971/04/21-06:18:44.227037 16:34:12.817227 [25 90 24] [] +917dae19-9906-4082-a015-7e5601be33c0 'Louis Mcknight' 97 'whitneytyler@example.org' 1989/08/07 2004/04/22-06:09:24.295995 05:43:30.569809 [49] [] +36924d24-ad7a-4ef8-af65-17f37b073913 'Dustin Frost' 86 'carmen43@example.net' 1980/05/22 1980/01/12-00:38:56.239871 05:14:53.584038 [86 33 12] [] +4b9a27a1-6b55-43ec-9f97-64689550fe0f 'Amy Gill' 69 'peckwilliam@example.net' 1975/06/07 1970/05/12-05:29:50.339568 15:26:41.551220 [48 19 91 58] [] +70bc00b9-6d53-4d75-b77a-a5880726e9db 'Tony Edwards' 30 'simmonscharles@example.net' 1987/03/08 2000/09/12-23:37:21.850780 15:03:04.343320 [41 16 38 1 92 6 14 65 8] [] +9fe3cc77-e4b0-48d4-90f8-d02e51f65b7e 'Christopher Hampton' 21 'dkim@example.org' 2019/05/20 1982/05/26-19:48:52.386687 11:44:55.421839 [] [] +6530eceb-15cf-4b6e-a96c-9dc377652fde 'Samantha Cline' 75 'sallyguzman@example.org' 2022/08/20 2019/11/17-06:59:59.793431 13:34:09.266364 [89] [] +85d4c7ad-7d87-421d-836a-452e9fcb9444 'Lindsay Rodriguez' 85 'lrush@example.net' 2010/03/10 2013/03/25-05:13:23.089586 18:28:41.293076 [77 7 9] [] +88a030c0-0fb5-4f02-9943-6f1fc773a12b 'Sabrina Larsen DDS' 33 'fwilliams@example.net' 2007/04/05 2004/05/21-04:21:36.580782 07:08:17.851384 [38 2 18 82 35 55 78 53] [] +87389248-a10b-41a3-b11e-f0d831db5510 'Nancy Edwards' 67 'ycasey@example.net' 2020/10/25 1973/05/10-02:33:22.938409 06:10:05.932522 [72 39 10 62 38 91 48 92 2] [] +788bbcc3-20a1-43d1-8a76-a54699840cbf 'Nicole Wolf' 35 'wardshelby@example.org' 2024/05/04 2018/07/22-06:23:11.213613 09:40:51.342992 [77 80 44 50 55 1 30 25 50] [] +dc9d8f21-fd2b-404e-a003-70199d889569 'Steven Brown' 19 'scottlindsay@example.com' 1994/05/26 2005/07/27-04:34:06.373227 02:28:56.761866 [66] [] +3d06d0f8-fb81-48bb-bc21-3fb8527fa6ce 'Kimberly Graham' 9 'mmcmahon@example.org' 2009/12/12 1978/09/09-14:12:39.019120 14:24:54.144847 [63 11 12 51 82] [] +8baf6bcd-41c5-4639-a6ab-ba66dec4d9a8 'Brianna Kennedy' 46 'david56@example.net' 1992/07/24 2012/11/28-02:02:17.460570 01:17:41.532201 [84 34] [] +256a0165-3d27-43a4-95e9-c7daaab38797 'Mr. Kevin Cole' 47 'luisjohnson@example.org' 1975/12/27 2003/10/06-08:10:59.441154 01:43:16.135264 [94 43 29 61] [] +5ca72f62-efd9-48fc-b505-7c3aa10e2799 'Ronald Luna' 100 'frances66@example.org' 2000/04/29 1973/06/25-08:14:51.197299 22:14:59.321156 [14 42] [] +e9752afe-8f2f-45f2-8e25-6b4a0d1cff54 'Michelle Phillips' 27 'robert68@example.net' 2015/01/05 1970/03/12-07:51:11.411035 09:00:10.096165 [75 96 12 35 69 27 77 47 36 28] [] +47302274-1d43-4702-b272-7481d64f43e5 'Amanda Perez' 47 'cyoder@example.org' 2017/03/29 2023/11/06-22:14:01.256222 16:58:27.758300 [47 94 16 15 43 16 27 73] [] +7d463829-d6e8-4142-9779-cbabc609619b 'Adam Tucker' 29 'djefferson@example.net' 1974/10/16 1983/05/12-23:43:49.811048 01:38:16.354009 [20 82 14 11 38] [] +7caabde4-0c04-4204-9c19-61fe6d3880e5 'Sarah Martin' 93 'robertbowman@example.org' 1985/10/12 1971/03/26-09:04:14.956300 16:13:12.309921 [] [] +0ff2434c-9aab-4dac-8b7e-bfe3d9efbe24 'Tammy Wilson' 26 'millsbilly@example.com' 1998/03/20 1998/12/01-13:05:07.237752 13:07:56.172858 [36 3 60 41 95 76 4 10 16] [] +d7264837-69eb-4d72-a8da-ae45b083d30c 'Kenneth Day' 63 'coryvasquez@example.org' 1991/01/24 2010/02/11-06:24:35.603560 15:02:52.501315 [66 36 64 45 61 25 60 42 78] [] +3cf85fa9-7bf6-4bc9-a876-9f3fbc6cfab4 'Diane Kelley' 84 'kelly23@example.com' 1976/01/02 2007/06/29-05:48:26.768312 22:37:56.820676 [93 41 91 94 50 43 85 57 8] [] +f1068ced-afea-47f0-a341-b40b6fab8dfc 'Jeffrey Stein' 6 'petersonveronica@example.org' 2008/05/03 2015/04/24-20:53:18.082149 05:06:01.654974 [52 54 55 30 10] [] +3093f1b7-ff0e-4e07-b2e6-bdd6743cf256 'William Vaughan' 97 'zhill@example.com' 2024/07/02 1970/02/20-02:11:22.624148 14:25:26.208268 [96 21 95 19 96 39 84 39] [] +0264849d-20d1-46c5-8525-c722f399714e 'Sandra Smith' 79 'bclayton@example.net' 1971/10/14 1997/08/03-11:49:06.366284 17:56:01.969774 [10 83 23 49 77 60 70 51] [] +5ca111fe-02d0-4a29-b630-8fbcd7a0c8f9 'Jill Snyder' 82 'hunter72@example.net' 1996/08/10 2018/11/18-18:18:10.149410 20:23:18.321137 [] [] +ebd12579-694f-4ba4-96a0-71621711a95f 'Calvin Fernandez' 1 'andrew92@example.com' 1994/11/17 2001/02/06-20:30:11.550466 04:41:12.634956 [21 49 94 57] [] diff --git a/test_data/v0.1.1/generate_dummy_data.py b/test_data/v0.1.1/generate_dummy_data.py new file mode 100644 index 0000000..0a256f4 --- /dev/null +++ b/test_data/v0.1.1/generate_dummy_data.py @@ -0,0 +1,55 @@ +import subprocess +from faker import Faker +import random +fake = Faker() + +# Start the Zig binary process + + +def random_array(): + length = random.randint(-1, 10) + scores = [random.randint(-1, 100) for _ in range(length)] + return f"[{' '.join(map(str, scores))}]" + +def run(process, command): + """Sends a command to the Zig process and returns the output.""" + + process.stdin.write('run "' + command + '"\n') + process.stdin.flush() + + output = "" + char = process.stdout.read(1) # Read one character + while char: + if char == "\x03": # Check for ETX + break + output += char + char = process.stdout.read(1) + + return output.strip() + +from tqdm import tqdm + +for i in tqdm(range(100)): + process = subprocess.Popen( + ["zig-out/bin/zippon"], + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + text=True # For easier string handling + ) + + query = "ADD User (" + query += f"name = '{fake.name()}'," + query += f"age = {random.randint(0, 100)}," + query += f"email = '{fake.email()}'," + query += f"scores={random_array()}," + query += f"friends = []," + query += f"bday={fake.date(pattern='%Y/%m/%d')}," + query += f"last_order={fake.date_time().strftime('%Y/%m/%d-%H:%M:%S.%f')}," # Shouldn't create an error if the millisecond are too long, here it's 6 digit instead of 4 + query += f"a_time={fake.date_time().strftime('%H:%M:%S.%f')}" + query += f")" + + output = run(process, query) + print(output) + process.terminate() + diff --git a/test_data/v0.1.1/schema.zipponschema b/test_data/v0.1.1/schema.zipponschema new file mode 100644 index 0000000..60b9cd6 --- /dev/null +++ b/test_data/v0.1.1/schema.zipponschema @@ -0,0 +1,10 @@ +User ( + name: str, + age: int, + email: str, + bday: date, + last_order: datetime, + a_time: time, + scores: []int, + friends: []str, +)