Start cleaning before working one 0.2

This commit is contained in:
Adrien Bouvais 2024-10-16 20:26:40 +02:00
parent c93538f4b0
commit 29af5a7ac0
8 changed files with 1063 additions and 1092 deletions

View File

@ -85,8 +85,7 @@ pub fn main() !void {
var token = cliToker.next(); var token = cliToker.next();
state = .expect_main_command; state = .expect_main_command;
while ((state != .end) and (state != .quit)) : (token = cliToker.next()) { while ((state != .end) and (state != .quit)) : (token = cliToker.next()) switch (state) {
switch (state) {
.expect_main_command => switch (token.tag) { .expect_main_command => switch (token.tag) {
.keyword_run => { .keyword_run => {
if (!file_engine.usable) { if (!file_engine.usable) {
@ -262,11 +261,10 @@ pub fn main() !void {
}, },
.quit, .end => break, .quit, .end => break,
} };
} }
if (state == .quit) break; if (state == .quit) break;
} }
}
} }
pub fn runQuery(null_term_query_str: [:0]const u8, file_engine: *FileEngine) void { pub fn runQuery(null_term_query_str: [:0]const u8, file_engine: *FileEngine) void {

View File

@ -8,17 +8,25 @@ const SchemaStruct = @import("schemaParser.zig").Parser.SchemaStruct;
const SchemaParser = @import("schemaParser.zig").Parser; const SchemaParser = @import("schemaParser.zig").Parser;
const SchemaTokenizer = @import("tokenizers/schema.zig").Tokenizer; const SchemaTokenizer = @import("tokenizers/schema.zig").Tokenizer;
const SchemaToken = @import("tokenizers/schema.zig").Token; const SchemaToken = @import("tokenizers/schema.zig").Token;
const AdditionalData = @import("ziqlParser.zig").Parser.AdditionalData; const AdditionalData = @import("parsing-tools/additionalData.zig").AdditionalData;
//TODO: Create a union class and chose between file and memory // TODO: Use those errors everywhere in this file
const FileEngineError = error{
SchemaFileNotFound,
SchemaNotConform,
DATAFolderNotFound,
StructFolderNotFound,
CantMakeDir,
CantMakeFile,
};
/// Manage everything that is relate to read or write in files /// Manage everything that is relate to read or write in files
/// Or even get stats, whatever. If it touch files, it's here /// Or even get stats, whatever. If it touch files, it's here
pub const FileEngine = struct { pub const FileEngine = struct {
allocator: Allocator, allocator: Allocator,
usable: bool, usable: bool,
path_to_ZipponDB_dir: []const u8, // Make that into a list path_to_ZipponDB_dir: []const u8, // TODO: Put in config file
max_file_size: usize = 5e+4, // 50kb TODO: Change max_file_size: usize = 5e+4, // 50kb TODO: Put in config file
null_terminated_schema_buff: [:0]u8, null_terminated_schema_buff: [:0]u8,
struct_array: std.ArrayList(SchemaStruct), struct_array: std.ArrayList(SchemaStruct),
@ -81,10 +89,6 @@ pub const FileEngine = struct {
} }
}; };
pub fn setPath(self: *FileEngine, path: []const u8) void {
self.path_to_ZipponDB_dir = path;
}
/// Take a list of UUID and, a buffer array and the additional data to write into the buffer the JSON to send /// Take a list of UUID and, a buffer array and the additional data to write into the buffer the JSON to send
/// TODO: Optimize /// TODO: Optimize
/// FIXME: Array of string are not working /// FIXME: Array of string are not working
@ -161,7 +165,8 @@ pub const FileEngine = struct {
} }
} }
if (founded) { if (!founded) continue;
try out_writer.writeAll("{"); try out_writer.writeAll("{");
try out_writer.writeAll("id:\""); try out_writer.writeAll("id:\"");
try out_writer.print("{s}", .{output_fbs.getWritten()[0..36]}); try out_writer.print("{s}", .{output_fbs.getWritten()[0..36]});
@ -169,7 +174,8 @@ pub const FileEngine = struct {
for (self.structName2structMembers(struct_name), self.structName2DataType(struct_name)) |member_name, member_type| { for (self.structName2structMembers(struct_name), self.structName2DataType(struct_name)) |member_name, member_type| {
token = data_toker.next(); token = data_toker.next();
// FIXME: When relationship will be implemented, need to check if the len of NON link is 0 // FIXME: When relationship will be implemented, need to check if the len of NON link is 0
if ((additional_data.member_to_find.items.len == 0) or (self.isMemberNameInAdditionalData(self.locToSlice(member_name), additional_data))) { if (!(additional_data.member_to_find.items.len == 0) or !(additional_data.contains(self.locToSlice(member_name)))) continue;
// write the member name and = sign // write the member name and = sign
try out_writer.print("{s}: ", .{self.locToSlice(member_name)}); try out_writer.print("{s}: ", .{self.locToSlice(member_name)});
@ -200,23 +206,14 @@ pub const FileEngine = struct {
} }
try out_writer.writeAll(", "); try out_writer.writeAll(", ");
} }
}
try out_writer.writeAll("}"); try out_writer.writeAll("}");
try out_writer.writeAll(", "); try out_writer.writeAll(", ");
} }
}
// Write the end }
try out_writer.writeAll("]"); try out_writer.writeAll("]");
} }
fn isMemberNameInAdditionalData(_: *FileEngine, member_name: []const u8, additional_data: AdditionalData) bool {
for (additional_data.member_to_find.items) |elem| {
if (std.mem.eql(u8, member_name, elem.name)) return true;
}
return false;
}
/// Use a struct name to populate a list with all UUID of this struct /// Use a struct name to populate a list with all UUID of this struct
/// TODO: Optimize this, I'm sure I can do better than that
pub fn getAllUUIDList(self: *FileEngine, struct_name: []const u8, uuid_array: *std.ArrayList(UUID)) !void { pub fn getAllUUIDList(self: *FileEngine, struct_name: []const u8, uuid_array: *std.ArrayList(UUID)) !void {
const max_file_index = try self.maxFileIndex(struct_name); const max_file_index = try self.maxFileIndex(struct_name);
var current_index: usize = 0; var current_index: usize = 0;
@ -431,9 +428,7 @@ pub const FileEngine = struct {
} }
} }
// TODO: Clean a bit the code // Do I need a map here ? Cant I use something else ?
// Do I need multiple files too ? I mean it duplicate UUID a lot, if it's just to save a name like 'Bob', storing a long UUID is overkill
// I could just use a tabular data format with separator using space - Or maybe I encode the uuid to take a minimum space as I always know it size
pub fn writeEntity(self: *FileEngine, struct_name: []const u8, data_map: std.StringHashMap([]const u8)) !UUID { pub fn writeEntity(self: *FileEngine, struct_name: []const u8, data_map: std.StringHashMap([]const u8)) !UUID {
const uuid = UUID.init(); const uuid = UUID.init();
@ -566,7 +561,14 @@ pub const FileEngine = struct {
} }
} }
if (founded) { if (!founded) {
// stream until the delimiter
output_fbs.reset();
try new_file.writeAll(" ");
try reader.streamUntilDelimiter(writer, '\n', null);
try new_file.writeAll(output_fbs.getWritten());
try new_file.writeAll("\n");
} else {
for (self.structName2structMembers(struct_name), self.structName2DataType(struct_name)) |member_name, member_type| { for (self.structName2structMembers(struct_name), self.structName2DataType(struct_name)) |member_name, member_type| {
// For all collum in the right order, check if the key is in the map, if so use it to write the new value, otherwise use the old file // For all collum in the right order, check if the key is in the map, if so use it to write the new value, otherwise use the old file
output_fbs.reset(); output_fbs.reset();
@ -610,13 +612,6 @@ pub const FileEngine = struct {
try reader.streamUntilDelimiter(writer, '\n', null); try reader.streamUntilDelimiter(writer, '\n', null);
try new_file.writeAll("\n"); try new_file.writeAll("\n");
} else {
// stream until the delimiter
output_fbs.reset();
try new_file.writeAll(" ");
try reader.streamUntilDelimiter(writer, '\n', null);
try new_file.writeAll(output_fbs.getWritten());
try new_file.writeAll("\n");
} }
} }
} }
@ -807,40 +802,13 @@ pub const FileEngine = struct {
} }
} }
// Maybe make it so it use itself to search if it find a directory
fn getDirTotalSize(self: FileEngine, dir: std.fs.Dir) !u64 {
var total: u64 = 0;
var stat: std.fs.File.Stat = undefined;
var iter = dir.iterate();
while (try iter.next()) |entry| {
if (entry.kind == .directory) {
const sub_dir = try dir.openDir(entry.name, .{ .iterate = true });
total += try self.getDirTotalSize(sub_dir);
}
if (entry.kind != .file) continue;
stat = try dir.statFile(entry.name);
total += stat.size;
}
return total;
}
const FileError = error{
SchemaFileNotFound,
SchemaNotConform,
DATAFolderNotFound,
StructFolderNotFound,
CantMakeDir,
CantMakeFile,
};
/// Request a path to a schema file and then create the struct folder /// Request a path to a schema file and then create the struct folder
/// TODO: Check if some data already exist and if so ask if the user want to delete it and make a backup /// TODO: Check if some data already exist and if so ask if the user want to delete it and make a backup
pub fn initDataFolder(self: *FileEngine, path_to_schema_file: []const u8) FileError!void { pub fn initDataFolder(self: *FileEngine, path_to_schema_file: []const u8) FileEngineError!void {
var schema_buf = self.allocator.alloc(u8, 1024 * 50) catch @panic("Cant allocate the schema buffer"); var schema_buf = self.allocator.alloc(u8, 1024 * 50) catch @panic("Cant allocate the schema buffer");
defer self.allocator.free(schema_buf); defer self.allocator.free(schema_buf);
const file = std.fs.cwd().openFile(path_to_schema_file, .{}) catch return FileError.SchemaFileNotFound; const file = std.fs.cwd().openFile(path_to_schema_file, .{}) catch return FileEngineError.SchemaFileNotFound;
defer file.close(); defer file.close();
const len = file.readAll(schema_buf) catch @panic("Can't read schema file"); const len = file.readAll(schema_buf) catch @panic("Can't read schema file");
@ -860,19 +828,19 @@ pub const FileEngine = struct {
const path = std.fmt.allocPrint(self.allocator, "{s}/DATA", .{self.path_to_ZipponDB_dir}) catch @panic("Cant allocate path"); const path = std.fmt.allocPrint(self.allocator, "{s}/DATA", .{self.path_to_ZipponDB_dir}) catch @panic("Cant allocate path");
defer self.allocator.free(path); defer self.allocator.free(path);
var data_dir = std.fs.cwd().openDir(path, .{}) catch return FileError.DATAFolderNotFound; var data_dir = std.fs.cwd().openDir(path, .{}) catch return FileEngineError.DATAFolderNotFound;
defer data_dir.close(); defer data_dir.close();
for (self.struct_array.items) |struct_item| { for (self.struct_array.items) |struct_item| {
data_dir.makeDir(self.locToSlice(struct_item.name)) catch |err| switch (err) { data_dir.makeDir(self.locToSlice(struct_item.name)) catch |err| switch (err) {
error.PathAlreadyExists => {}, error.PathAlreadyExists => {},
else => return FileError.CantMakeDir, else => return FileEngineError.CantMakeDir,
}; };
const struct_dir = data_dir.openDir(self.locToSlice(struct_item.name), .{}) catch return FileError.StructFolderNotFound; const struct_dir = data_dir.openDir(self.locToSlice(struct_item.name), .{}) catch return FileEngineError.StructFolderNotFound;
_ = struct_dir.createFile("0.zippondata", .{}) catch |err| switch (err) { _ = struct_dir.createFile("0.zippondata", .{}) catch |err| switch (err) {
error.PathAlreadyExists => {}, error.PathAlreadyExists => {},
else => return FileError.CantMakeFile, else => return FileEngineError.CantMakeFile,
}; };
} }
@ -880,6 +848,7 @@ pub const FileEngine = struct {
} }
// Stuff for schema // Stuff for schema
// TODO: Check all those functions and remove if not use
pub fn readSchemaFile(allocator: Allocator, sub_path: []const u8, buffer: []u8) !usize { pub fn readSchemaFile(allocator: Allocator, sub_path: []const u8, buffer: []u8) !usize {
const path = try std.fmt.allocPrint(allocator, "{s}/schema.zipponschema", .{sub_path}); const path = try std.fmt.allocPrint(allocator, "{s}/schema.zipponschema", .{sub_path});
@ -1011,6 +980,7 @@ test "Get list of UUID using condition" {
// Series of functions to use just before creating an entity. // Series of functions to use just before creating an entity.
// Will transform the string of data into data of the right type./ // Will transform the string of data into data of the right type./
// TODO: Put those functions somewhere else
pub fn parseInt(value_str: []const u8) i64 { pub fn parseInt(value_str: []const u8) i64 {
return std.fmt.parseInt(i64, value_str, 10) catch return 0; return std.fmt.parseInt(i64, value_str, 10) catch return 0;
} }

View File

@ -0,0 +1,40 @@
const std = @import("std");
const Allocator = std.mem.Allocator;
/// This is the [] part
/// IDK if saving it into the Parser struct is a good idea
pub const AdditionalData = struct {
entity_count_to_find: usize = 0,
member_to_find: std.ArrayList(AdditionalDataMember),
pub fn init(allocator: Allocator) AdditionalData {
return AdditionalData{ .member_to_find = std.ArrayList(AdditionalDataMember).init(allocator) };
}
pub fn deinit(self: *AdditionalData) void {
for (0..self.member_to_find.items.len) |i| {
self.member_to_find.items[i].additional_data.deinit();
}
self.member_to_find.deinit();
}
pub fn contains(additional_data: AdditionalData, member_name: []const u8) bool {
for (additional_data.member_to_find.items) |elem| {
if (std.mem.eql(u8, member_name, elem.name)) return true;
}
return false;
}
};
// This is name in: [name]
// There is an additional data because it can be [friend [1; name]]
pub const AdditionalDataMember = struct {
name: []const u8,
additional_data: AdditionalData,
pub fn init(allocator: Allocator, name: []const u8) AdditionalDataMember {
const additional_data = AdditionalData.init(allocator);
return AdditionalDataMember{ .name = name, .additional_data = additional_data };
}
};

View File

@ -0,0 +1,31 @@
const std = @import("std");
pub fn getEnvVariables(allocator: std.mem.Allocator, variable: []const u8) ?[]const u8 {
var env_map = std.process.getEnvMap(allocator) catch return null;
defer env_map.deinit();
var iter = env_map.iterator();
while (iter.next()) |entry| {
if (std.mem.eql(u8, entry.key_ptr.*, variable)) return allocator.dupe(u8, entry.value_ptr.*) catch return null;
}
return null;
}
pub fn getDirTotalSize(dir: std.fs.Dir) !u64 {
var total: u64 = 0;
var stat: std.fs.File.Stat = undefined;
var iter = dir.iterate();
while (try iter.next()) |entry| {
if (entry.kind == .directory) {
const sub_dir = try dir.openDir(entry.name, .{ .iterate = true });
total += try getDirTotalSize(sub_dir);
}
if (entry.kind != .file) continue;
stat = try dir.statFile(entry.name);
total += stat.size;
}
return total;
}

View File

@ -6,6 +6,7 @@ const Token = @import("tokenizers/schema.zig").Token;
const stdout = std.io.getStdOut().writer(); const stdout = std.io.getStdOut().writer();
// Fuse this with the same function in the ZiQL parser
fn send(comptime format: []const u8, args: anytype) void { fn send(comptime format: []const u8, args: anytype) void {
stdout.print(format, args) catch |err| { stdout.print(format, args) catch |err| {
std.log.err("Can't send: {any}", .{err}); std.log.err("Can't send: {any}", .{err});
@ -68,8 +69,7 @@ pub const Parser = struct {
while ((state != .end) and (state != .invalid)) : ({ while ((state != .end) and (state != .invalid)) : ({
token = if (!keep_next) self.toker.next() else token; token = if (!keep_next) self.toker.next() else token;
keep_next = false; keep_next = false;
}) { }) switch (state) {
switch (state) {
.expect_struct_name_OR_end => switch (token.tag) { .expect_struct_name_OR_end => switch (token.tag) {
.identifier => { .identifier => {
state = .expect_l_paren; state = .expect_l_paren;
@ -184,8 +184,7 @@ pub const Parser = struct {
}, },
else => unreachable, else => unreachable,
} };
}
// if invalid, empty the list // if invalid, empty the list
if (state == .invalid) { if (state == .invalid) {

View File

@ -123,9 +123,94 @@ pub const UUID = struct {
// Zero UUID // Zero UUID
pub const zero: UUID = .{ .bytes = .{0} ** 16 }; pub const zero: UUID = .{ .bytes = .{0} ** 16 };
// Convenience function to return a new v4 UUID. // TODO: Optimize both
pub fn newV4() UUID { pub fn OR(arr1: *std.ArrayList(UUID), arr2: *std.ArrayList(UUID)) !void {
return UUID.init(); for (0..arr2.items.len) |i| {
if (!containUUID(arr1.*, arr2.items[i])) {
try arr1.append(arr2.items[i]);
}
}
}
pub fn AND(arr1: *std.ArrayList(UUID), arr2: *std.ArrayList(UUID)) !void {
var i: usize = 0;
for (0..arr1.items.len) |_| {
if (!containUUID(arr2.*, arr1.items[i])) {
_ = arr1.orderedRemove(i);
} else {
i += 1;
}
}
}
test "OR & AND" {
const allocator = std.testing.allocator;
var right_arr = std.ArrayList(UUID).init(allocator);
defer right_arr.deinit();
try right_arr.append(try UUID.parse("00000000-0000-0000-0000-000000000000"));
try right_arr.append(try UUID.parse("00000000-0000-0000-0000-000000000001"));
try right_arr.append(try UUID.parse("00000000-0000-0000-0000-000000000005"));
try right_arr.append(try UUID.parse("00000000-0000-0000-0000-000000000006"));
try right_arr.append(try UUID.parse("00000000-0000-0000-0000-000000000007"));
var left_arr1 = std.ArrayList(UUID).init(allocator);
defer left_arr1.deinit();
try left_arr1.append(try UUID.parse("00000000-0000-0000-0000-000000000000"));
try left_arr1.append(try UUID.parse("00000000-0000-0000-0000-000000000001"));
try left_arr1.append(try UUID.parse("00000000-0000-0000-0000-000000000002"));
try left_arr1.append(try UUID.parse("00000000-0000-0000-0000-000000000003"));
try left_arr1.append(try UUID.parse("00000000-0000-0000-0000-000000000004"));
var expected_arr1 = std.ArrayList(UUID).init(allocator);
defer expected_arr1.deinit();
try expected_arr1.append(try UUID.parse("00000000-0000-0000-0000-000000000000"));
try expected_arr1.append(try UUID.parse("00000000-0000-0000-0000-000000000001"));
try AND(&left_arr1, &right_arr);
try std.testing.expect(compareUUIDArray(left_arr1, expected_arr1));
var left_arr2 = std.ArrayList(UUID).init(allocator);
defer left_arr2.deinit();
try left_arr2.append(try UUID.parse("00000000-0000-0000-0000-000000000000"));
try left_arr2.append(try UUID.parse("00000000-0000-0000-0000-000000000001"));
try left_arr2.append(try UUID.parse("00000000-0000-0000-0000-000000000002"));
try left_arr2.append(try UUID.parse("00000000-0000-0000-0000-000000000003"));
try left_arr2.append(try UUID.parse("00000000-0000-0000-0000-000000000004"));
var expected_arr2 = std.ArrayList(UUID).init(allocator);
defer expected_arr2.deinit();
try expected_arr2.append(try UUID.parse("00000000-0000-0000-0000-000000000000"));
try expected_arr2.append(try UUID.parse("00000000-0000-0000-0000-000000000001"));
try expected_arr2.append(try UUID.parse("00000000-0000-0000-0000-000000000002"));
try expected_arr2.append(try UUID.parse("00000000-0000-0000-0000-000000000003"));
try expected_arr2.append(try UUID.parse("00000000-0000-0000-0000-000000000004"));
try expected_arr2.append(try UUID.parse("00000000-0000-0000-0000-000000000005"));
try expected_arr2.append(try UUID.parse("00000000-0000-0000-0000-000000000006"));
try expected_arr2.append(try UUID.parse("00000000-0000-0000-0000-000000000007"));
try OR(&left_arr2, &right_arr);
try std.testing.expect(compareUUIDArray(left_arr2, expected_arr2));
}
fn containUUID(arr: std.ArrayList(UUID), value: UUID) bool {
return for (arr.items) |elem| {
if (value.compare(elem)) break true;
} else false;
}
fn compareUUIDArray(arr1: std.ArrayList(UUID), arr2: std.ArrayList(UUID)) bool {
if (arr1.items.len != arr2.items.len) {
std.debug.print("Not same array len when comparing UUID. arr1: {d} arr2: {d}\n", .{ arr1.items.len, arr2.items.len });
return false;
}
for (0..arr1.items.len) |i| {
if (!containUUID(arr2, arr1.items[i])) return false;
}
return true;
} }
test "parse and format" { test "parse and format" {

View File

@ -1,14 +0,0 @@
const std = @import("std");
pub fn getEnvVariables(allocator: std.mem.Allocator, variable: []const u8) ?[]const u8 {
var env_map = std.process.getEnvMap(allocator) catch return null;
defer env_map.deinit();
var iter = env_map.iterator();
while (iter.next()) |entry| {
if (std.mem.eql(u8, entry.key_ptr.*, variable)) return allocator.dupe(u8, entry.value_ptr.*) catch return null;
}
return null;
}

View File

@ -4,51 +4,25 @@ const Condition = @import("fileEngine.zig").FileEngine.Condition;
const Tokenizer = @import("tokenizers/ziql.zig").Tokenizer; const Tokenizer = @import("tokenizers/ziql.zig").Tokenizer;
const Token = @import("tokenizers/ziql.zig").Token; const Token = @import("tokenizers/ziql.zig").Token;
const UUID = @import("types/uuid.zig").UUID; const UUID = @import("types/uuid.zig").UUID;
const AND = @import("types/uuid.zig").AND;
const OR = @import("types/uuid.zig").OR;
const AdditionalData = @import("parsing-tools/additionalData.zig").AdditionalData;
const AdditionalDataMember = @import("parsing-tools/additionalData.zig").AdditionalDataMember;
const Allocator = std.mem.Allocator; const Allocator = std.mem.Allocator;
const stdout = std.io.getStdOut().writer(); const stdout = std.io.getStdOut().writer();
fn send(comptime format: []const u8, args: anytype) void { const ZiQlParserError = error{
stdout.print(format, args) catch |err| { SynthaxError,
std.log.err("Can't send: {any}", .{err}); MemberNotFound,
stdout.print("\x03\n", .{}) catch {}; MemberMissing,
}; StructNotFound,
FeatureMissing,
ParsingValueError,
ConditionError,
};
stdout.print("\x03\n", .{}) catch {}; const State = enum {
}
pub const Parser = struct {
allocator: Allocator,
state: State,
toker: *Tokenizer,
additional_data: AdditionalData,
struct_name: []const u8 = undefined,
file_engine: *FileEngine,
action: enum { GRAB, ADD, UPDATE, DELETE } = undefined,
pub fn init(allocator: Allocator, toker: *Tokenizer, file_engine: *FileEngine) Parser {
// Do I need to init a FileEngine at each Parser, can't I put it in the CLI parser instead ?
return Parser{
.allocator = allocator,
.toker = toker,
.state = .start,
.additional_data = AdditionalData.init(allocator),
.file_engine = file_engine,
};
}
pub fn deinit(self: *Parser) void {
self.additional_data.deinit();
self.allocator.free(self.struct_name);
}
const Options = struct {
members_for_ordering: std.ArrayList([]const u8), // The list in the right order of member name to use to order the result
sense_for_ordering: enum { ASC, DESC },
};
const State = enum {
start, start,
invalid, invalid,
end, end,
@ -87,48 +61,60 @@ pub const Parser = struct {
expect_comma_OR_end, expect_comma_OR_end,
add_member_to_map, add_member_to_map,
add_array_to_map, add_array_to_map,
};
fn send(comptime format: []const u8, args: anytype) void {
stdout.print(format, args) catch |err| {
std.log.err("Can't send: {any}", .{err});
stdout.print("\x03\n", .{}) catch {};
}; };
const ZiQlParserError = error{ stdout.print("\x03\n", .{}) catch {};
SynthaxError, }
MemberNotFound,
MemberMissing,
StructNotFound,
FeatureMissing,
ParsingValueError,
ConditionError,
};
/// This is the [] part pub const Parser = struct {
/// IDK if saving it into the Parser struct is a good idea allocator: Allocator,
pub const AdditionalData = struct { state: State,
entity_count_to_find: usize = 0, toker: *Tokenizer,
member_to_find: std.ArrayList(AdditionalDataMember),
pub fn init(allocator: Allocator) AdditionalData {
return AdditionalData{ .member_to_find = std.ArrayList(AdditionalDataMember).init(allocator) };
}
pub fn deinit(self: *AdditionalData) void {
for (0..self.member_to_find.items.len) |i| {
self.member_to_find.items[i].additional_data.deinit();
}
self.member_to_find.deinit();
}
};
// This is name in: [name]
// There is an additional data because it can be [friend [1; name]]
const AdditionalDataMember = struct {
name: []const u8,
additional_data: AdditionalData, additional_data: AdditionalData,
struct_name: []const u8 = undefined,
file_engine: *FileEngine,
pub fn init(allocator: Allocator, name: []const u8) AdditionalDataMember { action: enum { GRAB, ADD, UPDATE, DELETE } = undefined,
const additional_data = AdditionalData.init(allocator);
return AdditionalDataMember{ .name = name, .additional_data = additional_data }; pub fn init(allocator: Allocator, toker: *Tokenizer, file_engine: *FileEngine) Parser {
} // Do I need to init a FileEngine at each Parser, can't I put it in the CLI parser instead ?
return Parser{
.allocator = allocator,
.toker = toker,
.state = .start,
.additional_data = AdditionalData.init(allocator),
.file_engine = file_engine,
}; };
}
pub fn deinit(self: *Parser) void {
self.additional_data.deinit();
self.allocator.free(self.struct_name);
}
// TODO: Update to use ASC and DESC
// Maybe create a Sender struct or something like that
fn sendEntity(self: *Parser, uuid_list: *std.ArrayList(UUID)) void {
var buffer = std.ArrayList(u8).init(self.allocator);
defer buffer.deinit();
// Pop some element if the array is too long
if ((self.additional_data.entity_count_to_find != 0) and (self.additional_data.entity_count_to_find < uuid_list.items.len)) {
const to_pop = uuid_list.items.len - self.additional_data.entity_count_to_find;
for (0..to_pop) |_| _ = uuid_list.pop();
}
// Im gonna need a function in the file engine to parse and write in the buffer
self.file_engine.parseAndWriteToSend(self.struct_name, uuid_list.items, &buffer, self.additional_data) catch @panic("Error parsing data to send");
send("{s}", .{buffer.items});
}
pub fn parse(self: *Parser) !void { pub fn parse(self: *Parser) !void {
var token = self.toker.next(); var token = self.toker.next();
@ -137,8 +123,7 @@ pub const Parser = struct {
while (self.state != State.end) : ({ while (self.state != State.end) : ({
token = if (!keep_next) self.toker.next() else token; token = if (!keep_next) self.toker.next() else token;
keep_next = false; keep_next = false;
}) { }) switch (self.state) {
switch (self.state) {
.start => switch (token.tag) { .start => switch (token.tag) {
.keyword_grab => { .keyword_grab => {
self.action = .GRAB; self.action = .GRAB;
@ -180,10 +165,7 @@ pub const Parser = struct {
.DELETE => .filter_and_delete, .DELETE => .filter_and_delete,
else => unreachable, else => unreachable,
}, },
.eof => { .eof => self.state = .filter_and_send,
self.state = .filter_and_send;
keep_next = true;
},
else => return self.printError("Error: Expect [ for additional data or { for a filter", &token, ZiQlParserError.SynthaxError), else => return self.printError("Error: Expect [ for additional data or { for a filter", &token, ZiQlParserError.SynthaxError),
} }
}, },
@ -301,31 +283,7 @@ pub const Parser = struct {
}, },
else => unreachable, else => unreachable,
} };
}
}
// TODO: Update to use ASC and DESC
fn sendEntity(self: *Parser, uuid_list: *std.ArrayList(UUID)) void {
var buffer = std.ArrayList(u8).init(self.allocator);
defer buffer.deinit();
// Pop some element if the array is too long
if ((self.additional_data.entity_count_to_find != 0) and (self.additional_data.entity_count_to_find < uuid_list.items.len)) {
const to_pop = uuid_list.items.len - self.additional_data.entity_count_to_find;
for (0..to_pop) |_| _ = uuid_list.pop();
}
// Im gonna need a function in the file engine to parse and write in the buffer
self.file_engine.parseAndWriteToSend(self.struct_name, uuid_list.items, &buffer, self.additional_data) catch @panic("Error parsing data to send");
send("{s}", .{buffer.items});
}
// TODO: The parser that check what is between ||
// For now only |ASC name, age|
fn parseOptions(self: *Parser) void {
_ = self;
} }
/// Take an array of UUID and populate it with what match what is between {} /// Take an array of UUID and populate it with what match what is between {}
@ -342,8 +300,7 @@ pub const Parser = struct {
while (self.state != State.end) : ({ while (self.state != State.end) : ({
token = if (!keep_next) self.toker.next() else token; token = if (!keep_next) self.toker.next() else token;
keep_next = false; keep_next = false;
}) { }) switch (self.state) {
switch (self.state) {
.expect_left_condition => switch (token.tag) { .expect_left_condition => switch (token.tag) {
.r_brace => { .r_brace => {
try self.file_engine.getAllUUIDList(struct_name, left_array); try self.file_engine.getAllUUIDList(struct_name, left_array);
@ -407,8 +364,7 @@ pub const Parser = struct {
}, },
else => unreachable, else => unreachable,
} };
}
return token; return token;
} }
@ -423,8 +379,7 @@ pub const Parser = struct {
while (self.state != State.end) : ({ while (self.state != State.end) : ({
token = if (!keep_next) self.toker.next() else token; token = if (!keep_next) self.toker.next() else token;
keep_next = false; keep_next = false;
}) { }) switch (self.state) {
switch (self.state) {
.expect_member => switch (token.tag) { .expect_member => switch (token.tag) {
.identifier => { .identifier => {
if (!self.file_engine.isMemberNameInStruct(condition.struct_name, self.toker.getTokenSlice(token))) { if (!self.file_engine.isMemberNameInStruct(condition.struct_name, self.toker.getTokenSlice(token))) {
@ -525,10 +480,10 @@ pub const Parser = struct {
}, },
else => unreachable, else => unreachable,
} };
}
// Check if the condition is valid // Check if the condition is valid
// TODO: Mqke q function outside the Parser
switch (condition.operation) { switch (condition.operation) {
.equal => switch (condition.data_type) { .equal => switch (condition.data_type) {
.int, .float, .str, .bool, .id => {}, .int, .float, .str, .bool, .id => {},
@ -577,8 +532,7 @@ pub const Parser = struct {
while (self.state != .end) : ({ while (self.state != .end) : ({
token = if ((!keep_next) and (self.state != .end)) self.toker.next() else token; token = if ((!keep_next) and (self.state != .end)) self.toker.next() else token;
keep_next = false; keep_next = false;
}) { }) switch (self.state) {
switch (self.state) {
.expect_count_of_entity_to_find => switch (token.tag) { .expect_count_of_entity_to_find => switch (token.tag) {
.int_literal => { .int_literal => {
const count = std.fmt.parseInt(usize, self.toker.getTokenSlice(token), 10) catch { const count = std.fmt.parseInt(usize, self.toker.getTokenSlice(token), 10) catch {
@ -633,8 +587,7 @@ pub const Parser = struct {
}, },
else => unreachable, else => unreachable,
} };
}
} }
/// Take the tokenizer and return a map of the ADD action. /// Take the tokenizer and return a map of the ADD action.
@ -649,8 +602,7 @@ pub const Parser = struct {
while (self.state != .end) : ({ while (self.state != .end) : ({
token = if (!keep_next) self.toker.next() else token; token = if (!keep_next) self.toker.next() else token;
keep_next = false; keep_next = false;
}) { }) switch (self.state) {
switch (self.state) {
.expect_member => switch (token.tag) { .expect_member => switch (token.tag) {
.identifier => { .identifier => {
member_name = self.toker.getTokenSlice(token); member_name = self.toker.getTokenSlice(token);
@ -782,11 +734,11 @@ pub const Parser = struct {
}, },
else => unreachable, else => unreachable,
} };
}
} }
/// Print an error and send it to the user pointing to the token /// Print an error and send it to the user pointing to the token
/// TODO: There is a duplicate of this somewhere, make it a single function
fn printError(self: *Parser, message: []const u8, token: *Token, err: ZiQlParserError) ZiQlParserError { fn printError(self: *Parser, message: []const u8, token: *Token, err: ZiQlParserError) ZiQlParserError {
stdout.print("\n", .{}) catch {}; stdout.print("\n", .{}) catch {};
stdout.print("{s}\n", .{message}) catch {}; stdout.print("{s}\n", .{message}) catch {};
@ -810,96 +762,6 @@ pub const Parser = struct {
} }
}; };
// TODO: Optimize both
fn OR(arr1: *std.ArrayList(UUID), arr2: *std.ArrayList(UUID)) !void {
for (0..arr2.items.len) |i| {
if (!containUUID(arr1.*, arr2.items[i])) {
try arr1.append(arr2.items[i]);
}
}
}
fn AND(arr1: *std.ArrayList(UUID), arr2: *std.ArrayList(UUID)) !void {
var i: usize = 0;
for (0..arr1.items.len) |_| {
if (!containUUID(arr2.*, arr1.items[i])) {
_ = arr1.orderedRemove(i);
} else {
i += 1;
}
}
}
test "OR & AND" {
const allocator = std.testing.allocator;
var right_arr = std.ArrayList(UUID).init(allocator);
defer right_arr.deinit();
try right_arr.append(try UUID.parse("00000000-0000-0000-0000-000000000000"));
try right_arr.append(try UUID.parse("00000000-0000-0000-0000-000000000001"));
try right_arr.append(try UUID.parse("00000000-0000-0000-0000-000000000005"));
try right_arr.append(try UUID.parse("00000000-0000-0000-0000-000000000006"));
try right_arr.append(try UUID.parse("00000000-0000-0000-0000-000000000007"));
var left_arr1 = std.ArrayList(UUID).init(allocator);
defer left_arr1.deinit();
try left_arr1.append(try UUID.parse("00000000-0000-0000-0000-000000000000"));
try left_arr1.append(try UUID.parse("00000000-0000-0000-0000-000000000001"));
try left_arr1.append(try UUID.parse("00000000-0000-0000-0000-000000000002"));
try left_arr1.append(try UUID.parse("00000000-0000-0000-0000-000000000003"));
try left_arr1.append(try UUID.parse("00000000-0000-0000-0000-000000000004"));
var expected_arr1 = std.ArrayList(UUID).init(allocator);
defer expected_arr1.deinit();
try expected_arr1.append(try UUID.parse("00000000-0000-0000-0000-000000000000"));
try expected_arr1.append(try UUID.parse("00000000-0000-0000-0000-000000000001"));
try AND(&left_arr1, &right_arr);
try std.testing.expect(compareUUIDArray(left_arr1, expected_arr1));
var left_arr2 = std.ArrayList(UUID).init(allocator);
defer left_arr2.deinit();
try left_arr2.append(try UUID.parse("00000000-0000-0000-0000-000000000000"));
try left_arr2.append(try UUID.parse("00000000-0000-0000-0000-000000000001"));
try left_arr2.append(try UUID.parse("00000000-0000-0000-0000-000000000002"));
try left_arr2.append(try UUID.parse("00000000-0000-0000-0000-000000000003"));
try left_arr2.append(try UUID.parse("00000000-0000-0000-0000-000000000004"));
var expected_arr2 = std.ArrayList(UUID).init(allocator);
defer expected_arr2.deinit();
try expected_arr2.append(try UUID.parse("00000000-0000-0000-0000-000000000000"));
try expected_arr2.append(try UUID.parse("00000000-0000-0000-0000-000000000001"));
try expected_arr2.append(try UUID.parse("00000000-0000-0000-0000-000000000002"));
try expected_arr2.append(try UUID.parse("00000000-0000-0000-0000-000000000003"));
try expected_arr2.append(try UUID.parse("00000000-0000-0000-0000-000000000004"));
try expected_arr2.append(try UUID.parse("00000000-0000-0000-0000-000000000005"));
try expected_arr2.append(try UUID.parse("00000000-0000-0000-0000-000000000006"));
try expected_arr2.append(try UUID.parse("00000000-0000-0000-0000-000000000007"));
try OR(&left_arr2, &right_arr);
try std.testing.expect(compareUUIDArray(left_arr2, expected_arr2));
}
fn containUUID(arr: std.ArrayList(UUID), value: UUID) bool {
return for (arr.items) |elem| {
if (value.compare(elem)) break true;
} else false;
}
fn compareUUIDArray(arr1: std.ArrayList(UUID), arr2: std.ArrayList(UUID)) bool {
if (arr1.items.len != arr2.items.len) {
std.debug.print("Not same array len when comparing UUID. arr1: {d} arr2: {d}\n", .{ arr1.items.len, arr2.items.len });
return false;
}
for (0..arr1.items.len) |i| {
if (!containUUID(arr2, arr1.items[i])) return false;
}
return true;
}
test "ADD" { test "ADD" {
try testParsing("ADD User (name = 'Bob', email='bob@email.com', age=55, scores=[ 1 ], friends=[])"); try testParsing("ADD User (name = 'Bob', email='bob@email.com', age=55, scores=[ 1 ], friends=[])");
try testParsing("ADD User (name = 'Bob', email='bob@email.com', age=55, scores=[ 1 ], friends=[])"); try testParsing("ADD User (name = 'Bob', email='bob@email.com', age=55, scores=[ 1 ], friends=[])");
@ -941,13 +803,13 @@ test "Specific query" {
} }
test "Synthax error" { test "Synthax error" {
try expectParsingError("GRAB {}", Parser.ZiQlParserError.StructNotFound); try expectParsingError("GRAB {}", ZiQlParserError.StructNotFound);
try expectParsingError("GRAB User {qwe = 'qwe'}", Parser.ZiQlParserError.MemberNotFound); try expectParsingError("GRAB User {qwe = 'qwe'}", ZiQlParserError.MemberNotFound);
try expectParsingError("ADD User (name='Bob')", Parser.ZiQlParserError.MemberMissing); try expectParsingError("ADD User (name='Bob')", ZiQlParserError.MemberMissing);
try expectParsingError("GRAB User {name='Bob'", Parser.ZiQlParserError.SynthaxError); try expectParsingError("GRAB User {name='Bob'", ZiQlParserError.SynthaxError);
try expectParsingError("GRAB User {age = 50 name='Bob'}", Parser.ZiQlParserError.SynthaxError); try expectParsingError("GRAB User {age = 50 name='Bob'}", ZiQlParserError.SynthaxError);
try expectParsingError("GRAB User {age <14 AND (age>55}", Parser.ZiQlParserError.SynthaxError); try expectParsingError("GRAB User {age <14 AND (age>55}", ZiQlParserError.SynthaxError);
try expectParsingError("GRAB User {name < 'Hello'}", Parser.ZiQlParserError.ConditionError); try expectParsingError("GRAB User {name < 'Hello'}", ZiQlParserError.ConditionError);
} }
fn testParsing(source: [:0]const u8) !void { fn testParsing(source: [:0]const u8) !void {
@ -964,7 +826,7 @@ fn testParsing(source: [:0]const u8) !void {
try parser.parse(); try parser.parse();
} }
fn expectParsingError(source: [:0]const u8, err: Parser.ZiQlParserError) !void { fn expectParsingError(source: [:0]const u8, err: ZiQlParserError) !void {
const allocator = std.testing.allocator; const allocator = std.testing.allocator;
const path = try allocator.dupe(u8, "ZipponDB"); const path = try allocator.dupe(u8, "ZipponDB");