Implement send all and send JSON

Now send the JSON using the additional data.

Also implemented the empty filter and the no filter like GRAB User {}
and GRAB User
This commit is contained in:
Adrien Bouvais 2024-10-12 19:02:23 +02:00
parent e6357c4d74
commit ac4186529d
3 changed files with 236 additions and 28 deletions

View File

@ -8,6 +8,7 @@ const SchemaStruct = @import("schemaParser.zig").Parser.SchemaStruct;
const SchemaParser = @import("schemaParser.zig").Parser;
const SchemaTokenizer = @import("tokenizers/schema.zig").Tokenizer;
const SchemaToken = @import("tokenizers/schema.zig").Token;
const AdditionalData = @import("ziqlParser.zig").Parser.AdditionalData;
//TODO: Create a union class and chose between file and memory
@ -77,6 +78,182 @@ pub const FileEngine = struct {
}
};
// TODO: A function that take a list of UUID and write into the buffer the message tot send
// Like the other, write it line by line then if the UUID is found, you write the data
// The output need to be in the JSON format, so change '' into ""
// Maybe I will change '' to "" everywhere
pub fn parseAndWriteToSend(self: *FileEngine, struct_name: []const u8, uuids: []UUID, buffer: *std.ArrayList(u8), additional_data: AdditionalData) !void {
const max_file_index = try self.maxFileIndex(struct_name);
var current_index: usize = 0;
var path_buff = std.fmt.allocPrint(self.allocator, "{s}/DATA/{s}/{d}.zippondata", .{ self.path_to_ZipponDB_dir, struct_name, current_index }) catch @panic("Can't create sub_path for init a DataIterator");
defer self.allocator.free(path_buff);
var file = std.fs.cwd().openFile(path_buff, .{}) catch {
std.debug.print("Path: {s}", .{path_buff});
@panic("Can't open first file to init a data iterator");
};
defer file.close();
var output: [1024 * 50]u8 = undefined; // Maybe need to increase that as it limit the size of a line in a file
var output_fbs = std.io.fixedBufferStream(&output);
const writer = output_fbs.writer();
var buffered = std.io.bufferedReader(file.reader());
var reader = buffered.reader();
var founded = false;
var token: FileToken = undefined;
var out_writer = buffer.writer();
try out_writer.writeAll("[");
// Write the start {
while (true) {
output_fbs.reset();
reader.streamUntilDelimiter(writer, '\n', null) catch |err| switch (err) {
error.EndOfStream => {
// When end of file, check if all file was parse, if not update the reader to the next file
// TODO: Be able to give an array of file index from the B+Tree to only parse them
output_fbs.reset(); // clear buffer before exit
if (current_index == max_file_index) break;
current_index += 1;
self.allocator.free(path_buff);
path_buff = std.fmt.allocPrint(self.allocator, "{s}/DATA/{s}/{d}.zippondata", .{ self.path_to_ZipponDB_dir, struct_name, current_index }) catch @panic("Can't create sub_path for init a DataIterator");
file.close(); // Do I need to close ? I think so
file = std.fs.cwd().openFile(path_buff, .{}) catch {
std.debug.print("Error trying to open {s}\n", .{path_buff});
@panic("Can't open file to update a data iterator");
};
buffered = std.io.bufferedReader(file.reader());
reader = buffered.reader();
continue;
}, // file read till the end
else => {
std.debug.print("Error while reading file: {any}\n", .{err});
break;
},
};
const null_terminated_string = try self.allocator.dupeZ(u8, output_fbs.getWritten()[37..]);
defer self.allocator.free(null_terminated_string);
var data_toker = FileTokenizer.init(null_terminated_string);
const uuid = try UUID.parse(output_fbs.getWritten()[0..36]);
founded = false;
// Optimize this
for (uuids) |elem| {
if (elem.compare(uuid)) {
founded = true;
break;
}
}
if (founded) {
try out_writer.writeAll("{");
for (self.structName2structMembers(struct_name), self.structName2DataType(struct_name)) |member_name, member_type| {
token = data_toker.next();
// FIXME: When relationship will be implemented, need to check if the len of NON link is 0
if ((additional_data.member_to_find.items.len == 0) or (self.isMemberNameInAdditionalData(self.locToSlice(member_name), additional_data))) {
// write the member name and = sign
try out_writer.print("{s}: ", .{self.locToSlice(member_name)});
switch (member_type) {
.str => {
const str_slice = data_toker.getTokenSlice(token);
try out_writer.print("\"{s}\"", .{str_slice[1 .. str_slice.len - 1]});
},
.str_array => {}, // TODO: Write [ then "" then text, repeate
.int_array, .float_array, .bool_array, .id_array => {
while (token.tag != .r_bracket) : (token = data_toker.next()) {
try out_writer.writeAll(data_toker.getTokenSlice(token));
try out_writer.writeAll(" ");
}
try out_writer.writeAll(data_toker.getTokenSlice(token));
},
else => try out_writer.writeAll(data_toker.getTokenSlice(token)), //write the value as if
}
try out_writer.writeAll(", ");
}
}
try out_writer.writeAll("}");
try out_writer.writeAll(", ");
}
}
// Write the end }
try out_writer.writeAll("]");
}
fn isMemberNameInAdditionalData(_: *FileEngine, member_name: []const u8, additional_data: AdditionalData) bool {
for (additional_data.member_to_find.items) |elem| {
if (std.mem.eql(u8, member_name, elem.name)) return true;
}
return false;
}
/// Use a struct name to populate a list with all UUID of this struct
pub fn getAllUUIDList(self: *FileEngine, struct_name: []const u8, uuid_array: *std.ArrayList(UUID)) !void {
const max_file_index = try self.maxFileIndex(struct_name);
var current_index: usize = 0;
var path_buff = std.fmt.allocPrint(self.allocator, "{s}/DATA/{s}/{d}.zippondata", .{ self.path_to_ZipponDB_dir, struct_name, current_index }) catch @panic("Can't create sub_path for init a DataIterator");
defer self.allocator.free(path_buff);
var file = std.fs.cwd().openFile(path_buff, .{}) catch {
std.debug.print("Path: {s}", .{path_buff});
@panic("Can't open first file to init a data iterator");
};
defer file.close();
var output: [1024 * 50]u8 = undefined; // Maybe need to increase that as it limit the size of a line in a file
var output_fbs = std.io.fixedBufferStream(&output);
const writer = output_fbs.writer();
var buffered = std.io.bufferedReader(file.reader());
var reader = buffered.reader();
while (true) {
output_fbs.reset();
reader.streamUntilDelimiter(writer, '\n', null) catch |err| switch (err) {
error.EndOfStream => {
// When end of file, check if all file was parse, if not update the reader to the next file
// TODO: Be able to give an array of file index from the B+Tree to only parse them
output_fbs.reset(); // clear buffer before exit
if (current_index == max_file_index) break;
current_index += 1;
self.allocator.free(path_buff);
path_buff = std.fmt.allocPrint(self.allocator, "{s}/DATA/{s}/{d}.zippondata", .{ self.path_to_ZipponDB_dir, struct_name, current_index }) catch @panic("Can't create sub_path for init a DataIterator");
file.close(); // Do I need to close ? I think so
file = std.fs.cwd().openFile(path_buff, .{}) catch {
std.debug.print("Error trying to open {s}\n", .{path_buff});
@panic("Can't open file to update a data iterator");
};
buffered = std.io.bufferedReader(file.reader());
reader = buffered.reader();
continue;
}, // file read till the end
else => {
std.debug.print("Error while reading file: {any}\n", .{err});
break;
},
};
const uuid = try UUID.parse(output_fbs.getWritten()[0..36]);
try uuid_array.append(uuid);
}
}
/// Take a condition and an array of UUID and fill the array with all UUID that match the condition
/// TODO: Change the UUID function to be a B+Tree
/// TODO: Optimize the shit out of this, it it way too slow rn. Here some ideas

View File

@ -21,6 +21,7 @@ pub const Token = struct {
.{ "false", .bool_literal_false },
.{ "AND", .keyword_and },
.{ "OR", .keyword_or },
.{ "TO", .keyword_to },
});
pub fn getKeyword(bytes: []const u8) ?Tag {
@ -39,6 +40,7 @@ pub const Token = struct {
keyword_null,
keyword_and,
keyword_or,
keyword_to,
string_literal,
int_literal,

View File

@ -58,6 +58,7 @@ pub const Parser = struct {
filter_and_send,
filter_and_update,
filter_and_delete,
send_all,
// For the main parse function
expect_struct_name,
@ -178,6 +179,7 @@ pub const Parser = struct {
.DELETE => .filter_and_delete,
else => unreachable,
},
.eof => self.state = .send_all,
else => return self.printError("Error: Expect [ for additional data or { for a filter", &token, ZiQlParserError.SynthaxError),
}
},
@ -192,6 +194,17 @@ pub const Parser = struct {
};
},
.send_all => {
var array = std.ArrayList(UUID).init(self.allocator);
defer array.deinit();
try self.file_engine.getAllUUIDList(self.struct_name, &array);
// TODO: Use the additional data to reduce the array
self.sendEntity(&array);
self.state = .end;
},
.filter_and_send => {
var array = std.ArrayList(UUID).init(self.allocator);
defer array.deinit();
@ -199,7 +212,7 @@ pub const Parser = struct {
// TODO: Use the additional data to reduce the array
self.sendEntity(array.items);
self.sendEntity(&array);
self.state = .end;
},
@ -211,7 +224,7 @@ pub const Parser = struct {
// TODO: Use the additional data to reduce the array
if (token.tag != .equal_angle_bracket_right) return self.printError("Error: Expected =>", &token, ZiQlParserError.SynthaxError);
if (token.tag != .keyword_to) return self.printError("Error: Expected TO", &token, ZiQlParserError.SynthaxError);
token = self.toker.next();
if (token.tag != .l_paren) return self.printError("Error: Expected (", &token, ZiQlParserError.SynthaxError);
@ -264,14 +277,21 @@ pub const Parser = struct {
}
}
// TODO: Use that when I want to return data to the use, need to understand how it's work.
// I think for now put the ordering using additional data here
// Maybe to a struct Communicator to handle all communication between use and cli
fn sendEntity(self: *Parser, uuid_array: []UUID) void {
_ = self;
_ = uuid_array;
// TODO: Update that to order before cutting if too long when the ASC and DESC will be implemented
fn sendEntity(self: *Parser, uuid_list: *std.ArrayList(UUID)) void {
var buffer = std.ArrayList(u8).init(self.allocator);
defer buffer.deinit();
//send("Number of uuid to send: {d}\n", .{uuid_array.len});
// Pop some element if the array is too long
if ((self.additional_data.entity_count_to_find != 0) and (self.additional_data.entity_count_to_find < uuid_list.items.len)) {
const to_pop = uuid_list.items.len - self.additional_data.entity_count_to_find;
for (0..to_pop) |_| _ = uuid_list.pop();
}
// Im gonna need a function in the file engine to parse and write in the buffer
self.file_engine.parseAndWriteToSend(self.struct_name, uuid_list.items, &buffer, self.additional_data) catch @panic("Error parsing data to send");
send("{s}", .{buffer.items});
}
// TODO: The parser that check what is between ||
@ -296,31 +316,37 @@ pub const Parser = struct {
keep_next = false;
}) {
switch (self.state) {
.expect_left_condition => {
token = try self.parseCondition(&left_condition, &token);
try self.file_engine.getUUIDListUsingCondition(left_condition, left_array);
self.state = State.expect_ANDOR_OR_end;
keep_next = true;
.expect_left_condition => switch (token.tag) {
.r_brace => {
try self.file_engine.getAllUUIDList(struct_name, left_array);
self.state = .end;
},
else => {
token = try self.parseCondition(&left_condition, &token);
try self.file_engine.getUUIDListUsingCondition(left_condition, left_array);
self.state = .expect_ANDOR_OR_end;
keep_next = true;
},
},
.expect_ANDOR_OR_end => switch (token.tag) {
.r_brace => if (main) {
self.state = State.end;
self.state = .end;
} else {
return self.printError("Error: Expected } to end main condition or AND/OR to continue it", &token, ZiQlParserError.SynthaxError);
},
.r_paren => if (!main) {
self.state = State.end;
self.state = .end;
} else {
return self.printError("Error: Expected ) to end inside condition or AND/OR to continue it", &token, ZiQlParserError.SynthaxError);
},
.keyword_and => {
curent_operation = .and_;
self.state = State.expect_right_uuid_array;
self.state = .expect_right_uuid_array;
},
.keyword_or => {
curent_operation = .or_;
self.state = State.expect_right_uuid_array;
self.state = .expect_right_uuid_array;
},
else => return self.printError("Error: Expected a condition including AND OR or the end of the filter with } or )", &token, ZiQlParserError.SynthaxError),
},
@ -812,32 +838,30 @@ fn compareUUIDArray(arr1: std.ArrayList(UUID), arr2: std.ArrayList(UUID)) bool {
test "ADD" {
try testParsing("ADD User (name = 'Bob', email='bob@email.com', age=55, scores=[ 1 ], friends=[])");
try testParsing("ADD User (name = 'Bob', email='bob@email.com', age=55, scores=[ 1 ], friends=[])");
try testParsing("ADD User (name = 'Bob', email='bob@email.com', age=55, scores=[ 1 ], friends=[])");
}
test "UPDATE" {
try testParsing("UPDATE User {name = 'Bob'} => (email='new@gmail.com')");
try testParsing("UPDATE User {name = 'Bob'} TO (email='new@gmail.com')");
}
test "DELETE" {
try testParsing("DELETE User {name='Bob'}");
}
test "GRAB filter with string" {
try testParsing("GRAB User {name = 'Bob'}");
try testParsing("GRAB User {name != 'Brittany Rogers'}");
}
test "GRAB with additional data" {
try testParsing("GRAB User [1] {age < 18}");
try testParsing("GRAB User [name] {age < 18}");
try testParsing("GRAB User [100; name] {age < 18}");
}
test "GRAB filter with string" {
// TODO: Use a fixe dataset for testing, to choose in the build.zig
// It should check if the right number of entity is found too
try testParsing("GRAB User {name = 'Brittany Rogers'}");
try testParsing("GRAB User {name != 'Brittany Rogers'}");
}
test "GRAB filter with int" {
// TODO: Use a fixe dataset for testing, to choose in the build.zig
// It should check if the right number of entity is found too
try testParsing("GRAB User {age = 18}");
try testParsing("GRAB User {age > 18}");
try testParsing("GRAB User {age < 18}");
@ -846,6 +870,11 @@ test "GRAB filter with int" {
try testParsing("GRAB User {age != 18}");
}
test "Specific query" {
try testParsing("GRAB User");
try testParsing("GRAB User {}");
}
fn testParsing(source: [:0]const u8) !void {
const allocator = std.testing.allocator;