Implemented UPDATE and error for ZiQL Parser
Created a function that is use by UPDATE to take a list of uuids and a map of new value. Can be optimize later but work rn. Also started to creat more proper error handeling with custom error starting with ZiQLError
This commit is contained in:
parent
fbcca0dc09
commit
94e3905009
11
src/cli.zig
11
src/cli.zig
@ -92,7 +92,7 @@ pub fn main() !void {
|
||||
.string_literal => {
|
||||
const null_term_query_str = try allocator.dupeZ(u8, line_str[token.loc.start + 1 .. token.loc.end - 1]);
|
||||
defer allocator.free(null_term_query_str);
|
||||
try runQuery(null_term_query_str, &file_engine);
|
||||
runQuery(null_term_query_str, &file_engine);
|
||||
state = .end;
|
||||
},
|
||||
.keyword_help => {
|
||||
@ -165,7 +165,7 @@ pub fn main() !void {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn runQuery(null_term_query_str: [:0]const u8, file_engine: *FileEngine) !void {
|
||||
pub fn runQuery(null_term_query_str: [:0]const u8, file_engine: *FileEngine) void {
|
||||
var gpa = std.heap.GeneralPurposeAllocator(.{}){};
|
||||
const allocator = gpa.allocator();
|
||||
|
||||
@ -175,12 +175,15 @@ pub fn runQuery(null_term_query_str: [:0]const u8, file_engine: *FileEngine) !vo
|
||||
defer {
|
||||
parser.deinit();
|
||||
switch (gpa.deinit()) {
|
||||
.ok => std.log.debug("No memory leak baby !\n", .{}),
|
||||
.ok => {},
|
||||
.leak => std.log.debug("We fucked it up bro...\n", .{}),
|
||||
}
|
||||
}
|
||||
|
||||
try parser.parse();
|
||||
parser.parse() catch |err| switch (err) {
|
||||
error.SynthaxError => {},
|
||||
else => {},
|
||||
};
|
||||
}
|
||||
|
||||
// TODO: Put that in the FileEngine
|
||||
|
@ -54,10 +54,12 @@ pub const FileEngine = struct {
|
||||
float: f64,
|
||||
str: []const u8,
|
||||
bool_: bool,
|
||||
id: UUID,
|
||||
int_array: std.ArrayList(i64),
|
||||
str_array: std.ArrayList([]const u8),
|
||||
float_array: std.ArrayList(f64),
|
||||
bool_array: std.ArrayList(bool),
|
||||
id_array: std.ArrayList(UUID),
|
||||
};
|
||||
|
||||
/// use to parse file. It take a struct name and member name to know what to parse.
|
||||
@ -67,7 +69,7 @@ pub const FileEngine = struct {
|
||||
struct_name: []const u8,
|
||||
member_name: []const u8 = undefined,
|
||||
value: []const u8 = undefined,
|
||||
operation: enum { equal, different, superior, superior_or_equal, inferior, inferior_or_equal } = undefined, // Add more stuff like IN
|
||||
operation: enum { equal, different, superior, superior_or_equal, inferior, inferior_or_equal, in } = undefined, // Add more stuff like IN
|
||||
data_type: DataType = undefined,
|
||||
|
||||
pub fn init(struct_name: []const u8) Condition {
|
||||
@ -76,20 +78,25 @@ pub const FileEngine = struct {
|
||||
};
|
||||
|
||||
/// Take a condition and an array of UUID and fill the array with all UUID that match the condition
|
||||
/// TODO: Change the UUID function to be a B+Tree
|
||||
/// TODO: Optimize the shit out of this, it it way too slow rn. Here some ideas
|
||||
/// - Array can take a very long time to parse, maybe put them in a seperate file. But string can be too...
|
||||
/// - Use the stream directly in the tokenizer
|
||||
/// - Use a fixed size and split into other file. Like one file for one member (Because very long, like an array of 1000 value) and another one for everything else
|
||||
/// The threselhold can be like if the average len is > 400 character. So UUID would take less that 10% of the storage
|
||||
/// - Save data in a more compact way
|
||||
/// - Multithreading, each thread take a list of files and we mix them at the end
|
||||
pub fn getUUIDListUsingCondition(self: *FileEngine, condition: Condition, uuid_array: *std.ArrayList(UUID)) !void {
|
||||
const max_file_index = try self.maxFileIndex(condition.struct_name);
|
||||
var current_index: usize = 0;
|
||||
|
||||
var sub_path = std.fmt.allocPrint(self.allocator, "{s}/DATA/{s}/{d}.zippondata", .{ self.path_to_ZipponDB_dir, condition.struct_name, current_index }) catch @panic("Can't create sub_path for init a DataIterator");
|
||||
defer self.allocator.free(sub_path);
|
||||
var path_buff = std.fmt.allocPrint(self.allocator, "{s}/DATA/{s}/{d}.zippondata", .{ self.path_to_ZipponDB_dir, condition.struct_name, current_index }) catch @panic("Can't create sub_path for init a DataIterator");
|
||||
defer self.allocator.free(path_buff);
|
||||
|
||||
var file = std.fs.cwd().openFile(sub_path, .{}) catch @panic("Can't open first file to init a data iterator");
|
||||
var file = std.fs.cwd().openFile(path_buff, .{}) catch {
|
||||
std.debug.print("Path: {s}", .{path_buff});
|
||||
@panic("Can't open first file to init a data iterator");
|
||||
};
|
||||
defer file.close();
|
||||
|
||||
var output: [1024 * 50]u8 = undefined; // Maybe need to increase that as it limit the size of a line in a file
|
||||
@ -105,10 +112,12 @@ pub const FileEngine = struct {
|
||||
.str => compare_value = ComparisonValue{ .str = condition.value },
|
||||
.float => compare_value = ComparisonValue{ .float = parseFloat(condition.value) },
|
||||
.bool => compare_value = ComparisonValue{ .bool_ = parseBool(condition.value) },
|
||||
.id => compare_value = ComparisonValue{ .id = try UUID.parse(condition.value) },
|
||||
.int_array => compare_value = ComparisonValue{ .int_array = parseArrayInt(self.allocator, condition.value) },
|
||||
.str_array => compare_value = ComparisonValue{ .str_array = parseArrayStr(self.allocator, condition.value) },
|
||||
.float_array => compare_value = ComparisonValue{ .float_array = parseArrayFloat(self.allocator, condition.value) },
|
||||
.bool_array => compare_value = ComparisonValue{ .bool_array = parseArrayBool(self.allocator, condition.value) },
|
||||
.id_array => compare_value = ComparisonValue{ .id_array = parseArrayUUID(self.allocator, condition.value) },
|
||||
}
|
||||
defer {
|
||||
switch (condition.data_type) {
|
||||
@ -116,6 +125,7 @@ pub const FileEngine = struct {
|
||||
.str_array => compare_value.str_array.deinit(),
|
||||
.float_array => compare_value.float_array.deinit(),
|
||||
.bool_array => compare_value.bool_array.deinit(),
|
||||
.id_array => compare_value.id_array.deinit(),
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
@ -127,19 +137,21 @@ pub const FileEngine = struct {
|
||||
output_fbs.reset();
|
||||
reader.streamUntilDelimiter(writer, '\n', null) catch |err| switch (err) {
|
||||
error.EndOfStream => {
|
||||
// When end of file, check if all file was parse, if not update the reader to the next file
|
||||
// TODO: Be able to give an array of file index from the B+Tree to only parse them
|
||||
output_fbs.reset(); // clear buffer before exit
|
||||
|
||||
if (current_index == max_file_index) break;
|
||||
|
||||
current_index += 1;
|
||||
|
||||
self.allocator.free(sub_path);
|
||||
sub_path = std.fmt.allocPrint(self.allocator, "{s}/DATA/{s}/{d}.zippondata", .{ self.path_to_ZipponDB_dir, condition.struct_name, current_index }) catch @panic("Can't create sub_path for init a DataIterator");
|
||||
self.allocator.free(path_buff);
|
||||
path_buff = std.fmt.allocPrint(self.allocator, "{s}/DATA/{s}/{d}.zippondata", .{ self.path_to_ZipponDB_dir, condition.struct_name, current_index }) catch @panic("Can't create sub_path for init a DataIterator");
|
||||
|
||||
file.close(); // Do I need to close ? I think so
|
||||
file = std.fs.cwd().openFile(sub_path, .{}) catch {
|
||||
std.debug.print("Error trying to open {s}\n", .{sub_path});
|
||||
@panic("Can't open first file to init a data iterator");
|
||||
file = std.fs.cwd().openFile(path_buff, .{}) catch {
|
||||
std.debug.print("Error trying to open {s}\n", .{path_buff});
|
||||
@panic("Can't open file to update a data iterator");
|
||||
};
|
||||
|
||||
buffered = std.io.bufferedReader(file.reader());
|
||||
@ -167,65 +179,64 @@ pub const FileEngine = struct {
|
||||
|
||||
token = data_toker.next();
|
||||
|
||||
// TODO: Add error for wrong condition like superior between 2 string or array
|
||||
// TODO: Make sure in amount that the rest is unreachable by sending an error for wrong condition like superior between 2 string or array
|
||||
switch (condition.operation) {
|
||||
.equal => {
|
||||
switch (condition.data_type) {
|
||||
.equal => switch (condition.data_type) {
|
||||
.int => if (compare_value.int == parseInt(data_toker.getTokenSlice(token))) try uuid_array.append(uuid),
|
||||
.float => if (compare_value.float == parseFloat(data_toker.getTokenSlice(token))) try uuid_array.append(uuid),
|
||||
.str => if (std.mem.eql(u8, compare_value.str, data_toker.getTokenSlice(token))) try uuid_array.append(uuid),
|
||||
.bool => if (compare_value.bool_ == parseBool(data_toker.getTokenSlice(token))) try uuid_array.append(uuid),
|
||||
.id => if (compare_value.id.compare(uuid)) try uuid_array.append(uuid),
|
||||
// TODO: Implement for array too
|
||||
else => {},
|
||||
}
|
||||
else => unreachable,
|
||||
},
|
||||
|
||||
.different => {
|
||||
switch (condition.data_type) {
|
||||
.different => switch (condition.data_type) {
|
||||
.int => if (compare_value.int != parseInt(data_toker.getTokenSlice(token))) try uuid_array.append(uuid),
|
||||
.float => if (compare_value.float != parseFloat(data_toker.getTokenSlice(token))) try uuid_array.append(uuid),
|
||||
.str => if (!std.mem.eql(u8, compare_value.str, data_toker.getTokenSlice(token))) try uuid_array.append(uuid),
|
||||
.bool => if (compare_value.bool_ != parseBool(data_toker.getTokenSlice(token))) try uuid_array.append(uuid),
|
||||
// TODO: Implement for array too
|
||||
else => {},
|
||||
}
|
||||
else => unreachable,
|
||||
},
|
||||
|
||||
.superior_or_equal => {
|
||||
switch (condition.data_type) {
|
||||
.superior_or_equal => switch (condition.data_type) {
|
||||
.int => if (compare_value.int <= parseInt(data_toker.getTokenSlice(token))) try uuid_array.append(uuid),
|
||||
.float => if (compare_value.float <= parseFloat(data_toker.getTokenSlice(token))) try uuid_array.append(uuid),
|
||||
// TODO: Implement for array too
|
||||
else => {},
|
||||
}
|
||||
else => unreachable,
|
||||
},
|
||||
|
||||
.superior => {
|
||||
switch (condition.data_type) {
|
||||
.superior => switch (condition.data_type) {
|
||||
.int => if (compare_value.int < parseInt(data_toker.getTokenSlice(token))) try uuid_array.append(uuid),
|
||||
.float => if (compare_value.float < parseFloat(data_toker.getTokenSlice(token))) try uuid_array.append(uuid),
|
||||
// TODO: Implement for array too
|
||||
else => {},
|
||||
}
|
||||
else => unreachable,
|
||||
},
|
||||
|
||||
.inferior_or_equal => {
|
||||
switch (condition.data_type) {
|
||||
.inferior_or_equal => switch (condition.data_type) {
|
||||
.int => if (compare_value.int >= parseInt(data_toker.getTokenSlice(token))) try uuid_array.append(uuid),
|
||||
.float => if (compare_value.float >= parseFloat(data_toker.getTokenSlice(token))) try uuid_array.append(uuid),
|
||||
// TODO: Implement for array too
|
||||
else => {},
|
||||
}
|
||||
else => unreachable,
|
||||
},
|
||||
|
||||
.inferior => {
|
||||
switch (condition.data_type) {
|
||||
.inferior => switch (condition.data_type) {
|
||||
.int => if (compare_value.int > parseInt(data_toker.getTokenSlice(token))) try uuid_array.append(uuid),
|
||||
.float => if (compare_value.float > parseFloat(data_toker.getTokenSlice(token))) try uuid_array.append(uuid),
|
||||
// TODO: Implement for array too
|
||||
else => {},
|
||||
else => unreachable,
|
||||
},
|
||||
|
||||
// TODO: Do it for other array
|
||||
.in => switch (condition.data_type) {
|
||||
.id_array => {
|
||||
for (compare_value.id_array.items) |elem| {
|
||||
if (elem.compare(uuid)) try uuid_array.append(uuid);
|
||||
}
|
||||
},
|
||||
else => unreachable,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -265,6 +276,161 @@ pub const FileEngine = struct {
|
||||
return uuid;
|
||||
}
|
||||
|
||||
/// Function to update the file with updated data. Take a list of uuid and a list of string map. The map is in the format key: member; value: new value.
|
||||
/// It create a new index.zippondata.new file in the same folder, stream the output of the old file to it until a uuid is found, then write the new row and continue until the end
|
||||
/// TODO: Optmize a lot, I did that quickly to work but it is far from optimized. Idea:
|
||||
/// - Once all uuid found, stream until the end of the file without delimiter or uuid compare
|
||||
/// - Change map to array
|
||||
pub fn updateEntities(self: *FileEngine, struct_name: []const u8, uuids: std.ArrayList(UUID), new_data_map: std.StringHashMap([]const u8)) !void {
|
||||
const max_file_index = self.maxFileIndex(struct_name) catch @panic("Cant get max index file when updating");
|
||||
var current_file_index: usize = 0;
|
||||
|
||||
var path_buff = std.fmt.allocPrint(self.allocator, "{s}/DATA/{s}/{d}.zippondata", .{ self.path_to_ZipponDB_dir, struct_name, current_file_index }) catch @panic("Can't create sub_path for init a DataIterator");
|
||||
defer self.allocator.free(path_buff);
|
||||
|
||||
var path_buff2 = std.fmt.allocPrint(self.allocator, "{s}/DATA/{s}/{d}.zippondata", .{ self.path_to_ZipponDB_dir, struct_name, current_file_index }) catch @panic("Can't create sub_path for init a DataIterator");
|
||||
defer self.allocator.free(path_buff2);
|
||||
|
||||
var old_file = std.fs.cwd().openFile(path_buff, .{}) catch {
|
||||
std.debug.print("Path: {s}", .{path_buff});
|
||||
@panic("Can't open first file to init a data iterator");
|
||||
};
|
||||
|
||||
self.allocator.free(path_buff);
|
||||
path_buff = std.fmt.allocPrint(self.allocator, "{s}/DATA/{s}/{d}.zippondata.new", .{ self.path_to_ZipponDB_dir, struct_name, current_file_index }) catch @panic("Can't create sub_path for init a DataIterator");
|
||||
|
||||
var new_file = std.fs.cwd().createFile(path_buff, .{}) catch {
|
||||
std.debug.print("Path: {s}", .{path_buff});
|
||||
@panic("Can't create new file to init a data iterator");
|
||||
};
|
||||
defer new_file.close();
|
||||
|
||||
var output: [1024 * 50]u8 = undefined; // Maybe need to increase that as it limit the size of a line in a file
|
||||
var output_fbs = std.io.fixedBufferStream(&output);
|
||||
const writer = output_fbs.writer();
|
||||
|
||||
var buffered = std.io.bufferedReader(old_file.reader());
|
||||
var reader = buffered.reader();
|
||||
var founded = false;
|
||||
|
||||
while (true) {
|
||||
output_fbs.reset();
|
||||
reader.streamUntilDelimiter(writer, ' ', null) catch |err| switch (err) {
|
||||
error.EndOfStream => {
|
||||
// When end of file, check if all file was parse, if not update the reader to the next file
|
||||
// TODO: Be able to give an array of file index from the B+Tree to only parse them
|
||||
output_fbs.reset(); // clear buffer before exit
|
||||
|
||||
// Start by deleting and renaming the new file
|
||||
self.allocator.free(path_buff);
|
||||
path_buff = std.fmt.allocPrint(self.allocator, "{s}/DATA/{s}/{d}.zippondata", .{ self.path_to_ZipponDB_dir, struct_name, current_file_index }) catch @panic("Can't create sub_path for init a DataIterator");
|
||||
|
||||
self.allocator.free(path_buff2);
|
||||
path_buff2 = std.fmt.allocPrint(self.allocator, "{s}/DATA/{s}/{d}.zippondata.new", .{ self.path_to_ZipponDB_dir, struct_name, current_file_index }) catch @panic("Can't create sub_path for init a DataIterator");
|
||||
|
||||
old_file.close();
|
||||
try std.fs.cwd().deleteFile(path_buff);
|
||||
try std.fs.cwd().rename(path_buff2, path_buff);
|
||||
|
||||
if (current_file_index == max_file_index) break;
|
||||
|
||||
current_file_index += 1;
|
||||
|
||||
self.allocator.free(path_buff);
|
||||
path_buff = std.fmt.allocPrint(self.allocator, "{s}/DATA/{s}/{d}.zippondata", .{ self.path_to_ZipponDB_dir, struct_name, current_file_index }) catch @panic("Can't create sub_path for init a DataIterator");
|
||||
|
||||
self.allocator.free(path_buff2);
|
||||
path_buff2 = std.fmt.allocPrint(self.allocator, "{s}/DATA/{s}/{d}.zippondata.new", .{ self.path_to_ZipponDB_dir, struct_name, current_file_index }) catch @panic("Can't create sub_path for init a DataIterator");
|
||||
|
||||
old_file = std.fs.cwd().openFile(path_buff, .{}) catch {
|
||||
std.debug.print("Error trying to open {s}\n", .{path_buff});
|
||||
@panic("Can't open file to update entities");
|
||||
};
|
||||
|
||||
new_file = std.fs.cwd().createFile(path_buff2, .{}) catch {
|
||||
std.debug.print("Error trying to create {s}\n", .{path_buff2});
|
||||
@panic("Can't create file to update entities");
|
||||
};
|
||||
|
||||
buffered = std.io.bufferedReader(old_file.reader());
|
||||
reader = buffered.reader();
|
||||
continue;
|
||||
}, // file read till the end
|
||||
else => {
|
||||
std.debug.print("Error while reading file: {any}\n", .{err});
|
||||
break;
|
||||
},
|
||||
};
|
||||
|
||||
try new_file.writeAll(output_fbs.getWritten());
|
||||
|
||||
// THis is the uuid of the current row
|
||||
const uuid = try UUID.parse(output_fbs.getWritten()[0..36]); // FIXME: After the first loop, the first char is \n, which is invalid
|
||||
founded = false;
|
||||
|
||||
// Optimize this
|
||||
for (uuids.items) |elem| {
|
||||
if (elem.compare(uuid)) {
|
||||
founded = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (founded) {
|
||||
for (self.structName2structMembers(struct_name), self.structName2DataType(struct_name)) |member_name, member_type| {
|
||||
// For all collum in the right order, check if the key is in the map, if so use it to write the new value, otherwise use the old file
|
||||
output_fbs.reset();
|
||||
switch (member_type) {
|
||||
.str => {
|
||||
try reader.streamUntilDelimiter(writer, '\'', null);
|
||||
try reader.streamUntilDelimiter(writer, '\'', null);
|
||||
},
|
||||
.int_array, .float_array, .bool_array, .id_array => try reader.streamUntilDelimiter(writer, ']', null),
|
||||
.str_array => try reader.streamUntilDelimiter(writer, ']', null), // FIXME: If the string itself contain ], this will be a problem
|
||||
else => {
|
||||
try reader.streamUntilDelimiter(writer, ' ', null);
|
||||
try reader.streamUntilDelimiter(writer, ' ', null);
|
||||
},
|
||||
}
|
||||
|
||||
if (new_data_map.contains(self.locToSlice(member_name))) {
|
||||
// Write the new data
|
||||
try new_file.writer().print(" {s}", .{new_data_map.get(self.locToSlice(member_name)).?});
|
||||
} else {
|
||||
// Write the old data
|
||||
switch (member_type) {
|
||||
.str => try new_file.writeAll(" \'"),
|
||||
.int_array => try new_file.writeAll(" "),
|
||||
.float_array => try new_file.writeAll(" "),
|
||||
.str_array => try new_file.writeAll(" "),
|
||||
.bool_array => try new_file.writeAll(" "),
|
||||
.id_array => try new_file.writeAll(" "),
|
||||
else => try new_file.writeAll(" "),
|
||||
}
|
||||
|
||||
try new_file.writeAll(output_fbs.getWritten());
|
||||
|
||||
switch (member_type) {
|
||||
.str => try new_file.writeAll("\'"),
|
||||
.int_array, .float_array, .bool_array, .id_array => try new_file.writeAll("]"),
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try reader.streamUntilDelimiter(writer, '\n', null);
|
||||
try new_file.writeAll("\n");
|
||||
} else {
|
||||
// stream until the delimiter
|
||||
output_fbs.reset();
|
||||
try new_file.writeAll(" ");
|
||||
try reader.streamUntilDelimiter(writer, '\n', null);
|
||||
try new_file.writeAll(output_fbs.getWritten());
|
||||
try new_file.writeAll("\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Use a filename in the format 1.zippondata and return the 1
|
||||
/// Note that if I change the extension of the data file, I need to update that as it use a fixed len for the extension
|
||||
fn fileName2Index(_: FileEngine, file_name: []const u8) usize {
|
||||
@ -535,6 +701,18 @@ pub fn parseArrayBool(allocator: std.mem.Allocator, array_str: []const u8) std.A
|
||||
return array;
|
||||
}
|
||||
|
||||
pub fn parseArrayUUID(allocator: std.mem.Allocator, array_str: []const u8) std.ArrayList(UUID) {
|
||||
var array = std.ArrayList(UUID).init(allocator);
|
||||
|
||||
var it = std.mem.splitAny(u8, array_str[1 .. array_str.len - 1], " ");
|
||||
while (it.next()) |x| {
|
||||
const uuid = UUID.parse(x) catch continue;
|
||||
array.append(uuid) catch continue;
|
||||
}
|
||||
|
||||
return array;
|
||||
}
|
||||
|
||||
// FIXME: This will not work if their is a space in one string. E.g ['Hello world'] will be split between Hello and world but it shouldn't
|
||||
pub fn parseArrayStr(allocator: std.mem.Allocator, array_str: []const u8) std.ArrayList([]const u8) {
|
||||
var array = std.ArrayList([]const u8).init(allocator);
|
||||
|
@ -4,8 +4,10 @@ pub const DataType = enum {
|
||||
float,
|
||||
str,
|
||||
bool,
|
||||
id,
|
||||
int_array,
|
||||
float_array,
|
||||
str_array,
|
||||
bool_array,
|
||||
id_array,
|
||||
};
|
||||
|
@ -56,6 +56,8 @@ pub const Parser = struct {
|
||||
// Endpoint
|
||||
parse_new_data_and_add_data,
|
||||
filter_and_send,
|
||||
filter_and_update,
|
||||
filter_and_delete,
|
||||
|
||||
// For the main parse function
|
||||
expect_struct_name,
|
||||
@ -87,6 +89,15 @@ pub const Parser = struct {
|
||||
add_array_to_map,
|
||||
};
|
||||
|
||||
const ZiQlParserError = error{
|
||||
SynthaxError,
|
||||
MemberNotFound,
|
||||
MemberMissing,
|
||||
StructNotFound,
|
||||
FeatureMissing,
|
||||
ParsingValueError,
|
||||
};
|
||||
|
||||
/// This is the [] part
|
||||
/// IDK if saving it into the Parser struct is a good idea
|
||||
pub const AdditionalData = struct {
|
||||
@ -144,16 +155,13 @@ pub const Parser = struct {
|
||||
self.action = .DELETE;
|
||||
self.state = .expect_struct_name;
|
||||
},
|
||||
else => {
|
||||
self.printError("Error: Expected action keyword. Available: GRAB ADD DELETE UPDATE", &token);
|
||||
self.state = .end;
|
||||
},
|
||||
else => return self.printError("Error: Expected action keyword. Available: GRAB ADD DELETE UPDATE", &token, ZiQlParserError.SynthaxError),
|
||||
},
|
||||
|
||||
.expect_struct_name => {
|
||||
// Check if the struct name is in the schema
|
||||
self.struct_name = try self.allocator.dupe(u8, self.toker.getTokenSlice(token));
|
||||
if (!self.file_engine.isStructNameExists(self.struct_name)) self.printError("Error: struct name not found in schema.", &token);
|
||||
if (!self.file_engine.isStructNameExists(self.struct_name)) return self.printError("Error: struct name not found in schema.", &token, ZiQlParserError.StructNotFound);
|
||||
switch (self.action) {
|
||||
.ADD => self.state = .expect_new_data,
|
||||
else => self.state = .expect_filter_or_additional_data,
|
||||
@ -164,40 +172,75 @@ pub const Parser = struct {
|
||||
keep_next = true;
|
||||
switch (token.tag) {
|
||||
.l_bracket => self.state = .parse_additional_data,
|
||||
.l_brace => self.state = .filter_and_send,
|
||||
else => self.printError("Error: Expect [ for additional data or { for a filter", &token),
|
||||
.l_brace => self.state = switch (self.action) {
|
||||
.GRAB => .filter_and_send,
|
||||
.UPDATE => .filter_and_update,
|
||||
.DELETE => .filter_and_delete,
|
||||
else => unreachable,
|
||||
},
|
||||
else => return self.printError("Error: Expect [ for additional data or { for a filter", &token, ZiQlParserError.SynthaxError),
|
||||
}
|
||||
},
|
||||
|
||||
.parse_additional_data => {
|
||||
try self.parseAdditionalData(&self.additional_data);
|
||||
self.state = .filter_and_send;
|
||||
self.state = switch (self.action) {
|
||||
.GRAB => .filter_and_send,
|
||||
.UPDATE => .filter_and_update,
|
||||
.DELETE => .filter_and_delete,
|
||||
else => unreachable,
|
||||
};
|
||||
},
|
||||
|
||||
.filter_and_send => {
|
||||
var array = std.ArrayList(UUID).init(self.allocator);
|
||||
defer array.deinit();
|
||||
try self.parseFilter(&array, self.struct_name, true);
|
||||
_ = try self.parseFilter(&array, self.struct_name, true);
|
||||
|
||||
// TODO: Use the additional data to reduce the array
|
||||
|
||||
self.sendEntity(array.items);
|
||||
self.state = .end;
|
||||
},
|
||||
|
||||
// TODO: Optimize so it doesnt use parseFilter but just parse the file and directly check the condition. Here I end up parsing 2 times.
|
||||
.filter_and_update => {
|
||||
var array = std.ArrayList(UUID).init(self.allocator);
|
||||
defer array.deinit();
|
||||
token = try self.parseFilter(&array, self.struct_name, true);
|
||||
|
||||
// TODO: Use the additional data to reduce the array
|
||||
|
||||
if (token.tag != .equal_angle_bracket_right) return self.printError("Error: Expected =>", &token, ZiQlParserError.SynthaxError);
|
||||
|
||||
token = self.toker.next();
|
||||
if (token.tag != .l_paren) return self.printError("Error: Expected (", &token, ZiQlParserError.SynthaxError);
|
||||
|
||||
var data_map = std.StringHashMap([]const u8).init(self.allocator);
|
||||
defer data_map.deinit();
|
||||
try self.parseNewData(&data_map);
|
||||
|
||||
try self.file_engine.updateEntities(self.struct_name, array, data_map);
|
||||
self.state = .end;
|
||||
},
|
||||
|
||||
.filter_and_delete => return self.printError("Error: Delete not yet implemented", &token, ZiQlParserError.FeatureMissing),
|
||||
|
||||
.expect_new_data => switch (token.tag) {
|
||||
.l_paren => {
|
||||
keep_next = true;
|
||||
self.state = .parse_new_data_and_add_data;
|
||||
},
|
||||
else => self.printError("Error: Expecting new data starting with (", &token),
|
||||
else => return self.printError("Error: Expecting new data starting with (", &token, ZiQlParserError.SynthaxError),
|
||||
},
|
||||
|
||||
.parse_new_data_and_add_data => switch (self.action) {
|
||||
.ADD => {
|
||||
.parse_new_data_and_add_data => {
|
||||
var data_map = std.StringHashMap([]const u8).init(self.allocator);
|
||||
defer data_map.deinit();
|
||||
self.parseNewData(&data_map);
|
||||
try self.parseNewData(&data_map);
|
||||
|
||||
// TODO: Print the list of missing
|
||||
if (!self.file_engine.checkIfAllMemberInMap(self.struct_name, &data_map)) self.printError("Error: Missing member", &token);
|
||||
if (!self.file_engine.checkIfAllMemberInMap(self.struct_name, &data_map)) return self.printError("Error: Missing member", &token, ZiQlParserError.MemberMissing);
|
||||
const uuid = self.file_engine.writeEntity(self.struct_name, data_map) catch {
|
||||
send("ZipponDB error: Couln't write new data to file", .{});
|
||||
continue;
|
||||
@ -205,9 +248,6 @@ pub const Parser = struct {
|
||||
send("Successfully added new {s} with UUID: {s}", .{ self.struct_name, uuid.format_uuid() });
|
||||
self.state = .end;
|
||||
},
|
||||
.UPDATE => {}, // TODO:
|
||||
else => unreachable,
|
||||
},
|
||||
|
||||
else => unreachable,
|
||||
}
|
||||
@ -232,7 +272,8 @@ pub const Parser = struct {
|
||||
|
||||
/// Take an array of UUID and populate it with what match what is between {}
|
||||
/// Main is to know if between {} or (), main is true if between {}, otherwise between () inside {}
|
||||
fn parseFilter(self: *Parser, left_array: *std.ArrayList(UUID), struct_name: []const u8, main: bool) !void {
|
||||
/// TODO: Optimize this so it can use multiple condition at the same time instead of parsing the all file for each condition
|
||||
fn parseFilter(self: *Parser, left_array: *std.ArrayList(UUID), struct_name: []const u8, main: bool) !Token {
|
||||
var token = self.toker.next();
|
||||
var keep_next = false;
|
||||
self.state = State.expect_left_condition;
|
||||
@ -246,26 +287,22 @@ pub const Parser = struct {
|
||||
}) {
|
||||
switch (self.state) {
|
||||
.expect_left_condition => {
|
||||
token = self.parseCondition(&left_condition, &token);
|
||||
token = try self.parseCondition(&left_condition, &token);
|
||||
try self.file_engine.getUUIDListUsingCondition(left_condition, left_array);
|
||||
self.state = State.expect_ANDOR_OR_end;
|
||||
keep_next = true;
|
||||
},
|
||||
|
||||
.expect_ANDOR_OR_end => switch (token.tag) {
|
||||
.r_brace => {
|
||||
if (main) {
|
||||
.r_brace => if (main) {
|
||||
self.state = State.end;
|
||||
} else {
|
||||
self.printError("Error: Expected } to end main condition or AND/OR to continue it", &token);
|
||||
}
|
||||
return self.printError("Error: Expected } to end main condition or AND/OR to continue it", &token, ZiQlParserError.SynthaxError);
|
||||
},
|
||||
.r_paren => {
|
||||
if (!main) {
|
||||
.r_paren => if (!main) {
|
||||
self.state = State.end;
|
||||
} else {
|
||||
self.printError("Error: Expected ) to end inside condition or AND/OR to continue it", &token);
|
||||
}
|
||||
return self.printError("Error: Expected ) to end inside condition or AND/OR to continue it", &token, ZiQlParserError.SynthaxError);
|
||||
},
|
||||
.keyword_and => {
|
||||
curent_operation = .and_;
|
||||
@ -275,7 +312,7 @@ pub const Parser = struct {
|
||||
curent_operation = .or_;
|
||||
self.state = State.expect_right_uuid_array;
|
||||
},
|
||||
else => self.printError("Error: Expected a condition including AND or OR or } or )", &token),
|
||||
else => return self.printError("Error: Expected a condition including AND OR or the end of the filter with } or )", &token, ZiQlParserError.SynthaxError),
|
||||
},
|
||||
|
||||
.expect_right_uuid_array => {
|
||||
@ -283,15 +320,15 @@ pub const Parser = struct {
|
||||
defer right_array.deinit();
|
||||
|
||||
switch (token.tag) {
|
||||
.l_paren => try self.parseFilter(&right_array, struct_name, false), // run parserFilter to get the right array
|
||||
.l_paren => _ = try self.parseFilter(&right_array, struct_name, false), // run parserFilter to get the right array
|
||||
.identifier => {
|
||||
var right_condition = Condition.init(struct_name);
|
||||
|
||||
token = self.parseCondition(&right_condition, &token);
|
||||
token = try self.parseCondition(&right_condition, &token);
|
||||
keep_next = true;
|
||||
try self.file_engine.getUUIDListUsingCondition(right_condition, &right_array);
|
||||
}, // Create a new condition and compare it
|
||||
else => self.printError("Error: Expecting ( or member name.", &token),
|
||||
else => return self.printError("Error: Expecting ( or member name.", &token, ZiQlParserError.SynthaxError),
|
||||
}
|
||||
|
||||
switch (curent_operation) {
|
||||
@ -302,18 +339,19 @@ pub const Parser = struct {
|
||||
try OR(left_array, &right_array);
|
||||
},
|
||||
}
|
||||
std.debug.print("Token here {any}\n", .{token});
|
||||
self.state = .expect_ANDOR_OR_end;
|
||||
},
|
||||
|
||||
else => unreachable,
|
||||
}
|
||||
}
|
||||
|
||||
return token;
|
||||
}
|
||||
|
||||
/// Parse to get a Condition< Which is a struct that is use by the FileEngine to retreive data.
|
||||
/// In the query, it is this part name = 'Bob' or age <= 10
|
||||
fn parseCondition(self: *Parser, condition: *Condition, token_ptr: *Token) Token {
|
||||
fn parseCondition(self: *Parser, condition: *Condition, token_ptr: *Token) !Token {
|
||||
var keep_next = false;
|
||||
self.state = .expect_member;
|
||||
var token = token_ptr.*;
|
||||
@ -326,13 +364,13 @@ pub const Parser = struct {
|
||||
.expect_member => switch (token.tag) {
|
||||
.identifier => {
|
||||
if (!self.file_engine.isMemberNameInStruct(condition.struct_name, self.toker.getTokenSlice(token))) {
|
||||
self.printError("Error: Member not part of struct.", &token);
|
||||
return self.printError("Error: Member not part of struct.", &token, ZiQlParserError.MemberNotFound);
|
||||
}
|
||||
condition.data_type = self.file_engine.memberName2DataType(condition.struct_name, self.toker.getTokenSlice(token)) orelse @panic("Couldn't find the struct and member");
|
||||
condition.member_name = self.toker.getTokenSlice(token);
|
||||
self.state = State.expect_operation;
|
||||
},
|
||||
else => self.printError("Error: Expected member name.", &token),
|
||||
else => return self.printError("Error: Expected member name.", &token, ZiQlParserError.SynthaxError),
|
||||
},
|
||||
|
||||
.expect_operation => {
|
||||
@ -343,7 +381,7 @@ pub const Parser = struct {
|
||||
.angle_bracket_left_equal => condition.operation = .inferior_or_equal, // <=
|
||||
.angle_bracket_right_equal => condition.operation = .superior_or_equal, // >=
|
||||
.bang_equal => condition.operation = .different, // !=
|
||||
else => self.printError("Error: Expected condition. Including < > <= >= = !=", &token),
|
||||
else => return self.printError("Error: Expected condition. Including < > <= >= = !=", &token, ZiQlParserError.SynthaxError),
|
||||
}
|
||||
self.state = State.expect_value;
|
||||
},
|
||||
@ -353,25 +391,25 @@ pub const Parser = struct {
|
||||
.int => {
|
||||
switch (token.tag) {
|
||||
.int_literal => condition.value = self.toker.getTokenSlice(token),
|
||||
else => self.printError("Error: Expected int", &token),
|
||||
else => return self.printError("Error: Expected int", &token, ZiQlParserError.SynthaxError),
|
||||
}
|
||||
},
|
||||
.float => {
|
||||
switch (token.tag) {
|
||||
.float_literal => condition.value = self.toker.getTokenSlice(token),
|
||||
else => self.printError("Error: Expected float", &token),
|
||||
else => return self.printError("Error: Expected float", &token, ZiQlParserError.SynthaxError),
|
||||
}
|
||||
},
|
||||
.str => {
|
||||
.str, .id => {
|
||||
switch (token.tag) {
|
||||
.string_literal => condition.value = self.toker.getTokenSlice(token),
|
||||
else => self.printError("Error: Expected string", &token),
|
||||
else => return self.printError("Error: Expected string", &token, ZiQlParserError.SynthaxError),
|
||||
}
|
||||
},
|
||||
.bool => {
|
||||
switch (token.tag) {
|
||||
.bool_literal_true, .bool_literal_false => condition.value = self.toker.getTokenSlice(token),
|
||||
else => self.printError("Error: Expected bool", &token),
|
||||
else => return self.printError("Error: Expected bool", &token, ZiQlParserError.SynthaxError),
|
||||
}
|
||||
},
|
||||
.int_array => {
|
||||
@ -380,7 +418,7 @@ pub const Parser = struct {
|
||||
while (token.tag != Token.Tag.r_bracket) : (token = self.toker.next()) {
|
||||
switch (token.tag) {
|
||||
.int_literal => continue,
|
||||
else => self.printError("Error: Expected int or ].", &token),
|
||||
else => return self.printError("Error: Expected int or ].", &token, ZiQlParserError.SynthaxError),
|
||||
}
|
||||
}
|
||||
condition.value = self.toker.buffer[start_index..token.loc.end];
|
||||
@ -391,18 +429,18 @@ pub const Parser = struct {
|
||||
while (token.tag != Token.Tag.r_bracket) : (token = self.toker.next()) {
|
||||
switch (token.tag) {
|
||||
.float_literal => continue,
|
||||
else => self.printError("Error: Expected float or ].", &token),
|
||||
else => return self.printError("Error: Expected float or ].", &token, ZiQlParserError.SynthaxError),
|
||||
}
|
||||
}
|
||||
condition.value = self.toker.buffer[start_index..token.loc.end];
|
||||
},
|
||||
.str_array => {
|
||||
.str_array, .id_array => {
|
||||
const start_index = token.loc.start;
|
||||
token = self.toker.next();
|
||||
while (token.tag != Token.Tag.r_bracket) : (token = self.toker.next()) {
|
||||
switch (token.tag) {
|
||||
.string_literal => continue,
|
||||
else => self.printError("Error: Expected string or ].", &token),
|
||||
else => return self.printError("Error: Expected string or ].", &token, ZiQlParserError.SynthaxError),
|
||||
}
|
||||
}
|
||||
condition.value = self.toker.buffer[start_index..token.loc.end];
|
||||
@ -413,7 +451,7 @@ pub const Parser = struct {
|
||||
while (token.tag != Token.Tag.r_bracket) : (token = self.toker.next()) {
|
||||
switch (token.tag) {
|
||||
.bool_literal_false, .bool_literal_true => continue,
|
||||
else => self.printError("Error: Expected bool or ].", &token),
|
||||
else => return self.printError("Error: Expected bool or ].", &token, ZiQlParserError.SynthaxError),
|
||||
}
|
||||
}
|
||||
condition.value = self.toker.buffer[start_index..token.loc.end];
|
||||
@ -444,9 +482,7 @@ pub const Parser = struct {
|
||||
switch (token.tag) {
|
||||
.int_literal => {
|
||||
const count = std.fmt.parseInt(usize, self.toker.getTokenSlice(token), 10) catch {
|
||||
self.printError("Error while transforming this into a integer.", &token);
|
||||
self.state = .invalid;
|
||||
continue;
|
||||
return self.printError("Error while transforming this into a integer.", &token, ZiQlParserError.ParsingValueError);
|
||||
};
|
||||
additional_data.entity_count_to_find = count;
|
||||
self.state = .expect_semicolon_OR_right_bracket;
|
||||
@ -461,12 +497,12 @@ pub const Parser = struct {
|
||||
.expect_semicolon_OR_right_bracket => switch (token.tag) {
|
||||
.semicolon => self.state = .expect_member,
|
||||
.r_bracket => self.state = .end,
|
||||
else => self.printError("Error: Expect ';' or ']'.", &token),
|
||||
else => return self.printError("Error: Expect ';' or ']'.", &token, ZiQlParserError.SynthaxError),
|
||||
},
|
||||
|
||||
.expect_member => switch (token.tag) {
|
||||
.identifier => {
|
||||
if (!self.file_engine.isMemberNameInStruct(self.struct_name, self.toker.getTokenSlice(token))) self.printError("Member not found in struct.", &token);
|
||||
if (!self.file_engine.isMemberNameInStruct(self.struct_name, self.toker.getTokenSlice(token))) return self.printError("Member not found in struct.", &token, ZiQlParserError.SynthaxError);
|
||||
try additional_data.member_to_find.append(
|
||||
AdditionalDataMember.init(
|
||||
self.allocator,
|
||||
@ -476,7 +512,7 @@ pub const Parser = struct {
|
||||
|
||||
self.state = .expect_comma_OR_r_bracket_OR_l_bracket;
|
||||
},
|
||||
else => self.printError("Error: Expected a member name.", &token),
|
||||
else => return self.printError("Error: Expected a member name.", &token, ZiQlParserError.SynthaxError),
|
||||
},
|
||||
|
||||
.expect_comma_OR_r_bracket_OR_l_bracket => switch (token.tag) {
|
||||
@ -488,13 +524,13 @@ pub const Parser = struct {
|
||||
);
|
||||
self.state = .expect_comma_OR_r_bracket;
|
||||
},
|
||||
else => self.printError("Error: Expected , or ] or [", &token),
|
||||
else => return self.printError("Error: Expected , or ] or [", &token, ZiQlParserError.SynthaxError),
|
||||
},
|
||||
|
||||
.expect_comma_OR_r_bracket => switch (token.tag) {
|
||||
.comma => self.state = .expect_member,
|
||||
.r_bracket => self.state = .end,
|
||||
else => self.printError("Error: Expected , or ]", &token),
|
||||
else => return self.printError("Error: Expected , or ]", &token, ZiQlParserError.SynthaxError),
|
||||
},
|
||||
|
||||
else => unreachable,
|
||||
@ -505,7 +541,7 @@ pub const Parser = struct {
|
||||
/// Take the tokenizer and return a map of the query for the ADD command.
|
||||
/// Keys are the member name and value are the string of the value in the query. E.g. 'Adrien' or '10'
|
||||
/// Entry token need to be (
|
||||
fn parseNewData(self: *Parser, member_map: *std.StringHashMap([]const u8)) void {
|
||||
fn parseNewData(self: *Parser, member_map: *std.StringHashMap([]const u8)) !void {
|
||||
var token = self.toker.next();
|
||||
var keep_next = false;
|
||||
var member_name: []const u8 = undefined; // Maybe use allocator.alloc
|
||||
@ -519,16 +555,16 @@ pub const Parser = struct {
|
||||
.expect_member => switch (token.tag) {
|
||||
.identifier => {
|
||||
member_name = self.toker.getTokenSlice(token);
|
||||
if (!self.file_engine.isMemberNameInStruct(self.struct_name, member_name)) self.printError("Member not found in struct.", &token);
|
||||
if (!self.file_engine.isMemberNameInStruct(self.struct_name, member_name)) return self.printError("Member not found in struct.", &token, ZiQlParserError.MemberNotFound);
|
||||
self.state = .expect_equal;
|
||||
},
|
||||
else => self.printError("Error: Expected member name.", &token),
|
||||
else => return self.printError("Error: Expected member name.", &token, ZiQlParserError.SynthaxError),
|
||||
},
|
||||
|
||||
.expect_equal => switch (token.tag) {
|
||||
// TODO: Add more comparison like IN or other stuff
|
||||
.equal => self.state = .expect_new_value,
|
||||
else => self.printError("Error: Expected =", &token),
|
||||
else => return self.printError("Error: Expected =", &token, ZiQlParserError.SynthaxError),
|
||||
},
|
||||
|
||||
.expect_new_value => {
|
||||
@ -539,14 +575,14 @@ pub const Parser = struct {
|
||||
member_map.put(member_name, self.toker.getTokenSlice(token)) catch @panic("Could not add member name and value to map in getMapOfMember");
|
||||
self.state = .expect_comma_OR_end;
|
||||
},
|
||||
else => self.printError("Error: Expected int", &token),
|
||||
else => return self.printError("Error: Expected int", &token, ZiQlParserError.SynthaxError),
|
||||
},
|
||||
.float => switch (token.tag) {
|
||||
.float_literal, .keyword_null => {
|
||||
member_map.put(member_name, self.toker.getTokenSlice(token)) catch @panic("Could not add member name and value to map in getMapOfMember");
|
||||
self.state = .expect_comma_OR_end;
|
||||
},
|
||||
else => self.printError("Error: Expected float", &token),
|
||||
else => return self.printError("Error: Expected float", &token, ZiQlParserError.SynthaxError),
|
||||
},
|
||||
.bool => switch (token.tag) {
|
||||
.bool_literal_true => {
|
||||
@ -561,14 +597,14 @@ pub const Parser = struct {
|
||||
member_map.put(member_name, self.toker.getTokenSlice(token)) catch @panic("Could not add member name and value to map in getMapOfMember");
|
||||
self.state = .expect_comma_OR_end;
|
||||
},
|
||||
else => self.printError("Error: Expected bool: true false", &token),
|
||||
else => return self.printError("Error: Expected bool: true false", &token, ZiQlParserError.SynthaxError),
|
||||
},
|
||||
.str => switch (token.tag) {
|
||||
.str, .id => switch (token.tag) {
|
||||
.string_literal, .keyword_null => {
|
||||
member_map.put(member_name, self.toker.getTokenSlice(token)) catch @panic("Could not add member name and value to map in getMapOfMember");
|
||||
self.state = .expect_comma_OR_end;
|
||||
},
|
||||
else => self.printError("Error: Expected string between ''", &token),
|
||||
else => return self.printError("Error: Expected string between ''", &token, ZiQlParserError.SynthaxError),
|
||||
},
|
||||
// TODO: Maybe upgrade that to use multiple state
|
||||
.int_array => switch (token.tag) {
|
||||
@ -578,14 +614,14 @@ pub const Parser = struct {
|
||||
while (token.tag != .r_bracket) : (token = self.toker.next()) {
|
||||
switch (token.tag) {
|
||||
.int_literal => continue,
|
||||
else => self.printError("Error: Expected int or ].", &token),
|
||||
else => return self.printError("Error: Expected int or ].", &token, ZiQlParserError.SynthaxError),
|
||||
}
|
||||
}
|
||||
// Maybe change that as it just recreate a string that is already in the buffer
|
||||
member_map.put(member_name, self.toker.buffer[start_index..token.loc.end]) catch @panic("Couln't add string of array in data map");
|
||||
self.state = .expect_comma_OR_end;
|
||||
},
|
||||
else => self.printError("Error: Expected [ to start an array", &token),
|
||||
else => return self.printError("Error: Expected [ to start an array", &token, ZiQlParserError.SynthaxError),
|
||||
},
|
||||
.float_array => switch (token.tag) {
|
||||
.l_bracket => {
|
||||
@ -594,14 +630,14 @@ pub const Parser = struct {
|
||||
while (token.tag != .r_bracket) : (token = self.toker.next()) {
|
||||
switch (token.tag) {
|
||||
.float_literal => continue,
|
||||
else => self.printError("Error: Expected float or ].", &token),
|
||||
else => return self.printError("Error: Expected float or ].", &token, ZiQlParserError.SynthaxError),
|
||||
}
|
||||
}
|
||||
// Maybe change that as it just recreate a string that is already in the buffer
|
||||
member_map.put(member_name, self.toker.buffer[start_index..token.loc.end]) catch @panic("Couln't add string of array in data map");
|
||||
self.state = .expect_comma_OR_end;
|
||||
},
|
||||
else => self.printError("Error: Expected [ to start an array", &token),
|
||||
else => return self.printError("Error: Expected [ to start an array", &token, ZiQlParserError.SynthaxError),
|
||||
},
|
||||
.bool_array => switch (token.tag) {
|
||||
.l_bracket => {
|
||||
@ -610,30 +646,30 @@ pub const Parser = struct {
|
||||
while (token.tag != .r_bracket) : (token = self.toker.next()) {
|
||||
switch (token.tag) {
|
||||
.bool_literal_false, .bool_literal_true => continue,
|
||||
else => self.printError("Error: Expected bool or ].", &token),
|
||||
else => return self.printError("Error: Expected bool or ].", &token, ZiQlParserError.SynthaxError),
|
||||
}
|
||||
}
|
||||
// Maybe change that as it just recreate a string that is already in the buffer
|
||||
member_map.put(member_name, self.toker.buffer[start_index..token.loc.end]) catch @panic("Couln't add string of array in data map");
|
||||
self.state = .expect_comma_OR_end;
|
||||
},
|
||||
else => self.printError("Error: Expected [ to start an array", &token),
|
||||
else => return self.printError("Error: Expected [ to start an array", &token, ZiQlParserError.SynthaxError),
|
||||
},
|
||||
.str_array => switch (token.tag) {
|
||||
.str_array, .id_array => switch (token.tag) {
|
||||
.l_bracket => {
|
||||
const start_index = token.loc.start;
|
||||
token = self.toker.next();
|
||||
while (token.tag != .r_bracket) : (token = self.toker.next()) {
|
||||
switch (token.tag) {
|
||||
.string_literal => continue,
|
||||
else => self.printError("Error: Expected str or ].", &token),
|
||||
else => return self.printError("Error: Expected str or ].", &token, ZiQlParserError.SynthaxError),
|
||||
}
|
||||
}
|
||||
// Maybe change that as it just recreate a string that is already in the buffer
|
||||
member_map.put(member_name, self.toker.buffer[start_index..token.loc.end]) catch @panic("Couln't add string of array in data map");
|
||||
self.state = .expect_comma_OR_end;
|
||||
},
|
||||
else => self.printError("Error: Expected [ to start an array", &token),
|
||||
else => return self.printError("Error: Expected [ to start an array", &token, ZiQlParserError.SynthaxError),
|
||||
},
|
||||
}
|
||||
},
|
||||
@ -642,7 +678,7 @@ pub const Parser = struct {
|
||||
switch (token.tag) {
|
||||
.r_paren => self.state = .end,
|
||||
.comma => self.state = .expect_member,
|
||||
else => self.printError("Error: Expect , or )", &token),
|
||||
else => return self.printError("Error: Expect , or )", &token, ZiQlParserError.SynthaxError),
|
||||
}
|
||||
},
|
||||
|
||||
@ -651,8 +687,9 @@ pub const Parser = struct {
|
||||
}
|
||||
}
|
||||
|
||||
fn printError(self: *Parser, message: []const u8, token: *Token) void {
|
||||
fn printError(self: *Parser, message: []const u8, token: *Token, err: ZiQlParserError) ZiQlParserError {
|
||||
stdout.print("\n", .{}) catch {};
|
||||
stdout.print("{s}\n", .{message}) catch {};
|
||||
stdout.print("{s}\n", .{self.toker.buffer}) catch {};
|
||||
|
||||
// Calculate the number of spaces needed to reach the start position.
|
||||
@ -668,9 +705,8 @@ pub const Parser = struct {
|
||||
}
|
||||
stdout.print(" \n", .{}) catch {}; // Align with the message
|
||||
|
||||
stdout.print("{s}\n", .{message}) catch {};
|
||||
|
||||
send("", .{});
|
||||
return err;
|
||||
}
|
||||
};
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user