Renamed paarser.core to ziql.parser
This commit is contained in:
parent
6338cb6364
commit
a9c39d9aa5
@ -1,7 +1,7 @@
|
||||
const std = @import("std");
|
||||
const dtype = @import("dtype");
|
||||
const DBEngine = @import("src/cli/core.zig");
|
||||
const ziqlParser = @import("src/parser/core.zig");
|
||||
const ziqlParser = @import("src/ziql/parser.zig");
|
||||
const ZipponError = @import("error").ZipponError;
|
||||
|
||||
const names = [_][]const u8{ "Alice", "Bob", "Charlie", "Dave", "Eve" };
|
||||
|
398
src/ziql/parser.zig
Normal file
398
src/ziql/parser.zig
Normal file
@ -0,0 +1,398 @@
|
||||
const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
const FileEngine = @import("../file/core.zig");
|
||||
const SchemaEngine = @import("../schema/core.zig");
|
||||
const Tokenizer = @import("tokenizer.zig").Tokenizer;
|
||||
|
||||
const dtype = @import("dtype");
|
||||
const UUID = dtype.UUID;
|
||||
|
||||
const Filter = @import("../dataStructure/filter.zig").Filter;
|
||||
const Condition = @import("../dataStructure/filter.zig").Condition;
|
||||
const ConditionValue = @import("../dataStructure/filter.zig").ConditionValue;
|
||||
const ComparisonOperator = @import("../dataStructure/filter.zig").ComparisonOperator;
|
||||
|
||||
const AdditionalData = @import("../dataStructure/additionalData.zig").AdditionalData;
|
||||
const AdditionalDataMember = @import("../dataStructure/additionalData.zig").AdditionalDataMember;
|
||||
const send = @import("../utils.zig").send;
|
||||
const printError = @import("../utils.zig").printError;
|
||||
|
||||
const ZipponError = @import("error").ZipponError;
|
||||
const PRINT_STATE = @import("config").PRINT_STATE;
|
||||
|
||||
const log = std.log.scoped(.ziqlParser);
|
||||
|
||||
pub const State = enum {
|
||||
start,
|
||||
invalid,
|
||||
end,
|
||||
|
||||
// Endpoint
|
||||
parse_new_data_and_add_data,
|
||||
filter_and_send,
|
||||
filter_and_update,
|
||||
filter_and_delete,
|
||||
|
||||
// For the main parse function
|
||||
expect_struct_name,
|
||||
expect_filter,
|
||||
parse_additional_data,
|
||||
expect_filter_or_additional_data,
|
||||
expect_new_data,
|
||||
expect_right_arrow,
|
||||
|
||||
// For the additional data parser
|
||||
expect_limit,
|
||||
expect_semicolon_OR_right_bracket,
|
||||
expect_member,
|
||||
expect_comma_OR_r_bracket_OR_l_bracket,
|
||||
expect_comma_OR_r_bracket,
|
||||
|
||||
// For the filter parser
|
||||
expect_condition,
|
||||
expect_operation, // Operations are = != < <= > >=
|
||||
expect_value,
|
||||
expect_ANDOR_OR_end,
|
||||
expect_right_uuid_array,
|
||||
|
||||
// For the new data
|
||||
expect_member_OR_value,
|
||||
expect_equal,
|
||||
expect_new_value,
|
||||
expect_comma_OR_end,
|
||||
add_member_to_map,
|
||||
add_array_to_map,
|
||||
};
|
||||
|
||||
pub const Self = @This();
|
||||
|
||||
pub usingnamespace @import("parts/comparison.zig");
|
||||
pub usingnamespace @import("parts/condition.zig");
|
||||
pub usingnamespace @import("parts/newData.zig");
|
||||
pub usingnamespace @import("parts/value.zig");
|
||||
pub usingnamespace @import("parts/filter.zig");
|
||||
pub usingnamespace @import("parts/additionalData.zig");
|
||||
pub usingnamespace @import("utils.zig");
|
||||
|
||||
var toker: Tokenizer = undefined;
|
||||
|
||||
toker: *Tokenizer = undefined,
|
||||
file_engine: *FileEngine,
|
||||
schema_engine: *SchemaEngine,
|
||||
|
||||
pub fn init(file_engine: *FileEngine, schema_engine: *SchemaEngine) Self {
|
||||
return Self{
|
||||
.file_engine = file_engine,
|
||||
.schema_engine = schema_engine,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn parse(self: *Self, buffer: [:0]const u8) ZipponError!void {
|
||||
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
|
||||
defer arena.deinit();
|
||||
const allocator = arena.allocator();
|
||||
|
||||
toker = Tokenizer.init(buffer);
|
||||
self.toker = &toker;
|
||||
|
||||
var state: State = .start;
|
||||
var additional_data = AdditionalData.init(allocator);
|
||||
var struct_name: []const u8 = undefined;
|
||||
var action: enum { GRAB, ADD, UPDATE, DELETE } = undefined;
|
||||
|
||||
var token = self.toker.next();
|
||||
var keep_next = false; // Use in the loop to prevent to get the next token when continue. Just need to make it true and it is reset at every loop
|
||||
|
||||
while (state != State.end) : ({
|
||||
token = if (!keep_next) self.toker.next() else token;
|
||||
keep_next = false;
|
||||
if (PRINT_STATE) std.debug.print("parse: {any}\n", .{state});
|
||||
}) switch (state) {
|
||||
.start => switch (token.tag) {
|
||||
.keyword_grab => {
|
||||
action = .GRAB;
|
||||
state = .expect_struct_name;
|
||||
},
|
||||
.keyword_add => {
|
||||
action = .ADD;
|
||||
state = .expect_struct_name;
|
||||
},
|
||||
.keyword_update => {
|
||||
action = .UPDATE;
|
||||
state = .expect_struct_name;
|
||||
},
|
||||
.keyword_delete => {
|
||||
action = .DELETE;
|
||||
state = .expect_struct_name;
|
||||
},
|
||||
else => return printError(
|
||||
"Error: Expected action keyword. Available: GRAB ADD DELETE UPDATE",
|
||||
ZipponError.SynthaxError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
),
|
||||
},
|
||||
|
||||
.expect_struct_name => {
|
||||
// Check if the struct name is in the schema
|
||||
struct_name = self.toker.getTokenSlice(token);
|
||||
if (token.tag != .identifier) return printError(
|
||||
"Error: Missing struct name.",
|
||||
ZipponError.StructNotFound,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
);
|
||||
if (!self.schema_engine.isStructNameExists(struct_name)) return printError(
|
||||
"Error: struct name not found in schema.",
|
||||
ZipponError.StructNotFound,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
);
|
||||
switch (action) {
|
||||
.ADD => state = .expect_new_data,
|
||||
else => state = .expect_filter_or_additional_data,
|
||||
}
|
||||
},
|
||||
|
||||
.expect_filter_or_additional_data => {
|
||||
keep_next = true;
|
||||
switch (token.tag) {
|
||||
.l_bracket => state = .parse_additional_data,
|
||||
.l_brace, .eof => state = switch (action) {
|
||||
.GRAB => .filter_and_send,
|
||||
.UPDATE => .filter_and_update,
|
||||
.DELETE => .filter_and_delete,
|
||||
else => unreachable,
|
||||
},
|
||||
else => return printError(
|
||||
"Error: Expect [ for additional data or { for a filter",
|
||||
ZipponError.SynthaxError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
),
|
||||
}
|
||||
},
|
||||
|
||||
.parse_additional_data => {
|
||||
try self.parseAdditionalData(allocator, &additional_data, struct_name);
|
||||
state = switch (action) {
|
||||
.GRAB => .filter_and_send,
|
||||
.UPDATE => .filter_and_update,
|
||||
.DELETE => .filter_and_delete,
|
||||
else => unreachable,
|
||||
};
|
||||
},
|
||||
|
||||
.filter_and_send => switch (token.tag) {
|
||||
.l_brace => {
|
||||
var filter = try self.parseFilter(allocator, struct_name, false);
|
||||
defer filter.deinit();
|
||||
|
||||
const json_string = try self.file_engine.parseEntities(struct_name, filter, &additional_data, allocator);
|
||||
send("{s}", .{json_string});
|
||||
state = .end;
|
||||
},
|
||||
.eof => {
|
||||
const json_string = try self.file_engine.parseEntities(struct_name, null, &additional_data, allocator);
|
||||
send("{s}", .{json_string});
|
||||
state = .end;
|
||||
},
|
||||
else => return printError(
|
||||
"Error: Expected filter.",
|
||||
ZipponError.SynthaxError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
),
|
||||
},
|
||||
|
||||
// TODO: Optimize so it doesnt use parseFilter but just parse the file and directly check the condition. Here I end up parsing 2 times.
|
||||
.filter_and_update => switch (token.tag) {
|
||||
.l_brace => {
|
||||
var filter = try self.parseFilter(allocator, struct_name, false);
|
||||
defer filter.deinit();
|
||||
|
||||
token = self.toker.last();
|
||||
|
||||
if (token.tag != .keyword_to) return printError(
|
||||
"Error: Expected TO",
|
||||
ZipponError.SynthaxError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
);
|
||||
|
||||
token = self.toker.next();
|
||||
if (token.tag != .l_paren) return printError(
|
||||
"Error: Expected (",
|
||||
ZipponError.SynthaxError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
);
|
||||
|
||||
const sstruct = try self.schema_engine.structName2SchemaStruct(struct_name);
|
||||
var members = std.ArrayList([]const u8).init(allocator);
|
||||
defer members.deinit();
|
||||
members.appendSlice(sstruct.members[1..]) catch return ZipponError.MemoryError;
|
||||
|
||||
var data_map = std.StringHashMap(ConditionValue).init(allocator);
|
||||
defer data_map.deinit();
|
||||
try self.parseNewData(allocator, &data_map, struct_name, &members);
|
||||
|
||||
var buff = std.ArrayList(u8).init(allocator);
|
||||
defer buff.deinit();
|
||||
|
||||
try self.file_engine.updateEntities(struct_name, filter, data_map, &buff.writer(), &additional_data);
|
||||
send("{s}", .{buff.items});
|
||||
state = .end;
|
||||
},
|
||||
.keyword_to => {
|
||||
token = self.toker.next();
|
||||
if (token.tag != .l_paren) return printError(
|
||||
"Error: Expected (",
|
||||
ZipponError.SynthaxError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
);
|
||||
|
||||
const sstruct = try self.schema_engine.structName2SchemaStruct(struct_name);
|
||||
var members = std.ArrayList([]const u8).init(allocator);
|
||||
defer members.deinit();
|
||||
members.appendSlice(sstruct.members[1..]) catch return ZipponError.MemoryError;
|
||||
|
||||
var data_map = std.StringHashMap(ConditionValue).init(allocator);
|
||||
defer data_map.deinit();
|
||||
try self.parseNewData(allocator, &data_map, struct_name, &members);
|
||||
|
||||
var buff = std.ArrayList(u8).init(allocator);
|
||||
defer buff.deinit();
|
||||
|
||||
try self.file_engine.updateEntities(struct_name, null, data_map, &buff.writer(), &additional_data);
|
||||
send("{s}", .{buff.items});
|
||||
state = .end;
|
||||
},
|
||||
else => return printError(
|
||||
"Error: Expected filter or TO.",
|
||||
ZipponError.SynthaxError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
),
|
||||
},
|
||||
|
||||
.filter_and_delete => switch (token.tag) {
|
||||
.l_brace => {
|
||||
var filter = try self.parseFilter(allocator, struct_name, false);
|
||||
defer filter.deinit();
|
||||
|
||||
var buff = std.ArrayList(u8).init(allocator);
|
||||
defer buff.deinit();
|
||||
|
||||
try self.file_engine.deleteEntities(struct_name, filter, &buff.writer(), &additional_data);
|
||||
send("{s}", .{buff.items});
|
||||
state = .end;
|
||||
},
|
||||
.eof => {
|
||||
var buff = std.ArrayList(u8).init(allocator);
|
||||
defer buff.deinit();
|
||||
|
||||
try self.file_engine.deleteEntities(struct_name, null, &buff.writer(), &additional_data);
|
||||
send("{s}", .{buff.items});
|
||||
state = .end;
|
||||
},
|
||||
else => return printError(
|
||||
"Error: Expected filter.",
|
||||
ZipponError.SynthaxError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
),
|
||||
},
|
||||
|
||||
.expect_new_data => switch (token.tag) {
|
||||
.l_paren => {
|
||||
keep_next = true;
|
||||
state = .parse_new_data_and_add_data;
|
||||
},
|
||||
else => return printError(
|
||||
"Error: Expected new data starting with (",
|
||||
ZipponError.SynthaxError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
),
|
||||
},
|
||||
|
||||
.parse_new_data_and_add_data => {
|
||||
const sstruct = try self.schema_engine.structName2SchemaStruct(struct_name);
|
||||
var order = std.ArrayList([]const u8).init(allocator);
|
||||
defer order.deinit();
|
||||
order.appendSlice(sstruct.members[1..]) catch return ZipponError.MemoryError;
|
||||
|
||||
var buff = std.ArrayList(u8).init(allocator);
|
||||
defer buff.deinit();
|
||||
buff.writer().writeAll("[") catch return ZipponError.WriteError;
|
||||
|
||||
var maps = std.ArrayList(std.StringHashMap(ConditionValue)).init(allocator);
|
||||
defer maps.deinit();
|
||||
|
||||
var local_arena = std.heap.ArenaAllocator.init(allocator);
|
||||
defer local_arena.deinit();
|
||||
const local_allocator = arena.allocator();
|
||||
|
||||
var data_map = std.StringHashMap(ConditionValue).init(allocator);
|
||||
defer data_map.deinit();
|
||||
|
||||
while (true) { // I could multithread that as it do take a long time for big benchmark
|
||||
data_map.clearRetainingCapacity();
|
||||
try self.parseNewData(local_allocator, &data_map, struct_name, &order);
|
||||
|
||||
var error_message_buffer = std.ArrayList(u8).init(local_allocator);
|
||||
defer error_message_buffer.deinit();
|
||||
|
||||
const error_message_buffer_writer = error_message_buffer.writer();
|
||||
error_message_buffer_writer.writeAll("Error missing: ") catch return ZipponError.WriteError;
|
||||
|
||||
if (!(self.schema_engine.checkIfAllMemberInMap(struct_name, &data_map, &error_message_buffer) catch {
|
||||
return ZipponError.StructNotFound;
|
||||
})) {
|
||||
_ = error_message_buffer.pop();
|
||||
_ = error_message_buffer.pop();
|
||||
return printError(
|
||||
error_message_buffer.items,
|
||||
ZipponError.MemberMissing,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
);
|
||||
}
|
||||
|
||||
maps.append(data_map.cloneWithAllocator(local_allocator) catch return ZipponError.MemoryError) catch return ZipponError.MemoryError;
|
||||
|
||||
if (maps.items.len >= 1_000) {
|
||||
try self.file_engine.addEntity(struct_name, maps.items, &buff.writer());
|
||||
maps.clearRetainingCapacity();
|
||||
_ = local_arena.reset(.retain_capacity);
|
||||
}
|
||||
|
||||
token = self.toker.last_token;
|
||||
if (token.tag == .l_paren) continue;
|
||||
break;
|
||||
}
|
||||
|
||||
try self.file_engine.addEntity(struct_name, maps.items, &buff.writer());
|
||||
|
||||
buff.writer().writeAll("]") catch return ZipponError.WriteError;
|
||||
send("{s}", .{buff.items});
|
||||
state = .end;
|
||||
},
|
||||
|
||||
else => unreachable,
|
||||
};
|
||||
}
|
133
src/ziql/parts/additionalData.zig
Normal file
133
src/ziql/parts/additionalData.zig
Normal file
@ -0,0 +1,133 @@
|
||||
const std = @import("std");
|
||||
const config = @import("config");
|
||||
const Allocator = std.mem.Allocator;
|
||||
const Token = @import("../tokenizer.zig").Token;
|
||||
const AdditionalData = @import("../../dataStructure/additionalData.zig").AdditionalData;
|
||||
const printError = @import("../../utils.zig").printError;
|
||||
|
||||
const ZipponError = @import("error").ZipponError;
|
||||
|
||||
const Self = @import("../parser.zig");
|
||||
|
||||
/// When this function is call, next token should be [
|
||||
/// Check if an int is here -> check if ; is here -> check if member is here -> check if [ is here -> loop
|
||||
pub fn parseAdditionalData(
|
||||
self: Self,
|
||||
allocator: Allocator,
|
||||
additional_data: *AdditionalData,
|
||||
struct_name: []const u8,
|
||||
) ZipponError!void {
|
||||
var token = self.toker.next();
|
||||
var keep_next = false;
|
||||
var state: Self.State = .expect_limit;
|
||||
var last_member: []const u8 = undefined;
|
||||
|
||||
while (state != .end) : ({
|
||||
token = if ((!keep_next) and (state != .end)) self.toker.next() else token;
|
||||
keep_next = false;
|
||||
if (config.PRINT_STATE) std.debug.print("parseAdditionalData: {any}\n", .{state});
|
||||
}) switch (state) {
|
||||
.expect_limit => switch (token.tag) {
|
||||
.int_literal => {
|
||||
additional_data.limit = std.fmt.parseInt(usize, self.toker.getTokenSlice(token), 10) catch {
|
||||
return printError(
|
||||
"Error while transforming limit into a integer.",
|
||||
ZipponError.ParsingValueError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
);
|
||||
};
|
||||
state = .expect_semicolon_OR_right_bracket;
|
||||
},
|
||||
else => {
|
||||
state = .expect_member;
|
||||
keep_next = true;
|
||||
},
|
||||
},
|
||||
|
||||
.expect_semicolon_OR_right_bracket => switch (token.tag) {
|
||||
.semicolon => state = .expect_member,
|
||||
.r_bracket => state = .end,
|
||||
else => return printError(
|
||||
"Error: Expect ';' or ']'.",
|
||||
ZipponError.SynthaxError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
),
|
||||
},
|
||||
|
||||
.expect_member => switch (token.tag) {
|
||||
.identifier => {
|
||||
if (!(self.schema_engine.isMemberNameInStruct(struct_name, self.toker.getTokenSlice(token)) catch {
|
||||
return printError(
|
||||
"Struct not found.",
|
||||
ZipponError.StructNotFound,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
);
|
||||
})) {
|
||||
return printError(
|
||||
"Member not found in struct.",
|
||||
ZipponError.MemberNotFound,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
);
|
||||
}
|
||||
try additional_data.addMember(
|
||||
self.toker.getTokenSlice(token),
|
||||
try self.schema_engine.memberName2DataIndex(struct_name, self.toker.getTokenSlice(token)),
|
||||
);
|
||||
last_member = self.toker.getTokenSlice(token);
|
||||
|
||||
state = .expect_comma_OR_r_bracket_OR_l_bracket;
|
||||
},
|
||||
else => return printError(
|
||||
"Error: Expected a member name.",
|
||||
ZipponError.SynthaxError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
),
|
||||
},
|
||||
|
||||
.expect_comma_OR_r_bracket_OR_l_bracket => switch (token.tag) {
|
||||
.comma => state = .expect_member,
|
||||
.r_bracket => state = .end,
|
||||
.l_bracket => {
|
||||
const sstruct = try self.schema_engine.structName2SchemaStruct(struct_name);
|
||||
try parseAdditionalData(
|
||||
self,
|
||||
allocator,
|
||||
&additional_data.childrens.items[additional_data.childrens.items.len - 1].additional_data,
|
||||
sstruct.links.get(last_member).?,
|
||||
);
|
||||
state = .expect_comma_OR_r_bracket;
|
||||
},
|
||||
else => return printError(
|
||||
"Error: Expected , or ] or [",
|
||||
ZipponError.SynthaxError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
),
|
||||
},
|
||||
|
||||
.expect_comma_OR_r_bracket => switch (token.tag) {
|
||||
.comma => state = .expect_member,
|
||||
.r_bracket => state = .end,
|
||||
else => return printError(
|
||||
"Error: Expected , or ]",
|
||||
ZipponError.SynthaxError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
),
|
||||
},
|
||||
|
||||
else => unreachable,
|
||||
};
|
||||
}
|
31
src/ziql/parts/comparison.zig
Normal file
31
src/ziql/parts/comparison.zig
Normal file
@ -0,0 +1,31 @@
|
||||
const std = @import("std");
|
||||
const Token = @import("../tokenizer.zig").Token;
|
||||
const ComparisonOperator = @import("../../dataStructure/filter.zig").ComparisonOperator;
|
||||
const printError = @import("../../utils.zig").printError;
|
||||
|
||||
const ZipponError = @import("error").ZipponError;
|
||||
|
||||
const Self = @import("../parser.zig");
|
||||
|
||||
pub fn parseComparisonOperator(
|
||||
self: Self,
|
||||
token: Token,
|
||||
) ZipponError!ComparisonOperator {
|
||||
return switch (token.tag) {
|
||||
.equal => .equal, // =
|
||||
.angle_bracket_left => .inferior, // <
|
||||
.angle_bracket_right => .superior, // >
|
||||
.angle_bracket_left_equal => .inferior_or_equal, // <=
|
||||
.angle_bracket_right_equal => .superior_or_equal, // >=
|
||||
.bang_equal => .different, // !=
|
||||
.keyword_in => .in,
|
||||
.keyword_not_in => .not_in,
|
||||
else => return printError(
|
||||
"Error: Expected condition. Including < > <= >= = !=",
|
||||
ZipponError.SynthaxError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
),
|
||||
};
|
||||
}
|
89
src/ziql/parts/condition.zig
Normal file
89
src/ziql/parts/condition.zig
Normal file
@ -0,0 +1,89 @@
|
||||
const std = @import("std");
|
||||
const config = @import("config");
|
||||
const Allocator = std.mem.Allocator;
|
||||
const Token = @import("../tokenizer.zig").Token;
|
||||
const Condition = @import("../../dataStructure/filter.zig").Condition;
|
||||
const printError = @import("../../utils.zig").printError;
|
||||
const log = std.log.scoped(.ziqlParser);
|
||||
|
||||
const ZipponError = @import("error").ZipponError;
|
||||
|
||||
const Self = @import("../parser.zig");
|
||||
|
||||
/// Parse to get a Condition. Which is a struct that is use by the FileEngine to retreive data.
|
||||
/// In the query, it is this part name = 'Bob' or age <= 10
|
||||
pub fn parseCondition(
|
||||
self: Self,
|
||||
allocator: Allocator,
|
||||
token_ptr: *Token,
|
||||
struct_name: []const u8,
|
||||
) ZipponError!Condition {
|
||||
var keep_next = false;
|
||||
var state: Self.State = .expect_member;
|
||||
var token = token_ptr.*;
|
||||
var member_name: []const u8 = undefined;
|
||||
|
||||
var condition = Condition{};
|
||||
|
||||
while (state != .end) : ({
|
||||
token = if (!keep_next) self.toker.next() else token;
|
||||
keep_next = false;
|
||||
if (config.PRINT_STATE) std.debug.print("parseCondition: {any}\n", .{state});
|
||||
}) switch (state) {
|
||||
.expect_member => switch (token.tag) {
|
||||
.identifier => {
|
||||
if (!(self.schema_engine.isMemberNameInStruct(struct_name, self.toker.getTokenSlice(token)) catch {
|
||||
return printError(
|
||||
"Error: Struct not found.",
|
||||
ZipponError.StructNotFound,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
);
|
||||
})) {
|
||||
return printError(
|
||||
"Error: Member not part of struct.",
|
||||
ZipponError.MemberNotFound,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
);
|
||||
}
|
||||
condition.data_type = self.schema_engine.memberName2DataType(
|
||||
struct_name,
|
||||
self.toker.getTokenSlice(token),
|
||||
) catch return ZipponError.MemberNotFound;
|
||||
condition.data_index = self.schema_engine.memberName2DataIndex(
|
||||
struct_name,
|
||||
self.toker.getTokenSlice(token),
|
||||
) catch return ZipponError.MemberNotFound;
|
||||
member_name = self.toker.getTokenSlice(token);
|
||||
state = .expect_operation;
|
||||
},
|
||||
else => return printError(
|
||||
"Error: Expected member name.",
|
||||
ZipponError.SynthaxError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
),
|
||||
},
|
||||
|
||||
.expect_operation => {
|
||||
condition.operation = try self.parseComparisonOperator(token);
|
||||
state = .expect_value;
|
||||
},
|
||||
|
||||
.expect_value => {
|
||||
log.debug("Parse condition value of member {s}", .{member_name});
|
||||
condition.value = try self.parseConditionValue(allocator, struct_name, member_name, condition.data_type, &token);
|
||||
state = .end;
|
||||
},
|
||||
|
||||
else => unreachable,
|
||||
};
|
||||
|
||||
try self.checkConditionValidity(condition, token);
|
||||
|
||||
return condition;
|
||||
}
|
132
src/ziql/parts/filter.zig
Normal file
132
src/ziql/parts/filter.zig
Normal file
@ -0,0 +1,132 @@
|
||||
const std = @import("std");
|
||||
const dtype = @import("dtype");
|
||||
const config = @import("config");
|
||||
const Allocator = std.mem.Allocator;
|
||||
const Filter = @import("../../dataStructure/filter.zig").Filter;
|
||||
const printError = @import("../../utils.zig").printError;
|
||||
|
||||
const ZipponError = @import("error").ZipponError;
|
||||
|
||||
const Self = @import("../parser.zig");
|
||||
|
||||
/// Take an array of UUID and populate it with what match what is between {}
|
||||
/// Main is to know if between {} or (), main is true if between {}, otherwise between () inside {}
|
||||
pub fn parseFilter(
|
||||
self: Self,
|
||||
allocator: Allocator,
|
||||
struct_name: []const u8,
|
||||
is_sub: bool,
|
||||
) ZipponError!Filter {
|
||||
var filter = try Filter.init(allocator);
|
||||
errdefer filter.deinit();
|
||||
|
||||
var keep_next = false;
|
||||
var token = self.toker.next();
|
||||
var state: Self.State = .expect_condition;
|
||||
|
||||
while (state != .end) : ({
|
||||
token = if (keep_next) token else self.toker.next();
|
||||
keep_next = false;
|
||||
if (config.PRINT_STATE) std.debug.print("parseFilter: {any}\n", .{state});
|
||||
}) switch (state) {
|
||||
.expect_condition => switch (token.tag) {
|
||||
.r_brace => {
|
||||
if (!is_sub) {
|
||||
state = .end;
|
||||
} else {
|
||||
return printError(
|
||||
"Error: Expected ) not }",
|
||||
ZipponError.SynthaxError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
);
|
||||
}
|
||||
},
|
||||
.r_paren => {
|
||||
if (is_sub) {
|
||||
state = .end;
|
||||
} else {
|
||||
return printError(
|
||||
"Error: Expected } not )",
|
||||
ZipponError.SynthaxError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
);
|
||||
}
|
||||
},
|
||||
.l_paren => {
|
||||
var sub_filter = try parseFilter(self, allocator, struct_name, true);
|
||||
filter.addSubFilter(&sub_filter);
|
||||
token = self.toker.last();
|
||||
keep_next = true;
|
||||
state = .expect_ANDOR_OR_end;
|
||||
},
|
||||
.identifier => {
|
||||
const condition = try self.parseCondition(allocator, &token, struct_name);
|
||||
try filter.addCondition(condition);
|
||||
token = self.toker.last();
|
||||
keep_next = true;
|
||||
state = .expect_ANDOR_OR_end;
|
||||
},
|
||||
else => return printError(
|
||||
"Error: Expected ( or condition.",
|
||||
ZipponError.SynthaxError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
),
|
||||
},
|
||||
|
||||
.expect_ANDOR_OR_end => switch (token.tag) {
|
||||
.r_brace => {
|
||||
if (!is_sub) {
|
||||
state = .end;
|
||||
} else {
|
||||
return printError(
|
||||
"Error: Expected ) not }",
|
||||
ZipponError.SynthaxError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
);
|
||||
}
|
||||
},
|
||||
.r_paren => {
|
||||
if (is_sub) {
|
||||
state = .end;
|
||||
} else {
|
||||
return printError(
|
||||
"Error: Expected } not )",
|
||||
ZipponError.SynthaxError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
);
|
||||
}
|
||||
},
|
||||
.keyword_and => {
|
||||
try filter.addLogicalOperator(.AND);
|
||||
state = .expect_condition;
|
||||
},
|
||||
.keyword_or => {
|
||||
try filter.addLogicalOperator(.OR);
|
||||
state = .expect_condition;
|
||||
},
|
||||
else => return printError(
|
||||
"Error: Expected AND, OR, or }",
|
||||
ZipponError.SynthaxError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
),
|
||||
},
|
||||
|
||||
.end => {},
|
||||
|
||||
else => unreachable,
|
||||
};
|
||||
|
||||
return filter;
|
||||
}
|
116
src/ziql/parts/newData.zig
Normal file
116
src/ziql/parts/newData.zig
Normal file
@ -0,0 +1,116 @@
|
||||
const std = @import("std");
|
||||
const config = @import("config");
|
||||
const Allocator = std.mem.Allocator;
|
||||
const ConditionValue = @import("../../dataStructure/filter.zig").ConditionValue;
|
||||
const printError = @import("../../utils.zig").printError;
|
||||
|
||||
const ZipponError = @import("error").ZipponError;
|
||||
|
||||
const Self = @import("../parser.zig");
|
||||
|
||||
/// Take the tokenizer and return a map of the ADD action.
|
||||
/// Keys are the member name and value are the string of the value in the query. E.g. 'Adrien' or '10'
|
||||
/// Entry token need to be (
|
||||
pub fn parseNewData(
|
||||
self: Self,
|
||||
allocator: Allocator,
|
||||
map: *std.StringHashMap(ConditionValue),
|
||||
struct_name: []const u8,
|
||||
order: *std.ArrayList([]const u8),
|
||||
) !void {
|
||||
var token = self.toker.next();
|
||||
var keep_next = false;
|
||||
var reordering: bool = false;
|
||||
var member_name: []const u8 = undefined;
|
||||
var state: Self.State = .expect_member_OR_value;
|
||||
var i: usize = 0;
|
||||
|
||||
while (state != .end) : ({
|
||||
token = if (!keep_next) self.toker.next() else token;
|
||||
keep_next = false;
|
||||
if (config.PRINT_STATE) std.debug.print("parseNewData: {any}\n", .{state});
|
||||
}) switch (state) {
|
||||
.expect_member_OR_value => switch (token.tag) {
|
||||
.identifier => {
|
||||
if (!reordering) {
|
||||
order.*.clearRetainingCapacity();
|
||||
reordering = true;
|
||||
}
|
||||
member_name = self.toker.getTokenSlice(token);
|
||||
if (!(self.schema_engine.isMemberNameInStruct(struct_name, member_name) catch {
|
||||
return ZipponError.StructNotFound;
|
||||
})) return printError(
|
||||
"Member not found in struct.",
|
||||
ZipponError.MemberNotFound,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
);
|
||||
order.*.append(allocator.dupe(u8, member_name) catch return ZipponError.MemoryError) catch return ZipponError.MemoryError;
|
||||
state = .expect_equal;
|
||||
},
|
||||
.string_literal,
|
||||
.int_literal,
|
||||
.float_literal,
|
||||
.date_literal,
|
||||
.time_literal,
|
||||
.datetime_literal,
|
||||
.bool_literal_true,
|
||||
.bool_literal_false,
|
||||
.uuid_literal,
|
||||
.l_bracket,
|
||||
.l_brace,
|
||||
.keyword_none,
|
||||
.keyword_now,
|
||||
=> {
|
||||
member_name = order.items[i];
|
||||
i += 1;
|
||||
keep_next = true;
|
||||
state = .expect_new_value;
|
||||
},
|
||||
else => return printError(
|
||||
"Error: Expected member name.",
|
||||
ZipponError.SynthaxError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
),
|
||||
},
|
||||
|
||||
.expect_equal => switch (token.tag) {
|
||||
// TODO: Implement stuff to manipulate array like APPEND or REMOVE
|
||||
.equal => state = .expect_new_value,
|
||||
else => return printError(
|
||||
"Error: Expected =",
|
||||
ZipponError.SynthaxError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
),
|
||||
},
|
||||
|
||||
.expect_new_value => {
|
||||
const data_type = self.schema_engine.memberName2DataType(struct_name, member_name) catch return ZipponError.StructNotFound;
|
||||
map.put(member_name, try self.parseConditionValue(allocator, struct_name, member_name, data_type, &token)) catch return ZipponError.MemoryError;
|
||||
if (data_type == .link or data_type == .link_array) {
|
||||
token = self.toker.last_token;
|
||||
keep_next = true;
|
||||
}
|
||||
state = .expect_comma_OR_end;
|
||||
},
|
||||
|
||||
.expect_comma_OR_end => switch (token.tag) {
|
||||
.r_paren => state = .end,
|
||||
.comma => state = .expect_member_OR_value,
|
||||
else => return printError(
|
||||
"Error: Expect , or )",
|
||||
ZipponError.SynthaxError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
),
|
||||
},
|
||||
|
||||
else => unreachable,
|
||||
};
|
||||
}
|
289
src/ziql/parts/value.zig
Normal file
289
src/ziql/parts/value.zig
Normal file
@ -0,0 +1,289 @@
|
||||
const std = @import("std");
|
||||
const dtype = @import("dtype");
|
||||
const UUID = dtype.UUID;
|
||||
const Allocator = std.mem.Allocator;
|
||||
const Token = @import("../tokenizer.zig").Token;
|
||||
const Filter = @import("../../dataStructure/filter.zig").Filter;
|
||||
const ConditionValue = @import("../../dataStructure/filter.zig").ConditionValue;
|
||||
const AdditionalData = @import("../../dataStructure/additionalData.zig").AdditionalData;
|
||||
const printError = @import("../../utils.zig").printError;
|
||||
|
||||
const ZipponError = @import("error").ZipponError;
|
||||
|
||||
const Self = @import("../parser.zig");
|
||||
|
||||
/// To run just after a condition like = or > or >= to get the corresponding ConditionValue that you need to compare
|
||||
pub fn parseConditionValue(self: Self, allocator: Allocator, struct_name: []const u8, member_name: []const u8, data_type: dtype.DataType, token: *Token) ZipponError!ConditionValue {
|
||||
const start_index = token.loc.start;
|
||||
const expected_tag: ?Token.Tag = switch (data_type) {
|
||||
.int => .int_literal,
|
||||
.float => .float_literal,
|
||||
.str => .string_literal,
|
||||
.self => .uuid_literal,
|
||||
.int_array => .int_literal,
|
||||
.float_array => .float_literal,
|
||||
.str_array => .string_literal,
|
||||
.bool, .bool_array, .link, .link_array, .date, .time, .datetime, .date_array, .time_array, .datetime_array => null, // handle separately
|
||||
};
|
||||
|
||||
// Check if the all next tokens are the right one
|
||||
if (expected_tag) |tag| {
|
||||
if (data_type.is_array()) {
|
||||
token.* = try self.checkTokensInArray(tag);
|
||||
} else {
|
||||
if (token.tag != tag) {
|
||||
return printError(
|
||||
"Error: Wrong type", // TODO: Print the expected type
|
||||
ZipponError.SynthaxError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
);
|
||||
}
|
||||
}
|
||||
} else switch (data_type) {
|
||||
.bool => if (token.tag != .bool_literal_true and token.tag != .bool_literal_false) {
|
||||
return printError(
|
||||
"Error: Expected bool",
|
||||
ZipponError.SynthaxError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
);
|
||||
},
|
||||
.bool_array => {
|
||||
token.* = self.toker.next();
|
||||
while (token.tag != .r_bracket) : (token.* = self.toker.next()) {
|
||||
if (token.tag != .bool_literal_true and token.tag != .bool_literal_false) {
|
||||
return printError(
|
||||
"Error: Expected bool or ]",
|
||||
ZipponError.SynthaxError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
);
|
||||
}
|
||||
}
|
||||
},
|
||||
.date => if (token.tag != .date_literal and token.tag != .keyword_now) {
|
||||
return printError(
|
||||
"Error: Expected date",
|
||||
ZipponError.SynthaxError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
);
|
||||
},
|
||||
.date_array => {
|
||||
token.* = self.toker.next();
|
||||
while (token.tag != .r_bracket) : (token.* = self.toker.next()) {
|
||||
if (token.tag != .date_literal and token.tag != .keyword_now) {
|
||||
return printError(
|
||||
"Error: Expected date",
|
||||
ZipponError.SynthaxError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
);
|
||||
}
|
||||
}
|
||||
},
|
||||
.time => if (token.tag != .time_literal and token.tag != .keyword_now) {
|
||||
return printError(
|
||||
"Error: Expected time",
|
||||
ZipponError.SynthaxError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
);
|
||||
},
|
||||
.time_array => {
|
||||
token.* = self.toker.next();
|
||||
while (token.tag != .r_bracket) : (token.* = self.toker.next()) {
|
||||
if (token.tag != .time_literal and token.tag != .keyword_now) {
|
||||
return printError(
|
||||
"Error: Expected time",
|
||||
ZipponError.SynthaxError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
);
|
||||
}
|
||||
}
|
||||
},
|
||||
.datetime => if (token.tag != .datetime_literal and token.tag != .keyword_now) {
|
||||
return printError(
|
||||
"Error: Expected datetime",
|
||||
ZipponError.SynthaxError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
);
|
||||
},
|
||||
.datetime_array => {
|
||||
token.* = self.toker.next();
|
||||
while (token.tag != .r_bracket) : (token.* = self.toker.next()) {
|
||||
if (token.tag != .datetime_literal and token.tag != .keyword_now) {
|
||||
return printError(
|
||||
"Error: Expected datetime",
|
||||
ZipponError.SynthaxError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
);
|
||||
}
|
||||
}
|
||||
},
|
||||
.link, .link_array => {},
|
||||
else => unreachable,
|
||||
}
|
||||
|
||||
// And finally create the ConditionValue
|
||||
switch (data_type) {
|
||||
.int => return ConditionValue.initInt(self.toker.buffer[start_index..token.loc.end]),
|
||||
.float => return ConditionValue.initFloat(self.toker.buffer[start_index..token.loc.end]),
|
||||
.str => return ConditionValue.initStr(self.toker.buffer[start_index + 1 .. token.loc.end - 1]),
|
||||
.date => return ConditionValue.initDate(self.toker.buffer[start_index..token.loc.end]),
|
||||
.time => return ConditionValue.initTime(self.toker.buffer[start_index..token.loc.end]),
|
||||
.datetime => return ConditionValue.initDateTime(self.toker.buffer[start_index..token.loc.end]),
|
||||
.bool => return ConditionValue.initBool(self.toker.buffer[start_index..token.loc.end]),
|
||||
.int_array => return try ConditionValue.initArrayInt(allocator, self.toker.buffer[start_index..token.loc.end]),
|
||||
.str_array => return try ConditionValue.initArrayStr(allocator, self.toker.buffer[start_index..token.loc.end]),
|
||||
.bool_array => return try ConditionValue.initArrayBool(allocator, self.toker.buffer[start_index..token.loc.end]),
|
||||
.float_array => return try ConditionValue.initArrayFloat(allocator, self.toker.buffer[start_index..token.loc.end]),
|
||||
.date_array => return try ConditionValue.initArrayDate(allocator, self.toker.buffer[start_index..token.loc.end]),
|
||||
.time_array => return try ConditionValue.initArrayTime(allocator, self.toker.buffer[start_index..token.loc.end]),
|
||||
.datetime_array => return try ConditionValue.initArrayDateTime(allocator, self.toker.buffer[start_index..token.loc.end]),
|
||||
.link => switch (token.tag) {
|
||||
.keyword_none => { // TODO: Stop creating a map if empty, can be null or something. Or maybe just keep one map link that in memory, so I dont create it everytime
|
||||
const map = allocator.create(std.AutoHashMap(UUID, void)) catch return ZipponError.MemoryError;
|
||||
map.* = std.AutoHashMap(UUID, void).init(allocator);
|
||||
map.put(dtype.Zero, {}) catch return ZipponError.MemoryError;
|
||||
_ = self.toker.next();
|
||||
return ConditionValue.initLink(map);
|
||||
},
|
||||
.uuid_literal => {
|
||||
const uuid = UUID.parse(self.toker.buffer[start_index..token.loc.end]) catch return ZipponError.InvalidUUID;
|
||||
if (!self.schema_engine.isUUIDExist(struct_name, uuid)) return printError(
|
||||
"Error: UUID do not exist in database.",
|
||||
ZipponError.SynthaxError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
);
|
||||
|
||||
const map = allocator.create(std.AutoHashMap(UUID, void)) catch return ZipponError.MemoryError;
|
||||
map.* = std.AutoHashMap(UUID, void).init(allocator);
|
||||
map.put(uuid, {}) catch return ZipponError.MemoryError;
|
||||
_ = self.toker.next();
|
||||
return ConditionValue.initLink(map);
|
||||
},
|
||||
.l_brace, .l_bracket => {
|
||||
var filter: ?Filter = null;
|
||||
defer if (filter != null) filter.?.deinit();
|
||||
|
||||
var additional_data_arena = std.heap.ArenaAllocator.init(allocator);
|
||||
defer additional_data_arena.deinit();
|
||||
var additional_data = AdditionalData.init(additional_data_arena.allocator());
|
||||
|
||||
if (token.tag == .l_bracket) {
|
||||
try self.parseAdditionalData(allocator, &additional_data, struct_name);
|
||||
token.* = self.toker.next();
|
||||
}
|
||||
|
||||
additional_data.limit = 1;
|
||||
|
||||
const link_sstruct = try self.schema_engine.linkedStructName(struct_name, member_name);
|
||||
if (token.tag == .l_brace) filter = try self.parseFilter(
|
||||
allocator,
|
||||
link_sstruct.name,
|
||||
false,
|
||||
) else return printError(
|
||||
"Error: Expected filter",
|
||||
ZipponError.SynthaxError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
);
|
||||
|
||||
filter = switch (filter.?.root.*) {
|
||||
.empty => null,
|
||||
else => filter,
|
||||
};
|
||||
|
||||
// Here I have the filter and additionalData
|
||||
const map = allocator.create(std.AutoHashMap(UUID, void)) catch return ZipponError.MemoryError;
|
||||
map.* = std.AutoHashMap(UUID, void).init(allocator);
|
||||
try self.file_engine.populateVoidUUIDMap(
|
||||
link_sstruct.name,
|
||||
filter,
|
||||
map,
|
||||
&additional_data,
|
||||
);
|
||||
return ConditionValue.initLink(map);
|
||||
},
|
||||
|
||||
else => return printError(
|
||||
"Error: Expected uuid or none",
|
||||
ZipponError.SynthaxError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
),
|
||||
},
|
||||
.link_array => switch (token.tag) {
|
||||
.keyword_none => {
|
||||
const map = allocator.create(std.AutoHashMap(UUID, void)) catch return ZipponError.MemoryError;
|
||||
map.* = std.AutoHashMap(UUID, void).init(allocator);
|
||||
_ = self.toker.next();
|
||||
return ConditionValue.initArrayLink(map);
|
||||
},
|
||||
.l_brace, .l_bracket => {
|
||||
var filter: ?Filter = null;
|
||||
defer if (filter != null) filter.?.deinit();
|
||||
|
||||
var additional_data_arena = std.heap.ArenaAllocator.init(allocator);
|
||||
defer additional_data_arena.deinit();
|
||||
var additional_data = AdditionalData.init(additional_data_arena.allocator());
|
||||
|
||||
if (token.tag == .l_bracket) {
|
||||
try self.parseAdditionalData(allocator, &additional_data, struct_name);
|
||||
token.* = self.toker.next();
|
||||
}
|
||||
|
||||
const link_sstruct = try self.schema_engine.linkedStructName(struct_name, member_name);
|
||||
if (token.tag == .l_brace) filter = try self.parseFilter(allocator, link_sstruct.name, false) else return printError(
|
||||
"Error: Expected filter",
|
||||
ZipponError.SynthaxError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
);
|
||||
|
||||
filter = switch (filter.?.root.*) {
|
||||
.empty => null,
|
||||
else => filter,
|
||||
};
|
||||
|
||||
// Here I have the filter and additionalData
|
||||
const map = allocator.create(std.AutoHashMap(UUID, void)) catch return ZipponError.MemoryError;
|
||||
map.* = std.AutoHashMap(UUID, void).init(allocator);
|
||||
try self.file_engine.populateVoidUUIDMap(
|
||||
struct_name,
|
||||
filter,
|
||||
map,
|
||||
&additional_data,
|
||||
);
|
||||
return ConditionValue.initArrayLink(map);
|
||||
},
|
||||
else => return printError(
|
||||
"Error: Expected uuid or none",
|
||||
ZipponError.SynthaxError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
),
|
||||
},
|
||||
.self => unreachable,
|
||||
}
|
||||
}
|
447
src/ziql/tokenizer.zig
Normal file
447
src/ziql/tokenizer.zig
Normal file
@ -0,0 +1,447 @@
|
||||
// From https://github.com/ziglang/zig/blob/master/lib/std/zig/tokenizer.zig
|
||||
const std = @import("std");
|
||||
const Loc = @import("../dataStructure/loc.zig");
|
||||
|
||||
pub const Token = struct {
|
||||
tag: Tag,
|
||||
loc: Loc,
|
||||
|
||||
pub const keywords = std.StaticStringMap(Tag).initComptime(.{
|
||||
.{ "GRAB", .keyword_grab },
|
||||
.{ "UPDATE", .keyword_update },
|
||||
.{ "DELETE", .keyword_delete },
|
||||
.{ "ADD", .keyword_add },
|
||||
.{ "IN", .keyword_in },
|
||||
.{ "AND", .keyword_and },
|
||||
.{ "OR", .keyword_or },
|
||||
.{ "TO", .keyword_to },
|
||||
.{ "NONE", .keyword_none },
|
||||
.{ "NOW", .keyword_now },
|
||||
.{ "APPEND", .keyword_append },
|
||||
.{ "POP", .keyword_pop },
|
||||
.{ "REMOVE", .keyword_remove },
|
||||
.{ "REMOVEAT", .keyword_remove_at },
|
||||
.{ "grab", .keyword_grab },
|
||||
.{ "update", .keyword_update },
|
||||
.{ "delete", .keyword_delete },
|
||||
.{ "add", .keyword_add },
|
||||
.{ "in", .keyword_in },
|
||||
.{ "and", .keyword_and },
|
||||
.{ "or", .keyword_or },
|
||||
.{ "to", .keyword_to },
|
||||
.{ "none", .keyword_none },
|
||||
.{ "true", .bool_literal_true },
|
||||
.{ "false", .bool_literal_false },
|
||||
.{ "now", .keyword_now },
|
||||
});
|
||||
|
||||
pub fn getKeyword(bytes: []const u8) ?Tag {
|
||||
return keywords.get(bytes);
|
||||
}
|
||||
|
||||
pub const Tag = enum {
|
||||
eof,
|
||||
invalid,
|
||||
|
||||
keyword_grab,
|
||||
keyword_update,
|
||||
keyword_delete,
|
||||
keyword_add,
|
||||
keyword_in,
|
||||
keyword_not_in,
|
||||
keyword_and,
|
||||
keyword_or,
|
||||
keyword_to,
|
||||
keyword_none,
|
||||
keyword_now,
|
||||
keyword_append,
|
||||
keyword_pop,
|
||||
keyword_remove,
|
||||
keyword_remove_at,
|
||||
|
||||
string_literal,
|
||||
int_literal,
|
||||
float_literal,
|
||||
date_literal,
|
||||
time_literal,
|
||||
datetime_literal,
|
||||
bool_literal_true,
|
||||
bool_literal_false,
|
||||
uuid_literal,
|
||||
identifier,
|
||||
equal,
|
||||
bang, // !
|
||||
pipe, // |
|
||||
l_paren, // (
|
||||
r_paren, // )
|
||||
l_bracket, // [
|
||||
r_bracket, // ]
|
||||
l_brace, // {
|
||||
r_brace, // }
|
||||
semicolon, // ;
|
||||
comma, // ,
|
||||
angle_bracket_left, // <
|
||||
angle_bracket_right, // >
|
||||
angle_bracket_left_equal, // <=
|
||||
angle_bracket_right_equal, // >=
|
||||
equal_angle_bracket_right, // =>
|
||||
period, // .
|
||||
bang_equal, // !=
|
||||
};
|
||||
};
|
||||
|
||||
pub const Tokenizer = struct {
|
||||
buffer: [:0]const u8,
|
||||
index: usize,
|
||||
last_token: Token = undefined,
|
||||
|
||||
pub fn getTokenSlice(self: *Tokenizer, token: Token) []const u8 {
|
||||
return self.buffer[token.loc.start..token.loc.end];
|
||||
}
|
||||
|
||||
pub fn last(self: Tokenizer) Token {
|
||||
return self.last_token;
|
||||
}
|
||||
|
||||
pub fn init(buffer: [:0]const u8) Tokenizer {
|
||||
// Skip the UTF-8 BOM if present.
|
||||
return .{
|
||||
.buffer = buffer,
|
||||
.index = if (std.mem.startsWith(u8, buffer, "\xEF\xBB\xBF")) 3 else 0,
|
||||
};
|
||||
}
|
||||
|
||||
const State = enum {
|
||||
start,
|
||||
invalid,
|
||||
string_literal,
|
||||
date_literal,
|
||||
time_literal,
|
||||
uuid_literal,
|
||||
identifier,
|
||||
equal,
|
||||
bang,
|
||||
angle_bracket_left,
|
||||
angle_bracket_right,
|
||||
string_literal_backslash,
|
||||
float,
|
||||
int,
|
||||
};
|
||||
|
||||
pub fn next(self: *Tokenizer) Token {
|
||||
var state: State = .start;
|
||||
var result: Token = .{
|
||||
.tag = undefined,
|
||||
.loc = .{
|
||||
.start = self.index,
|
||||
.end = undefined,
|
||||
},
|
||||
};
|
||||
while (true) : (self.index += 1) {
|
||||
const c = self.buffer[self.index];
|
||||
switch (state) {
|
||||
.start => switch (c) {
|
||||
0 => {
|
||||
if (self.index == self.buffer.len) return .{
|
||||
.tag = .eof,
|
||||
.loc = .{
|
||||
.start = self.index,
|
||||
.end = self.index,
|
||||
},
|
||||
};
|
||||
state = .invalid;
|
||||
},
|
||||
' ', '\n', '\t', '\r' => {
|
||||
result.loc.start = self.index + 1;
|
||||
},
|
||||
'\'' => {
|
||||
state = .string_literal;
|
||||
result.tag = .string_literal;
|
||||
},
|
||||
'a'...'z', 'A'...'Z', '_' => {
|
||||
state = .identifier;
|
||||
result.tag = .identifier;
|
||||
},
|
||||
'=' => {
|
||||
state = .equal;
|
||||
},
|
||||
'!' => {
|
||||
state = .bang;
|
||||
},
|
||||
'|' => {
|
||||
result.tag = .pipe;
|
||||
self.index += 1;
|
||||
break;
|
||||
},
|
||||
'(' => {
|
||||
result.tag = .l_paren;
|
||||
self.index += 1;
|
||||
break;
|
||||
},
|
||||
')' => {
|
||||
result.tag = .r_paren;
|
||||
self.index += 1;
|
||||
break;
|
||||
},
|
||||
'[' => {
|
||||
result.tag = .l_bracket;
|
||||
self.index += 1;
|
||||
break;
|
||||
},
|
||||
']' => {
|
||||
result.tag = .r_bracket;
|
||||
self.index += 1;
|
||||
break;
|
||||
},
|
||||
';' => {
|
||||
result.tag = .semicolon;
|
||||
self.index += 1;
|
||||
break;
|
||||
},
|
||||
',' => {
|
||||
result.tag = .comma;
|
||||
self.index += 1;
|
||||
break;
|
||||
},
|
||||
'<' => {
|
||||
state = .angle_bracket_left;
|
||||
},
|
||||
'>' => {
|
||||
state = .angle_bracket_right;
|
||||
},
|
||||
'{' => {
|
||||
result.tag = .l_brace;
|
||||
self.index += 1;
|
||||
break;
|
||||
},
|
||||
'}' => {
|
||||
result.tag = .r_brace;
|
||||
self.index += 1;
|
||||
break;
|
||||
},
|
||||
'.' => {
|
||||
state = .float;
|
||||
result.tag = .float_literal;
|
||||
},
|
||||
'0'...'9', '-' => {
|
||||
state = .int;
|
||||
result.tag = .int_literal;
|
||||
},
|
||||
else => {
|
||||
state = .invalid;
|
||||
},
|
||||
},
|
||||
|
||||
.invalid => {
|
||||
// TODO make a better invalid handler
|
||||
@panic("Unknow char!!!");
|
||||
},
|
||||
|
||||
.identifier => switch (c) {
|
||||
'a'...'z', 'A'...'Z', '_', '0'...'9' => continue,
|
||||
'-' => {
|
||||
state = .uuid_literal;
|
||||
result.tag = .uuid_literal;
|
||||
},
|
||||
else => {
|
||||
if (Token.getKeyword(self.buffer[result.loc.start..self.index])) |tag| {
|
||||
result.tag = tag;
|
||||
} else {
|
||||
result.tag = .identifier;
|
||||
}
|
||||
break;
|
||||
},
|
||||
},
|
||||
|
||||
.string_literal => switch (c) {
|
||||
0 => {
|
||||
if (self.index != self.buffer.len) {
|
||||
state = .invalid;
|
||||
continue;
|
||||
}
|
||||
result.tag = .invalid;
|
||||
break;
|
||||
},
|
||||
'\n' => {
|
||||
result.tag = .invalid;
|
||||
break;
|
||||
},
|
||||
'\\' => {
|
||||
state = .string_literal_backslash;
|
||||
},
|
||||
'\'' => {
|
||||
self.index += 1;
|
||||
break;
|
||||
},
|
||||
0x01...0x09, 0x0b...0x1f, 0x7f => {
|
||||
state = .invalid;
|
||||
},
|
||||
else => continue,
|
||||
},
|
||||
|
||||
.string_literal_backslash => switch (c) {
|
||||
0, '\n' => {
|
||||
result.tag = .invalid;
|
||||
break;
|
||||
},
|
||||
else => {
|
||||
state = .string_literal;
|
||||
},
|
||||
},
|
||||
|
||||
.bang => switch (c) {
|
||||
'=' => {
|
||||
result.tag = .bang_equal;
|
||||
self.index += 1;
|
||||
break;
|
||||
},
|
||||
'I' => {
|
||||
if (self.buffer.len > self.index + 1 and self.buffer[self.index + 1] == 'N') {
|
||||
result.tag = .keyword_not_in;
|
||||
self.index += 2; // Skip 'I' and 'N'
|
||||
break;
|
||||
} else {
|
||||
result.tag = .bang;
|
||||
break;
|
||||
}
|
||||
},
|
||||
else => {
|
||||
result.tag = .bang;
|
||||
break;
|
||||
},
|
||||
},
|
||||
|
||||
.equal => switch (c) {
|
||||
'>' => {
|
||||
result.tag = .equal_angle_bracket_right;
|
||||
self.index += 1;
|
||||
break;
|
||||
},
|
||||
else => {
|
||||
result.tag = .equal;
|
||||
break;
|
||||
},
|
||||
},
|
||||
|
||||
.angle_bracket_left => switch (c) {
|
||||
'=' => {
|
||||
result.tag = .angle_bracket_left_equal;
|
||||
self.index += 1;
|
||||
break;
|
||||
},
|
||||
else => {
|
||||
result.tag = .angle_bracket_left;
|
||||
break;
|
||||
},
|
||||
},
|
||||
|
||||
.angle_bracket_right => switch (c) {
|
||||
'=' => {
|
||||
result.tag = .angle_bracket_right_equal;
|
||||
self.index += 1;
|
||||
break;
|
||||
},
|
||||
else => {
|
||||
result.tag = .angle_bracket_right;
|
||||
break;
|
||||
},
|
||||
},
|
||||
|
||||
.int => switch (c) {
|
||||
'.' => {
|
||||
state = .float;
|
||||
result.tag = .float_literal;
|
||||
},
|
||||
'a'...'z', '-' => {
|
||||
state = .uuid_literal;
|
||||
result.tag = .uuid_literal;
|
||||
},
|
||||
'/' => {
|
||||
state = .date_literal;
|
||||
result.tag = .date_literal;
|
||||
},
|
||||
':' => {
|
||||
state = .time_literal;
|
||||
result.tag = .time_literal;
|
||||
},
|
||||
'_', '0'...'9' => continue,
|
||||
else => break,
|
||||
},
|
||||
|
||||
.float => switch (c) {
|
||||
'_', '0'...'9' => {
|
||||
continue;
|
||||
},
|
||||
else => {
|
||||
break;
|
||||
},
|
||||
},
|
||||
|
||||
.date_literal => switch (c) {
|
||||
'-' => {
|
||||
state = .time_literal;
|
||||
result.tag = .datetime_literal;
|
||||
},
|
||||
'0'...'9', '/' => continue,
|
||||
else => break,
|
||||
},
|
||||
|
||||
.time_literal => switch (c) {
|
||||
'0'...'9', ':', '.' => continue,
|
||||
else => break,
|
||||
},
|
||||
|
||||
.uuid_literal => switch (c) {
|
||||
'0'...'9', 'a'...'z', '-' => continue,
|
||||
else => break,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
result.loc.end = self.index;
|
||||
self.last_token = result;
|
||||
return result;
|
||||
}
|
||||
};
|
||||
|
||||
test "keywords" {
|
||||
try testTokenize("GRAB UPDATE ADD DELETE IN", &.{ .keyword_grab, .keyword_update, .keyword_add, .keyword_delete, .keyword_in });
|
||||
}
|
||||
|
||||
test "basic query" {
|
||||
try testTokenize("GRAB User {}", &.{ .keyword_grab, .identifier, .l_brace, .r_brace });
|
||||
try testTokenize("GRAB User { name = 'Adrien'}", &.{ .keyword_grab, .identifier, .l_brace, .identifier, .equal, .string_literal, .r_brace });
|
||||
try testTokenize("GRAB User { age = 1.5}", &.{ .keyword_grab, .identifier, .l_brace, .identifier, .equal, .float_literal, .r_brace });
|
||||
try testTokenize("GRAB User { admin = true}", &.{ .keyword_grab, .identifier, .l_brace, .identifier, .equal, .bool_literal_true, .r_brace });
|
||||
try testTokenize("GRAB User [1; name] {}", &.{ .keyword_grab, .identifier, .l_bracket, .int_literal, .semicolon, .identifier, .r_bracket, .l_brace, .r_brace });
|
||||
try testTokenize("GRAB User{}|ASCENDING name|", &.{ .keyword_grab, .identifier, .l_brace, .r_brace, .pipe, .identifier, .identifier, .pipe });
|
||||
try testTokenize("DELETE User[1]{name='Adrien'}|ASCENDING name, age|", &.{ .keyword_delete, .identifier, .l_bracket, .int_literal, .r_bracket, .l_brace, .identifier, .equal, .string_literal, .r_brace, .pipe, .identifier, .identifier, .comma, .identifier, .pipe });
|
||||
}
|
||||
|
||||
test "basic date" {
|
||||
try testTokenize("1a5527af-88fb-48c1-8d5c-49c9b73c2379", &.{.uuid_literal});
|
||||
try testTokenize("1998/01/21", &.{.date_literal});
|
||||
try testTokenize("17:55:31.0000", &.{.time_literal});
|
||||
try testTokenize("1998/01/21-17:55:31.0000", &.{.datetime_literal});
|
||||
}
|
||||
|
||||
test "not in keyword" {
|
||||
try testTokenize("!IN", &.{.keyword_not_in});
|
||||
try testTokenize("!IN(", &.{ .keyword_not_in, .l_paren });
|
||||
try testTokenize("!Ind", &.{ .bang, .identifier });
|
||||
}
|
||||
|
||||
fn testTokenize(source: [:0]const u8, expected_token_tags: []const Token.Tag) !void {
|
||||
var tokenizer = Tokenizer.init(source);
|
||||
for (expected_token_tags) |expected_token_tag| {
|
||||
const token = tokenizer.next();
|
||||
try std.testing.expectEqual(expected_token_tag, token.tag);
|
||||
}
|
||||
// Last token should always be eof, even when the last token was invalid,
|
||||
// in which case the tokenizer is in an invalid state, which can only be
|
||||
// recovered by opinionated means outside the scope of this implementation.
|
||||
const last_token = tokenizer.next();
|
||||
try std.testing.expectEqual(Token.Tag.eof, last_token.tag);
|
||||
try std.testing.expectEqual(source.len, last_token.loc.start);
|
||||
try std.testing.expectEqual(source.len, last_token.loc.end);
|
||||
}
|
122
src/ziql/utils.zig
Normal file
122
src/ziql/utils.zig
Normal file
@ -0,0 +1,122 @@
|
||||
const std = @import("std");
|
||||
const config = @import("config");
|
||||
const Allocator = std.mem.Allocator;
|
||||
const Token = @import("tokenizer.zig").Token;
|
||||
const Condition = @import("../dataStructure/filter.zig").Condition;
|
||||
const printError = @import("../utils.zig").printError;
|
||||
|
||||
const ZipponError = @import("error").ZipponError;
|
||||
|
||||
const Self = @import("parser.zig");
|
||||
|
||||
/// Check if all token in an array is of one specific type
|
||||
pub fn checkTokensInArray(self: Self, tag: Token.Tag) ZipponError!Token {
|
||||
var token = self.toker.next();
|
||||
while (token.tag != .r_bracket) : (token = self.toker.next()) {
|
||||
if (token.tag != tag) return printError(
|
||||
"Error: Wrong type.",
|
||||
ZipponError.SynthaxError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
);
|
||||
}
|
||||
return token;
|
||||
}
|
||||
|
||||
/// Will check if what is compared is ok, like comparing if a string is superior to another string is not for example.
|
||||
pub fn checkConditionValidity(
|
||||
self: Self,
|
||||
condition: Condition,
|
||||
token: Token,
|
||||
) ZipponError!void {
|
||||
switch (condition.operation) {
|
||||
.equal => switch (condition.data_type) {
|
||||
.int, .float, .str, .bool, .date, .time, .datetime => {},
|
||||
else => return printError(
|
||||
"Error: Only int, float, str, bool, date, time, datetime can be compare with =",
|
||||
ZipponError.ConditionError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
),
|
||||
},
|
||||
|
||||
.different => switch (condition.data_type) {
|
||||
.int, .float, .str, .bool, .date, .time, .datetime => {},
|
||||
else => return printError(
|
||||
"Error: Only int, float, str, bool, date, time, datetime can be compare with !=",
|
||||
ZipponError.ConditionError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
),
|
||||
},
|
||||
|
||||
.superior_or_equal => switch (condition.data_type) {
|
||||
.int, .float, .date, .time, .datetime => {},
|
||||
else => return printError(
|
||||
"Error: Only int, float, date, time, datetime can be compare with >=",
|
||||
ZipponError.ConditionError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
),
|
||||
},
|
||||
|
||||
.superior => switch (condition.data_type) {
|
||||
.int, .float, .date, .time, .datetime => {},
|
||||
else => return printError(
|
||||
"Error: Only int, float, date, time, datetime can be compare with >",
|
||||
ZipponError.ConditionError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
),
|
||||
},
|
||||
|
||||
.inferior_or_equal => switch (condition.data_type) {
|
||||
.int, .float, .date, .time, .datetime => {},
|
||||
else => return printError(
|
||||
"Error: Only int, float, date, time, datetime can be compare with <=",
|
||||
ZipponError.ConditionError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
),
|
||||
},
|
||||
|
||||
.inferior => switch (condition.data_type) {
|
||||
.int, .float, .date, .time, .datetime => {},
|
||||
else => return printError(
|
||||
"Error: Only int, float, date, time, datetime can be compare with <",
|
||||
ZipponError.ConditionError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
),
|
||||
},
|
||||
|
||||
.in => switch (condition.data_type) {
|
||||
.link => {},
|
||||
else => return printError(
|
||||
"Error: Only link can be compare with IN.",
|
||||
ZipponError.ConditionError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
),
|
||||
},
|
||||
|
||||
.not_in => switch (condition.data_type) {
|
||||
.link => {},
|
||||
else => return printError(
|
||||
"Error: Only link can be compare with !IN.",
|
||||
ZipponError.ConditionError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
),
|
||||
},
|
||||
}
|
||||
}
|
4
test.zig
4
test.zig
@ -1,7 +1,7 @@
|
||||
const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
const Parser = @import("src/parser/core.zig").Parser;
|
||||
const Tokenizer = @import("src/parser/tokenizer.zig").Tokenizer;
|
||||
const Parser = @import("src/ziql/parser.zig");
|
||||
const Tokenizer = @import("src/ziql/tokenizer.zig").Tokenizer;
|
||||
const DBEngine = @import("src/cli/core.zig");
|
||||
const ZipponError = @import("error").ZipponError;
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user