Pass test with added stuffs for relationship
This commit is contained in:
parent
13cba58dbc
commit
6e7d1d150c
@ -22,14 +22,14 @@ pub const DataType = enum {
|
||||
float_array,
|
||||
str_array,
|
||||
bool_array,
|
||||
link_array,
|
||||
date_array,
|
||||
time_array,
|
||||
datetime_array,
|
||||
link_array,
|
||||
|
||||
pub fn is_array(self: DataType) bool {
|
||||
return switch (self) {
|
||||
.int_array, .float_array, .link_array, .str_array, .bool_array, .date_array, .time_array, .datetime_array => true,
|
||||
.int_array, .float_array, .str_array, .bool_array, .date_array, .time_array, .datetime_array => true,
|
||||
else => false,
|
||||
};
|
||||
}
|
||||
|
@ -197,7 +197,7 @@ test "Value parsing: Int" {
|
||||
|
||||
// Int array
|
||||
const array_str = "[1 14 44 42 hello]";
|
||||
const array = parseArrayInt(allocator, array_str);
|
||||
const array = try parseArrayInt(allocator, array_str);
|
||||
defer allocator.free(array);
|
||||
const expected_array: [5]i32 = .{ 1, 14, 44, 42, 0 };
|
||||
try std.testing.expect(std.mem.eql(i32, array, &expected_array));
|
||||
@ -214,7 +214,7 @@ test "Value parsing: Float" {
|
||||
|
||||
// Float array
|
||||
const array_str = "[1.5 14.3 44.9999 42 hello]";
|
||||
const array = parseArrayFloat(allocator, array_str);
|
||||
const array = try parseArrayFloat(allocator, array_str);
|
||||
defer allocator.free(array);
|
||||
const expected_array: [5]f64 = .{ 1.5, 14.3, 44.9999, 42, 0 };
|
||||
try std.testing.expect(std.mem.eql(f64, array, &expected_array));
|
||||
@ -227,10 +227,11 @@ test "Value parsing: String" {
|
||||
|
||||
// string array
|
||||
const array_str = "['Hello' 'How are you doing ?' '']";
|
||||
const array = parseArrayStr(allocator, array_str);
|
||||
const array = try parseArrayStr(allocator, array_str);
|
||||
defer allocator.free(array);
|
||||
const expected_array: [3][]const u8 = .{ "'Hello'", "'How are you doing ?'", "''" };
|
||||
const expected_array: [3][]const u8 = .{ "Hello", "How are you doing ?", "" };
|
||||
for (array, expected_array) |parsed, expected| {
|
||||
std.debug.print("{s} : {s}\n", .{ parsed, expected });
|
||||
try std.testing.expect(std.mem.eql(u8, parsed, expected));
|
||||
}
|
||||
}
|
||||
@ -246,7 +247,7 @@ test "Value parsing: Bool array" {
|
||||
|
||||
// Bool array
|
||||
const array_str = "[1 0 0 1 1]";
|
||||
const array = parseArrayBool(allocator, array_str);
|
||||
const array = try parseArrayBool(allocator, array_str);
|
||||
defer allocator.free(array);
|
||||
const expected_array: [5]bool = .{ true, false, false, true, true };
|
||||
try std.testing.expect(std.mem.eql(bool, array, &expected_array));
|
||||
@ -267,7 +268,7 @@ test "Value parsing: Date" {
|
||||
|
||||
// Date array
|
||||
const array_str = "[1920/01/01 1998/01/21 2024/12/31]";
|
||||
const array = parseArrayDate(allocator, array_str);
|
||||
const array = try parseArrayDate(allocator, array_str);
|
||||
defer allocator.free(array);
|
||||
const expected_array: [3]DateTime = .{
|
||||
DateTime.init(1920, 1, 1, 0, 0, 0, 0),
|
||||
@ -295,7 +296,7 @@ test "Value parsing: Time" {
|
||||
|
||||
// Time array
|
||||
const array_str = "[12:45:00.0000 18:12:53.7491 02:30:10 12:30]";
|
||||
const array = parseArrayTime(allocator, array_str);
|
||||
const array = try parseArrayTime(allocator, array_str);
|
||||
defer allocator.free(array);
|
||||
const expected_array: [4]DateTime = .{
|
||||
DateTime.init(0, 0, 0, 12, 45, 0, 0),
|
||||
@ -324,7 +325,7 @@ test "Value parsing: Datetime" {
|
||||
|
||||
// Time array
|
||||
const array_str = "[1920/01/01-12:45:00.0000 1920/01/01-18:12:53.7491 1920/01/01-02:30:10 1920/01/01-12:30]";
|
||||
const array = parseArrayDatetime(allocator, array_str);
|
||||
const array = try parseArrayDatetime(allocator, array_str);
|
||||
defer allocator.free(array);
|
||||
const expected_array: [4]DateTime = .{
|
||||
DateTime.init(1920, 1, 1, 12, 45, 0, 0),
|
||||
|
@ -284,7 +284,7 @@ pub const FileEngine = struct {
|
||||
|
||||
// Combine results
|
||||
for (thread_writer_list) |list| {
|
||||
for (list.items) |uuid| map.put(uuid, void) catch return ZipponError.MemoryError;
|
||||
for (list.items) |uuid| _ = map.getOrPut(uuid) catch return ZipponError.MemoryError;
|
||||
}
|
||||
}
|
||||
|
||||
@ -315,7 +315,10 @@ pub const FileEngine = struct {
|
||||
|
||||
while (iter.next() catch return) |row| {
|
||||
if (filter == null or filter.?.evaluate(row)) {
|
||||
list.*.append(row[0].UUID);
|
||||
list.*.append(row[0].UUID) catch |err| {
|
||||
sync_context.logError("Error initializing DataIterator", err);
|
||||
return;
|
||||
};
|
||||
|
||||
if (sync_context.incrementAndCheckStructLimit()) break;
|
||||
}
|
||||
@ -681,8 +684,7 @@ pub const FileEngine = struct {
|
||||
defer new_writer.deinit();
|
||||
|
||||
while (iter.next() catch |err| {
|
||||
sync_context.logError("Error initializing DataWriter", err);
|
||||
zid.deleteFile(new_path, dir) catch {};
|
||||
sync_context.logError("Parsing files", err);
|
||||
return;
|
||||
}) |row| {
|
||||
if (filter == null or filter.?.evaluate(row)) {
|
||||
@ -701,21 +703,12 @@ pub const FileEngine = struct {
|
||||
return;
|
||||
};
|
||||
|
||||
writer.writeByte('{') catch |err| {
|
||||
sync_context.logError("Error initializing DataWriter", err);
|
||||
zid.deleteFile(new_path, dir) catch {};
|
||||
return;
|
||||
};
|
||||
writer.print("\"{s}\"", .{UUID.format_bytes(row[0].UUID)}) catch |err| {
|
||||
sync_context.logError("Error initializing DataWriter", err);
|
||||
zid.deleteFile(new_path, dir) catch {};
|
||||
return;
|
||||
};
|
||||
writer.writeAll("},") catch |err| {
|
||||
writer.print("{{\"{s}\"}},", .{UUID.format_bytes(row[0].UUID)}) catch |err| {
|
||||
sync_context.logError("Error initializing DataWriter", err);
|
||||
zid.deleteFile(new_path, dir) catch {};
|
||||
return;
|
||||
};
|
||||
|
||||
if (sync_context.incrementAndCheckStructLimit()) break;
|
||||
} else {
|
||||
new_writer.write(row) catch |err| {
|
||||
@ -804,6 +797,7 @@ pub const FileEngine = struct {
|
||||
}
|
||||
|
||||
// Combine results
|
||||
// TODO: Make a struct for writing
|
||||
writer.writeByte('[') catch return FileEngineError.WriteError;
|
||||
for (thread_writer_list) |list| {
|
||||
writer.writeAll(list.items) catch return FileEngineError.WriteError;
|
||||
@ -836,11 +830,11 @@ pub const FileEngine = struct {
|
||||
};
|
||||
defer iter.deinit();
|
||||
|
||||
const new_path = std.fmt.allocPrint(allocator, "{d}.zid.new", .{file_index}) catch |err| {
|
||||
sync_context.logError("Error creating new file path", err);
|
||||
var new_path_buffer: [128]u8 = undefined;
|
||||
const new_path = std.fmt.bufPrint(&new_path_buffer, "{d}.zid.new", .{file_index}) catch |err| {
|
||||
sync_context.logError("Error creating file path", err);
|
||||
return;
|
||||
};
|
||||
defer allocator.free(new_path);
|
||||
|
||||
zid.createFile(new_path, dir) catch |err| {
|
||||
sync_context.logError("Error creating new file", err);
|
||||
@ -853,11 +847,15 @@ pub const FileEngine = struct {
|
||||
};
|
||||
defer new_writer.deinit();
|
||||
|
||||
while (iter.next() catch return) |row| {
|
||||
while (iter.next() catch |err| {
|
||||
sync_context.logError("Error during iter", err);
|
||||
return;
|
||||
}) |row| {
|
||||
if (filter == null or filter.?.evaluate(row)) {
|
||||
writer.writeByte('{') catch return;
|
||||
writer.print("\"{s}\"", .{UUID.format_bytes(row[0].UUID)}) catch return;
|
||||
writer.writeAll("},") catch return;
|
||||
writer.print("{{\"{s}\"}},", .{UUID.format_bytes(row[0].UUID)}) catch |err| {
|
||||
sync_context.logError("Error writting", err);
|
||||
return;
|
||||
};
|
||||
|
||||
if (sync_context.incrementAndCheckStructLimit()) break;
|
||||
} else {
|
||||
@ -899,7 +897,7 @@ pub const FileEngine = struct {
|
||||
.str => return zid.Data.initStr(value),
|
||||
.link, .self => {
|
||||
const uuid = UUID.parse(value) catch return FileEngineError.InvalidUUID;
|
||||
return zid.Data{ .UUID = uuid.bytes };
|
||||
return zid.Data.initUUID(uuid.bytes);
|
||||
},
|
||||
.int_array => {
|
||||
const array = s2t.parseArrayInt(allocator, value) catch return FileEngineError.MemoryError;
|
||||
@ -925,12 +923,6 @@ pub const FileEngine = struct {
|
||||
|
||||
return zid.Data.initBoolArray(zid.allocEncodArray.Bool(allocator, array) catch return FileEngineError.AllocEncodError);
|
||||
},
|
||||
.link_array => {
|
||||
const array = s2t.parseArrayUUIDBytes(allocator, value) catch return FileEngineError.MemoryError;
|
||||
defer allocator.free(array);
|
||||
|
||||
return zid.Data.initUUIDArray(zid.allocEncodArray.UUID(allocator, array) catch return FileEngineError.AllocEncodError);
|
||||
},
|
||||
.date_array => {
|
||||
const array = s2t.parseArrayDateUnix(allocator, value) catch return FileEngineError.MemoryError;
|
||||
defer allocator.free(array);
|
||||
@ -949,6 +941,12 @@ pub const FileEngine = struct {
|
||||
|
||||
return zid.Data.initUnixArray(zid.allocEncodArray.Unix(allocator, array) catch return FileEngineError.AllocEncodError);
|
||||
},
|
||||
.link_array => {
|
||||
const array = s2t.parseArrayUUIDBytes(allocator, value) catch return FileEngineError.MemoryError;
|
||||
defer allocator.free(array);
|
||||
|
||||
return zid.Data.initUUIDArray(zid.allocEncodArray.UUID(allocator, array) catch return FileEngineError.AllocEncodError);
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@ -973,6 +971,8 @@ pub const FileEngine = struct {
|
||||
datas[i] = try string2Data(allocator, dt, map.get(member).?);
|
||||
}
|
||||
|
||||
log.debug("New ordered data: {any}\n", .{datas});
|
||||
|
||||
return datas;
|
||||
}
|
||||
|
||||
@ -991,7 +991,7 @@ pub const FileEngine = struct {
|
||||
const file_stat = member_dir.statFile(entry.name) catch return FileEngineError.FileStatError;
|
||||
if (file_stat.size < MAX_FILE_SIZE) {
|
||||
// Cant I just return i ? It is supossed that files are ordered. I think I already check and it is not
|
||||
std.debug.print("{s}\n\n", .{entry.name[0..(entry.name.len - 4)]});
|
||||
log.debug("{s}\n\n", .{entry.name});
|
||||
return std.fmt.parseInt(usize, entry.name[0..(entry.name.len - 4)], 10) catch return FileEngineError.InvalidFileIndex; // INFO: Hardcoded len of file extension
|
||||
}
|
||||
}
|
||||
|
@ -285,7 +285,11 @@ pub fn main() !void {
|
||||
.expect_path_to_db => switch (token.tag) {
|
||||
.identifier => {
|
||||
db_engine.deinit();
|
||||
db_engine = DBEngine.init(allocator, try allocator.dupe(u8, toker.getTokenSlice(token)), null);
|
||||
db_engine = DBEngine.init(
|
||||
allocator,
|
||||
try allocator.dupe(u8, toker.getTokenSlice(token)),
|
||||
null,
|
||||
);
|
||||
state = .end;
|
||||
},
|
||||
else => {
|
||||
@ -337,6 +341,7 @@ pub fn main() !void {
|
||||
const main_path = try allocator.dupe(u8, db_engine.file_engine.path_to_ZipponDB_dir);
|
||||
db_engine.deinit();
|
||||
db_engine = DBEngine.init(allocator, main_path, toker.getTokenSlice(token));
|
||||
try db_engine.file_engine.writeSchemaFile(db_engine.schema_engine.null_terminated_schema_buff);
|
||||
state = .end;
|
||||
},
|
||||
else => {
|
||||
|
@ -80,7 +80,8 @@ pub const Parser = struct {
|
||||
.float => .Float,
|
||||
.str => .Str,
|
||||
.bool => .Bool,
|
||||
.link, .self => .UUID,
|
||||
.link => .UUID,
|
||||
.self => .UUID,
|
||||
.date => .Unix,
|
||||
.time => .Unix,
|
||||
.datetime => .Unix,
|
||||
@ -88,10 +89,10 @@ pub const Parser = struct {
|
||||
.float_array => .FloatArray,
|
||||
.str_array => .StrArray,
|
||||
.bool_array => .BoolArray,
|
||||
.link_array => .UUIDArray,
|
||||
.date_array => .UnixArray,
|
||||
.time_array => .UnixArray,
|
||||
.datetime_array => .UnixArray,
|
||||
.link_array => .UUIDArray,
|
||||
}) catch return SchemaParserError.MemoryError;
|
||||
}
|
||||
return schema.toOwnedSlice() catch return SchemaParserError.MemoryError;
|
||||
|
77
src/stuffs/UUIDTree.zig
Normal file
77
src/stuffs/UUIDTree.zig
Normal file
@ -0,0 +1,77 @@
|
||||
const std = @import("std");
|
||||
const UUID = @import("dtype").UUID;
|
||||
const ArenaAllocator = std.heap.ArenaAllocator;
|
||||
|
||||
// Maube use that later, the point is that it take only 16 comparison per UUID and save a lot of memory
|
||||
// But now that I think about it, 16 comparison vs 1, you get me
|
||||
pub const UUIDTree = struct {
|
||||
arena: *ArenaAllocator,
|
||||
root_node: Node,
|
||||
len: usize,
|
||||
|
||||
pub fn init(allocator: std.mem.Allocator) UUIDTree {
|
||||
var arena = ArenaAllocator.init(allocator);
|
||||
return UUIDTree{ .arena = &arena, .root_node = Node.init(&arena, 0), .len = 0 };
|
||||
}
|
||||
|
||||
pub fn deinit(self: *UUIDTree) void {
|
||||
self.arena.deinit();
|
||||
}
|
||||
|
||||
pub fn add(self: *UUIDTree, uuid: UUID) void {
|
||||
if (self.root_node.add(uuid, self.arena)) self.len += 1;
|
||||
}
|
||||
|
||||
pub fn isIn(self: UUIDTree, uuid: UUID) bool {
|
||||
return self.root_node.evaluate(uuid);
|
||||
}
|
||||
};
|
||||
|
||||
const Node = struct {
|
||||
depth: u4, // Because a UUID is 16 len and u4 have 16 different value
|
||||
map: std.AutoHashMap(u8, ?Node),
|
||||
|
||||
fn init(arena: *ArenaAllocator, depth: u4) Node {
|
||||
const allocator = arena.*.allocator();
|
||||
return Node{
|
||||
.depth = depth,
|
||||
.map = std.AutoHashMap(u8, ?Node).init(allocator),
|
||||
};
|
||||
}
|
||||
fn evaluate(self: Node, _: UUID) bool {
|
||||
return switch (self.depth) {
|
||||
15 => true,
|
||||
else => false,
|
||||
};
|
||||
}
|
||||
|
||||
fn add(self: *Node, uuid: UUID, arena: *ArenaAllocator) bool {
|
||||
switch (self.depth) {
|
||||
15 => {
|
||||
const c = uuid.bytes[self.depth];
|
||||
std.debug.print("{b}\n", .{c});
|
||||
|
||||
if (self.map.get(c)) |_| {
|
||||
std.debug.print("UUID already in map\n", .{});
|
||||
return false;
|
||||
} else {
|
||||
self.map.put(c, null) catch return false;
|
||||
return true;
|
||||
}
|
||||
},
|
||||
else => {
|
||||
const c = uuid.bytes[self.depth];
|
||||
std.debug.print("{b}\n", .{c});
|
||||
|
||||
// Could use getOrPut for perf I think
|
||||
if (self.map.getPtr(c)) |next_node| {
|
||||
return next_node.*.?.add(uuid, arena);
|
||||
} else {
|
||||
var new_node = Node.init(arena, self.depth + 1);
|
||||
self.map.put(c, new_node) catch return false;
|
||||
return new_node.add(uuid, arena);
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
};
|
@ -24,6 +24,7 @@ const ComparisonOperator = enum {
|
||||
inferior,
|
||||
inferior_or_equal,
|
||||
in,
|
||||
not_in,
|
||||
|
||||
pub fn str(self: ComparisonOperator) []const u8 {
|
||||
return switch (self) {
|
||||
@ -34,6 +35,7 @@ const ComparisonOperator = enum {
|
||||
.inferior => "<",
|
||||
.inferior_or_equal => "<=",
|
||||
.in => "IN",
|
||||
.not_in => "!IN",
|
||||
};
|
||||
}
|
||||
};
|
||||
@ -55,14 +57,14 @@ pub const ConditionValue = union(enum) {
|
||||
float: f64,
|
||||
str: []const u8,
|
||||
bool_: bool,
|
||||
link: UUID,
|
||||
self: UUID,
|
||||
unix: u64,
|
||||
int_array: std.ArrayList(i32),
|
||||
str_array: std.ArrayList([]const u8),
|
||||
float_array: std.ArrayList(f64),
|
||||
bool_array: std.ArrayList(bool),
|
||||
link_array: std.ArrayList(UUID),
|
||||
unix_array: std.ArrayList(u64),
|
||||
link: *std.AutoHashMap([16]u8, void),
|
||||
|
||||
pub fn deinit(self: ConditionValue) void {
|
||||
switch (self) {
|
||||
@ -70,7 +72,6 @@ pub const ConditionValue = union(enum) {
|
||||
.str_array => self.str_array.deinit(),
|
||||
.float_array => self.float_array.deinit(),
|
||||
.bool_array => self.bool_array.deinit(),
|
||||
.link_array => self.link_array.deinit(),
|
||||
.unix_array => self.unix_array.deinit(),
|
||||
else => {},
|
||||
}
|
||||
@ -88,6 +89,10 @@ pub const ConditionValue = union(enum) {
|
||||
return ConditionValue{ .str = value };
|
||||
}
|
||||
|
||||
pub fn initSelf(value: UUID) ConditionValue {
|
||||
return ConditionValue{ .self = value };
|
||||
}
|
||||
|
||||
pub fn initBool(value: []const u8) ConditionValue {
|
||||
return ConditionValue{ .bool_ = s2t.parseBool(value) };
|
||||
}
|
||||
@ -132,6 +137,10 @@ pub const ConditionValue = union(enum) {
|
||||
pub fn initArrayDateTime(allocator: std.mem.Allocator, value: []const u8) ConditionValue {
|
||||
return ConditionValue{ .unix_array = s2t.parseArrayDatetimeUnix(allocator, value) };
|
||||
}
|
||||
|
||||
pub fn initLink(value: *std.AutoHashMap([16]u8, void)) ConditionValue {
|
||||
return ConditionValue{ .link = value };
|
||||
}
|
||||
};
|
||||
|
||||
pub const Condition = struct {
|
||||
@ -360,3 +369,29 @@ test "Evaluate" {
|
||||
|
||||
_ = filter.evaluate(&data);
|
||||
}
|
||||
|
||||
test "ConditionValue: link" {
|
||||
const allocator = std.testing.allocator;
|
||||
|
||||
// Create a hash map for storing UUIDs
|
||||
var hash_map = std.AutoHashMap([16]u8, void).init(allocator);
|
||||
defer hash_map.deinit();
|
||||
|
||||
// Create a UUID to add to the hash map
|
||||
const uuid1 = try UUID.parse("123e4567-e89b-12d3-a456-426614174000");
|
||||
const uuid2 = try UUID.parse("223e4567-e89b-12d3-a456-426614174000");
|
||||
|
||||
// Add UUIDs to the hash map
|
||||
try hash_map.put(uuid1.bytes, {});
|
||||
try hash_map.put(uuid2.bytes, {});
|
||||
|
||||
// Create a ConditionValue with the link
|
||||
var value = ConditionValue.initLink(&hash_map);
|
||||
|
||||
// Check that the hash map contains the correct number of UUIDs
|
||||
try std.testing.expectEqual(@as(usize, 2), value.link.count());
|
||||
|
||||
// Check that specific UUIDs are in the hash map
|
||||
try std.testing.expect(value.link.contains(uuid1.bytes));
|
||||
try std.testing.expect(value.link.contains(uuid2.bytes));
|
||||
}
|
||||
|
@ -12,11 +12,21 @@ pub const Token = struct {
|
||||
.{ "DELETE", .keyword_delete },
|
||||
.{ "ADD", .keyword_add },
|
||||
.{ "IN", .keyword_in },
|
||||
.{ "true", .bool_literal_true },
|
||||
.{ "false", .bool_literal_false },
|
||||
.{ "AND", .keyword_and },
|
||||
.{ "OR", .keyword_or },
|
||||
.{ "TO", .keyword_to },
|
||||
.{ "NONE", .keyword_none },
|
||||
.{ "grab", .keyword_grab },
|
||||
.{ "update", .keyword_update },
|
||||
.{ "delete", .keyword_delete },
|
||||
.{ "add", .keyword_add },
|
||||
.{ "in", .keyword_in },
|
||||
.{ "and", .keyword_and },
|
||||
.{ "or", .keyword_or },
|
||||
.{ "to", .keyword_to },
|
||||
.{ "none", .keyword_none },
|
||||
.{ "true", .bool_literal_true },
|
||||
.{ "false", .bool_literal_false },
|
||||
});
|
||||
|
||||
pub fn getKeyword(bytes: []const u8) ?Tag {
|
||||
@ -36,6 +46,7 @@ pub const Token = struct {
|
||||
keyword_and,
|
||||
keyword_or,
|
||||
keyword_to,
|
||||
keyword_none,
|
||||
|
||||
string_literal,
|
||||
int_literal,
|
||||
|
@ -563,6 +563,9 @@ pub const Parser = struct {
|
||||
.bool, .bool_array, .link, .link_array => null, // handle separately
|
||||
};
|
||||
|
||||
var filter: ?Filter = null;
|
||||
var additional_data = AdditionalData.init(self.allocator);
|
||||
|
||||
if (expected_tag) |tag| {
|
||||
if (condition.data_type.is_array()) {
|
||||
token = try self.checkTokensInArray(tag);
|
||||
@ -603,21 +606,51 @@ pub const Parser = struct {
|
||||
}
|
||||
}
|
||||
} else if (condition.data_type == .link) {
|
||||
// If token is ", this mean a single UUID
|
||||
// If token is { or [, this mean a new filter
|
||||
switch (token.tag) {
|
||||
.l_bracket => {
|
||||
try self.parseAdditionalData(
|
||||
&additional_data,
|
||||
struct_name,
|
||||
);
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
|
||||
switch (token.tag) {
|
||||
.l_brace => {
|
||||
filter = try self.parseFilter(struct_name, false);
|
||||
},
|
||||
else => return printError(
|
||||
"Error: Expected new filter",
|
||||
ZiQlParserError.SynthaxError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
condition.value = switch (condition.data_type) {
|
||||
.int => ConditionValue.initInt(self.toker.buffer[start_index..token.loc.end]),
|
||||
.float => ConditionValue.initFloat(self.toker.buffer[start_index..token.loc.end]),
|
||||
.str => ConditionValue.initStr(self.toker.buffer[start_index + 1 .. token.loc.end - 1]),
|
||||
.date => ConditionValue.initDate(self.toker.buffer[start_index..token.loc.end]),
|
||||
.time => ConditionValue.initTime(self.toker.buffer[start_index..token.loc.end]),
|
||||
.datetime => ConditionValue.initDateTime(self.toker.buffer[start_index..token.loc.end]),
|
||||
.bool => ConditionValue.initBool(self.toker.buffer[start_index..token.loc.end]),
|
||||
switch (condition.data_type) {
|
||||
.int => condition.value = ConditionValue.initInt(self.toker.buffer[start_index..token.loc.end]),
|
||||
.float => condition.value = ConditionValue.initFloat(self.toker.buffer[start_index..token.loc.end]),
|
||||
.str => condition.value = ConditionValue.initStr(self.toker.buffer[start_index + 1 .. token.loc.end - 1]),
|
||||
.date => condition.value = ConditionValue.initDate(self.toker.buffer[start_index..token.loc.end]),
|
||||
.time => condition.value = ConditionValue.initTime(self.toker.buffer[start_index..token.loc.end]),
|
||||
.datetime => condition.value = ConditionValue.initDateTime(self.toker.buffer[start_index..token.loc.end]),
|
||||
.bool => condition.value = ConditionValue.initBool(self.toker.buffer[start_index..token.loc.end]),
|
||||
.link => {
|
||||
var map = std.AutoHashMap([16]u8, void).init(self.allocator);
|
||||
try self.file_engine.populateUUIDMap(
|
||||
struct_name,
|
||||
filter,
|
||||
&map,
|
||||
&additional_data,
|
||||
);
|
||||
condition.value = ConditionValue.initLink(&map);
|
||||
},
|
||||
else => unreachable, // TODO: Make for link and array =/
|
||||
};
|
||||
}
|
||||
state = .end;
|
||||
},
|
||||
|
||||
@ -692,6 +725,17 @@ pub const Parser = struct {
|
||||
),
|
||||
},
|
||||
|
||||
.in => switch (condition.data_type) {
|
||||
.link => {},
|
||||
else => return printError(
|
||||
"Error: Only link can be compare with in.",
|
||||
ZiQlParserError.ConditionError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
),
|
||||
},
|
||||
|
||||
else => unreachable,
|
||||
}
|
||||
|
||||
@ -870,19 +914,18 @@ pub const Parser = struct {
|
||||
.int => .int_literal,
|
||||
.float => .float_literal,
|
||||
.str => .string_literal,
|
||||
.link, .self => .uuid_literal,
|
||||
.self => .uuid_literal,
|
||||
.date => .date_literal,
|
||||
.time => .time_literal,
|
||||
.datetime => .datetime_literal,
|
||||
.int_array => .int_literal,
|
||||
.float_array => .float_literal,
|
||||
.link_array => .uuid_literal,
|
||||
.str_array => .string_literal,
|
||||
.date_array => .date_literal,
|
||||
.time_array => .time_literal,
|
||||
.datetime_array => .datetime_literal,
|
||||
// Handle bool and arrays separately
|
||||
.bool, .bool_array => null,
|
||||
.bool, .bool_array, .link, .link_array => null,
|
||||
};
|
||||
|
||||
if (expected_tag) |tag| {
|
||||
@ -957,6 +1000,26 @@ pub const Parser = struct {
|
||||
}
|
||||
member_map.put(member_name, self.toker.buffer[start_index..token.loc.end]) catch return ZipponError.MemoryError;
|
||||
},
|
||||
.link => {
|
||||
switch (token.tag) {
|
||||
.keyword_none => {
|
||||
member_map.put(member_name, "00000000-0000-0000-0000-000000000000") catch return ZipponError.MemoryError;
|
||||
},
|
||||
.uuid_literal => {
|
||||
// TODO: Check if the uuid is in the struct, otherwise return and error
|
||||
member_map.put(member_name, self.toker.buffer[start_index..token.loc.end]) catch return ZipponError.MemoryError;
|
||||
},
|
||||
.l_brace => {}, // TODO: Get the filter and return the first value found
|
||||
else => return printError(
|
||||
"Error: Expected uuid or none",
|
||||
ZiQlParserError.SynthaxError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
),
|
||||
}
|
||||
},
|
||||
.link_array => {},
|
||||
else => unreachable,
|
||||
}
|
||||
}
|
||||
@ -1001,9 +1064,12 @@ pub const Parser = struct {
|
||||
};
|
||||
|
||||
test "ADD" {
|
||||
try testParsing("ADD User (name = 'Bob', email='bob@email.com', age=55, scores=[ 1 ], friends=[], bday=2000/01/01, a_time=12:04, last_order=2000/01/01-12:45)");
|
||||
try testParsing("ADD User (name = 'Bob', email='bob@email.com', age=55, scores=[ 1 ], friends=[], bday=2000/01/01, a_time=12:04:54, last_order=2000/01/01-12:45)");
|
||||
try testParsing("ADD User (name = 'Bob', email='bob@email.com', age=-55, scores=[ 1 ], friends=[], bday=2000/01/01, a_time=12:04:54.8741, last_order=2000/01/01-12:45)");
|
||||
try testParsing("ADD User (name = 'Bob', email='bob@email.com', age=55, scores=[ 1 ], best_friend=none, bday=2000/01/01, a_time=12:04, last_order=2000/01/01-12:45)");
|
||||
try testParsing("ADD User (name = 'Bob', email='bob@email.com', age=55, scores=[ 1 ], best_friend=none, bday=2000/01/01, a_time=12:04:54, last_order=2000/01/01-12:45)");
|
||||
try testParsing("ADD User (name = 'Bob', email='bob@email.com', age=-55, scores=[ 1 ], best_friend=none, bday=2000/01/01, a_time=12:04:54.8741, last_order=2000/01/01-12:45)");
|
||||
|
||||
// This need to take the first User named Bob as it is a unique link
|
||||
//try testParsing("ADD User (name = 'Bob', email='bob@email.com', age=-55, scores=[ 1 ], best_friend={name = 'Bob'}, bday=2000/01/01, a_time=12:04:54.8741, last_order=2000/01/01-12:45)");
|
||||
}
|
||||
|
||||
test "GRAB filter with string" {
|
||||
@ -1042,6 +1108,10 @@ test "Specific query" {
|
||||
try testParsing("GRAB User [1]");
|
||||
}
|
||||
|
||||
//test "Relationship" {
|
||||
// try testParsing("GRAB User {best_friend IN {name = 'Bob'}}");
|
||||
//}
|
||||
|
||||
test "DELETE" {
|
||||
try testParsing("DELETE User {name='Bob'}");
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user