Now query with relationship will write the UUID bytes betwen {|<>|}
So then I can parse the file again, create a map of UUID sub json and just iterate over the first json looking for UUID and update with new data from map
This commit is contained in:
parent
7fb0b5f63e
commit
e7056efec9
59
lib/zid.zig
59
lib/zid.zig
@ -332,7 +332,64 @@ pub const allocEncodArray = struct {
|
|||||||
pub const DataIterator = struct {
|
pub const DataIterator = struct {
|
||||||
allocator: std.mem.Allocator,
|
allocator: std.mem.Allocator,
|
||||||
file: std.fs.File,
|
file: std.fs.File,
|
||||||
reader: std.io.BufferedReader(4096, std.fs.File.Reader), // Use ArrayList reader maybe ?
|
reader: std.io.BufferedReader(4096, std.fs.File.Reader),
|
||||||
|
|
||||||
|
schema: []const DType,
|
||||||
|
data: []Data,
|
||||||
|
|
||||||
|
index: usize = 0,
|
||||||
|
file_len: usize,
|
||||||
|
str_index: usize = 0,
|
||||||
|
array_index: usize = 0,
|
||||||
|
|
||||||
|
pub fn init(allocator: std.mem.Allocator, name: []const u8, dir: ?std.fs.Dir, schema: []const DType) !DataIterator {
|
||||||
|
const d_ = dir orelse std.fs.cwd();
|
||||||
|
const file = try d_.openFile(name, .{ .mode = .read_only });
|
||||||
|
|
||||||
|
return DataIterator{
|
||||||
|
.allocator = allocator,
|
||||||
|
.file = file,
|
||||||
|
.schema = schema,
|
||||||
|
.reader = std.io.bufferedReader(file.reader()),
|
||||||
|
.data = try allocator.alloc(Data, schema.len),
|
||||||
|
.file_len = try file.getEndPos(),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn deinit(self: *DataIterator) void {
|
||||||
|
self.allocator.free(self.data);
|
||||||
|
self.file.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn next(self: *DataIterator) !?[]Data {
|
||||||
|
self.str_index = 0;
|
||||||
|
self.array_index = 0;
|
||||||
|
if (self.index >= self.file_len) return null;
|
||||||
|
|
||||||
|
var i: usize = 0;
|
||||||
|
while (i < self.schema.len) : (i += 1) {
|
||||||
|
self.data[i] = switch (self.schema[i]) {
|
||||||
|
.Str => try self.schema[i].readStr(self.reader.reader(), &self.str_index),
|
||||||
|
.IntArray,
|
||||||
|
.FloatArray,
|
||||||
|
.BoolArray,
|
||||||
|
.StrArray,
|
||||||
|
.UUIDArray,
|
||||||
|
.UnixArray,
|
||||||
|
=> try self.schema[i].readArray(self.reader.reader(), &self.array_index),
|
||||||
|
else => try self.schema[i].read(self.reader.reader()),
|
||||||
|
};
|
||||||
|
self.index += self.data[i].size();
|
||||||
|
}
|
||||||
|
|
||||||
|
return self.data;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const DataIteratorFullBuffer = struct {
|
||||||
|
allocator: std.mem.Allocator,
|
||||||
|
file: std.fs.File,
|
||||||
|
reader: std.io.BufferedReader(4096, std.fs.File.Reader),
|
||||||
|
|
||||||
schema: []const DType,
|
schema: []const DType,
|
||||||
data: []Data,
|
data: []Data,
|
||||||
|
@ -9,6 +9,8 @@ const UUID = dtype.UUID;
|
|||||||
|
|
||||||
const ZipponError = @import("stuffs/errors.zig").ZipponError;
|
const ZipponError = @import("stuffs/errors.zig").ZipponError;
|
||||||
|
|
||||||
|
// TODO: Try std.json
|
||||||
|
|
||||||
pub const EntityWriter = struct {
|
pub const EntityWriter = struct {
|
||||||
pub fn writeEntityTable(
|
pub fn writeEntityTable(
|
||||||
writer: anytype,
|
writer: anytype,
|
||||||
@ -47,7 +49,7 @@ pub const EntityWriter = struct {
|
|||||||
.UUID => |v| {
|
.UUID => |v| {
|
||||||
const uuid = try UUID.parse("00000000-0000-0000-0000-000000000000"); // Maybe pass that comptime to prevent parsing it everytime
|
const uuid = try UUID.parse("00000000-0000-0000-0000-000000000000"); // Maybe pass that comptime to prevent parsing it everytime
|
||||||
if (!std.meta.eql(v, uuid.bytes)) {
|
if (!std.meta.eql(v, uuid.bytes)) {
|
||||||
try writer.print("\"{s}\"", .{UUID.format_bytes(v)});
|
try writer.print("\"{{|<{s}>|}}\"", .{v});
|
||||||
} else {
|
} else {
|
||||||
try writer.print("{{}}", .{});
|
try writer.print("{{}}", .{});
|
||||||
}
|
}
|
||||||
|
@ -456,7 +456,6 @@ pub const FileEngine = struct {
|
|||||||
data_types: []const DataType,
|
data_types: []const DataType,
|
||||||
sync_context: *ThreadSyncContext,
|
sync_context: *ThreadSyncContext,
|
||||||
) void {
|
) void {
|
||||||
log.debug("{any}\n", .{@TypeOf(writer)});
|
|
||||||
var data_buffer: [BUFFER_SIZE]u8 = undefined;
|
var data_buffer: [BUFFER_SIZE]u8 = undefined;
|
||||||
var fa = std.heap.FixedBufferAllocator.init(&data_buffer);
|
var fa = std.heap.FixedBufferAllocator.init(&data_buffer);
|
||||||
defer fa.reset();
|
defer fa.reset();
|
||||||
@ -513,8 +512,6 @@ pub const FileEngine = struct {
|
|||||||
const path = std.fmt.bufPrint(&path_buffer, "{s}/DATA/{s}/{d}.zid", .{ self.path_to_ZipponDB_dir, struct_name, file_index }) catch return FileEngineError.MemoryError;
|
const path = std.fmt.bufPrint(&path_buffer, "{s}/DATA/{s}/{d}.zid", .{ self.path_to_ZipponDB_dir, struct_name, file_index }) catch return FileEngineError.MemoryError;
|
||||||
const data = try self.orderedNewData(allocator, struct_name, map);
|
const data = try self.orderedNewData(allocator, struct_name, map);
|
||||||
|
|
||||||
std.debug.print("{any}", .{data});
|
|
||||||
|
|
||||||
var data_writer = zid.DataWriter.init(path, null) catch return FileEngineError.ZipponDataError;
|
var data_writer = zid.DataWriter.init(path, null) catch return FileEngineError.ZipponDataError;
|
||||||
defer data_writer.deinit();
|
defer data_writer.deinit();
|
||||||
|
|
||||||
|
@ -797,7 +797,6 @@ pub const Parser = struct {
|
|||||||
|
|
||||||
.expect_new_value => {
|
.expect_new_value => {
|
||||||
const data_type = self.schema_engine.memberName2DataType(struct_name, member_name) catch return ZiQlParserError.StructNotFound;
|
const data_type = self.schema_engine.memberName2DataType(struct_name, member_name) catch return ZiQlParserError.StructNotFound;
|
||||||
std.debug.print("DATA TYPE: {any}\n", .{data_type});
|
|
||||||
map.put(member_name, try self.parseConditionValue(allocator, struct_name, data_type, &token)) catch return ZipponError.MemoryError;
|
map.put(member_name, try self.parseConditionValue(allocator, struct_name, data_type, &token)) catch return ZipponError.MemoryError;
|
||||||
if (data_type == .link or data_type == .link_array) {
|
if (data_type == .link or data_type == .link_array) {
|
||||||
token = self.toker.last_token;
|
token = self.toker.last_token;
|
||||||
@ -1060,6 +1059,8 @@ pub const Parser = struct {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// TODO: Check if what is send is expected
|
||||||
|
|
||||||
test "ADD" {
|
test "ADD" {
|
||||||
try testParsing("ADD User (name = 'Bob', email='bob@email.com', age=55, scores=[ 1 ], best_friend=none, friends=none, bday=2000/01/01, a_time=12:04, last_order=2000/01/01-12:45)");
|
try testParsing("ADD User (name = 'Bob', email='bob@email.com', age=55, scores=[ 1 ], best_friend=none, friends=none, bday=2000/01/01, a_time=12:04, last_order=2000/01/01-12:45)");
|
||||||
try testParsing("ADD User (name = 'Bob', email='bob@email.com', age=55, scores=[ 666 123 331 ], best_friend=none, friends=none, bday=2000/11/01, a_time=12:04:54, last_order=2000/01/01-12:45)");
|
try testParsing("ADD User (name = 'Bob', email='bob@email.com', age=55, scores=[ 666 123 331 ], best_friend=none, friends=none, bday=2000/11/01, a_time=12:04:54, last_order=2000/01/01-12:45)");
|
||||||
@ -1110,19 +1111,29 @@ test "Specific query" {
|
|||||||
try testParsing("GRAB User [1]");
|
try testParsing("GRAB User [1]");
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: next step is to make this work
|
// FIXME: This make the following query return only 1 thing, to check
|
||||||
|
|
||||||
test "UPDATE relationship" {
|
test "UPDATE relationship" {
|
||||||
try testParsing("UPDATE User [1] {name='Bob'} TO (best_friend = {name='Boba'} )");
|
try testParsing("UPDATE User [1] {name='Bob'} TO (best_friend = {name='Boba'} )");
|
||||||
try testParsing("GRAB User {}");
|
try testParsing("GRAB User {}");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Not yet working but dont trow an error
|
// WORKING!!!!
|
||||||
test "GRAB Relationship" {
|
test "GRAB Relationship Filter" {
|
||||||
try testParsing("GRAB User {best_friend IN {name = 'Bob'}}");
|
try testParsing("GRAB User {best_friend IN {name = 'Bob'}}");
|
||||||
try testParsing("GRAB User {best_friend IN {name = 'Boba'}}");
|
try testParsing("GRAB User {best_friend IN {name = 'Boba'}}");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Make work
|
||||||
|
test "GRAB Relationship AdditionalData" {
|
||||||
|
try testParsing("GRAB User [name, friends] {}");
|
||||||
|
try testParsing("GRAB User [name, best_friend] {}");
|
||||||
|
try testParsing("GRAB User [2; name, best_friend] {best_friend != none}");
|
||||||
|
}
|
||||||
|
|
||||||
|
test "GRAB Relationship dot" {
|
||||||
|
try testParsing("GRAB User.best_friend {}");
|
||||||
|
}
|
||||||
|
|
||||||
test "DELETE" {
|
test "DELETE" {
|
||||||
try testParsing("DELETE User {}");
|
try testParsing("DELETE User {}");
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user