Opti
Bring test back Removed useless arena, making like 40% perf improv Removed shared Loc
This commit is contained in:
parent
98f0c69e61
commit
6fa8d6b7c6
138
benchmark.zig
138
benchmark.zig
@ -67,7 +67,7 @@ test "benchmark" {
|
||||
|
||||
// Maybe I can make it a test to use the testing alloc
|
||||
pub fn benchmark(allocator: std.mem.Allocator) !void {
|
||||
const to_test = [_]usize{ 5, 50, 500, 5_000, 50_000, 500_000, 5_000_000, 10_000_000 };
|
||||
const to_test = [_]usize{ 5_000, 100_000, 1_000_000, 10_000_000 };
|
||||
var line_buffer: [1024 * 1024]u8 = undefined;
|
||||
for (to_test) |users_count| {
|
||||
var db_engine = DBEngine.init(allocator, "benchmarkDB", "schema/benchmark");
|
||||
@ -75,50 +75,114 @@ pub fn benchmark(allocator: std.mem.Allocator) !void {
|
||||
|
||||
// Empty db
|
||||
{
|
||||
const null_term_query_str = try std.fmt.bufPrintZ(&line_buffer, "DELETE User {{}}", .{});
|
||||
var null_term_query_str = try std.fmt.bufPrintZ(&line_buffer, "DELETE User {{}}", .{});
|
||||
db_engine.runQuery(null_term_query_str);
|
||||
|
||||
null_term_query_str = try std.fmt.bufPrintZ(&line_buffer, "DELETE Category {{}}", .{});
|
||||
db_engine.runQuery(null_term_query_str);
|
||||
|
||||
null_term_query_str = try std.fmt.bufPrintZ(&line_buffer, "DELETE Item {{}}", .{});
|
||||
db_engine.runQuery(null_term_query_str);
|
||||
|
||||
null_term_query_str = try std.fmt.bufPrintZ(&line_buffer, "DELETE Order {{}}", .{});
|
||||
db_engine.runQuery(null_term_query_str);
|
||||
}
|
||||
|
||||
// Populate with random dummy value
|
||||
{
|
||||
std.debug.print("\n=====================================\n\n", .{});
|
||||
std.debug.print("Populating with {d} users.\n", .{users_count});
|
||||
|
||||
var prng = std.Random.DefaultPrng.init(0);
|
||||
const rng = prng.random();
|
||||
const populate_start_time = std.time.nanoTimestamp();
|
||||
|
||||
var array = std.ArrayList(u8).init(allocator);
|
||||
defer array.deinit();
|
||||
var writer = array.writer();
|
||||
// Category
|
||||
{
|
||||
const null_term_query_str = try std.fmt.bufPrintZ(&line_buffer, "ADD Category (name = 'Book') ('Food') ('Toy') ('Other')", .{});
|
||||
db_engine.runQuery(null_term_query_str);
|
||||
}
|
||||
|
||||
try writer.print(
|
||||
"ADD User (name = '{s}', email='{s}', orders=none)",
|
||||
.{
|
||||
names[rng.uintAtMost(usize, names.len - 1)],
|
||||
emails[rng.uintAtMost(usize, emails.len - 1)],
|
||||
},
|
||||
);
|
||||
// Item
|
||||
{
|
||||
// TODO: Cache some filter. Here I end up parse Category everytime
|
||||
const null_term_query_str = try std.fmt.bufPrintZ(
|
||||
&line_buffer, // I dont like 'category = {name='Book'}'. Maybe att a IS keyword ?
|
||||
\\ADD Item
|
||||
\\(name='Book1', price=12.45, category = {{name='Book'}})
|
||||
\\(name='Book2', price=10.45, category = {{name='Book'}})
|
||||
\\(name='Book3', price=12.45, category = {{name='Book'}})
|
||||
\\(name='Book4', price=2.00, category = {{name='Book'}})
|
||||
\\(name='Book5', price=59.99, category = {{name='Book'}})
|
||||
\\(name='Book6', price=10.45, category = {{name='Book'}})
|
||||
\\(name='Book7', price=10.45, category = {{name='Book'}})
|
||||
\\
|
||||
\\(name='Food1', price=1.45, category = {{name='Food'}})
|
||||
\\(name='Food2', price=1.45, category = {{name='Food'}})
|
||||
\\(name='Food3', price=1.45, category = {{name='Food'}})
|
||||
\\(name='Food4', price=1.45, category = {{name='Food'}})
|
||||
\\(name='Food6', price=1.45, category = {{name='Food'}})
|
||||
\\(name='Food7', price=1.45, category = {{name='Food'}})
|
||||
\\(name='Food8', price=1.45, category = {{name='Food'}})
|
||||
\\
|
||||
\\(name='Toy1', price=10.45, category = {{name='Toy'}})
|
||||
\\(name='Toy2', price=4.45, category = {{name='Toy'}})
|
||||
\\(name='Toy3', price=6.45, category = {{name='Toy'}})
|
||||
\\(name='Toy4', price=1.45, category = {{name='Toy'}})
|
||||
\\
|
||||
\\(name='Other', price=0.99, category = {{name='Other'}})
|
||||
,
|
||||
.{},
|
||||
);
|
||||
db_engine.runQuery(null_term_query_str);
|
||||
}
|
||||
|
||||
// User
|
||||
{
|
||||
std.debug.print("Populating with {d} users.\n", .{users_count});
|
||||
const populate_start_time = std.time.nanoTimestamp();
|
||||
|
||||
var array = std.ArrayList(u8).init(allocator);
|
||||
defer array.deinit();
|
||||
var writer = array.writer();
|
||||
|
||||
for (0..users_count - 1) |_| {
|
||||
try writer.print(
|
||||
"('{s}', '{s}', none)",
|
||||
"ADD User (name = '{s}', email='{s}', orders=none)",
|
||||
.{
|
||||
names[rng.uintAtMost(usize, names.len - 1)],
|
||||
emails[rng.uintAtMost(usize, emails.len - 1)],
|
||||
},
|
||||
);
|
||||
|
||||
for (0..users_count - 1) |_| {
|
||||
try writer.print(
|
||||
"('{s}', '{s}', none)",
|
||||
.{
|
||||
names[rng.uintAtMost(usize, names.len - 1)],
|
||||
emails[rng.uintAtMost(usize, emails.len - 1)],
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
const null_term_query_str = try std.fmt.allocPrintZ(allocator, "{s}", .{array.items});
|
||||
defer allocator.free(null_term_query_str);
|
||||
|
||||
db_engine.runQuery(null_term_query_str);
|
||||
|
||||
const populate_end_time = std.time.nanoTimestamp();
|
||||
const populate_duration = @as(f64, @floatFromInt(populate_end_time - populate_start_time)) / 1e9;
|
||||
|
||||
std.debug.print("Populate duration: {d:.6} seconds\n\n", .{populate_duration});
|
||||
}
|
||||
|
||||
const null_term_query_str = try std.fmt.allocPrintZ(allocator, "{s}", .{array.items});
|
||||
defer allocator.free(null_term_query_str);
|
||||
|
||||
db_engine.runQuery(null_term_query_str);
|
||||
|
||||
const populate_end_time = std.time.nanoTimestamp();
|
||||
const populate_duration = @as(f64, @floatFromInt(populate_end_time - populate_start_time)) / 1e9;
|
||||
|
||||
std.debug.print("Populate duration: {d:.6} seconds\n\n", .{populate_duration});
|
||||
// Order
|
||||
{
|
||||
// Linked array not yet implemented and array manipulation not tested
|
||||
const null_term_query_str = try std.fmt.bufPrintZ(
|
||||
&line_buffer, // I dont like 'category = {name='Book'}'. Maybe att a IS keyword ?
|
||||
\\ADD Order (from={{}}, at=NOW, items={{name IN ['Food1', 'Food2']}}, quantity=[5 22])
|
||||
,
|
||||
.{},
|
||||
);
|
||||
db_engine.runQuery(null_term_query_str);
|
||||
}
|
||||
|
||||
var buffer = std.ArrayList(u8).init(allocator);
|
||||
defer buffer.deinit();
|
||||
@ -127,15 +191,15 @@ pub fn benchmark(allocator: std.mem.Allocator) !void {
|
||||
std.debug.print("--------------------------------------\n\n", .{});
|
||||
}
|
||||
|
||||
//{
|
||||
// for (db_engine.schema_engine.struct_array) |sstruct| {
|
||||
// const mb: f64 = @as(f64, @floatFromInt(sstruct.uuid_file_index.arena.queryCapacity())) / 1024.0 / 1024.0;
|
||||
// std.debug.print("Sstruct: {s}\n", .{sstruct.name});
|
||||
// std.debug.print("Memory: {d:.2}Mb\n", .{mb});
|
||||
// std.debug.print("Count: {d}\n\n", .{sstruct.uuid_file_index.map.count()});
|
||||
// std.debug.print("--------------------------------------\n\n", .{});
|
||||
// }
|
||||
//}
|
||||
if (false) {
|
||||
for (db_engine.schema_engine.struct_array) |sstruct| {
|
||||
const mb: f64 = @as(f64, @floatFromInt(sstruct.uuid_file_index.arena.queryCapacity())) / 1024.0 / 1024.0;
|
||||
std.debug.print("Sstruct: {s}\n", .{sstruct.name});
|
||||
std.debug.print("Memory: {d:.2}Mb\n", .{mb});
|
||||
std.debug.print("Count: {d}\n\n", .{sstruct.uuid_file_index.map.count()});
|
||||
std.debug.print("--------------------------------------\n\n", .{});
|
||||
}
|
||||
}
|
||||
|
||||
// Run query
|
||||
{
|
||||
@ -145,6 +209,10 @@ pub fn benchmark(allocator: std.mem.Allocator) !void {
|
||||
"GRAB User [1] {}",
|
||||
"GRAB User [name] {}",
|
||||
"GRAB User {name = 'Charlie'}",
|
||||
"GRAB Category {}",
|
||||
"GRAB Item {}",
|
||||
"GRAB Order {}",
|
||||
"GRAB Order [from, items, quantity, at] {at > 2024}",
|
||||
"DELETE User {}",
|
||||
};
|
||||
|
||||
|
13
build.zig
13
build.zig
@ -2,7 +2,7 @@ const std = @import("std");
|
||||
|
||||
pub fn build(b: *std.Build) void {
|
||||
const target = b.standardTargetOptions(.{});
|
||||
const optimize = b.standardOptimizeOption(.{ .preferred_optimize_mode = .ReleaseSmall });
|
||||
const optimize = b.standardOptimizeOption(.{});
|
||||
|
||||
// Run
|
||||
// -----------------------------------------------
|
||||
@ -40,7 +40,7 @@ pub fn build(b: *std.Build) void {
|
||||
const run_tests1 = b.addRunArtifact(tests1);
|
||||
|
||||
const tests2 = b.addTest(.{
|
||||
.root_source_file = b.path("src/tokenizers/cli.zig"),
|
||||
.root_source_file = b.path("src/cli/tokenizer.zig"),
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
.name = "CLI tokenizer",
|
||||
@ -49,7 +49,7 @@ pub fn build(b: *std.Build) void {
|
||||
const run_tests2 = b.addRunArtifact(tests2);
|
||||
|
||||
const tests3 = b.addTest(.{
|
||||
.root_source_file = b.path("src/tokenizers/ziql.zig"),
|
||||
.root_source_file = b.path("src/ziql/tokenizer.zig"),
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
.name = "ZiQL tokenizer",
|
||||
@ -58,7 +58,7 @@ pub fn build(b: *std.Build) void {
|
||||
const run_tests3 = b.addRunArtifact(tests3);
|
||||
|
||||
const tests4 = b.addTest(.{
|
||||
.root_source_file = b.path("src/tokenizers/schema.zig"),
|
||||
.root_source_file = b.path("src/schema/tokenizer.zig"),
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
.name = "Schema tokenizer",
|
||||
@ -103,7 +103,7 @@ pub fn build(b: *std.Build) void {
|
||||
test_step.dependOn(&run_tests6.step);
|
||||
}
|
||||
|
||||
// Test
|
||||
// Test zid
|
||||
// -----------------------------------------------
|
||||
{
|
||||
const tests1 = b.addTest(.{
|
||||
@ -142,7 +142,6 @@ pub fn build(b: *std.Build) void {
|
||||
}
|
||||
|
||||
// Release
|
||||
// TODO: Make a small, fast and safe release
|
||||
// -----------------------------------------------
|
||||
{
|
||||
const release_step = b.step("release", "Create release binaries for multiple platforms");
|
||||
@ -159,7 +158,7 @@ pub fn build(b: *std.Build) void {
|
||||
};
|
||||
|
||||
for (targets) |tar| {
|
||||
const exe_name = b.fmt("ZipponDB-{s}-{s}", .{
|
||||
const exe_name = b.fmt("zippondb-{s}-{s}", .{
|
||||
@tagName(tar.query.cpu_arch.?),
|
||||
@tagName(tar.query.os_tag.?),
|
||||
});
|
||||
|
@ -20,7 +20,6 @@ var value_buffer: [1024]u8 = undefined;
|
||||
|
||||
usingnamespace @import("parser.zig");
|
||||
|
||||
arena: *std.heap.ArenaAllocator = undefined,
|
||||
allocator: Allocator = undefined,
|
||||
state: DBEngineState = .Init,
|
||||
file_engine: FileEngine = undefined,
|
||||
@ -28,16 +27,7 @@ schema_engine: SchemaEngine = undefined,
|
||||
thread_engine: ThreadEngine = undefined,
|
||||
|
||||
pub fn init(allocator: Allocator, potential_main_path: ?[]const u8, potential_schema_path: ?[]const u8) Self {
|
||||
var self = Self{};
|
||||
|
||||
const arena = allocator.create(std.heap.ArenaAllocator) catch {
|
||||
log.err("Error when init Engine DB allocator", .{});
|
||||
self.state = .MissingAllocator;
|
||||
return self;
|
||||
};
|
||||
arena.* = std.heap.ArenaAllocator.init(allocator);
|
||||
self.arena = arena;
|
||||
self.allocator = arena.allocator();
|
||||
var self = Self{ .allocator = allocator };
|
||||
|
||||
self.thread_engine = ThreadEngine.init(self.allocator) catch {
|
||||
log.err("Error initializing thread engine", .{});
|
||||
@ -159,7 +149,4 @@ pub fn deinit(self: *Self) void {
|
||||
self.thread_engine.deinit();
|
||||
self.schema_engine.deinit();
|
||||
self.file_engine.deinit();
|
||||
const parent_allocator = self.arena.child_allocator;
|
||||
self.arena.deinit();
|
||||
parent_allocator.destroy(self.arena);
|
||||
}
|
||||
|
@ -135,7 +135,7 @@ pub fn parse(self: *Self, null_term_line_str: [:0]const u8) !bool {
|
||||
.expect_path_to_db => switch (token.tag) {
|
||||
.identifier => {
|
||||
self.deinit();
|
||||
self.* = Self.init(self.arena.child_allocator, toker.getTokenSlice(token), null);
|
||||
self.* = Self.init(self.allocator, toker.getTokenSlice(token), null);
|
||||
state = .end;
|
||||
},
|
||||
else => {
|
||||
@ -194,7 +194,7 @@ pub fn parse(self: *Self, null_term_line_str: [:0]const u8) !bool {
|
||||
.identifier => {
|
||||
const main_path = try allocator.dupe(u8, self.file_engine.path_to_ZipponDB_dir);
|
||||
self.deinit();
|
||||
self.* = Self.init(self.arena.child_allocator, main_path, toker.getTokenSlice(token));
|
||||
self.* = Self.init(self.allocator, main_path, toker.getTokenSlice(token));
|
||||
try self.file_engine.writeSchemaFile(self.schema_engine.null_terminated);
|
||||
state = .end;
|
||||
},
|
||||
|
@ -1,6 +1,10 @@
|
||||
// From https://github.com/ziglang/zig/blob/master/lib/std/zig/tokenizer.zig
|
||||
const std = @import("std");
|
||||
const Loc = @import("../dataStructure/loc.zig");
|
||||
|
||||
const Loc = struct {
|
||||
start: usize,
|
||||
end: usize,
|
||||
};
|
||||
|
||||
pub const Token = struct {
|
||||
tag: Tag,
|
||||
|
@ -1,4 +0,0 @@
|
||||
pub const Loc = @This();
|
||||
|
||||
start: usize,
|
||||
end: usize,
|
@ -20,25 +20,18 @@ pub usingnamespace @import("read.zig");
|
||||
pub usingnamespace @import("write.zig");
|
||||
pub usingnamespace @import("dump.zig");
|
||||
|
||||
arena: *std.heap.ArenaAllocator,
|
||||
allocator: std.mem.Allocator,
|
||||
path_to_ZipponDB_dir: []const u8,
|
||||
thread_pool: *Pool, // same pool as the ThreadEngine
|
||||
schema_engine: SchemaEngine = undefined, // This is init after the FileEngine and I attach after. Do I need to init after tho ?
|
||||
|
||||
pub fn init(allocator: std.mem.Allocator, path: []const u8, thread_pool: *Pool) ZipponError!Self {
|
||||
const arena = allocator.create(std.heap.ArenaAllocator) catch return ZipponError.MemoryError;
|
||||
arena.* = std.heap.ArenaAllocator.init(allocator);
|
||||
return Self{
|
||||
.arena = arena,
|
||||
.allocator = arena.allocator(),
|
||||
.allocator = allocator,
|
||||
.path_to_ZipponDB_dir = std.fmt.bufPrint(&path_to_ZipponDB_dir_buffer, "{s}", .{path}) catch return ZipponError.MemoryError,
|
||||
.thread_pool = thread_pool,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Self) void {
|
||||
const parent_allocator = self.arena.child_allocator;
|
||||
self.arena.deinit();
|
||||
parent_allocator.destroy(self.arena);
|
||||
}
|
||||
// The allocator is only use to make arena when parsing and everything is deinit after parsing
|
||||
pub fn deinit(_: *Self) void {}
|
||||
|
@ -42,7 +42,8 @@ pub fn populateFileIndexUUIDMap(
|
||||
) ZipponError!void {
|
||||
var arena = std.heap.ArenaAllocator.init(self.allocator);
|
||||
defer arena.deinit();
|
||||
const allocator = arena.allocator();
|
||||
var safe_allocator = std.heap.ThreadSafeAllocator{ .child_allocator = arena.allocator() };
|
||||
const allocator = safe_allocator.allocator();
|
||||
|
||||
const dir = try self.printOpenDir("{s}/DATA/{s}", .{ self.path_to_ZipponDB_dir, sstruct.name }, .{});
|
||||
const to_parse = try self.allFileIndex(allocator, sstruct.name);
|
||||
@ -106,7 +107,8 @@ pub fn populateVoidUUIDMap(
|
||||
) ZipponError!void {
|
||||
var arena = std.heap.ArenaAllocator.init(self.allocator);
|
||||
defer arena.deinit();
|
||||
const allocator = arena.allocator();
|
||||
var safe_allocator = std.heap.ThreadSafeAllocator{ .child_allocator = arena.allocator() };
|
||||
const allocator = safe_allocator.allocator();
|
||||
|
||||
const sstruct = try self.schema_engine.structName2SchemaStruct(struct_name);
|
||||
|
||||
@ -195,7 +197,7 @@ pub fn parseEntities(
|
||||
) ZipponError![]const u8 {
|
||||
var arena = std.heap.ArenaAllocator.init(self.allocator);
|
||||
defer arena.deinit();
|
||||
var safe_allocator = std.heap.ThreadSafeAllocator{ .child_allocator = self.allocator };
|
||||
var safe_allocator = std.heap.ThreadSafeAllocator{ .child_allocator = arena.allocator() };
|
||||
const allocator = safe_allocator.allocator();
|
||||
|
||||
var buff = std.ArrayList(u8).init(entry_allocator);
|
||||
|
@ -71,12 +71,13 @@ pub fn updateEntities(
|
||||
struct_name: []const u8,
|
||||
filter: ?Filter,
|
||||
map: std.StringHashMap(ValueOrArray),
|
||||
writer: anytype,
|
||||
writer: anytype, // TODO: Stop using writer and use an allocator + toOwnedSlice like parseEntities
|
||||
additional_data: *AdditionalData,
|
||||
) ZipponError!void {
|
||||
var arena = std.heap.ArenaAllocator.init(self.allocator);
|
||||
defer arena.deinit();
|
||||
const allocator = arena.allocator();
|
||||
var safe_allocator = std.heap.ThreadSafeAllocator{ .child_allocator = arena.allocator() };
|
||||
const allocator = safe_allocator.allocator();
|
||||
|
||||
const sstruct = try self.schema_engine.structName2SchemaStruct(struct_name);
|
||||
|
||||
@ -229,7 +230,7 @@ pub fn deleteEntities(
|
||||
) ZipponError!void {
|
||||
var arena = std.heap.ArenaAllocator.init(self.allocator);
|
||||
defer arena.deinit();
|
||||
var safe_allocator = std.heap.ThreadSafeAllocator{ .child_allocator = self.allocator };
|
||||
var safe_allocator = std.heap.ThreadSafeAllocator{ .child_allocator = arena.allocator() };
|
||||
const allocator = safe_allocator.allocator();
|
||||
|
||||
const sstruct = try self.schema_engine.structName2SchemaStruct(struct_name);
|
||||
|
@ -34,11 +34,11 @@ pub fn init(parent_allocator: Allocator, path: []const u8, file_engine: *FileEng
|
||||
const null_terminated = std.fmt.bufPrintZ(&schema_buffer, "{s}", .{buffer[0..len]}) catch return ZipponError.MemoryError;
|
||||
|
||||
var toker = Tokenizer.init(null_terminated);
|
||||
var parser = Parser.init(&toker, allocator);
|
||||
var parser = Parser.init(&toker);
|
||||
|
||||
var struct_array = std.ArrayList(SchemaStruct).init(allocator);
|
||||
errdefer struct_array.deinit();
|
||||
parser.parse(&struct_array) catch return ZipponError.SchemaNotConform;
|
||||
parser.parse(allocator, &struct_array) catch return ZipponError.SchemaNotConform;
|
||||
|
||||
log.debug("SchemaEngine init with {d} SchemaStruct.", .{struct_array.items.len});
|
||||
|
||||
|
@ -6,7 +6,6 @@ const DataType = @import("dtype").DataType;
|
||||
const UUID = @import("dtype").UUID;
|
||||
const Toker = @import("tokenizer.zig").Tokenizer;
|
||||
const Token = @import("tokenizer.zig").Token;
|
||||
const Loc = @import("../dataStructure/loc.zig");
|
||||
const send = @import("../utils.zig").send;
|
||||
const printError = @import("../utils.zig").printError;
|
||||
|
||||
@ -29,27 +28,23 @@ const State = enum {
|
||||
pub const Parser = @This();
|
||||
|
||||
toker: *Toker,
|
||||
allocator: Allocator,
|
||||
|
||||
pub fn init(toker: *Toker, allocator: Allocator) Parser {
|
||||
return .{
|
||||
.allocator = allocator,
|
||||
.toker = toker,
|
||||
};
|
||||
pub fn init(toker: *Toker) Parser {
|
||||
return .{ .toker = toker };
|
||||
}
|
||||
|
||||
pub fn parse(self: *Parser, struct_array: *std.ArrayList(SchemaStruct)) !void {
|
||||
pub fn parse(self: *Parser, allocator: Allocator, struct_array: *std.ArrayList(SchemaStruct)) !void {
|
||||
var state: State = .expect_struct_name_OR_end;
|
||||
var keep_next = false;
|
||||
|
||||
var member_token: Token = undefined;
|
||||
|
||||
var name: []const u8 = undefined;
|
||||
var member_list = std.ArrayList([]const u8).init(self.allocator);
|
||||
var member_list = std.ArrayList([]const u8).init(allocator);
|
||||
defer member_list.deinit();
|
||||
var type_list = std.ArrayList(DataType).init(self.allocator);
|
||||
var type_list = std.ArrayList(DataType).init(allocator);
|
||||
defer type_list.deinit();
|
||||
var links = std.StringHashMap([]const u8).init(self.allocator);
|
||||
var links = std.StringHashMap([]const u8).init(allocator);
|
||||
defer links.deinit();
|
||||
|
||||
var token = self.toker.next();
|
||||
@ -105,18 +100,17 @@ pub fn parse(self: *Parser, struct_array: *std.ArrayList(SchemaStruct)) !void {
|
||||
|
||||
.add_struct => {
|
||||
struct_array.append(try SchemaStruct.init(
|
||||
struct_array.allocator,
|
||||
allocator,
|
||||
name,
|
||||
member_list.toOwnedSlice() catch return ZipponError.MemoryError,
|
||||
type_list.toOwnedSlice() catch return ZipponError.MemoryError,
|
||||
try links.clone(),
|
||||
)) catch return ZipponError.MemoryError;
|
||||
|
||||
links.deinit();
|
||||
links = std.StringHashMap([]const u8).init(self.allocator);
|
||||
links.clearRetainingCapacity();
|
||||
|
||||
member_list = std.ArrayList([]const u8).init(self.allocator);
|
||||
type_list = std.ArrayList(DataType).init(self.allocator);
|
||||
member_list = std.ArrayList([]const u8).init(allocator);
|
||||
type_list = std.ArrayList(DataType).init(allocator);
|
||||
|
||||
state = .expect_struct_name_OR_end;
|
||||
keep_next = true;
|
||||
|
@ -1,6 +1,10 @@
|
||||
// From https://github.com/ziglang/zig/blob/master/lib/std/zig/tokenizer.zig
|
||||
const std = @import("std");
|
||||
const Loc = @import("../dataStructure/loc.zig");
|
||||
|
||||
const Loc = struct {
|
||||
start: usize,
|
||||
end: usize,
|
||||
};
|
||||
|
||||
pub const Token = struct {
|
||||
tag: Tag,
|
||||
|
@ -39,3 +39,12 @@ pub fn deinit(self: *ThreadEngine) void {
|
||||
parent_allocator.destroy(self.thread_arena);
|
||||
parent_allocator.destroy(self.thread_pool);
|
||||
}
|
||||
|
||||
// Not tested, for later when config is runtime
|
||||
pub fn setCpuCore(self: *ThreadEngine, cpu_core: usize) void {
|
||||
self.thread_pool.deinit();
|
||||
self.thread_pool.init(std.Thread.Pool.Options{
|
||||
.allocator = self.thread_arena.allocator(),
|
||||
.n_jobs = cpu_core,
|
||||
});
|
||||
}
|
||||
|
@ -1,6 +1,10 @@
|
||||
// From https://github.com/ziglang/zig/blob/master/lib/std/zig/tokenizer.zig
|
||||
const std = @import("std");
|
||||
const Loc = @import("../dataStructure/loc.zig");
|
||||
|
||||
const Loc = struct {
|
||||
start: usize,
|
||||
end: usize,
|
||||
};
|
||||
|
||||
pub const Token = struct {
|
||||
tag: Tag,
|
||||
|
83
test.zig
83
test.zig
@ -11,7 +11,7 @@ const DB = struct {
|
||||
};
|
||||
|
||||
test "Synthax error" {
|
||||
const db = DB{ .path = "test1", .schema = "schema/example" };
|
||||
const db = DB{ .path = "test1", .schema = "schema/test" };
|
||||
try expectParsingError(db, "ADD User (name = 'Bob', email='bob@email.com', age=-55, scores=[ 1 ], best_friend=7db1f06d-a5a7-4917-8cc6-4d490191c9c1, bday=2000/01/01, a_time=12:04:54.8741, last_order=2000/01/01-12:45)", ZipponError.SynthaxError);
|
||||
try expectParsingError(db, "GRAB {}", ZipponError.StructNotFound);
|
||||
try expectParsingError(db, "GRAB User {qwe = 'qwe'}", ZipponError.MemberNotFound);
|
||||
@ -23,12 +23,12 @@ test "Synthax error" {
|
||||
}
|
||||
|
||||
test "Clear" {
|
||||
const db = DB{ .path = "test1", .schema = "schema/example" };
|
||||
const db = DB{ .path = "test1", .schema = "schema/test" };
|
||||
try testParsing(db, "DELETE User {}");
|
||||
}
|
||||
|
||||
test "ADD" {
|
||||
const db = DB{ .path = "test1", .schema = "schema/example" };
|
||||
const db = DB{ .path = "test1", .schema = "schema/test" };
|
||||
try testParsing(db, "ADD User (name = 'Bob', email='bob@email.com', age=55, scores=[ 1 ], best_friend=none, friends=none, bday=2000/01/01, a_time=12:04, last_order=2000/01/01-12:45)");
|
||||
try testParsing(db, "ADD User (name = 'Bob', email='bob@email.com', age=55, scores=[ 666 123 331 ], best_friend=none, friends=none, bday=2000/11/01, a_time=12:04:54, last_order=2000/01/01-12:45)");
|
||||
try testParsing(db, "ADD User (name = 'Bob', email='bob@email.com', age=-55, scores=[ 33 ], best_friend=none, friends=none, bday=2000/01/04, a_time=12:04:54.8741, last_order=2000/01/01-12:45)");
|
||||
@ -42,7 +42,7 @@ test "ADD" {
|
||||
}
|
||||
|
||||
test "ADD batch" {
|
||||
const db = DB{ .path = "test1", .schema = "schema/example" };
|
||||
const db = DB{ .path = "test1", .schema = "schema/test" };
|
||||
try testParsing(db, "ADD User (name = 'ewq', email='ewq@email.com', age=22, scores=[ ], best_friend=none, friends=none, bday=2000/01/01, a_time=12:04, last_order=2000/01/01-12:45) (name = 'Roger', email='roger@email.com', age=10, scores=[ 1 11 111 123 562345 123451234 34623465234 12341234 ], best_friend=none, friends=none, bday=2000/01/01, a_time=12:04, last_order=2000/01/01-12:45)");
|
||||
try testParsing(db, "ADD User (name = 'qwe', email='qwe@email.com', age=57, scores=[ ], best_friend=none, friends=none, bday=2000/01/01, a_time=12:04, last_order=2000/01/01-12:45) ('Rodrigo', 'bob@email.com', 55, [ 1 ], {name = 'qwe'}, none, 2000/01/01, 12:04, 2000/01/01-12:45)");
|
||||
|
||||
@ -51,26 +51,26 @@ test "ADD batch" {
|
||||
}
|
||||
|
||||
test "GRAB filter with string" {
|
||||
const db = DB{ .path = "test1", .schema = "schema/example" };
|
||||
const db = DB{ .path = "test1", .schema = "schema/test" };
|
||||
try testParsing(db, "GRAB User {name = 'Bob'}");
|
||||
try testParsing(db, "GRAB User {name != 'Brittany Rogers'}");
|
||||
}
|
||||
|
||||
test "GRAB with additional data" {
|
||||
const db = DB{ .path = "test1", .schema = "schema/example" };
|
||||
const db = DB{ .path = "test1", .schema = "schema/test" };
|
||||
try testParsing(db, "GRAB User [1] {age < 18}");
|
||||
try testParsing(db, "GRAB User [id, name] {age < 18}");
|
||||
try testParsing(db, "GRAB User [100; name, age] {age < 18}");
|
||||
}
|
||||
|
||||
test "UPDATE" {
|
||||
const db = DB{ .path = "test1", .schema = "schema/example" };
|
||||
const db = DB{ .path = "test1", .schema = "schema/test" };
|
||||
try testParsing(db, "UPDATE User [1] {name = 'Bob'} TO (email='new@gmail.com')");
|
||||
try testParsing(db, "GRAB User {}");
|
||||
}
|
||||
|
||||
test "GRAB filter with int" {
|
||||
const db = DB{ .path = "test1", .schema = "schema/example" };
|
||||
const db = DB{ .path = "test1", .schema = "schema/test" };
|
||||
try testParsing(db, "GRAB User {age = 18}");
|
||||
try testParsing(db, "GRAB User {age > -18}");
|
||||
try testParsing(db, "GRAB User {age < 18}");
|
||||
@ -80,45 +80,45 @@ test "GRAB filter with int" {
|
||||
}
|
||||
|
||||
test "GRAB filter with date" {
|
||||
const db = DB{ .path = "test1", .schema = "schema/example" };
|
||||
const db = DB{ .path = "test1", .schema = "schema/test" };
|
||||
try testParsing(db, "GRAB User {bday > 2000/01/01}");
|
||||
try testParsing(db, "GRAB User {a_time < 08:00}");
|
||||
try testParsing(db, "GRAB User {last_order > 2000/01/01-12:45}");
|
||||
}
|
||||
|
||||
test "Specific query" {
|
||||
const db = DB{ .path = "test1", .schema = "schema/example" };
|
||||
const db = DB{ .path = "test1", .schema = "schema/test" };
|
||||
try testParsing(db, "GRAB User");
|
||||
try testParsing(db, "GRAB User {}");
|
||||
try testParsing(db, "GRAB User [1]");
|
||||
}
|
||||
|
||||
test "UPDATE relationship" {
|
||||
const db = DB{ .path = "test1", .schema = "schema/example" };
|
||||
const db = DB{ .path = "test1", .schema = "schema/test" };
|
||||
try testParsing(db, "UPDATE User [1] {name='Bob'} TO (best_friend = {name='Boba'} )");
|
||||
try testParsing(db, "GRAB User {}");
|
||||
}
|
||||
|
||||
test "GRAB Relationship Filter" {
|
||||
const db = DB{ .path = "test1", .schema = "schema/example" };
|
||||
const db = DB{ .path = "test1", .schema = "schema/test" };
|
||||
try testParsing(db, "GRAB User {best_friend IN {name = 'Bob'}}");
|
||||
try testParsing(db, "GRAB User {best_friend IN {name = 'Boba'}}");
|
||||
}
|
||||
|
||||
test "GRAB Relationship AdditionalData" {
|
||||
const db = DB{ .path = "test1", .schema = "schema/example" };
|
||||
const db = DB{ .path = "test1", .schema = "schema/test" };
|
||||
try testParsing(db, "GRAB User [name, friends] {}");
|
||||
try testParsing(db, "GRAB User [name, best_friend] {}");
|
||||
}
|
||||
|
||||
test "GRAB Relationship Sub AdditionalData" {
|
||||
const db = DB{ .path = "test1", .schema = "schema/example" };
|
||||
const db = DB{ .path = "test1", .schema = "schema/test" };
|
||||
try testParsing(db, "GRAB User [name, friends [name]] {}");
|
||||
try testParsing(db, "GRAB User [name, best_friend [name, friends [age]]] {}");
|
||||
}
|
||||
|
||||
test "GRAB Relationship AdditionalData Filtered" {
|
||||
const db = DB{ .path = "test1", .schema = "schema/example" };
|
||||
const db = DB{ .path = "test1", .schema = "schema/test" };
|
||||
try testParsing(db, "GRAB User [2; name, best_friend] {name = 'Bob'}");
|
||||
try testParsing(db, "GRAB User [2; name, best_friend] {best_friend IN {}}");
|
||||
try testParsing(db, "GRAB User [2; name, best_friend] {best_friend !IN {}}");
|
||||
@ -126,7 +126,7 @@ test "GRAB Relationship AdditionalData Filtered" {
|
||||
|
||||
test "GRAB Relationship dot" {
|
||||
// DO I add this ? I'm not sure about this feature
|
||||
const db = DB{ .path = "test1", .schema = "schema/example" };
|
||||
const db = DB{ .path = "test1", .schema = "schema/test" };
|
||||
// try testParsing(db, "GRAB User.best_friend {}");
|
||||
// try testParsing(db, "GRAB User.best_friend.best_friend {}");
|
||||
// try testParsing(db, "GRAB User.best_friend.posts {}");
|
||||
@ -135,12 +135,12 @@ test "GRAB Relationship dot" {
|
||||
}
|
||||
|
||||
test "DELETE" {
|
||||
const db = DB{ .path = "test1", .schema = "schema/example" };
|
||||
const db = DB{ .path = "test1", .schema = "schema/test" };
|
||||
try testParsing(db, "DELETE User {}");
|
||||
}
|
||||
|
||||
test "3 struct base" {
|
||||
const db = DB{ .path = "test2", .schema = "schema/3struct" };
|
||||
const db = DB{ .path = "test2", .schema = "schema/test-3struct" };
|
||||
try testParsing(db, "DELETE User {}");
|
||||
try testParsing(db, "DELETE Post {}");
|
||||
try testParsing(db, "ADD User (name = 'Bob', email='bob@email.com', age=55, friends=none, posts=none, comments=none, bday=2000/01/01)");
|
||||
@ -150,7 +150,7 @@ test "3 struct base" {
|
||||
}
|
||||
|
||||
test "3 struct both side" {
|
||||
const db = DB{ .path = "test2", .schema = "schema/3struct" };
|
||||
const db = DB{ .path = "test2", .schema = "schema/test-3struct" };
|
||||
try testParsing(db, "DELETE User {}");
|
||||
try testParsing(db, "DELETE Post {}");
|
||||
try testParsing(db, "ADD User (name = 'Bob', email='bob@email.com', age=55, friends=none, posts=none, comments=none, bday=2000/01/01)");
|
||||
@ -165,60 +165,27 @@ test "3 struct both side" {
|
||||
}
|
||||
|
||||
fn testParsing(db: DB, source: [:0]const u8) !void {
|
||||
var db_engine = DBEngine.init(db.path, db.schema);
|
||||
const allocator = std.testing.allocator;
|
||||
var db_engine = DBEngine.init(allocator, db.path, db.schema);
|
||||
defer db_engine.deinit();
|
||||
|
||||
var toker = Tokenizer.init(source);
|
||||
var parser = Parser.init(
|
||||
&toker,
|
||||
&db_engine.file_engine,
|
||||
&db_engine.schema_engine,
|
||||
);
|
||||
|
||||
try parser.parse();
|
||||
try parser.parse(allocator, source);
|
||||
}
|
||||
|
||||
fn expectParsingError(db: DB, source: [:0]const u8, err: ZipponError) !void {
|
||||
var db_engine = DBEngine.init(db.path, db.schema);
|
||||
const allocator = std.testing.allocator;
|
||||
var db_engine = DBEngine.init(allocator, db.path, db.schema);
|
||||
defer db_engine.deinit();
|
||||
|
||||
var toker = Tokenizer.init(source);
|
||||
var parser = Parser.init(
|
||||
&toker,
|
||||
&db_engine.file_engine,
|
||||
&db_engine.schema_engine,
|
||||
);
|
||||
|
||||
try std.testing.expectError(err, parser.parse());
|
||||
}
|
||||
|
||||
test "Parse filter" {
|
||||
const db = DB{ .path = "test1", .schema = "schema/example" };
|
||||
try testParseFilter(db, "name = 'Adrien'}");
|
||||
try testParseFilter(db, "name = 'Adrien' AND age > 11}");
|
||||
try testParseFilter(db, "name = 'Adrien' AND (age < 11 OR age > 40)}");
|
||||
try testParseFilter(db, "(name = 'Adrien') AND (age < 11 OR age > 40)}");
|
||||
try testParseFilter(db, "(name = 'Adrien' OR name = 'Bob') AND (age < 11 OR age > 40)}");
|
||||
try testParseFilter(db, "(name = 'Adrien' OR name = 'Bob') AND (age < 11 OR age > 40 AND (age != 20))}");
|
||||
}
|
||||
|
||||
fn testParseFilter(db: DB, source: [:0]const u8) !void {
|
||||
var arena = std.heap.ArenaAllocator.init(std.testing.allocator);
|
||||
defer arena.deinit();
|
||||
const allocator = arena.allocator();
|
||||
|
||||
var db_engine = DBEngine.init(db.path, db.schema);
|
||||
defer db_engine.deinit();
|
||||
|
||||
var toker = Tokenizer.init(source);
|
||||
var parser = Parser.init(
|
||||
&toker,
|
||||
&db_engine.file_engine,
|
||||
&db_engine.schema_engine,
|
||||
);
|
||||
|
||||
var filter = try parser.parseFilter(allocator, "User", false);
|
||||
defer filter.deinit();
|
||||
std.debug.print("{s}\n", .{source});
|
||||
filter.debugPrint();
|
||||
try std.testing.expectError(err, parser.parse(allocator, source));
|
||||
}
|
||||
|
@ -6,13 +6,13 @@ const Allocator = std.mem.Allocator;
|
||||
const BORDER = "=" ** 80;
|
||||
|
||||
const log = std.log.scoped(.cli);
|
||||
pub const std_options = .{
|
||||
pub const std_options = std.Options{
|
||||
.logFn = myLog,
|
||||
};
|
||||
|
||||
pub fn myLog(
|
||||
comptime message_level: std.log.Level,
|
||||
comptime scope: @Type(.EnumLiteral),
|
||||
comptime scope: @Type(.enum_literal),
|
||||
comptime format: []const u8,
|
||||
args: anytype,
|
||||
) void {
|
||||
|
Loading…
x
Reference in New Issue
Block a user