Random ADD for benchmark
This commit is contained in:
parent
900b9e5305
commit
e3264d8553
@ -28,26 +28,21 @@ pub fn myLog(
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn main() !void {
|
pub fn main() !void {
|
||||||
|
const to_test = [_]usize{ 1, 10, 100, 1_000, 10_000, 100_000, 1_000_000 };
|
||||||
var line_buffer: [1024 * 1024]u8 = undefined;
|
var line_buffer: [1024 * 1024]u8 = undefined;
|
||||||
// Initialize your DBEngine here
|
// Initialize your DBEngine here
|
||||||
var db_engine = DBEngine.init("benchmark", "schema/example");
|
var db_engine = DBEngine.init("benchmark", "schema/example");
|
||||||
defer db_engine.deinit();
|
defer db_engine.deinit();
|
||||||
|
|
||||||
// Reset the database
|
for (to_test) |users_count| {
|
||||||
{
|
|
||||||
const null_term_query_str = try std.fmt.bufPrintZ(&line_buffer, "DELETE User {{}}", .{});
|
|
||||||
var toker = ziqlTokenizer.init(null_term_query_str);
|
|
||||||
var parser = ziqlParser.init(&toker, &db_engine.file_engine, &db_engine.schema_engine);
|
|
||||||
try parser.parse();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Populate with random dummy value
|
// Populate with random dummy value
|
||||||
|
{
|
||||||
std.debug.print("\n=====================================\n\n", .{});
|
std.debug.print("\n=====================================\n\n", .{});
|
||||||
std.debug.print("Populating with {d} users.\n", .{10_000});
|
std.debug.print("Populating with {d} users.\n", .{users_count});
|
||||||
|
|
||||||
var gpa = std.rand.DefaultPrng.init(0);
|
var gpa = std.rand.DefaultPrng.init(0);
|
||||||
const populate_start_time = std.time.nanoTimestamp();
|
const populate_start_time = std.time.nanoTimestamp();
|
||||||
for (10_000) |_| {
|
for (users_count) |_| {
|
||||||
const name = names[gpa.random().uintAtMost(usize, names.len - 1)];
|
const name = names[gpa.random().uintAtMost(usize, names.len - 1)];
|
||||||
const email = emails[gpa.random().uintAtMost(usize, emails.len - 1)];
|
const email = emails[gpa.random().uintAtMost(usize, emails.len - 1)];
|
||||||
const age = gpa.random().uintAtMost(usize, 100);
|
const age = gpa.random().uintAtMost(usize, 100);
|
||||||
@ -71,8 +66,10 @@ pub fn main() !void {
|
|||||||
try db_engine.file_engine.writeDbMetrics(&buffer);
|
try db_engine.file_engine.writeDbMetrics(&buffer);
|
||||||
std.debug.print("{s}\n", .{buffer.items});
|
std.debug.print("{s}\n", .{buffer.items});
|
||||||
std.debug.print("--------------------------------------\n\n", .{});
|
std.debug.print("--------------------------------------\n\n", .{});
|
||||||
|
}
|
||||||
|
|
||||||
// Define your benchmark queries
|
// Define your benchmark queries
|
||||||
|
{
|
||||||
const queries = [_][]const u8{
|
const queries = [_][]const u8{
|
||||||
"GRAB User {}",
|
"GRAB User {}",
|
||||||
"GRAB User [1] {}",
|
"GRAB User [1] {}",
|
||||||
@ -100,3 +97,5 @@ pub fn main() !void {
|
|||||||
|
|
||||||
std.debug.print("=====================================\n\n", .{});
|
std.debug.print("=====================================\n\n", .{});
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -8,6 +8,7 @@ pub const TEST_DATA_DIR = "data";
|
|||||||
// Debug
|
// Debug
|
||||||
pub const PRINT_STATE = false;
|
pub const PRINT_STATE = false;
|
||||||
pub const DONT_SEND = true;
|
pub const DONT_SEND = true;
|
||||||
|
pub const DONT_SEND_ERROR = false;
|
||||||
pub const RESET_LOG_AT_RESTART = false; // If true, will reset the log file at the start of the db, otherwise just keep adding to it
|
pub const RESET_LOG_AT_RESTART = false; // If true, will reset the log file at the start of the db, otherwise just keep adding to it
|
||||||
|
|
||||||
// Help message
|
// Help message
|
||||||
|
@ -872,7 +872,7 @@ pub const FileEngine = struct {
|
|||||||
_ = sync_context.completeThread();
|
_ = sync_context.completeThread();
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Will delete all entity based on the filter. Will also write a JSON format list of all UUID deleted into the buffer
|
/// Delete all entity based on the filter. Will also write a JSON format list of all UUID deleted into the buffer
|
||||||
pub fn deleteEntities(
|
pub fn deleteEntities(
|
||||||
self: *FileEngine,
|
self: *FileEngine,
|
||||||
struct_name: []const u8,
|
struct_name: []const u8,
|
||||||
@ -927,6 +927,7 @@ pub const FileEngine = struct {
|
|||||||
writer.writeByte(']') catch return FileEngineError.WriteError;
|
writer.writeByte(']') catch return FileEngineError.WriteError;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// TODO: Delete the file if it is not 0 and is empty at the end
|
||||||
fn deleteEntitiesOneFile(
|
fn deleteEntitiesOneFile(
|
||||||
sstruct: SchemaStruct,
|
sstruct: SchemaStruct,
|
||||||
filter: ?Filter,
|
filter: ?Filter,
|
||||||
@ -940,7 +941,7 @@ pub const FileEngine = struct {
|
|||||||
defer fa.reset();
|
defer fa.reset();
|
||||||
const allocator = fa.allocator();
|
const allocator = fa.allocator();
|
||||||
|
|
||||||
const path = std.fmt.bufPrint(&path_buffer, "{d}.zid", .{file_index}) catch |err| {
|
const path = std.fmt.allocPrint(allocator, "{d}.zid", .{file_index}) catch |err| {
|
||||||
sync_context.logError("Error creating file path", err);
|
sync_context.logError("Error creating file path", err);
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
@ -951,8 +952,7 @@ pub const FileEngine = struct {
|
|||||||
};
|
};
|
||||||
defer iter.deinit();
|
defer iter.deinit();
|
||||||
|
|
||||||
var new_path_buffer: [128]u8 = undefined;
|
const new_path = std.fmt.allocPrint(allocator, "{d}.zid.new", .{file_index}) catch |err| {
|
||||||
const new_path = std.fmt.bufPrint(&new_path_buffer, "{d}.zid.new", .{file_index}) catch |err| {
|
|
||||||
sync_context.logError("Error creating file path", err);
|
sync_context.logError("Error creating file path", err);
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
@ -968,19 +968,20 @@ pub const FileEngine = struct {
|
|||||||
};
|
};
|
||||||
defer new_writer.deinit();
|
defer new_writer.deinit();
|
||||||
|
|
||||||
|
var finish_writing = false;
|
||||||
while (iter.next() catch |err| {
|
while (iter.next() catch |err| {
|
||||||
sync_context.logError("Error during iter", err);
|
sync_context.logError("Error during iter", err);
|
||||||
return;
|
return;
|
||||||
}) |row| {
|
}) |row| {
|
||||||
if (sync_context.checkStructLimit()) break;
|
if (!finish_writing and (filter == null or filter.?.evaluate(row))) {
|
||||||
if (filter == null or filter.?.evaluate(row)) {
|
|
||||||
writer.print("{{\"{s}\"}},", .{UUID.format_bytes(row[0].UUID)}) catch |err| {
|
writer.print("{{\"{s}\"}},", .{UUID.format_bytes(row[0].UUID)}) catch |err| {
|
||||||
sync_context.logError("Error writting", err);
|
sync_context.logError("Error writting", err);
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
|
|
||||||
if (sync_context.incrementAndCheckStructLimit()) break;
|
finish_writing = sync_context.incrementAndCheckStructLimit();
|
||||||
} else {
|
} else {
|
||||||
|
std.debug.print("Oups", .{});
|
||||||
new_writer.write(row) catch |err| {
|
new_writer.write(row) catch |err| {
|
||||||
sync_context.logError("Error writing unchanged data", err);
|
sync_context.logError("Error writing unchanged data", err);
|
||||||
return;
|
return;
|
||||||
|
@ -58,7 +58,7 @@ pub fn send(comptime format: []const u8, args: anytype) void {
|
|||||||
|
|
||||||
/// Print an error and send it to the user pointing to the token
|
/// Print an error and send it to the user pointing to the token
|
||||||
pub fn printError(message: []const u8, err: ZipponError, query: ?[]const u8, start: ?usize, end: ?usize) ZipponError {
|
pub fn printError(message: []const u8, err: ZipponError, query: ?[]const u8, start: ?usize, end: ?usize) ZipponError {
|
||||||
if (config.DONT_SEND) return err;
|
if (config.DONT_SEND_ERROR) return err;
|
||||||
fa.reset();
|
fa.reset();
|
||||||
|
|
||||||
var buffer = std.ArrayList(u8).init(allocator);
|
var buffer = std.ArrayList(u8).init(allocator);
|
||||||
|
Loading…
x
Reference in New Issue
Block a user