Cleaning session
Removed the loop for the CLI, now just take some argument using the binary. May come back to while loop if I need to keep like the file engine between session. To run query in parallel. Moved printError and send to utils and removed duplicate of them. Organized FileEngine better. Put the function that convert string to value in a seperate file. Fix some synthax and make it smaller. Removed unused functions too
This commit is contained in:
parent
29af5a7ac0
commit
2b1deca452
@ -6,7 +6,7 @@ pub fn build(b: *std.Build) void {
|
||||
const optimize = b.standardOptimizeOption(.{ .preferred_optimize_mode = .ReleaseFast });
|
||||
const exe = b.addExecutable(.{
|
||||
.name = "zippon",
|
||||
.root_source_file = b.path("src/cli.zig"),
|
||||
.root_source_file = b.path("src/main.zig"),
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
});
|
||||
|
326
src/cli.zig
326
src/cli.zig
@ -1,326 +0,0 @@
|
||||
const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
const FileEngine = @import("fileEngine.zig").FileEngine;
|
||||
const cliTokenizer = @import("tokenizers/cli.zig").Tokenizer;
|
||||
const cliToken = @import("tokenizers/cli.zig").Token;
|
||||
const ziqlTokenizer = @import("tokenizers/ziql.zig").Tokenizer;
|
||||
const ziqlToken = @import("tokenizers/ziql.zig").Token;
|
||||
const ziqlParser = @import("ziqlParser.zig").Parser;
|
||||
const utils = @import("utils.zig");
|
||||
|
||||
const stdout = std.io.getStdOut().writer();
|
||||
|
||||
// TODO: Use some global var like that
|
||||
const version = "0.0.9";
|
||||
|
||||
fn send(comptime format: []const u8, args: anytype) void {
|
||||
stdout.print(format, args) catch |err| {
|
||||
std.log.err("Can't send: {any}", .{err});
|
||||
stdout.print("\x03\n", .{}) catch {};
|
||||
};
|
||||
|
||||
stdout.print("\x03\n", .{}) catch {};
|
||||
}
|
||||
|
||||
const State = enum {
|
||||
expect_main_command,
|
||||
expect_query,
|
||||
expect_schema_command,
|
||||
expect_path_to_schema,
|
||||
expect_db_command,
|
||||
expect_path_to_new_db,
|
||||
expect_path_to_db,
|
||||
quit,
|
||||
end,
|
||||
};
|
||||
|
||||
// TODO: Check id the argument --query or --q is in the command and just run the query instead
|
||||
pub fn main() !void {
|
||||
var gpa = std.heap.GeneralPurposeAllocator(.{}){};
|
||||
const allocator = gpa.allocator();
|
||||
defer switch (gpa.deinit()) {
|
||||
.ok => {},
|
||||
.leak => std.log.debug("We fucked it up bro...\n", .{}),
|
||||
};
|
||||
|
||||
var db_path = std.ArrayList(u8).init(allocator);
|
||||
defer db_path.deinit();
|
||||
|
||||
const path_env_variable = utils.getEnvVariables(allocator, "ZIPPONDB_PATH");
|
||||
var file_engine: FileEngine = undefined;
|
||||
defer file_engine.deinit();
|
||||
|
||||
if (path_env_variable) |path| {
|
||||
std.debug.print("ZIPONDB_PATH environment variable found: {s}\n", .{path});
|
||||
|
||||
var already_init = false;
|
||||
_ = std.fs.cwd().openDir(path, .{}) catch {
|
||||
std.debug.print("Error opening ZipponDB path using environment variable, please select the database using 'db use' or create a new one with 'db new'\n", .{});
|
||||
file_engine = FileEngine.init(allocator, try allocator.dupe(u8, ""));
|
||||
already_init = true;
|
||||
};
|
||||
if (!already_init) {
|
||||
try checkAndCreateDirectories(path, allocator);
|
||||
file_engine = FileEngine.init(allocator, path_env_variable.?);
|
||||
}
|
||||
} else {
|
||||
file_engine = FileEngine.init(allocator, try allocator.dupe(u8, ""));
|
||||
std.debug.print("No ZIPONDB_PATH environment variable found, please use the command:\n db use path/to/db \nor\n db new /path/to/dir\n", .{});
|
||||
}
|
||||
|
||||
const line_buf = try allocator.alloc(u8, 1024 * 50);
|
||||
defer allocator.free(line_buf);
|
||||
|
||||
var state: State = .expect_main_command;
|
||||
|
||||
while (true) {
|
||||
std.debug.print("> ", .{});
|
||||
const line = try std.io.getStdIn().reader().readUntilDelimiterOrEof(line_buf, '\n');
|
||||
|
||||
if (line) |line_str| {
|
||||
const null_term_line_str = try allocator.dupeZ(u8, line_str[0..line_str.len]);
|
||||
defer allocator.free(null_term_line_str);
|
||||
|
||||
var cliToker = cliTokenizer.init(null_term_line_str);
|
||||
var token = cliToker.next();
|
||||
state = .expect_main_command;
|
||||
|
||||
while ((state != .end) and (state != .quit)) : (token = cliToker.next()) switch (state) {
|
||||
.expect_main_command => switch (token.tag) {
|
||||
.keyword_run => {
|
||||
if (!file_engine.usable) {
|
||||
send("Error: No database selected. Please use db new or db use.", .{});
|
||||
state = .end;
|
||||
continue;
|
||||
}
|
||||
state = .expect_query;
|
||||
},
|
||||
.keyword_db => state = .expect_db_command,
|
||||
.keyword_schema => {
|
||||
if (!file_engine.usable) {
|
||||
send("Error: No database selected. Please use db new or db use.", .{});
|
||||
state = .end;
|
||||
continue;
|
||||
}
|
||||
state = .expect_schema_command;
|
||||
},
|
||||
.keyword_help => {
|
||||
send("{s}", .{
|
||||
\\Welcome to ZipponDB v0.1!
|
||||
\\
|
||||
\\Available commands:
|
||||
\\run To run a query.
|
||||
\\db Create or chose a database.
|
||||
\\schema Initialize the database schema.
|
||||
\\quit Stop the CLI with memory safety.
|
||||
\\
|
||||
\\ For more informations: https://github.com/MrBounty/ZipponDB
|
||||
\\
|
||||
});
|
||||
state = .end;
|
||||
},
|
||||
.keyword_quit => state = .quit,
|
||||
.eof => state = .end,
|
||||
else => {
|
||||
send("Command need to start with a keyword, including: run, db, schema, help and quit", .{});
|
||||
state = .end;
|
||||
},
|
||||
},
|
||||
|
||||
.expect_db_command => switch (token.tag) {
|
||||
.keyword_new => state = .expect_path_to_new_db,
|
||||
.keyword_use => state = .expect_path_to_db,
|
||||
.keyword_metrics => {
|
||||
if (!file_engine.usable) {
|
||||
send("Error: No database selected. Please use db new or db use.", .{});
|
||||
state = .end;
|
||||
continue;
|
||||
}
|
||||
|
||||
var buffer = std.ArrayList(u8).init(allocator);
|
||||
defer buffer.deinit();
|
||||
|
||||
try file_engine.writeDbMetrics(&buffer);
|
||||
send("{s}", .{buffer.items});
|
||||
state = .end;
|
||||
},
|
||||
.keyword_help => {
|
||||
send("{s}", .{
|
||||
\\Available commands:
|
||||
\\new Create a new database using a path to a sub folder.
|
||||
\\use Select another ZipponDB folder to use as database.
|
||||
\\metrics Print some metrics of the current database.
|
||||
\\
|
||||
\\ For more informations: https://github.com/MrBounty/ZipponDB
|
||||
\\
|
||||
});
|
||||
state = .end;
|
||||
},
|
||||
else => {
|
||||
send("Error: db commands available: new, metrics, swap & help", .{});
|
||||
state = .end;
|
||||
},
|
||||
},
|
||||
|
||||
.expect_path_to_db => switch (token.tag) {
|
||||
.identifier => {
|
||||
file_engine.deinit();
|
||||
file_engine = FileEngine.init(allocator, try allocator.dupe(u8, cliToker.getTokenSlice(token)));
|
||||
send("Successfully started using the database!", .{});
|
||||
state = .end;
|
||||
},
|
||||
else => {
|
||||
send("Error Expect a path to a ZipponDB folder.", .{});
|
||||
state = .end;
|
||||
},
|
||||
},
|
||||
|
||||
.expect_path_to_new_db => switch (token.tag) {
|
||||
.identifier => {
|
||||
checkAndCreateDirectories(cliToker.getTokenSlice(token), allocator) catch |err| {
|
||||
send("Error: Coulnt create database directories: {any}", .{err});
|
||||
state = .end;
|
||||
continue;
|
||||
};
|
||||
file_engine.deinit();
|
||||
file_engine = FileEngine.init(allocator, try allocator.dupe(u8, cliToker.getTokenSlice(token)));
|
||||
send("Successfully initialized the database!", .{});
|
||||
state = .end;
|
||||
},
|
||||
else => {
|
||||
send("Error Expect a path to a folder.", .{});
|
||||
state = .end;
|
||||
},
|
||||
},
|
||||
|
||||
.expect_query => switch (token.tag) {
|
||||
.string_literal => {
|
||||
const null_term_query_str = try allocator.dupeZ(u8, line_str[token.loc.start + 1 .. token.loc.end - 1]);
|
||||
defer allocator.free(null_term_query_str);
|
||||
runQuery(null_term_query_str, &file_engine);
|
||||
state = .end;
|
||||
},
|
||||
.keyword_help => {
|
||||
send("The run command take a ZiQL query between \" and run it. eg: run \"GRAB User\"", .{});
|
||||
state = .end;
|
||||
},
|
||||
else => {
|
||||
send("Error: After command run, need a query, eg: \"GRAB User\"", .{});
|
||||
state = .end;
|
||||
},
|
||||
},
|
||||
|
||||
.expect_schema_command => switch (token.tag) {
|
||||
.keyword_describe => {
|
||||
if (std.mem.eql(u8, file_engine.path_to_ZipponDB_dir, "")) send("Error: No database selected. Please use db bew or db use.", .{});
|
||||
|
||||
if (file_engine.null_terminated_schema_buff.len == 0) {
|
||||
send("Need to init the schema first. Please use the schema init path/to/schema command to start.", .{});
|
||||
} else {
|
||||
send("Schema:\n {s}", .{file_engine.null_terminated_schema_buff});
|
||||
}
|
||||
state = .end;
|
||||
},
|
||||
.keyword_init => state = .expect_path_to_schema,
|
||||
.keyword_help => {
|
||||
send("{s}", .{
|
||||
\\Available commands:
|
||||
\\describe Print the schema use by the currently selected database.
|
||||
\\init Take the path to a schema file and initialize the database.
|
||||
\\
|
||||
\\ For more informations: https://github.com/MrBounty/ZipponDB
|
||||
\\
|
||||
});
|
||||
state = .end;
|
||||
},
|
||||
else => {
|
||||
send("Error: schema commands available: describe, init & help", .{});
|
||||
state = .end;
|
||||
},
|
||||
},
|
||||
|
||||
.expect_path_to_schema => switch (token.tag) {
|
||||
.identifier => {
|
||||
file_engine.initDataFolder(cliToker.getTokenSlice(token)) catch |err| switch (err) {
|
||||
error.SchemaFileNotFound => {
|
||||
send("Coulnt find the schema file at {s}", .{cliToker.getTokenSlice(token)});
|
||||
state = .end;
|
||||
},
|
||||
else => {
|
||||
send("Error initializing the schema", .{});
|
||||
state = .end;
|
||||
},
|
||||
};
|
||||
send("Successfully initialized the database schema!", .{});
|
||||
state = .end;
|
||||
},
|
||||
else => {
|
||||
send("Error: Expect path to schema file.", .{});
|
||||
state = .end;
|
||||
},
|
||||
},
|
||||
|
||||
.quit, .end => break,
|
||||
};
|
||||
}
|
||||
if (state == .quit) break;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn runQuery(null_term_query_str: [:0]const u8, file_engine: *FileEngine) void {
|
||||
var gpa = std.heap.GeneralPurposeAllocator(.{}){};
|
||||
const allocator = gpa.allocator();
|
||||
|
||||
var toker = ziqlTokenizer.init(null_term_query_str);
|
||||
|
||||
var parser = ziqlParser.init(allocator, &toker, file_engine);
|
||||
defer {
|
||||
parser.deinit();
|
||||
switch (gpa.deinit()) {
|
||||
.ok => {},
|
||||
.leak => std.log.debug("We fucked it up bro...\n", .{}),
|
||||
}
|
||||
}
|
||||
|
||||
parser.parse() catch |err| switch (err) {
|
||||
error.SynthaxError => {},
|
||||
else => {},
|
||||
};
|
||||
}
|
||||
|
||||
// TODO: Put that in the FileEngine
|
||||
fn checkAndCreateDirectories(sub_path: []const u8, allocator: Allocator) !void {
|
||||
var path_buff = try std.fmt.allocPrint(allocator, "{s}", .{sub_path});
|
||||
defer allocator.free(path_buff);
|
||||
|
||||
const cwd = std.fs.cwd();
|
||||
|
||||
cwd.makeDir(path_buff) catch |err| switch (err) {
|
||||
error.PathAlreadyExists => {},
|
||||
else => return err,
|
||||
};
|
||||
|
||||
allocator.free(path_buff);
|
||||
path_buff = try std.fmt.allocPrint(allocator, "{s}/DATA", .{sub_path});
|
||||
|
||||
cwd.makeDir(path_buff) catch |err| switch (err) {
|
||||
error.PathAlreadyExists => {},
|
||||
else => return err,
|
||||
};
|
||||
|
||||
allocator.free(path_buff);
|
||||
path_buff = try std.fmt.allocPrint(allocator, "{s}/BACKUP", .{sub_path});
|
||||
|
||||
cwd.makeDir(path_buff) catch |err| switch (err) {
|
||||
error.PathAlreadyExists => {},
|
||||
else => return err,
|
||||
};
|
||||
|
||||
allocator.free(path_buff);
|
||||
path_buff = try std.fmt.allocPrint(allocator, "{s}/LOG", .{sub_path});
|
||||
|
||||
cwd.makeDir(path_buff) catch |err| switch (err) {
|
||||
error.PathAlreadyExists => {},
|
||||
else => return err,
|
||||
};
|
||||
}
|
@ -2,13 +2,14 @@ const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
const UUID = @import("types/uuid.zig").UUID;
|
||||
const DataType = @import("types/dataType.zig").DataType;
|
||||
const s2t = @import("types/stringToType.zig");
|
||||
const FileTokenizer = @import("tokenizers/file.zig").Tokenizer;
|
||||
const FileToken = @import("tokenizers/file.zig").Token;
|
||||
const SchemaStruct = @import("schemaParser.zig").Parser.SchemaStruct;
|
||||
const SchemaParser = @import("schemaParser.zig").Parser;
|
||||
const SchemaTokenizer = @import("tokenizers/schema.zig").Tokenizer;
|
||||
const SchemaToken = @import("tokenizers/schema.zig").Token;
|
||||
const AdditionalData = @import("parsing-tools/additionalData.zig").AdditionalData;
|
||||
const AdditionalData = @import("stuffs/additionalData.zig").AdditionalData;
|
||||
|
||||
// TODO: Use those errors everywhere in this file
|
||||
const FileEngineError = error{
|
||||
@ -30,14 +31,23 @@ pub const FileEngine = struct {
|
||||
null_terminated_schema_buff: [:0]u8,
|
||||
struct_array: std.ArrayList(SchemaStruct),
|
||||
|
||||
pub fn init(allocator: Allocator, path: []const u8) FileEngine {
|
||||
pub fn init(allocator: Allocator, path: []const u8) !FileEngine {
|
||||
const path_to_ZipponDB_dir = path;
|
||||
|
||||
var schema_buf = allocator.alloc(u8, 1024 * 50) catch @panic("Cant allocate the schema buffer");
|
||||
defer allocator.free(schema_buf);
|
||||
var schema_buf = std.ArrayList(u8).init(allocator);
|
||||
defer schema_buf.deinit();
|
||||
|
||||
const len: usize = FileEngine.readSchemaFile(allocator, path_to_ZipponDB_dir, schema_buf) catch 0;
|
||||
const null_terminated_schema_buff = allocator.dupeZ(u8, schema_buf[0..len]) catch @panic("Cant allocate null term buffer for the schema");
|
||||
const schema_path = try std.fmt.allocPrint(allocator, "{s}/schema.zipponschema", .{path_to_ZipponDB_dir});
|
||||
defer allocator.free(schema_path);
|
||||
|
||||
const file = try std.fs.cwd().openFile(schema_path, .{});
|
||||
defer file.close();
|
||||
|
||||
const stat = try file.stat();
|
||||
const buff = try file.readToEndAlloc(allocator, stat.size);
|
||||
|
||||
const null_terminated_schema_buff = try allocator.dupeZ(u8, buff);
|
||||
allocator.free(buff);
|
||||
|
||||
var toker = SchemaTokenizer.init(null_terminated_schema_buff);
|
||||
var parser = SchemaParser.init(&toker, allocator);
|
||||
@ -89,6 +99,126 @@ pub const FileEngine = struct {
|
||||
}
|
||||
};
|
||||
|
||||
// --------------------Other--------------------
|
||||
|
||||
pub fn writeDbMetrics(self: *FileEngine, buffer: *std.ArrayList(u8)) !void {
|
||||
const path = try std.fmt.allocPrint(self.allocator, "{s}", .{self.path_to_ZipponDB_dir});
|
||||
defer self.allocator.free(path);
|
||||
|
||||
const main_dir = try std.fs.cwd().openDir(path, .{ .iterate = true });
|
||||
|
||||
const writer = buffer.writer();
|
||||
try writer.print("Database path: {s}\n", .{path});
|
||||
const main_size = try self.getDirTotalSize(main_dir);
|
||||
try writer.print("Total size: {d:.2}Mb\n", .{@as(f64, @floatFromInt(main_size)) / 1e6});
|
||||
|
||||
const log_dir = try main_dir.openDir("LOG", .{ .iterate = true });
|
||||
const log_size = try self.getDirTotalSize(log_dir);
|
||||
try writer.print("LOG: {d:.2}Mb\n", .{@as(f64, @floatFromInt(log_size)) / 1e6});
|
||||
|
||||
const backup_dir = try main_dir.openDir("BACKUP", .{ .iterate = true });
|
||||
const backup_size = try self.getDirTotalSize(backup_dir);
|
||||
try writer.print("BACKUP: {d:.2}Mb\n", .{@as(f64, @floatFromInt(backup_size)) / 1e6});
|
||||
|
||||
const data_dir = try main_dir.openDir("DATA", .{ .iterate = true });
|
||||
const data_size = try self.getDirTotalSize(data_dir);
|
||||
try writer.print("DATA: {d:.2}Mb\n", .{@as(f64, @floatFromInt(data_size)) / 1e6});
|
||||
|
||||
var iter = data_dir.iterate();
|
||||
while (try iter.next()) |entry| {
|
||||
if (entry.kind != .directory) continue;
|
||||
const sub_dir = try data_dir.openDir(entry.name, .{ .iterate = true });
|
||||
const size = try self.getDirTotalSize(sub_dir);
|
||||
try writer.print(" {s}: {d:.}Mb\n", .{ entry.name, @as(f64, @floatFromInt(size)) / 1e6 });
|
||||
}
|
||||
}
|
||||
|
||||
// --------------------Init folder and files--------------------
|
||||
|
||||
/// Create the main folder. Including DATA, LOG and BACKUP
|
||||
pub fn checkAndCreateDirectories(sub_path: []const u8, allocator: Allocator) !void {
|
||||
var path_buff = try std.fmt.allocPrint(allocator, "{s}", .{sub_path});
|
||||
defer allocator.free(path_buff);
|
||||
|
||||
const cwd = std.fs.cwd();
|
||||
|
||||
cwd.makeDir(path_buff) catch |err| switch (err) {
|
||||
error.PathAlreadyExists => {},
|
||||
else => return err,
|
||||
};
|
||||
|
||||
allocator.free(path_buff);
|
||||
path_buff = try std.fmt.allocPrint(allocator, "{s}/DATA", .{sub_path});
|
||||
|
||||
cwd.makeDir(path_buff) catch |err| switch (err) {
|
||||
error.PathAlreadyExists => {},
|
||||
else => return err,
|
||||
};
|
||||
|
||||
allocator.free(path_buff);
|
||||
path_buff = try std.fmt.allocPrint(allocator, "{s}/BACKUP", .{sub_path});
|
||||
|
||||
cwd.makeDir(path_buff) catch |err| switch (err) {
|
||||
error.PathAlreadyExists => {},
|
||||
else => return err,
|
||||
};
|
||||
|
||||
allocator.free(path_buff);
|
||||
path_buff = try std.fmt.allocPrint(allocator, "{s}/LOG", .{sub_path});
|
||||
|
||||
cwd.makeDir(path_buff) catch |err| switch (err) {
|
||||
error.PathAlreadyExists => {},
|
||||
else => return err,
|
||||
};
|
||||
}
|
||||
|
||||
/// Request a path to a schema file and then create the struct folder
|
||||
/// TODO: Check if some data already exist and if so ask if the user want to delete it and make a backup
|
||||
pub fn initDataFolder(self: *FileEngine, path_to_schema_file: []const u8) FileEngineError!void {
|
||||
var schema_buf = self.allocator.alloc(u8, 1024 * 50) catch @panic("Cant allocate the schema buffer");
|
||||
defer self.allocator.free(schema_buf);
|
||||
|
||||
const file = std.fs.cwd().openFile(path_to_schema_file, .{}) catch return FileEngineError.SchemaFileNotFound;
|
||||
defer file.close();
|
||||
|
||||
const len = file.readAll(schema_buf) catch @panic("Can't read schema file");
|
||||
|
||||
self.allocator.free(self.null_terminated_schema_buff);
|
||||
self.null_terminated_schema_buff = self.allocator.dupeZ(u8, schema_buf[0..len]) catch @panic("Cant allocate null term buffer for the schema");
|
||||
|
||||
var toker = SchemaTokenizer.init(self.null_terminated_schema_buff);
|
||||
var parser = SchemaParser.init(&toker, self.allocator);
|
||||
|
||||
// Deinit the struct array before creating a new one
|
||||
for (self.struct_array.items) |*elem| elem.deinit();
|
||||
for (0..self.struct_array.items.len) |_| _ = self.struct_array.pop();
|
||||
|
||||
parser.parse(&self.struct_array) catch return error.SchemaNotConform;
|
||||
|
||||
const path = std.fmt.allocPrint(self.allocator, "{s}/DATA", .{self.path_to_ZipponDB_dir}) catch @panic("Cant allocate path");
|
||||
defer self.allocator.free(path);
|
||||
|
||||
var data_dir = std.fs.cwd().openDir(path, .{}) catch return FileEngineError.DATAFolderNotFound;
|
||||
defer data_dir.close();
|
||||
|
||||
for (self.struct_array.items) |struct_item| {
|
||||
data_dir.makeDir(self.locToSlice(struct_item.name)) catch |err| switch (err) {
|
||||
error.PathAlreadyExists => {},
|
||||
else => return FileEngineError.CantMakeDir,
|
||||
};
|
||||
const struct_dir = data_dir.openDir(self.locToSlice(struct_item.name), .{}) catch return FileEngineError.StructFolderNotFound;
|
||||
|
||||
_ = struct_dir.createFile("0.zippondata", .{}) catch |err| switch (err) {
|
||||
error.PathAlreadyExists => {},
|
||||
else => return FileEngineError.CantMakeFile,
|
||||
};
|
||||
}
|
||||
|
||||
self.writeSchemaFile();
|
||||
}
|
||||
|
||||
// --------------------Read and parse files--------------------
|
||||
|
||||
/// Take a list of UUID and, a buffer array and the additional data to write into the buffer the JSON to send
|
||||
/// TODO: Optimize
|
||||
/// FIXME: Array of string are not working
|
||||
@ -301,16 +431,16 @@ pub const FileEngine = struct {
|
||||
|
||||
var compare_value: ComparisonValue = undefined;
|
||||
switch (condition.data_type) {
|
||||
.int => compare_value = ComparisonValue{ .int = parseInt(condition.value) },
|
||||
.int => compare_value = ComparisonValue{ .int = s2t.parseInt(condition.value) },
|
||||
.str => compare_value = ComparisonValue{ .str = condition.value },
|
||||
.float => compare_value = ComparisonValue{ .float = parseFloat(condition.value) },
|
||||
.bool => compare_value = ComparisonValue{ .bool_ = parseBool(condition.value) },
|
||||
.float => compare_value = ComparisonValue{ .float = s2t.parseFloat(condition.value) },
|
||||
.bool => compare_value = ComparisonValue{ .bool_ = s2t.parseBool(condition.value) },
|
||||
.id => compare_value = ComparisonValue{ .id = try UUID.parse(condition.value) },
|
||||
.int_array => compare_value = ComparisonValue{ .int_array = parseArrayInt(self.allocator, condition.value) },
|
||||
.str_array => compare_value = ComparisonValue{ .str_array = parseArrayStr(self.allocator, condition.value) },
|
||||
.float_array => compare_value = ComparisonValue{ .float_array = parseArrayFloat(self.allocator, condition.value) },
|
||||
.bool_array => compare_value = ComparisonValue{ .bool_array = parseArrayBool(self.allocator, condition.value) },
|
||||
.id_array => compare_value = ComparisonValue{ .id_array = parseArrayUUID(self.allocator, condition.value) },
|
||||
.int_array => compare_value = ComparisonValue{ .int_array = s2t.parseArrayInt(self.allocator, condition.value) },
|
||||
.str_array => compare_value = ComparisonValue{ .str_array = s2t.parseArrayStr(self.allocator, condition.value) },
|
||||
.float_array => compare_value = ComparisonValue{ .float_array = s2t.parseArrayFloat(self.allocator, condition.value) },
|
||||
.bool_array => compare_value = ComparisonValue{ .bool_array = s2t.parseArrayBool(self.allocator, condition.value) },
|
||||
.id_array => compare_value = ComparisonValue{ .id_array = s2t.parseArrayUUID(self.allocator, condition.value) },
|
||||
}
|
||||
defer {
|
||||
switch (condition.data_type) {
|
||||
@ -324,7 +454,6 @@ pub const FileEngine = struct {
|
||||
}
|
||||
|
||||
var token: FileToken = undefined;
|
||||
const column_index = self.columnIndexOfMember(condition.struct_name, condition.member_name);
|
||||
|
||||
while (true) {
|
||||
output_fbs.reset();
|
||||
@ -366,52 +495,52 @@ pub const FileEngine = struct {
|
||||
const uuid = try UUID.parse(output_fbs.getWritten()[0..36]);
|
||||
|
||||
// Skip unwanted token
|
||||
for (0..column_index.?) |_| {
|
||||
for (self.structName2structMembers(condition.struct_name)) |mn| {
|
||||
if (std.mem.eql(u8, self.locToSlice(mn), condition.member_name)) break;
|
||||
_ = data_toker.next();
|
||||
}
|
||||
|
||||
token = data_toker.next();
|
||||
|
||||
// TODO: Make sure in amount that the rest is unreachable by sending an error for wrong condition like superior between 2 string or array
|
||||
switch (condition.operation) {
|
||||
.equal => switch (condition.data_type) {
|
||||
.int => if (compare_value.int == parseInt(data_toker.getTokenSlice(token))) try uuid_array.append(uuid),
|
||||
.float => if (compare_value.float == parseFloat(data_toker.getTokenSlice(token))) try uuid_array.append(uuid),
|
||||
.int => if (compare_value.int == s2t.parseInt(data_toker.getTokenSlice(token))) try uuid_array.append(uuid),
|
||||
.float => if (compare_value.float == s2t.parseFloat(data_toker.getTokenSlice(token))) try uuid_array.append(uuid),
|
||||
.str => if (std.mem.eql(u8, compare_value.str, data_toker.getTokenSlice(token))) try uuid_array.append(uuid),
|
||||
.bool => if (compare_value.bool_ == parseBool(data_toker.getTokenSlice(token))) try uuid_array.append(uuid),
|
||||
.bool => if (compare_value.bool_ == s2t.parseBool(data_toker.getTokenSlice(token))) try uuid_array.append(uuid),
|
||||
.id => if (compare_value.id.compare(uuid)) try uuid_array.append(uuid),
|
||||
else => unreachable,
|
||||
},
|
||||
|
||||
.different => switch (condition.data_type) {
|
||||
.int => if (compare_value.int != parseInt(data_toker.getTokenSlice(token))) try uuid_array.append(uuid),
|
||||
.float => if (compare_value.float != parseFloat(data_toker.getTokenSlice(token))) try uuid_array.append(uuid),
|
||||
.int => if (compare_value.int != s2t.parseInt(data_toker.getTokenSlice(token))) try uuid_array.append(uuid),
|
||||
.float => if (compare_value.float != s2t.parseFloat(data_toker.getTokenSlice(token))) try uuid_array.append(uuid),
|
||||
.str => if (!std.mem.eql(u8, compare_value.str, data_toker.getTokenSlice(token))) try uuid_array.append(uuid),
|
||||
.bool => if (compare_value.bool_ != parseBool(data_toker.getTokenSlice(token))) try uuid_array.append(uuid),
|
||||
.bool => if (compare_value.bool_ != s2t.parseBool(data_toker.getTokenSlice(token))) try uuid_array.append(uuid),
|
||||
else => unreachable,
|
||||
},
|
||||
|
||||
.superior_or_equal => switch (condition.data_type) {
|
||||
.int => if (compare_value.int <= parseInt(data_toker.getTokenSlice(token))) try uuid_array.append(uuid),
|
||||
.float => if (compare_value.float <= parseFloat(data_toker.getTokenSlice(token))) try uuid_array.append(uuid),
|
||||
.int => if (compare_value.int <= s2t.parseInt(data_toker.getTokenSlice(token))) try uuid_array.append(uuid),
|
||||
.float => if (compare_value.float <= s2t.parseFloat(data_toker.getTokenSlice(token))) try uuid_array.append(uuid),
|
||||
else => unreachable,
|
||||
},
|
||||
|
||||
.superior => switch (condition.data_type) {
|
||||
.int => if (compare_value.int < parseInt(data_toker.getTokenSlice(token))) try uuid_array.append(uuid),
|
||||
.float => if (compare_value.float < parseFloat(data_toker.getTokenSlice(token))) try uuid_array.append(uuid),
|
||||
.int => if (compare_value.int < s2t.parseInt(data_toker.getTokenSlice(token))) try uuid_array.append(uuid),
|
||||
.float => if (compare_value.float < s2t.parseFloat(data_toker.getTokenSlice(token))) try uuid_array.append(uuid),
|
||||
else => unreachable,
|
||||
},
|
||||
|
||||
.inferior_or_equal => switch (condition.data_type) {
|
||||
.int => if (compare_value.int >= parseInt(data_toker.getTokenSlice(token))) try uuid_array.append(uuid),
|
||||
.float => if (compare_value.float >= parseFloat(data_toker.getTokenSlice(token))) try uuid_array.append(uuid),
|
||||
.int => if (compare_value.int >= s2t.parseInt(data_toker.getTokenSlice(token))) try uuid_array.append(uuid),
|
||||
.float => if (compare_value.float >= s2t.parseFloat(data_toker.getTokenSlice(token))) try uuid_array.append(uuid),
|
||||
else => unreachable,
|
||||
},
|
||||
|
||||
.inferior => switch (condition.data_type) {
|
||||
.int => if (compare_value.int > parseInt(data_toker.getTokenSlice(token))) try uuid_array.append(uuid),
|
||||
.float => if (compare_value.float > parseFloat(data_toker.getTokenSlice(token))) try uuid_array.append(uuid),
|
||||
.int => if (compare_value.int > s2t.parseInt(data_toker.getTokenSlice(token))) try uuid_array.append(uuid),
|
||||
.float => if (compare_value.float > s2t.parseFloat(data_toker.getTokenSlice(token))) try uuid_array.append(uuid),
|
||||
else => unreachable,
|
||||
},
|
||||
|
||||
@ -428,6 +557,8 @@ pub const FileEngine = struct {
|
||||
}
|
||||
}
|
||||
|
||||
// --------------------Change existing files--------------------
|
||||
|
||||
// Do I need a map here ? Cant I use something else ?
|
||||
pub fn writeEntity(self: *FileEngine, struct_name: []const u8, data_map: std.StringHashMap([]const u8)) !UUID {
|
||||
const uuid = UUID.init();
|
||||
@ -730,14 +861,9 @@ pub const FileEngine = struct {
|
||||
return deleted_count;
|
||||
}
|
||||
|
||||
/// Use a filename in the format 1.zippondata and return the 1
|
||||
/// Note that if I change the extension of the data file, I need to update that as it use a fixed len for the extension
|
||||
fn fileName2Index(_: FileEngine, file_name: []const u8) usize {
|
||||
return std.fmt.parseInt(usize, file_name[0..(file_name.len - 11)], 10) catch @panic("Couln't parse the int of a zippondata file.");
|
||||
}
|
||||
// --------------------Schema utils--------------------
|
||||
|
||||
/// Use the map of file stat to find the first file with under the bytes limit.
|
||||
/// return the name of the file. If none is found, return null.
|
||||
/// Get the index of the first file that is bellow the size limit. If not found, return null
|
||||
fn getFirstUsableIndexFile(self: FileEngine, struct_name: []const u8) !?usize {
|
||||
const path = try std.fmt.allocPrint(self.allocator, "{s}/DATA/{s}", .{ self.path_to_ZipponDB_dir, struct_name });
|
||||
defer self.allocator.free(path);
|
||||
@ -748,13 +874,13 @@ pub const FileEngine = struct {
|
||||
var iter = member_dir.iterate();
|
||||
while (try iter.next()) |entry| {
|
||||
const file_stat = try member_dir.statFile(entry.name);
|
||||
if (file_stat.size < self.max_file_size) return self.fileName2Index(entry.name);
|
||||
if (file_stat.size < self.max_file_size) return try std.fmt.parseInt(usize, entry.name[0..(entry.name.len - 11)], 10);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/// Iter over all file and get the max name and return the value of it as usize
|
||||
/// So for example if there is 1.zippondata and 2.zippondata it return 2.
|
||||
/// Iterate over all file of a struct and return the index of the last file.
|
||||
/// E.g. a struct with 0.csv and 1.csv it return 1.
|
||||
fn maxFileIndex(self: FileEngine, struct_name: []const u8) !usize {
|
||||
const path = try std.fmt.allocPrint(self.allocator, "{s}/DATA/{s}", .{ self.path_to_ZipponDB_dir, struct_name });
|
||||
defer self.allocator.free(path);
|
||||
@ -770,103 +896,10 @@ pub const FileEngine = struct {
|
||||
return count - 1;
|
||||
}
|
||||
|
||||
pub fn writeDbMetrics(self: *FileEngine, buffer: *std.ArrayList(u8)) !void {
|
||||
const path = try std.fmt.allocPrint(self.allocator, "{s}", .{self.path_to_ZipponDB_dir});
|
||||
defer self.allocator.free(path);
|
||||
|
||||
const main_dir = try std.fs.cwd().openDir(path, .{ .iterate = true });
|
||||
|
||||
const writer = buffer.writer();
|
||||
try writer.print("Database path: {s}\n", .{path});
|
||||
const main_size = try self.getDirTotalSize(main_dir);
|
||||
try writer.print("Total size: {d:.2}Mb\n", .{@as(f64, @floatFromInt(main_size)) / 1e6});
|
||||
|
||||
const log_dir = try main_dir.openDir("LOG", .{ .iterate = true });
|
||||
const log_size = try self.getDirTotalSize(log_dir);
|
||||
try writer.print("LOG: {d:.2}Mb\n", .{@as(f64, @floatFromInt(log_size)) / 1e6});
|
||||
|
||||
const backup_dir = try main_dir.openDir("BACKUP", .{ .iterate = true });
|
||||
const backup_size = try self.getDirTotalSize(backup_dir);
|
||||
try writer.print("BACKUP: {d:.2}Mb\n", .{@as(f64, @floatFromInt(backup_size)) / 1e6});
|
||||
|
||||
const data_dir = try main_dir.openDir("DATA", .{ .iterate = true });
|
||||
const data_size = try self.getDirTotalSize(data_dir);
|
||||
try writer.print("DATA: {d:.2}Mb\n", .{@as(f64, @floatFromInt(data_size)) / 1e6});
|
||||
|
||||
var iter = data_dir.iterate();
|
||||
while (try iter.next()) |entry| {
|
||||
if (entry.kind != .directory) continue;
|
||||
const sub_dir = try data_dir.openDir(entry.name, .{ .iterate = true });
|
||||
const size = try self.getDirTotalSize(sub_dir);
|
||||
try writer.print(" {s}: {d:.}Mb\n", .{ entry.name, @as(f64, @floatFromInt(size)) / 1e6 });
|
||||
}
|
||||
}
|
||||
|
||||
/// Request a path to a schema file and then create the struct folder
|
||||
/// TODO: Check if some data already exist and if so ask if the user want to delete it and make a backup
|
||||
pub fn initDataFolder(self: *FileEngine, path_to_schema_file: []const u8) FileEngineError!void {
|
||||
var schema_buf = self.allocator.alloc(u8, 1024 * 50) catch @panic("Cant allocate the schema buffer");
|
||||
defer self.allocator.free(schema_buf);
|
||||
|
||||
const file = std.fs.cwd().openFile(path_to_schema_file, .{}) catch return FileEngineError.SchemaFileNotFound;
|
||||
defer file.close();
|
||||
|
||||
const len = file.readAll(schema_buf) catch @panic("Can't read schema file");
|
||||
|
||||
self.allocator.free(self.null_terminated_schema_buff);
|
||||
self.null_terminated_schema_buff = self.allocator.dupeZ(u8, schema_buf[0..len]) catch @panic("Cant allocate null term buffer for the schema");
|
||||
|
||||
var toker = SchemaTokenizer.init(self.null_terminated_schema_buff);
|
||||
var parser = SchemaParser.init(&toker, self.allocator);
|
||||
|
||||
// Deinit the struct array before creating a new one
|
||||
for (self.struct_array.items) |*elem| elem.deinit();
|
||||
for (0..self.struct_array.items.len) |_| _ = self.struct_array.pop();
|
||||
|
||||
parser.parse(&self.struct_array) catch return error.SchemaNotConform;
|
||||
|
||||
const path = std.fmt.allocPrint(self.allocator, "{s}/DATA", .{self.path_to_ZipponDB_dir}) catch @panic("Cant allocate path");
|
||||
defer self.allocator.free(path);
|
||||
|
||||
var data_dir = std.fs.cwd().openDir(path, .{}) catch return FileEngineError.DATAFolderNotFound;
|
||||
defer data_dir.close();
|
||||
|
||||
for (self.struct_array.items) |struct_item| {
|
||||
data_dir.makeDir(self.locToSlice(struct_item.name)) catch |err| switch (err) {
|
||||
error.PathAlreadyExists => {},
|
||||
else => return FileEngineError.CantMakeDir,
|
||||
};
|
||||
const struct_dir = data_dir.openDir(self.locToSlice(struct_item.name), .{}) catch return FileEngineError.StructFolderNotFound;
|
||||
|
||||
_ = struct_dir.createFile("0.zippondata", .{}) catch |err| switch (err) {
|
||||
error.PathAlreadyExists => {},
|
||||
else => return FileEngineError.CantMakeFile,
|
||||
};
|
||||
}
|
||||
|
||||
self.writeSchemaFile();
|
||||
}
|
||||
|
||||
// Stuff for schema
|
||||
// TODO: Check all those functions and remove if not use
|
||||
|
||||
pub fn readSchemaFile(allocator: Allocator, sub_path: []const u8, buffer: []u8) !usize {
|
||||
const path = try std.fmt.allocPrint(allocator, "{s}/schema.zipponschema", .{sub_path});
|
||||
defer allocator.free(path);
|
||||
|
||||
const file = try std.fs.cwd().openFile(path, .{});
|
||||
defer file.close();
|
||||
|
||||
const len = try file.readAll(buffer);
|
||||
return len;
|
||||
}
|
||||
|
||||
pub fn writeSchemaFile(self: *FileEngine) void {
|
||||
// Delete the current schema file
|
||||
// Create a new one
|
||||
// Dumpe the buffer inside
|
||||
var zippon_dir = std.fs.cwd().openDir(self.path_to_ZipponDB_dir, .{}) catch @panic("Cant open main folder!");
|
||||
defer zippon_dir.close();
|
||||
|
||||
zippon_dir.deleteFile("schema.zipponschema") catch |err| switch (err) {
|
||||
error.FileNotFound => {},
|
||||
else => @panic("Error other than file not found when writing the schema."),
|
||||
@ -881,17 +914,6 @@ pub const FileEngine = struct {
|
||||
return self.null_terminated_schema_buff[loc.start..loc.end];
|
||||
}
|
||||
|
||||
pub fn columnIndexOfMember(self: *FileEngine, struct_name: []const u8, member_name: []const u8) ?usize {
|
||||
var i: u16 = 0;
|
||||
|
||||
for (self.structName2structMembers(struct_name)) |mn| {
|
||||
if (std.mem.eql(u8, self.locToSlice(mn), member_name)) return i;
|
||||
i += 1;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/// Get the type of the member
|
||||
pub fn memberName2DataType(self: *FileEngine, struct_name: []const u8, member_name: []const u8) ?DataType {
|
||||
var i: u16 = 0;
|
||||
@ -940,16 +962,6 @@ pub const FileEngine = struct {
|
||||
return false;
|
||||
}
|
||||
|
||||
/// Check if a string is a name of a struct in the currently use engine
|
||||
pub fn isStructInSchema(self: *FileEngine, struct_name_to_check: []const u8) bool {
|
||||
for (self.struct_array.items) |struct_schema| {
|
||||
if (std.mem.eql(u8, struct_name_to_check, struct_schema.name)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
// Return true if the map have all the member name as key and not more
|
||||
pub fn checkIfAllMemberInMap(self: *FileEngine, struct_name: []const u8, map: *std.StringHashMap([]const u8)) bool {
|
||||
const all_struct_member = self.structName2structMembers(struct_name);
|
||||
@ -967,7 +979,7 @@ test "Get list of UUID using condition" {
|
||||
const allocator = std.testing.allocator;
|
||||
|
||||
const path = try allocator.dupe(u8, "ZipponDB");
|
||||
var file_engine = FileEngine.init(allocator, path);
|
||||
var file_engine = try FileEngine.init(allocator, path);
|
||||
defer file_engine.deinit();
|
||||
|
||||
var uuid_array = std.ArrayList(UUID).init(allocator);
|
||||
@ -976,127 +988,3 @@ test "Get list of UUID using condition" {
|
||||
const condition = FileEngine.Condition{ .struct_name = "User", .member_name = "email", .value = "adrien@mail.com", .operation = .equal, .data_type = .str };
|
||||
try file_engine.getUUIDListUsingCondition(condition, &uuid_array);
|
||||
}
|
||||
|
||||
// Series of functions to use just before creating an entity.
|
||||
// Will transform the string of data into data of the right type./
|
||||
|
||||
// TODO: Put those functions somewhere else
|
||||
pub fn parseInt(value_str: []const u8) i64 {
|
||||
return std.fmt.parseInt(i64, value_str, 10) catch return 0;
|
||||
}
|
||||
|
||||
pub fn parseArrayInt(allocator: std.mem.Allocator, array_str: []const u8) std.ArrayList(i64) {
|
||||
var array = std.ArrayList(i64).init(allocator);
|
||||
|
||||
var it = std.mem.splitAny(u8, array_str[1 .. array_str.len - 1], " ");
|
||||
while (it.next()) |x| {
|
||||
array.append(parseInt(x)) catch {};
|
||||
}
|
||||
|
||||
return array;
|
||||
}
|
||||
|
||||
pub fn parseFloat(value_str: []const u8) f64 {
|
||||
return std.fmt.parseFloat(f64, value_str) catch return 0;
|
||||
}
|
||||
|
||||
pub fn parseArrayFloat(allocator: std.mem.Allocator, array_str: []const u8) std.ArrayList(f64) {
|
||||
var array = std.ArrayList(f64).init(allocator);
|
||||
|
||||
var it = std.mem.splitAny(u8, array_str[1 .. array_str.len - 1], " ");
|
||||
while (it.next()) |x| {
|
||||
array.append(parseFloat(x)) catch {};
|
||||
}
|
||||
|
||||
return array;
|
||||
}
|
||||
|
||||
pub fn parseBool(value_str: []const u8) bool {
|
||||
return (value_str[0] != '0');
|
||||
}
|
||||
|
||||
pub fn parseArrayBool(allocator: std.mem.Allocator, array_str: []const u8) std.ArrayList(bool) {
|
||||
var array = std.ArrayList(bool).init(allocator);
|
||||
|
||||
var it = std.mem.splitAny(u8, array_str[1 .. array_str.len - 1], " ");
|
||||
while (it.next()) |x| {
|
||||
array.append(parseBool(x)) catch {};
|
||||
}
|
||||
|
||||
return array;
|
||||
}
|
||||
|
||||
pub fn parseArrayUUID(allocator: std.mem.Allocator, array_str: []const u8) std.ArrayList(UUID) {
|
||||
var array = std.ArrayList(UUID).init(allocator);
|
||||
|
||||
var it = std.mem.splitAny(u8, array_str[1 .. array_str.len - 1], " ");
|
||||
while (it.next()) |x| {
|
||||
const uuid = UUID.parse(x) catch continue;
|
||||
array.append(uuid) catch continue;
|
||||
}
|
||||
|
||||
return array;
|
||||
}
|
||||
|
||||
// FIXME: I think it will not work if there is a ' inside the string
|
||||
pub fn parseArrayStr(allocator: std.mem.Allocator, array_str: []const u8) std.ArrayList([]const u8) {
|
||||
var array = std.ArrayList([]const u8).init(allocator);
|
||||
|
||||
var it = std.mem.splitAny(u8, array_str[1 .. array_str.len - 1], "'");
|
||||
while (it.next()) |x| {
|
||||
if (std.mem.eql(u8, " ", x)) continue;
|
||||
const x_copy = allocator.dupe(u8, x) catch @panic("=(");
|
||||
// FIXME: I think I need to add the '' on each side again
|
||||
array.append(x_copy) catch {};
|
||||
}
|
||||
|
||||
return array;
|
||||
}
|
||||
|
||||
test "Data parsing" {
|
||||
const allocator = std.testing.allocator;
|
||||
|
||||
// Int
|
||||
const in1: [3][]const u8 = .{ "1", "42", "Hello" };
|
||||
const expected_out1: [3]i64 = .{ 1, 42, 0 };
|
||||
for (in1, 0..) |value, i| {
|
||||
try std.testing.expect(parseInt(value) == expected_out1[i]);
|
||||
}
|
||||
|
||||
// Int array
|
||||
const in2 = "[1 14 44 42 hello]";
|
||||
const out2 = parseArrayInt(allocator, in2);
|
||||
defer out2.deinit();
|
||||
const expected_out2: [5]i64 = .{ 1, 14, 44, 42, 0 };
|
||||
try std.testing.expect(std.mem.eql(i64, out2.items, &expected_out2));
|
||||
|
||||
// Float
|
||||
const in3: [3][]const u8 = .{ "1.3", "65.991", "Hello" };
|
||||
const expected_out3: [3]f64 = .{ 1.3, 65.991, 0 };
|
||||
for (in3, 0..) |value, i| {
|
||||
try std.testing.expect(parseFloat(value) == expected_out3[i]);
|
||||
}
|
||||
|
||||
// Float array
|
||||
const in4 = "[1.5 14.3 44.9999 42 hello]";
|
||||
const out4 = parseArrayFloat(allocator, in4);
|
||||
defer out4.deinit();
|
||||
const expected_out4: [5]f64 = .{ 1.5, 14.3, 44.9999, 42, 0 };
|
||||
try std.testing.expect(std.mem.eql(f64, out4.items, &expected_out4));
|
||||
|
||||
// Bool
|
||||
const in5: [3][]const u8 = .{ "1", "Hello", "0" };
|
||||
const expected_out5: [3]bool = .{ true, true, false };
|
||||
for (in5, 0..) |value, i| {
|
||||
try std.testing.expect(parseBool(value) == expected_out5[i]);
|
||||
}
|
||||
|
||||
// Bool array
|
||||
const in6 = "[1 0 0 1 1]";
|
||||
const out6 = parseArrayBool(allocator, in6);
|
||||
defer out6.deinit();
|
||||
const expected_out6: [5]bool = .{ true, false, false, true, true };
|
||||
try std.testing.expect(std.mem.eql(bool, out6.items, &expected_out6));
|
||||
|
||||
// TODO: Test the string array
|
||||
}
|
||||
|
263
src/main.zig
Normal file
263
src/main.zig
Normal file
@ -0,0 +1,263 @@
|
||||
const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
const FileEngine = @import("fileEngine.zig").FileEngine;
|
||||
const cliTokenizer = @import("tokenizers/cli.zig").Tokenizer;
|
||||
const cliToken = @import("tokenizers/cli.zig").Token;
|
||||
const ziqlTokenizer = @import("tokenizers/ziql.zig").Tokenizer;
|
||||
const ziqlToken = @import("tokenizers/ziql.zig").Token;
|
||||
const ziqlParser = @import("ziqlParser.zig").Parser;
|
||||
const utils = @import("utils.zig");
|
||||
const send = @import("stuffs/utils.zig").send;
|
||||
|
||||
const State = enum {
|
||||
expect_main_command,
|
||||
expect_query,
|
||||
expect_schema_command,
|
||||
expect_path_to_schema,
|
||||
expect_db_command,
|
||||
expect_path_to_new_db,
|
||||
expect_path_to_db,
|
||||
quit,
|
||||
end,
|
||||
};
|
||||
|
||||
pub fn main() !void {
|
||||
var state: State = .expect_main_command;
|
||||
|
||||
var gpa = std.heap.GeneralPurposeAllocator(.{}){};
|
||||
const allocator = gpa.allocator();
|
||||
defer switch (gpa.deinit()) {
|
||||
.ok => {},
|
||||
.leak => std.log.debug("We fucked it up bro...\n", .{}),
|
||||
};
|
||||
|
||||
const path_env_variable = utils.getEnvVariables(allocator, "ZIPPONDB_PATH");
|
||||
var file_engine: FileEngine = undefined;
|
||||
defer file_engine.deinit();
|
||||
|
||||
if (path_env_variable) |path| {
|
||||
std.debug.print("ZIPONDB_PATH environment variable found: {s}\n", .{path});
|
||||
|
||||
var to_init = true;
|
||||
_ = std.fs.cwd().openDir(path, .{}) catch {
|
||||
std.debug.print("Error opening ZipponDB path using environment variable, please select the database using 'db use' or create a new one with 'db new'\n", .{});
|
||||
file_engine = try FileEngine.init(allocator, try allocator.dupe(u8, ""));
|
||||
to_init = true;
|
||||
};
|
||||
if (to_init) {
|
||||
file_engine = try FileEngine.init(allocator, path_env_variable.?);
|
||||
try file_engine.checkAndCreateDirectories(path, allocator);
|
||||
}
|
||||
} else {
|
||||
file_engine = try FileEngine.init(allocator, try allocator.dupe(u8, ""));
|
||||
std.debug.print("No ZIPONDB_PATH environment variable found, please use the command:\n db use path/to/db \nor\n db new /path/to/dir\n", .{});
|
||||
}
|
||||
|
||||
const args_buffer = utils.getArgsString(allocator);
|
||||
defer args_buffer.deinit();
|
||||
|
||||
var toker = cliTokenizer.init(args_buffer.items);
|
||||
var token = toker.next();
|
||||
state = .expect_main_command;
|
||||
|
||||
while ((state != .end) and (state != .quit)) : (token = toker.next()) switch (state) {
|
||||
.expect_main_command => switch (token.tag) {
|
||||
.keyword_run => {
|
||||
if (!file_engine.usable) {
|
||||
send("Error: No database selected. Please use db new or db use.", .{});
|
||||
state = .end;
|
||||
continue;
|
||||
}
|
||||
state = .expect_query;
|
||||
},
|
||||
.keyword_db => state = .expect_db_command,
|
||||
.keyword_schema => {
|
||||
if (!file_engine.usable) {
|
||||
send("Error: No database selected. Please use db new or db use.", .{});
|
||||
state = .end;
|
||||
continue;
|
||||
}
|
||||
state = .expect_schema_command;
|
||||
},
|
||||
.keyword_help => {
|
||||
send("{s}", .{
|
||||
\\Welcome to ZipponDB v0.1.1!
|
||||
\\
|
||||
\\Available commands:
|
||||
\\run To run a query.
|
||||
\\db Create or chose a database.
|
||||
\\schema Initialize the database schema.
|
||||
\\quit Stop the CLI with memory safety.
|
||||
\\
|
||||
\\ For more informations: https://github.com/MrBounty/ZipponDB
|
||||
\\
|
||||
});
|
||||
state = .end;
|
||||
},
|
||||
.keyword_quit => state = .quit,
|
||||
.eof => state = .end,
|
||||
else => {
|
||||
send("Command need to start with a keyword, including: run, db, schema, help and quit", .{});
|
||||
state = .end;
|
||||
},
|
||||
},
|
||||
|
||||
.expect_db_command => switch (token.tag) {
|
||||
.keyword_new => state = .expect_path_to_new_db,
|
||||
.keyword_use => state = .expect_path_to_db,
|
||||
.keyword_metrics => {
|
||||
if (!file_engine.usable) {
|
||||
send("Error: No database selected. Please use db new or db use.", .{});
|
||||
state = .end;
|
||||
continue;
|
||||
}
|
||||
|
||||
var buffer = std.ArrayList(u8).init(allocator);
|
||||
defer buffer.deinit();
|
||||
|
||||
try file_engine.writeDbMetrics(&buffer);
|
||||
send("{s}", .{buffer.items});
|
||||
state = .end;
|
||||
},
|
||||
.keyword_help => {
|
||||
send("{s}", .{
|
||||
\\Available commands:
|
||||
\\new Create a new database using a path to a sub folder.
|
||||
\\use Select another ZipponDB folder to use as database.
|
||||
\\metrics Print some metrics of the current database.
|
||||
\\
|
||||
\\ For more informations: https://github.com/MrBounty/ZipponDB
|
||||
\\
|
||||
});
|
||||
state = .end;
|
||||
},
|
||||
else => {
|
||||
send("Error: db commands available: new, metrics, swap & help", .{});
|
||||
state = .end;
|
||||
},
|
||||
},
|
||||
|
||||
.expect_path_to_db => switch (token.tag) {
|
||||
.identifier => {
|
||||
file_engine.deinit();
|
||||
file_engine = try FileEngine.init(allocator, try allocator.dupe(u8, toker.getTokenSlice(token)));
|
||||
send("Successfully started using the database!", .{});
|
||||
state = .end;
|
||||
},
|
||||
else => {
|
||||
send("Error Expect a path to a ZipponDB folder.", .{});
|
||||
state = .end;
|
||||
},
|
||||
},
|
||||
|
||||
.expect_path_to_new_db => switch (token.tag) {
|
||||
.identifier => {
|
||||
file_engine.checkAndCreateDirectories(toker.getTokenSlice(token), allocator) catch |err| {
|
||||
send("Error: Coulnt create database directories: {any}", .{err});
|
||||
state = .end;
|
||||
continue;
|
||||
};
|
||||
file_engine.deinit();
|
||||
file_engine = FileEngine.init(allocator, try allocator.dupe(u8, toker.getTokenSlice(token)));
|
||||
send("Successfully initialized the database!", .{});
|
||||
state = .end;
|
||||
},
|
||||
else => {
|
||||
send("Error Expect a path to a folder.", .{});
|
||||
state = .end;
|
||||
},
|
||||
},
|
||||
|
||||
.expect_query => switch (token.tag) {
|
||||
.string_literal => {
|
||||
const null_term_query_str = try allocator.dupeZ(u8, toker.buffer[token.loc.start + 1 .. token.loc.end - 1]);
|
||||
defer allocator.free(null_term_query_str);
|
||||
runQuery(null_term_query_str, &file_engine);
|
||||
state = .end;
|
||||
},
|
||||
.keyword_help => {
|
||||
send("The run command take a ZiQL query between \" and run it. eg: run \"GRAB User\"", .{});
|
||||
state = .end;
|
||||
},
|
||||
else => {
|
||||
send("Error: After command run, need a query, eg: \"GRAB User\"", .{});
|
||||
state = .end;
|
||||
},
|
||||
},
|
||||
|
||||
.expect_schema_command => switch (token.tag) {
|
||||
.keyword_describe => {
|
||||
if (std.mem.eql(u8, file_engine.path_to_ZipponDB_dir, "")) send("Error: No database selected. Please use db bew or db use.", .{});
|
||||
|
||||
if (file_engine.null_terminated_schema_buff.len == 0) {
|
||||
send("Need to init the schema first. Please use the schema init path/to/schema command to start.", .{});
|
||||
} else {
|
||||
send("Schema:\n {s}", .{file_engine.null_terminated_schema_buff});
|
||||
}
|
||||
state = .end;
|
||||
},
|
||||
.keyword_init => state = .expect_path_to_schema,
|
||||
.keyword_help => {
|
||||
send("{s}", .{
|
||||
\\Available commands:
|
||||
\\describe Print the schema use by the currently selected database.
|
||||
\\init Take the path to a schema file and initialize the database.
|
||||
\\
|
||||
\\ For more informations: https://github.com/MrBounty/ZipponDB
|
||||
\\
|
||||
});
|
||||
state = .end;
|
||||
},
|
||||
else => {
|
||||
send("Error: schema commands available: describe, init & help", .{});
|
||||
state = .end;
|
||||
},
|
||||
},
|
||||
|
||||
.expect_path_to_schema => switch (token.tag) {
|
||||
.identifier => {
|
||||
file_engine.initDataFolder(toker.getTokenSlice(token)) catch |err| switch (err) {
|
||||
error.SchemaFileNotFound => {
|
||||
send("Coulnt find the schema file at {s}", .{toker.getTokenSlice(token)});
|
||||
state = .end;
|
||||
},
|
||||
else => {
|
||||
send("Error initializing the schema", .{});
|
||||
state = .end;
|
||||
},
|
||||
};
|
||||
send("Successfully initialized the database schema!", .{});
|
||||
state = .end;
|
||||
},
|
||||
else => {
|
||||
send("Error: Expect path to schema file.", .{});
|
||||
state = .end;
|
||||
},
|
||||
},
|
||||
|
||||
.quit, .end => break,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn runQuery(null_term_query_str: [:0]const u8, file_engine: *FileEngine) void {
|
||||
var gpa = std.heap.GeneralPurposeAllocator(.{}){};
|
||||
const allocator = gpa.allocator();
|
||||
|
||||
var toker = ziqlTokenizer.init(null_term_query_str);
|
||||
|
||||
var parser = ziqlParser.init(allocator, &toker, file_engine);
|
||||
defer {
|
||||
parser.deinit();
|
||||
switch (gpa.deinit()) {
|
||||
.ok => {},
|
||||
.leak => std.log.debug("We fucked it up bro...\n", .{}),
|
||||
}
|
||||
}
|
||||
|
||||
parser.parse() catch |err| switch (err) {
|
||||
error.SynthaxError => {},
|
||||
else => {},
|
||||
};
|
||||
}
|
||||
|
||||
// TODO: Put that in the FileEngine
|
@ -1,31 +0,0 @@
|
||||
const std = @import("std");
|
||||
|
||||
pub fn getEnvVariables(allocator: std.mem.Allocator, variable: []const u8) ?[]const u8 {
|
||||
var env_map = std.process.getEnvMap(allocator) catch return null;
|
||||
defer env_map.deinit();
|
||||
|
||||
var iter = env_map.iterator();
|
||||
|
||||
while (iter.next()) |entry| {
|
||||
if (std.mem.eql(u8, entry.key_ptr.*, variable)) return allocator.dupe(u8, entry.value_ptr.*) catch return null;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn getDirTotalSize(dir: std.fs.Dir) !u64 {
|
||||
var total: u64 = 0;
|
||||
var stat: std.fs.File.Stat = undefined;
|
||||
var iter = dir.iterate();
|
||||
while (try iter.next()) |entry| {
|
||||
if (entry.kind == .directory) {
|
||||
const sub_dir = try dir.openDir(entry.name, .{ .iterate = true });
|
||||
total += try getDirTotalSize(sub_dir);
|
||||
}
|
||||
|
||||
if (entry.kind != .file) continue;
|
||||
stat = try dir.statFile(entry.name);
|
||||
total += stat.size;
|
||||
}
|
||||
return total;
|
||||
}
|
@ -3,18 +3,26 @@ const Allocator = std.mem.Allocator;
|
||||
const DataType = @import("types/dataType.zig").DataType;
|
||||
const Toker = @import("tokenizers/schema.zig").Tokenizer;
|
||||
const Token = @import("tokenizers/schema.zig").Token;
|
||||
const send = @import("stuffs/utils.zig").send;
|
||||
const printError = @import("stuffs/utils.zig").printError;
|
||||
|
||||
const stdout = std.io.getStdOut().writer();
|
||||
const SchemaParserError = error{
|
||||
SynthaxError,
|
||||
FeatureMissing,
|
||||
};
|
||||
|
||||
// Fuse this with the same function in the ZiQL parser
|
||||
fn send(comptime format: []const u8, args: anytype) void {
|
||||
stdout.print(format, args) catch |err| {
|
||||
std.log.err("Can't send: {any}", .{err});
|
||||
stdout.print("\x03\n", .{}) catch {};
|
||||
};
|
||||
|
||||
stdout.print("\x03\n", .{}) catch {};
|
||||
}
|
||||
const State = enum {
|
||||
end,
|
||||
invalid,
|
||||
expect_struct_name_OR_end,
|
||||
expect_member_name,
|
||||
expect_l_paren,
|
||||
expect_member_name_OR_r_paren,
|
||||
expect_value_type,
|
||||
expext_array_type,
|
||||
expect_two_dot,
|
||||
expect_comma,
|
||||
};
|
||||
|
||||
pub const Parser = struct {
|
||||
toker: *Toker,
|
||||
@ -47,24 +55,21 @@ pub const Parser = struct {
|
||||
}
|
||||
};
|
||||
|
||||
const State = enum {
|
||||
end,
|
||||
invalid,
|
||||
expect_struct_name_OR_end,
|
||||
expect_member_name,
|
||||
expect_l_paren,
|
||||
expect_member_name_OR_r_paren,
|
||||
expect_value_type,
|
||||
expext_array_type,
|
||||
expect_two_dot,
|
||||
expect_comma,
|
||||
};
|
||||
|
||||
pub fn parse(self: *Parser, struct_array: *std.ArrayList(SchemaStruct)) !void {
|
||||
var state: State = .expect_struct_name_OR_end;
|
||||
var index: usize = 0;
|
||||
var keep_next = false;
|
||||
|
||||
errdefer {
|
||||
for (0..struct_array.items.len) |i| {
|
||||
struct_array.items[i].deinit();
|
||||
}
|
||||
|
||||
for (0..struct_array.items.len) |_| {
|
||||
_ = struct_array.pop();
|
||||
}
|
||||
}
|
||||
|
||||
var token = self.toker.next();
|
||||
while ((state != .end) and (state != .invalid)) : ({
|
||||
token = if (!keep_next) self.toker.next() else token;
|
||||
@ -76,18 +81,12 @@ pub const Parser = struct {
|
||||
struct_array.append(SchemaStruct.init(self.allocator, token.loc)) catch @panic("Error appending a struct name.");
|
||||
},
|
||||
.eof => state = .end,
|
||||
else => {
|
||||
self.printError("Error parsing schema: Expected a struct name", &token);
|
||||
state = .invalid;
|
||||
},
|
||||
else => return printError("Error parsing schema: Expected a struct name", SchemaParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
},
|
||||
|
||||
.expect_l_paren => switch (token.tag) {
|
||||
.l_paren => state = .expect_member_name,
|
||||
else => {
|
||||
self.printError("Error parsing schema: Expected (", &token);
|
||||
state = .invalid;
|
||||
},
|
||||
else => return printError("Error parsing schema: Expected (", SchemaParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
},
|
||||
|
||||
.expect_member_name_OR_r_paren => switch (token.tag) {
|
||||
@ -99,10 +98,7 @@ pub const Parser = struct {
|
||||
state = .expect_struct_name_OR_end;
|
||||
index += 1;
|
||||
},
|
||||
else => {
|
||||
self.printError("Error parsing schema: Expected member name or )", &token);
|
||||
state = .invalid;
|
||||
},
|
||||
else => return printError("Error parsing schema: Expected member name or )", SchemaParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
},
|
||||
|
||||
.expect_member_name => {
|
||||
@ -112,10 +108,7 @@ pub const Parser = struct {
|
||||
|
||||
.expect_two_dot => switch (token.tag) {
|
||||
.two_dot => state = .expect_value_type,
|
||||
else => {
|
||||
self.printError("Error parsing schema: Expected :", &token);
|
||||
state = .invalid;
|
||||
},
|
||||
else => return printError("Error parsing schema: Expected :", SchemaParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
},
|
||||
|
||||
.expect_value_type => switch (token.tag) {
|
||||
@ -138,10 +131,7 @@ pub const Parser = struct {
|
||||
.type_date => @panic("Date not yet implemented"),
|
||||
.identifier => @panic("Link not yet implemented"),
|
||||
.lr_bracket => state = .expext_array_type,
|
||||
else => {
|
||||
self.printError("Error parsing schema: Expected data type", &token);
|
||||
state = .invalid;
|
||||
},
|
||||
else => return printError("Error parsing schema: Expected data type", SchemaParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
},
|
||||
|
||||
.expext_array_type => switch (token.tag) {
|
||||
@ -161,69 +151,18 @@ pub const Parser = struct {
|
||||
state = .expect_comma;
|
||||
struct_array.items[index].types.append(DataType.bool_array) catch @panic("Error appending a type.");
|
||||
},
|
||||
.type_date => {
|
||||
self.printError("Error parsing schema: Data not yet implemented", &token);
|
||||
state = .invalid;
|
||||
},
|
||||
.identifier => {
|
||||
self.printError("Error parsing schema: Relationship not yet implemented", &token);
|
||||
state = .invalid;
|
||||
},
|
||||
else => {
|
||||
self.printError("Error parsing schema: Expected data type", &token);
|
||||
state = .invalid;
|
||||
},
|
||||
.type_date => return printError("Error parsing schema: Data not yet implemented", SchemaParserError.FeatureMissing, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
.identifier => return printError("Error parsing schema: Relationship not yet implemented", SchemaParserError.FeatureMissing, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
else => return printError("Error parsing schema: Expected data type", SchemaParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
},
|
||||
|
||||
.expect_comma => switch (token.tag) {
|
||||
.comma => state = .expect_member_name_OR_r_paren,
|
||||
else => {
|
||||
self.printError("Error parsing schema: Expected ,", &token);
|
||||
state = .invalid;
|
||||
},
|
||||
else => return printError("Error parsing schema: Expected ,", SchemaParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
},
|
||||
|
||||
else => unreachable,
|
||||
};
|
||||
|
||||
// if invalid, empty the list
|
||||
if (state == .invalid) {
|
||||
for (0..struct_array.items.len) |i| {
|
||||
struct_array.items[i].deinit();
|
||||
}
|
||||
|
||||
for (0..struct_array.items.len) |_| {
|
||||
_ = struct_array.pop();
|
||||
}
|
||||
return error.SchemaNotConform;
|
||||
}
|
||||
}
|
||||
|
||||
fn printError(self: *Parser, message: []const u8, token: *Token) void {
|
||||
stdout.print("\n", .{}) catch {};
|
||||
|
||||
const output = self.allocator.dupe(u8, self.toker.buffer) catch @panic("Cant allocator memory when print error");
|
||||
defer self.allocator.free(output);
|
||||
|
||||
std.mem.replaceScalar(u8, output, '\n', ' ');
|
||||
stdout.print("{s}\n", .{output}) catch {};
|
||||
|
||||
// Calculate the number of spaces needed to reach the start position.
|
||||
var spaces: usize = 0;
|
||||
while (spaces < token.loc.start) : (spaces += 1) {
|
||||
stdout.print(" ", .{}) catch {};
|
||||
}
|
||||
|
||||
// Print the '^' characters for the error span.
|
||||
var i: usize = token.loc.start;
|
||||
while (i < token.loc.end) : (i += 1) {
|
||||
stdout.print("^", .{}) catch {};
|
||||
}
|
||||
stdout.print(" \n", .{}) catch {}; // Align with the message
|
||||
|
||||
stdout.print("{s}\n", .{message}) catch {};
|
||||
|
||||
send("", .{});
|
||||
}
|
||||
};
|
||||
|
||||
|
95
src/stuffs/utils.zig
Normal file
95
src/stuffs/utils.zig
Normal file
@ -0,0 +1,95 @@
|
||||
const std = @import("std");
|
||||
|
||||
const stdout = std.io.getStdOut().writer();
|
||||
|
||||
pub fn getEnvVariables(allocator: std.mem.Allocator, variable: []const u8) ?[]const u8 {
|
||||
var env_map = std.process.getEnvMap(allocator) catch return null;
|
||||
defer env_map.deinit();
|
||||
|
||||
var iter = env_map.iterator();
|
||||
|
||||
while (iter.next()) |entry| {
|
||||
if (std.mem.eql(u8, entry.key_ptr.*, variable)) return allocator.dupe(u8, entry.value_ptr.*) catch return null;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn getDirTotalSize(dir: std.fs.Dir) !u64 {
|
||||
var total: u64 = 0;
|
||||
var stat: std.fs.File.Stat = undefined;
|
||||
var iter = dir.iterate();
|
||||
while (try iter.next()) |entry| {
|
||||
if (entry.kind == .directory) {
|
||||
const sub_dir = try dir.openDir(entry.name, .{ .iterate = true });
|
||||
total += try getDirTotalSize(sub_dir);
|
||||
}
|
||||
|
||||
if (entry.kind != .file) continue;
|
||||
stat = try dir.statFile(entry.name);
|
||||
total += stat.size;
|
||||
}
|
||||
return total;
|
||||
}
|
||||
|
||||
// Maybe create a struct for that
|
||||
pub fn send(comptime format: []const u8, args: anytype) void {
|
||||
stdout.print(format, args) catch |err| {
|
||||
std.log.err("Can't send: {any}", .{err});
|
||||
stdout.print("\x03\n", .{}) catch {};
|
||||
};
|
||||
|
||||
stdout.print("\x03\n", .{}) catch {};
|
||||
}
|
||||
|
||||
/// Print an error and send it to the user pointing to the token
|
||||
pub fn printError(message: []const u8, err: anyerror, query: ?[]const u8, start: ?usize, end: ?usize) anyerror {
|
||||
const allocator = std.heap.page_allocator;
|
||||
var buffer = std.ArrayList(u8).init(allocator);
|
||||
defer buffer.deinit();
|
||||
|
||||
var writer = buffer.writer();
|
||||
|
||||
writer.print("\n", .{}) catch {}; // Maybe use write all, not sure if it affect performance in any considerable way
|
||||
writer.print("{s}\n", .{message}) catch {};
|
||||
|
||||
if ((start != null) and (end != null) and (query != null)) {
|
||||
const buffer_query = try allocator.dupe(u8, query.?);
|
||||
defer allocator.free(buffer_query);
|
||||
|
||||
std.mem.replaceScalar(u8, buffer_query, '\n', ' ');
|
||||
writer.print("{s}\n", .{buffer_query}) catch {};
|
||||
|
||||
// Calculate the number of spaces needed to reach the start position.
|
||||
var spaces: usize = 0;
|
||||
while (spaces < start.?) : (spaces += 1) {
|
||||
writer.print(" ", .{}) catch {};
|
||||
}
|
||||
|
||||
// Print the '^' characters for the error span.
|
||||
var i: usize = start.?;
|
||||
while (i < end.?) : (i += 1) {
|
||||
writer.print("^", .{}) catch {};
|
||||
}
|
||||
writer.print(" \n", .{}) catch {}; // Align with the message
|
||||
}
|
||||
|
||||
send("{s}", .{buffer.items});
|
||||
return err;
|
||||
}
|
||||
|
||||
pub fn getArgsString(allocator: std.mem.Allocator) std.ArrayList(u8) {
|
||||
const args = try std.process.argsAlloc(allocator);
|
||||
defer std.process.argsFree(allocator, args);
|
||||
|
||||
var buffer = std.ArrayList(u8).init(allocator);
|
||||
var writer = buffer.writer();
|
||||
|
||||
for (args) |arg| {
|
||||
writer.print("{s} ", .{arg});
|
||||
}
|
||||
|
||||
buffer.append(0);
|
||||
|
||||
return buffer;
|
||||
}
|
123
src/types/stringToType.zig
Normal file
123
src/types/stringToType.zig
Normal file
@ -0,0 +1,123 @@
|
||||
const std = @import("std");
|
||||
const UUID = @import("uuid.zig").UUID;
|
||||
|
||||
// TODO: Put those functions somewhere else
|
||||
pub fn parseInt(value_str: []const u8) i64 {
|
||||
return std.fmt.parseInt(i64, value_str, 10) catch return 0;
|
||||
}
|
||||
|
||||
pub fn parseArrayInt(allocator: std.mem.Allocator, array_str: []const u8) std.ArrayList(i64) {
|
||||
var array = std.ArrayList(i64).init(allocator);
|
||||
|
||||
var it = std.mem.splitAny(u8, array_str[1 .. array_str.len - 1], " ");
|
||||
while (it.next()) |x| {
|
||||
array.append(parseInt(x)) catch {};
|
||||
}
|
||||
|
||||
return array;
|
||||
}
|
||||
|
||||
pub fn parseFloat(value_str: []const u8) f64 {
|
||||
return std.fmt.parseFloat(f64, value_str) catch return 0;
|
||||
}
|
||||
|
||||
pub fn parseArrayFloat(allocator: std.mem.Allocator, array_str: []const u8) std.ArrayList(f64) {
|
||||
var array = std.ArrayList(f64).init(allocator);
|
||||
|
||||
var it = std.mem.splitAny(u8, array_str[1 .. array_str.len - 1], " ");
|
||||
while (it.next()) |x| {
|
||||
array.append(parseFloat(x)) catch {};
|
||||
}
|
||||
|
||||
return array;
|
||||
}
|
||||
|
||||
pub fn parseBool(value_str: []const u8) bool {
|
||||
return (value_str[0] != '0');
|
||||
}
|
||||
|
||||
pub fn parseArrayBool(allocator: std.mem.Allocator, array_str: []const u8) std.ArrayList(bool) {
|
||||
var array = std.ArrayList(bool).init(allocator);
|
||||
|
||||
var it = std.mem.splitAny(u8, array_str[1 .. array_str.len - 1], " ");
|
||||
while (it.next()) |x| {
|
||||
array.append(parseBool(x)) catch {};
|
||||
}
|
||||
|
||||
return array;
|
||||
}
|
||||
|
||||
pub fn parseArrayUUID(allocator: std.mem.Allocator, array_str: []const u8) std.ArrayList(UUID) {
|
||||
var array = std.ArrayList(UUID).init(allocator);
|
||||
|
||||
var it = std.mem.splitAny(u8, array_str[1 .. array_str.len - 1], " ");
|
||||
while (it.next()) |x| {
|
||||
const uuid = UUID.parse(x) catch continue;
|
||||
array.append(uuid) catch continue;
|
||||
}
|
||||
|
||||
return array;
|
||||
}
|
||||
|
||||
// FIXME: I think it will not work if there is a ' inside the string
|
||||
pub fn parseArrayStr(allocator: std.mem.Allocator, array_str: []const u8) std.ArrayList([]const u8) {
|
||||
var array = std.ArrayList([]const u8).init(allocator);
|
||||
|
||||
var it = std.mem.splitAny(u8, array_str[1 .. array_str.len - 1], "'");
|
||||
while (it.next()) |x| {
|
||||
if (std.mem.eql(u8, " ", x)) continue;
|
||||
const x_copy = allocator.dupe(u8, x) catch @panic("=(");
|
||||
// FIXME: I think I need to add the '' on each side again
|
||||
array.append(x_copy) catch {};
|
||||
}
|
||||
|
||||
return array;
|
||||
}
|
||||
|
||||
test "Data parsing" {
|
||||
const allocator = std.testing.allocator;
|
||||
|
||||
// Int
|
||||
const in1: [3][]const u8 = .{ "1", "42", "Hello" };
|
||||
const expected_out1: [3]i64 = .{ 1, 42, 0 };
|
||||
for (in1, 0..) |value, i| {
|
||||
try std.testing.expect(parseInt(value) == expected_out1[i]);
|
||||
}
|
||||
|
||||
// Int array
|
||||
const in2 = "[1 14 44 42 hello]";
|
||||
const out2 = parseArrayInt(allocator, in2);
|
||||
defer out2.deinit();
|
||||
const expected_out2: [5]i64 = .{ 1, 14, 44, 42, 0 };
|
||||
try std.testing.expect(std.mem.eql(i64, out2.items, &expected_out2));
|
||||
|
||||
// Float
|
||||
const in3: [3][]const u8 = .{ "1.3", "65.991", "Hello" };
|
||||
const expected_out3: [3]f64 = .{ 1.3, 65.991, 0 };
|
||||
for (in3, 0..) |value, i| {
|
||||
try std.testing.expect(parseFloat(value) == expected_out3[i]);
|
||||
}
|
||||
|
||||
// Float array
|
||||
const in4 = "[1.5 14.3 44.9999 42 hello]";
|
||||
const out4 = parseArrayFloat(allocator, in4);
|
||||
defer out4.deinit();
|
||||
const expected_out4: [5]f64 = .{ 1.5, 14.3, 44.9999, 42, 0 };
|
||||
try std.testing.expect(std.mem.eql(f64, out4.items, &expected_out4));
|
||||
|
||||
// Bool
|
||||
const in5: [3][]const u8 = .{ "1", "Hello", "0" };
|
||||
const expected_out5: [3]bool = .{ true, true, false };
|
||||
for (in5, 0..) |value, i| {
|
||||
try std.testing.expect(parseBool(value) == expected_out5[i]);
|
||||
}
|
||||
|
||||
// Bool array
|
||||
const in6 = "[1 0 0 1 1]";
|
||||
const out6 = parseArrayBool(allocator, in6);
|
||||
defer out6.deinit();
|
||||
const expected_out6: [5]bool = .{ true, false, false, true, true };
|
||||
try std.testing.expect(std.mem.eql(bool, out6.items, &expected_out6));
|
||||
|
||||
// TODO: Test the string array
|
||||
}
|
@ -1,4 +1,5 @@
|
||||
const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
const FileEngine = @import("fileEngine.zig").FileEngine;
|
||||
const Condition = @import("fileEngine.zig").FileEngine.Condition;
|
||||
const Tokenizer = @import("tokenizers/ziql.zig").Tokenizer;
|
||||
@ -6,11 +7,10 @@ const Token = @import("tokenizers/ziql.zig").Token;
|
||||
const UUID = @import("types/uuid.zig").UUID;
|
||||
const AND = @import("types/uuid.zig").AND;
|
||||
const OR = @import("types/uuid.zig").OR;
|
||||
const AdditionalData = @import("parsing-tools/additionalData.zig").AdditionalData;
|
||||
const AdditionalDataMember = @import("parsing-tools/additionalData.zig").AdditionalDataMember;
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
const stdout = std.io.getStdOut().writer();
|
||||
const AdditionalData = @import("stuffs/additionalData.zig").AdditionalData;
|
||||
const AdditionalDataMember = @import("stuffs/additionalData.zig").AdditionalDataMember;
|
||||
const send = @import("stuffs/utils.zig").send;
|
||||
const printError = @import("stuffs/utils.zig").printError;
|
||||
|
||||
const ZiQlParserError = error{
|
||||
SynthaxError,
|
||||
@ -63,15 +63,6 @@ const State = enum {
|
||||
add_array_to_map,
|
||||
};
|
||||
|
||||
fn send(comptime format: []const u8, args: anytype) void {
|
||||
stdout.print(format, args) catch |err| {
|
||||
std.log.err("Can't send: {any}", .{err});
|
||||
stdout.print("\x03\n", .{}) catch {};
|
||||
};
|
||||
|
||||
stdout.print("\x03\n", .{}) catch {};
|
||||
}
|
||||
|
||||
pub const Parser = struct {
|
||||
allocator: Allocator,
|
||||
state: State,
|
||||
@ -141,14 +132,14 @@ pub const Parser = struct {
|
||||
self.action = .DELETE;
|
||||
self.state = .expect_struct_name;
|
||||
},
|
||||
else => return self.printError("Error: Expected action keyword. Available: GRAB ADD DELETE UPDATE", &token, ZiQlParserError.SynthaxError),
|
||||
else => return printError("Error: Expected action keyword. Available: GRAB ADD DELETE UPDATE", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
},
|
||||
|
||||
.expect_struct_name => {
|
||||
// Check if the struct name is in the schema
|
||||
self.struct_name = try self.allocator.dupe(u8, self.toker.getTokenSlice(token));
|
||||
if (token.tag != .identifier) return self.printError("Error: Missing struct name", &token, ZiQlParserError.StructNotFound);
|
||||
if (!self.file_engine.isStructNameExists(self.struct_name)) return self.printError("Error: struct name not found in schema.", &token, ZiQlParserError.StructNotFound);
|
||||
if (token.tag != .identifier) return printError("Error: Missing struct name", ZiQlParserError.StructNotFound, self.toker.buffer, token.loc.start, token.loc.end);
|
||||
if (!self.file_engine.isStructNameExists(self.struct_name)) return printError("Error: struct name not found in schema.", ZiQlParserError.StructNotFound, self.toker.buffer, token.loc.start, token.loc.end);
|
||||
switch (self.action) {
|
||||
.ADD => self.state = .expect_new_data,
|
||||
else => self.state = .expect_filter_or_additional_data,
|
||||
@ -166,7 +157,7 @@ pub const Parser = struct {
|
||||
else => unreachable,
|
||||
},
|
||||
.eof => self.state = .filter_and_send,
|
||||
else => return self.printError("Error: Expect [ for additional data or { for a filter", &token, ZiQlParserError.SynthaxError),
|
||||
else => return printError("Error: Expect [ for additional data or { for a filter", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
}
|
||||
},
|
||||
|
||||
@ -197,7 +188,7 @@ pub const Parser = struct {
|
||||
self.sendEntity(&array);
|
||||
self.state = .end;
|
||||
},
|
||||
else => return self.printError("Error: Expected filter.", &token, ZiQlParserError.SynthaxError),
|
||||
else => return printError("Error: Expected filter.", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
},
|
||||
|
||||
// TODO: Optimize so it doesnt use parseFilter but just parse the file and directly check the condition. Here I end up parsing 2 times.
|
||||
@ -207,10 +198,10 @@ pub const Parser = struct {
|
||||
defer array.deinit();
|
||||
token = try self.parseFilter(&array, self.struct_name, true);
|
||||
|
||||
if (token.tag != .keyword_to) return self.printError("Error: Expected TO", &token, ZiQlParserError.SynthaxError);
|
||||
if (token.tag != .keyword_to) return printError("Error: Expected TO", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end);
|
||||
|
||||
token = self.toker.next();
|
||||
if (token.tag != .l_paren) return self.printError("Error: Expected (", &token, ZiQlParserError.SynthaxError);
|
||||
if (token.tag != .l_paren) return printError("Error: Expected (", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end);
|
||||
|
||||
var data_map = std.StringHashMap([]const u8).init(self.allocator);
|
||||
defer data_map.deinit();
|
||||
@ -225,7 +216,7 @@ pub const Parser = struct {
|
||||
try self.file_engine.getAllUUIDList(self.struct_name, &array);
|
||||
|
||||
token = self.toker.next();
|
||||
if (token.tag != .l_paren) return self.printError("Error: Expected (", &token, ZiQlParserError.SynthaxError);
|
||||
if (token.tag != .l_paren) return printError("Error: Expected (", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end);
|
||||
|
||||
var data_map = std.StringHashMap([]const u8).init(self.allocator);
|
||||
defer data_map.deinit();
|
||||
@ -234,7 +225,7 @@ pub const Parser = struct {
|
||||
try self.file_engine.updateEntities(self.struct_name, array.items, data_map);
|
||||
self.state = .end;
|
||||
},
|
||||
else => return self.printError("Error: Expected filter or TO.", &token, ZiQlParserError.SynthaxError),
|
||||
else => return printError("Error: Expected filter or TO.", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
},
|
||||
|
||||
.filter_and_delete => switch (token.tag) {
|
||||
@ -256,7 +247,7 @@ pub const Parser = struct {
|
||||
std.debug.print("Successfully deleted all {d} {s}\n", .{ deleted_count, self.struct_name });
|
||||
self.state = .end;
|
||||
},
|
||||
else => return self.printError("Error: Expected filter.", &token, ZiQlParserError.SynthaxError),
|
||||
else => return printError("Error: Expected filter.", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
},
|
||||
|
||||
.expect_new_data => switch (token.tag) {
|
||||
@ -264,7 +255,7 @@ pub const Parser = struct {
|
||||
keep_next = true;
|
||||
self.state = .parse_new_data_and_add_data;
|
||||
},
|
||||
else => return self.printError("Error: Expected new data starting with (", &token, ZiQlParserError.SynthaxError),
|
||||
else => return printError("Error: Expected new data starting with (", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
},
|
||||
|
||||
.parse_new_data_and_add_data => {
|
||||
@ -273,7 +264,7 @@ pub const Parser = struct {
|
||||
try self.parseNewData(&data_map);
|
||||
|
||||
// TODO: Print the entire list of missing
|
||||
if (!self.file_engine.checkIfAllMemberInMap(self.struct_name, &data_map)) return self.printError("Error: Missing member", &token, ZiQlParserError.MemberMissing);
|
||||
if (!self.file_engine.checkIfAllMemberInMap(self.struct_name, &data_map)) return printError("Error: Missing member", ZiQlParserError.MemberMissing, self.toker.buffer, token.loc.start, token.loc.end);
|
||||
const uuid = self.file_engine.writeEntity(self.struct_name, data_map) catch {
|
||||
send("ZipponDB error: Couln't write new data to file", .{});
|
||||
continue;
|
||||
@ -318,12 +309,12 @@ pub const Parser = struct {
|
||||
.r_brace => if (main) {
|
||||
self.state = .end;
|
||||
} else {
|
||||
return self.printError("Error: Expected } to end main condition or AND/OR to continue it", &token, ZiQlParserError.SynthaxError);
|
||||
return printError("Error: Expected } to end main condition or AND/OR to continue it", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end);
|
||||
},
|
||||
.r_paren => if (!main) {
|
||||
self.state = .end;
|
||||
} else {
|
||||
return self.printError("Error: Expected ) to end inside condition or AND/OR to continue it", &token, ZiQlParserError.SynthaxError);
|
||||
return printError("Error: Expected ) to end inside condition or AND/OR to continue it", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end);
|
||||
},
|
||||
.keyword_and => {
|
||||
curent_operation = .and_;
|
||||
@ -333,7 +324,7 @@ pub const Parser = struct {
|
||||
curent_operation = .or_;
|
||||
self.state = .expect_right_uuid_array;
|
||||
},
|
||||
else => return self.printError("Error: Expected a condition including AND OR or the end of the filter with } or )", &token, ZiQlParserError.SynthaxError),
|
||||
else => return printError("Error: Expected a condition including AND OR or the end of the filter with } or )", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
},
|
||||
|
||||
.expect_right_uuid_array => {
|
||||
@ -349,7 +340,7 @@ pub const Parser = struct {
|
||||
keep_next = true;
|
||||
try self.file_engine.getUUIDListUsingCondition(right_condition, &right_array);
|
||||
}, // Create a new condition and compare it
|
||||
else => return self.printError("Error: Expected ( or member name.", &token, ZiQlParserError.SynthaxError),
|
||||
else => return printError("Error: Expected ( or member name.", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
}
|
||||
|
||||
switch (curent_operation) {
|
||||
@ -383,13 +374,13 @@ pub const Parser = struct {
|
||||
.expect_member => switch (token.tag) {
|
||||
.identifier => {
|
||||
if (!self.file_engine.isMemberNameInStruct(condition.struct_name, self.toker.getTokenSlice(token))) {
|
||||
return self.printError("Error: Member not part of struct.", &token, ZiQlParserError.MemberNotFound);
|
||||
return printError("Error: Member not part of struct.", ZiQlParserError.MemberNotFound, self.toker.buffer, token.loc.start, token.loc.end);
|
||||
}
|
||||
condition.data_type = self.file_engine.memberName2DataType(condition.struct_name, self.toker.getTokenSlice(token)) orelse @panic("Couldn't find the struct and member");
|
||||
condition.member_name = self.toker.getTokenSlice(token);
|
||||
self.state = State.expect_operation;
|
||||
},
|
||||
else => return self.printError("Error: Expected member name.", &token, ZiQlParserError.SynthaxError),
|
||||
else => return printError("Error: Expected member name.", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
},
|
||||
|
||||
.expect_operation => {
|
||||
@ -400,7 +391,7 @@ pub const Parser = struct {
|
||||
.angle_bracket_left_equal => condition.operation = .inferior_or_equal, // <=
|
||||
.angle_bracket_right_equal => condition.operation = .superior_or_equal, // >=
|
||||
.bang_equal => condition.operation = .different, // !=
|
||||
else => return self.printError("Error: Expected condition. Including < > <= >= = !=", &token, ZiQlParserError.SynthaxError),
|
||||
else => return printError("Error: Expected condition. Including < > <= >= = !=", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
}
|
||||
self.state = State.expect_value;
|
||||
},
|
||||
@ -410,25 +401,25 @@ pub const Parser = struct {
|
||||
.int => {
|
||||
switch (token.tag) {
|
||||
.int_literal => condition.value = self.toker.getTokenSlice(token),
|
||||
else => return self.printError("Error: Expected int", &token, ZiQlParserError.SynthaxError),
|
||||
else => return printError("Error: Expected int", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
}
|
||||
},
|
||||
.float => {
|
||||
switch (token.tag) {
|
||||
.float_literal => condition.value = self.toker.getTokenSlice(token),
|
||||
else => return self.printError("Error: Expected float", &token, ZiQlParserError.SynthaxError),
|
||||
else => return printError("Error: Expected float", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
}
|
||||
},
|
||||
.str, .id => {
|
||||
switch (token.tag) {
|
||||
.string_literal => condition.value = self.toker.getTokenSlice(token),
|
||||
else => return self.printError("Error: Expected string", &token, ZiQlParserError.SynthaxError),
|
||||
else => return printError("Error: Expected string", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
}
|
||||
},
|
||||
.bool => {
|
||||
switch (token.tag) {
|
||||
.bool_literal_true, .bool_literal_false => condition.value = self.toker.getTokenSlice(token),
|
||||
else => return self.printError("Error: Expected bool", &token, ZiQlParserError.SynthaxError),
|
||||
else => return printError("Error: Expected bool", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
}
|
||||
},
|
||||
.int_array => {
|
||||
@ -437,7 +428,7 @@ pub const Parser = struct {
|
||||
while (token.tag != Token.Tag.r_bracket) : (token = self.toker.next()) {
|
||||
switch (token.tag) {
|
||||
.int_literal => continue,
|
||||
else => return self.printError("Error: Expected int or ].", &token, ZiQlParserError.SynthaxError),
|
||||
else => return printError("Error: Expected int or ].", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
}
|
||||
}
|
||||
condition.value = self.toker.buffer[start_index..token.loc.end];
|
||||
@ -448,7 +439,7 @@ pub const Parser = struct {
|
||||
while (token.tag != Token.Tag.r_bracket) : (token = self.toker.next()) {
|
||||
switch (token.tag) {
|
||||
.float_literal => continue,
|
||||
else => return self.printError("Error: Expected float or ].", &token, ZiQlParserError.SynthaxError),
|
||||
else => return printError("Error: Expected float or ].", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
}
|
||||
}
|
||||
condition.value = self.toker.buffer[start_index..token.loc.end];
|
||||
@ -459,7 +450,7 @@ pub const Parser = struct {
|
||||
while (token.tag != Token.Tag.r_bracket) : (token = self.toker.next()) {
|
||||
switch (token.tag) {
|
||||
.string_literal => continue,
|
||||
else => return self.printError("Error: Expected string or ].", &token, ZiQlParserError.SynthaxError),
|
||||
else => return printError("Error: Expected string or ].", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
}
|
||||
}
|
||||
condition.value = self.toker.buffer[start_index..token.loc.end];
|
||||
@ -470,7 +461,7 @@ pub const Parser = struct {
|
||||
while (token.tag != Token.Tag.r_bracket) : (token = self.toker.next()) {
|
||||
switch (token.tag) {
|
||||
.bool_literal_false, .bool_literal_true => continue,
|
||||
else => return self.printError("Error: Expected bool or ].", &token, ZiQlParserError.SynthaxError),
|
||||
else => return printError("Error: Expected bool or ].", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
}
|
||||
}
|
||||
condition.value = self.toker.buffer[start_index..token.loc.end];
|
||||
@ -487,32 +478,32 @@ pub const Parser = struct {
|
||||
switch (condition.operation) {
|
||||
.equal => switch (condition.data_type) {
|
||||
.int, .float, .str, .bool, .id => {},
|
||||
else => return self.printError("Error: Only int, float, str, bool can be compare with =.", &token, ZiQlParserError.ConditionError),
|
||||
else => return printError("Error: Only int, float, str, bool can be compare with =.", ZiQlParserError.ConditionError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
},
|
||||
|
||||
.different => switch (condition.data_type) {
|
||||
.int, .float, .str, .bool, .id => {},
|
||||
else => return self.printError("Error: Only int, float, str, bool can be compare with !=.", &token, ZiQlParserError.ConditionError),
|
||||
else => return printError("Error: Only int, float, str, bool can be compare with !=.", ZiQlParserError.ConditionError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
},
|
||||
|
||||
.superior_or_equal => switch (condition.data_type) {
|
||||
.int, .float => {},
|
||||
else => return self.printError("Error: Only int, float can be compare with <=.", &token, ZiQlParserError.ConditionError),
|
||||
else => return printError("Error: Only int, float can be compare with <=.", ZiQlParserError.ConditionError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
},
|
||||
|
||||
.superior => switch (condition.data_type) {
|
||||
.int, .float => {},
|
||||
else => return self.printError("Error: Only int, float can be compare with <.", &token, ZiQlParserError.ConditionError),
|
||||
else => return printError("Error: Only int, float can be compare with <.", ZiQlParserError.ConditionError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
},
|
||||
|
||||
.inferior_or_equal => switch (condition.data_type) {
|
||||
.int, .float => {},
|
||||
else => return self.printError("Error: Only int, float can be compare with >=.", &token, ZiQlParserError.ConditionError),
|
||||
else => return printError("Error: Only int, float can be compare with >=.", ZiQlParserError.ConditionError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
},
|
||||
|
||||
.inferior => switch (condition.data_type) {
|
||||
.int, .float => {},
|
||||
else => return self.printError("Error: Only int, float can be compare with >.", &token, ZiQlParserError.ConditionError),
|
||||
else => return printError("Error: Only int, float can be compare with >.", ZiQlParserError.ConditionError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
},
|
||||
|
||||
// TODO: Do it for IN and other stuff to
|
||||
@ -536,7 +527,7 @@ pub const Parser = struct {
|
||||
.expect_count_of_entity_to_find => switch (token.tag) {
|
||||
.int_literal => {
|
||||
const count = std.fmt.parseInt(usize, self.toker.getTokenSlice(token), 10) catch {
|
||||
return self.printError("Error while transforming this into a integer.", &token, ZiQlParserError.ParsingValueError);
|
||||
return printError("Error while transforming this into a integer.", ZiQlParserError.ParsingValueError, self.toker.buffer, token.loc.start, token.loc.end);
|
||||
};
|
||||
additional_data.entity_count_to_find = count;
|
||||
self.state = .expect_semicolon_OR_right_bracket;
|
||||
@ -550,12 +541,12 @@ pub const Parser = struct {
|
||||
.expect_semicolon_OR_right_bracket => switch (token.tag) {
|
||||
.semicolon => self.state = .expect_member,
|
||||
.r_bracket => self.state = .end,
|
||||
else => return self.printError("Error: Expect ';' or ']'.", &token, ZiQlParserError.SynthaxError),
|
||||
else => return printError("Error: Expect ';' or ']'.", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
},
|
||||
|
||||
.expect_member => switch (token.tag) {
|
||||
.identifier => {
|
||||
if (!self.file_engine.isMemberNameInStruct(self.struct_name, self.toker.getTokenSlice(token))) return self.printError("Member not found in struct.", &token, ZiQlParserError.SynthaxError);
|
||||
if (!self.file_engine.isMemberNameInStruct(self.struct_name, self.toker.getTokenSlice(token))) return printError("Member not found in struct.", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end);
|
||||
try additional_data.member_to_find.append(
|
||||
AdditionalDataMember.init(
|
||||
self.allocator,
|
||||
@ -565,7 +556,7 @@ pub const Parser = struct {
|
||||
|
||||
self.state = .expect_comma_OR_r_bracket_OR_l_bracket;
|
||||
},
|
||||
else => return self.printError("Error: Expected a member name.", &token, ZiQlParserError.SynthaxError),
|
||||
else => return printError("Error: Expected a member name.", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
},
|
||||
|
||||
.expect_comma_OR_r_bracket_OR_l_bracket => switch (token.tag) {
|
||||
@ -577,13 +568,13 @@ pub const Parser = struct {
|
||||
);
|
||||
self.state = .expect_comma_OR_r_bracket;
|
||||
},
|
||||
else => return self.printError("Error: Expected , or ] or [", &token, ZiQlParserError.SynthaxError),
|
||||
else => return printError("Error: Expected , or ] or [", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
},
|
||||
|
||||
.expect_comma_OR_r_bracket => switch (token.tag) {
|
||||
.comma => self.state = .expect_member,
|
||||
.r_bracket => self.state = .end,
|
||||
else => return self.printError("Error: Expected , or ]", &token, ZiQlParserError.SynthaxError),
|
||||
else => return printError("Error: Expected , or ]", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
},
|
||||
|
||||
else => unreachable,
|
||||
@ -606,16 +597,16 @@ pub const Parser = struct {
|
||||
.expect_member => switch (token.tag) {
|
||||
.identifier => {
|
||||
member_name = self.toker.getTokenSlice(token);
|
||||
if (!self.file_engine.isMemberNameInStruct(self.struct_name, member_name)) return self.printError("Member not found in struct.", &token, ZiQlParserError.MemberNotFound);
|
||||
if (!self.file_engine.isMemberNameInStruct(self.struct_name, member_name)) return printError("Member not found in struct.", ZiQlParserError.MemberNotFound, self.toker.buffer, token.loc.start, token.loc.end);
|
||||
self.state = .expect_equal;
|
||||
},
|
||||
else => return self.printError("Error: Expected member name.", &token, ZiQlParserError.SynthaxError),
|
||||
else => return printError("Error: Expected member name.", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
},
|
||||
|
||||
.expect_equal => switch (token.tag) {
|
||||
// TODO: Implement stuff to manipulate array like APPEND or REMOVE
|
||||
.equal => self.state = .expect_new_value,
|
||||
else => return self.printError("Error: Expected =", &token, ZiQlParserError.SynthaxError),
|
||||
else => return printError("Error: Expected =", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
},
|
||||
|
||||
.expect_new_value => {
|
||||
@ -626,14 +617,14 @@ pub const Parser = struct {
|
||||
member_map.put(member_name, self.toker.getTokenSlice(token)) catch @panic("Could not add member name and value to map in getMapOfMember");
|
||||
self.state = .expect_comma_OR_end;
|
||||
},
|
||||
else => return self.printError("Error: Expected int", &token, ZiQlParserError.SynthaxError),
|
||||
else => return printError("Error: Expected int", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
},
|
||||
.float => switch (token.tag) {
|
||||
.float_literal, .keyword_null => {
|
||||
member_map.put(member_name, self.toker.getTokenSlice(token)) catch @panic("Could not add member name and value to map in getMapOfMember");
|
||||
self.state = .expect_comma_OR_end;
|
||||
},
|
||||
else => return self.printError("Error: Expected float", &token, ZiQlParserError.SynthaxError),
|
||||
else => return printError("Error: Expected float", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
},
|
||||
.bool => switch (token.tag) {
|
||||
.bool_literal_true => {
|
||||
@ -648,14 +639,14 @@ pub const Parser = struct {
|
||||
member_map.put(member_name, self.toker.getTokenSlice(token)) catch @panic("Could not add member name and value to map in getMapOfMember");
|
||||
self.state = .expect_comma_OR_end;
|
||||
},
|
||||
else => return self.printError("Error: Expected bool: true false", &token, ZiQlParserError.SynthaxError),
|
||||
else => return printError("Error: Expected bool: true false", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
},
|
||||
.str, .id => switch (token.tag) {
|
||||
.string_literal, .keyword_null => {
|
||||
member_map.put(member_name, self.toker.getTokenSlice(token)) catch @panic("Could not add member name and value to map in getMapOfMember");
|
||||
self.state = .expect_comma_OR_end;
|
||||
},
|
||||
else => return self.printError("Error: Expected string between ''", &token, ZiQlParserError.SynthaxError),
|
||||
else => return printError("Error: Expected string between ''", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
},
|
||||
// TODO: Maybe upgrade that to use multiple state
|
||||
.int_array => switch (token.tag) {
|
||||
@ -665,14 +656,14 @@ pub const Parser = struct {
|
||||
while (token.tag != .r_bracket) : (token = self.toker.next()) {
|
||||
switch (token.tag) {
|
||||
.int_literal => continue,
|
||||
else => return self.printError("Error: Expected int or ].", &token, ZiQlParserError.SynthaxError),
|
||||
else => return printError("Error: Expected int or ].", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
}
|
||||
}
|
||||
// Maybe change that as it just recreate a string that is already in the buffer
|
||||
member_map.put(member_name, self.toker.buffer[start_index..token.loc.end]) catch @panic("Couln't add string of array in data map");
|
||||
self.state = .expect_comma_OR_end;
|
||||
},
|
||||
else => return self.printError("Error: Expected [ to start an array", &token, ZiQlParserError.SynthaxError),
|
||||
else => return printError("Error: Expected [ to start an array", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
},
|
||||
.float_array => switch (token.tag) {
|
||||
.l_bracket => {
|
||||
@ -681,14 +672,14 @@ pub const Parser = struct {
|
||||
while (token.tag != .r_bracket) : (token = self.toker.next()) {
|
||||
switch (token.tag) {
|
||||
.float_literal => continue,
|
||||
else => return self.printError("Error: Expected float or ].", &token, ZiQlParserError.SynthaxError),
|
||||
else => return printError("Error: Expected float or ].", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
}
|
||||
}
|
||||
// Maybe change that as it just recreate a string that is already in the buffer
|
||||
member_map.put(member_name, self.toker.buffer[start_index..token.loc.end]) catch @panic("Couln't add string of array in data map");
|
||||
self.state = .expect_comma_OR_end;
|
||||
},
|
||||
else => return self.printError("Error: Expected [ to start an array", &token, ZiQlParserError.SynthaxError),
|
||||
else => return printError("Error: Expected [ to start an array", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
},
|
||||
.bool_array => switch (token.tag) {
|
||||
.l_bracket => {
|
||||
@ -697,14 +688,14 @@ pub const Parser = struct {
|
||||
while (token.tag != .r_bracket) : (token = self.toker.next()) {
|
||||
switch (token.tag) {
|
||||
.bool_literal_false, .bool_literal_true => continue,
|
||||
else => return self.printError("Error: Expected bool or ].", &token, ZiQlParserError.SynthaxError),
|
||||
else => return printError("Error: Expected bool or ].", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
}
|
||||
}
|
||||
// Maybe change that as it just recreate a string that is already in the buffer
|
||||
member_map.put(member_name, self.toker.buffer[start_index..token.loc.end]) catch @panic("Couln't add string of array in data map");
|
||||
self.state = .expect_comma_OR_end;
|
||||
},
|
||||
else => return self.printError("Error: Expected [ to start an array", &token, ZiQlParserError.SynthaxError),
|
||||
else => return printError("Error: Expected [ to start an array", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
},
|
||||
.str_array, .id_array => switch (token.tag) {
|
||||
.l_bracket => {
|
||||
@ -713,14 +704,14 @@ pub const Parser = struct {
|
||||
while (token.tag != .r_bracket) : (token = self.toker.next()) {
|
||||
switch (token.tag) {
|
||||
.string_literal => continue,
|
||||
else => return self.printError("Error: Expected str or ].", &token, ZiQlParserError.SynthaxError),
|
||||
else => return printError("Error: Expected str or ].", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
}
|
||||
}
|
||||
// Maybe change that as it just recreate a string that is already in the buffer
|
||||
member_map.put(member_name, self.toker.buffer[start_index..token.loc.end]) catch @panic("Couln't add string of array in data map");
|
||||
self.state = .expect_comma_OR_end;
|
||||
},
|
||||
else => return self.printError("Error: Expected [ to start an array", &token, ZiQlParserError.SynthaxError),
|
||||
else => return printError("Error: Expected [ to start an array", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
},
|
||||
}
|
||||
},
|
||||
@ -729,37 +720,13 @@ pub const Parser = struct {
|
||||
switch (token.tag) {
|
||||
.r_paren => self.state = .end,
|
||||
.comma => self.state = .expect_member,
|
||||
else => return self.printError("Error: Expect , or )", &token, ZiQlParserError.SynthaxError),
|
||||
else => return printError("Error: Expect , or )", ZiQlParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
}
|
||||
},
|
||||
|
||||
else => unreachable,
|
||||
};
|
||||
}
|
||||
|
||||
/// Print an error and send it to the user pointing to the token
|
||||
/// TODO: There is a duplicate of this somewhere, make it a single function
|
||||
fn printError(self: *Parser, message: []const u8, token: *Token, err: ZiQlParserError) ZiQlParserError {
|
||||
stdout.print("\n", .{}) catch {};
|
||||
stdout.print("{s}\n", .{message}) catch {};
|
||||
stdout.print("{s}\n", .{self.toker.buffer}) catch {};
|
||||
|
||||
// Calculate the number of spaces needed to reach the start position.
|
||||
var spaces: usize = 0;
|
||||
while (spaces < token.loc.start) : (spaces += 1) {
|
||||
stdout.print(" ", .{}) catch {};
|
||||
}
|
||||
|
||||
// Print the '^' characters for the error span.
|
||||
var i: usize = token.loc.start;
|
||||
while (i < token.loc.end) : (i += 1) {
|
||||
stdout.print("^", .{}) catch {};
|
||||
}
|
||||
stdout.print(" \n", .{}) catch {}; // Align with the message
|
||||
|
||||
send("", .{});
|
||||
return err;
|
||||
}
|
||||
};
|
||||
|
||||
test "ADD" {
|
||||
@ -816,7 +783,7 @@ fn testParsing(source: [:0]const u8) !void {
|
||||
const allocator = std.testing.allocator;
|
||||
|
||||
const path = try allocator.dupe(u8, "ZipponDB");
|
||||
var file_engine = FileEngine.init(allocator, path);
|
||||
var file_engine = try FileEngine.init(allocator, path);
|
||||
defer file_engine.deinit();
|
||||
|
||||
var tokenizer = Tokenizer.init(source);
|
||||
@ -830,7 +797,7 @@ fn expectParsingError(source: [:0]const u8, err: ZiQlParserError) !void {
|
||||
const allocator = std.testing.allocator;
|
||||
|
||||
const path = try allocator.dupe(u8, "ZipponDB");
|
||||
var file_engine = FileEngine.init(allocator, path);
|
||||
var file_engine = try FileEngine.init(allocator, path);
|
||||
defer file_engine.deinit();
|
||||
|
||||
var tokenizer = Tokenizer.init(source);
|
||||
|
Loading…
x
Reference in New Issue
Block a user