Added logging, all Token use the same Loc
This commit is contained in:
parent
ebb3eec4f4
commit
a6a3c092cc
@ -12,6 +12,7 @@ const SchemaParser = @import("schemaParser.zig").Parser;
|
||||
const SchemaTokenizer = @import("tokenizers/schema.zig").Tokenizer;
|
||||
const SchemaToken = @import("tokenizers/schema.zig").Token;
|
||||
const AdditionalData = @import("stuffs/additionalData.zig").AdditionalData;
|
||||
const Loc = @import("tokenizers/shared/loc.zig").Loc;
|
||||
|
||||
const FileEngineError = @import("stuffs/errors.zig").FileEngineError;
|
||||
|
||||
@ -83,18 +84,61 @@ pub const FileEngine = struct {
|
||||
/// An Operation from equal, different, superior, superior_or_equal, ...
|
||||
/// The DataType from int, float and str
|
||||
/// TODO: Use token from the query for struct_name, member_name and value, to save memory
|
||||
/// TODO: Update to do multiple operation at the same tome on a row
|
||||
pub const Condition = struct {
|
||||
struct_name: []const u8,
|
||||
member_name: []const u8 = undefined,
|
||||
value: []const u8 = undefined, // Could be just one with data_type if using union(enum) or can use ComparisonValue directly
|
||||
operation: enum { equal, different, superior, superior_or_equal, inferior, inferior_or_equal, in } = undefined, // Add more stuff like IN
|
||||
value: []const u8 = undefined,
|
||||
operation: enum { equal, different, superior, superior_or_equal, inferior, inferior_or_equal, in } = undefined,
|
||||
data_type: DataType = undefined,
|
||||
|
||||
pub fn init(struct_name: []const u8) Condition {
|
||||
return Condition{ .struct_name = struct_name };
|
||||
pub fn init(struct_loc: []const u8) Condition {
|
||||
return Condition{ .struct_name = struct_loc };
|
||||
}
|
||||
};
|
||||
|
||||
// --------------------Logs--------------------
|
||||
|
||||
const Level = enum {
|
||||
Debug,
|
||||
Info,
|
||||
Warning,
|
||||
Error,
|
||||
Critical,
|
||||
};
|
||||
|
||||
pub fn resetLog(self: FileEngine, file_name: []const u8) void {
|
||||
const path = std.fmt.allocPrint(self.allocator, "{s}/LOG/{s}.log", .{ self.path_to_ZipponDB_dir, file_name }) catch return;
|
||||
defer self.allocator.free(path);
|
||||
|
||||
std.fs.cwd().deleteFile(path) catch {};
|
||||
_ = std.fs.cwd().createFile(path, .{}) catch return;
|
||||
}
|
||||
|
||||
pub fn log(self: FileEngine, file_name: []const u8, level: Level, comptime format: []const u8, args: anytype) void {
|
||||
const path = std.fmt.allocPrint(self.allocator, "{s}/LOG/{s}.log", .{ self.path_to_ZipponDB_dir, file_name }) catch return;
|
||||
defer self.allocator.free(path);
|
||||
|
||||
const file = std.fs.cwd().openFile(path, .{ .mode = .write_only }) catch return;
|
||||
defer file.close();
|
||||
|
||||
file.seekFromEnd(0) catch return;
|
||||
|
||||
const writer = file.writer();
|
||||
const now = DateTime.now();
|
||||
|
||||
writer.print("Time: {d}/{d}/{d}-{d}:{d}:{d}.{d} - ", .{ now.years, now.months, now.days, now.hours, now.minutes, now.seconds, now.ms }) catch return;
|
||||
switch (level) {
|
||||
.Debug => writer.print("Debug - ", .{}) catch return,
|
||||
.Info => writer.print("Info - ", .{}) catch return,
|
||||
.Warning => writer.print("Warning - ", .{}) catch return,
|
||||
.Error => writer.print("Error - ", .{}) catch return,
|
||||
.Critical => writer.print("Critical - ", .{}) catch return,
|
||||
}
|
||||
writer.print(format, args) catch return;
|
||||
writer.writeByte('\n') catch return;
|
||||
}
|
||||
|
||||
// --------------------Other--------------------
|
||||
|
||||
pub fn readSchemaFile(allocator: Allocator, sub_path: []const u8, buffer: []u8) FileEngineError!usize {
|
||||
@ -427,6 +471,7 @@ pub const FileEngine = struct {
|
||||
/// Take a condition and an array of UUID and fill the array with all UUID that match the condition
|
||||
/// TODO: Change the UUID function to be a B+Tree
|
||||
/// TODO: Optimize the shit out of this, it it way too slow rn. Here some ideas
|
||||
/// - Make multiple condition per row
|
||||
/// - Array can take a very long time to parse, maybe put them in a seperate file. But string can be too...
|
||||
/// - Use the stream directly in the tokenizer
|
||||
/// - Use a fixed size and split into other file. Like one file for one member (Because very long, like an array of 1000 value) and another one for everything else
|
||||
@ -444,8 +489,6 @@ pub const FileEngine = struct {
|
||||
) catch return FileEngineError.MemoryError;
|
||||
defer self.allocator.free(path_buff);
|
||||
|
||||
std.debug.print("{s}\n", .{path_buff});
|
||||
|
||||
var file = std.fs.cwd().openFile(path_buff, .{}) catch return FileEngineError.CantOpenFile;
|
||||
defer file.close();
|
||||
|
||||
@ -1036,7 +1079,7 @@ pub const FileEngine = struct {
|
||||
file.writeAll(self.null_terminated_schema_buff) catch return FileEngineError.WriteError;
|
||||
}
|
||||
|
||||
pub fn locToSlice(self: *FileEngine, loc: SchemaToken.Loc) []const u8 {
|
||||
pub fn locToSlice(self: *FileEngine, loc: Loc) []const u8 {
|
||||
return self.null_terminated_schema_buff[loc.start..loc.end];
|
||||
}
|
||||
|
||||
@ -1054,7 +1097,7 @@ pub const FileEngine = struct {
|
||||
}
|
||||
|
||||
/// Get the list of all member name for a struct name
|
||||
pub fn structName2structMembers(self: *FileEngine, struct_name: []const u8) FileEngineError![]SchemaToken.Loc {
|
||||
pub fn structName2structMembers(self: *FileEngine, struct_name: []const u8) FileEngineError![]Loc {
|
||||
var i: u16 = 0;
|
||||
|
||||
while (i < self.struct_array.items.len) : (i += 1) if (std.mem.eql(u8, self.locToSlice(self.struct_array.items[i].name), struct_name)) break;
|
||||
|
14
src/main.zig
14
src/main.zig
@ -39,21 +39,20 @@ pub fn main() !void {
|
||||
defer file_engine.deinit();
|
||||
|
||||
if (path_env_variable) |path| {
|
||||
std.debug.print("ZIPONDB_PATH environment variable found: {s}\n", .{path});
|
||||
|
||||
var to_init = true;
|
||||
_ = std.fs.cwd().openDir(path, .{}) catch {
|
||||
std.debug.print("Error opening ZipponDB path using environment variable, please select the database using 'db use' or create a new one with 'db new'\n", .{});
|
||||
file_engine = FileEngine.init(allocator, try allocator.dupe(u8, ""));
|
||||
file_engine = FileEngine.init(allocator, "");
|
||||
to_init = false;
|
||||
};
|
||||
if (to_init) {
|
||||
file_engine = FileEngine.init(allocator, path);
|
||||
try file_engine.checkAndCreateDirectories();
|
||||
file_engine.resetLog("main");
|
||||
file_engine.log("main", .Info, "Found envirionment variable ZIPPONDB_PATH: {s}", .{path});
|
||||
}
|
||||
} else {
|
||||
file_engine = FileEngine.init(allocator, try allocator.dupe(u8, ""));
|
||||
std.debug.print("No ZIPONDB_PATH environment variable found, please use the command:\n db use path/to/db \nor\n db new /path/to/dir\n", .{});
|
||||
file_engine = FileEngine.init(allocator, "");
|
||||
}
|
||||
|
||||
const line_buf = try allocator.alloc(u8, BUFFER_SIZE); // TODO: Remove the size limitation
|
||||
@ -64,6 +63,8 @@ pub fn main() !void {
|
||||
const line = try std.io.getStdIn().reader().readUntilDelimiterOrEof(line_buf, '\n');
|
||||
|
||||
if (line) |line_str| {
|
||||
file_engine.log("main", .Info, "Query received: {s}", .{line_str});
|
||||
|
||||
const null_term_line_str = try allocator.dupeZ(u8, line_str[0..line_str.len]);
|
||||
defer allocator.free(null_term_line_str);
|
||||
|
||||
@ -270,8 +271,7 @@ pub fn runQuery(null_term_query_str: [:0]const u8, file_engine: *FileEngine) voi
|
||||
}
|
||||
|
||||
parser.parse() catch |err| {
|
||||
std.debug.print("Error: {any}\n", .{err});
|
||||
@panic("=9");
|
||||
file_engine.log("main", .Error, "Error parsing: {any}", .{err});
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -3,6 +3,7 @@ const Allocator = std.mem.Allocator;
|
||||
const DataType = @import("types/dataType.zig").DataType;
|
||||
const Toker = @import("tokenizers/schema.zig").Tokenizer;
|
||||
const Token = @import("tokenizers/schema.zig").Token;
|
||||
const Loc = @import("tokenizers/shared/loc.zig").Loc;
|
||||
const send = @import("stuffs/utils.zig").send;
|
||||
const printError = @import("stuffs/utils.zig").printError;
|
||||
|
||||
@ -34,15 +35,15 @@ pub const Parser = struct {
|
||||
|
||||
pub const SchemaStruct = struct {
|
||||
allocator: Allocator,
|
||||
name: Token.Loc,
|
||||
members: std.ArrayList(Token.Loc),
|
||||
name: Loc,
|
||||
members: std.ArrayList(Loc),
|
||||
types: std.ArrayList(DataType),
|
||||
|
||||
pub fn init(allocator: Allocator, name: Token.Loc) SchemaStruct {
|
||||
pub fn init(allocator: Allocator, name: Loc) SchemaStruct {
|
||||
return SchemaStruct{
|
||||
.allocator = allocator,
|
||||
.name = name,
|
||||
.members = std.ArrayList(Token.Loc).init(allocator),
|
||||
.members = std.ArrayList(Loc).init(allocator),
|
||||
.types = std.ArrayList(DataType).init(allocator),
|
||||
};
|
||||
}
|
||||
|
@ -1,8 +1,6 @@
|
||||
const std = @import("std");
|
||||
const ZipponError = @import("errors.zig").ZipponError;
|
||||
|
||||
const stdout = std.io.getStdOut().writer();
|
||||
|
||||
pub fn getEnvVariables(allocator: std.mem.Allocator, variable: []const u8) ?[]const u8 {
|
||||
var env_map = std.process.getEnvMap(allocator) catch return null;
|
||||
defer env_map.deinit();
|
||||
@ -33,6 +31,24 @@ pub fn getDirTotalSize(dir: std.fs.Dir) !u64 {
|
||||
return total;
|
||||
}
|
||||
|
||||
pub fn getArgsString(allocator: std.mem.Allocator) std.ArrayList(u8) {
|
||||
const args = try std.process.argsAlloc(allocator);
|
||||
defer std.process.argsFree(allocator, args);
|
||||
|
||||
var buffer = std.ArrayList(u8).init(allocator);
|
||||
var writer = buffer.writer();
|
||||
|
||||
for (args) |arg| {
|
||||
writer.print("{s} ", .{arg});
|
||||
}
|
||||
|
||||
buffer.append(0);
|
||||
|
||||
return buffer;
|
||||
}
|
||||
|
||||
const stdout = std.io.getStdOut().writer();
|
||||
|
||||
// Maybe create a struct for that
|
||||
pub fn send(comptime format: []const u8, args: anytype) void {
|
||||
stdout.print(format, args) catch |err| {
|
||||
@ -78,19 +94,3 @@ pub fn printError(message: []const u8, err: ZipponError, query: ?[]const u8, sta
|
||||
send("{s}", .{buffer.items});
|
||||
return err;
|
||||
}
|
||||
|
||||
pub fn getArgsString(allocator: std.mem.Allocator) std.ArrayList(u8) {
|
||||
const args = try std.process.argsAlloc(allocator);
|
||||
defer std.process.argsFree(allocator, args);
|
||||
|
||||
var buffer = std.ArrayList(u8).init(allocator);
|
||||
var writer = buffer.writer();
|
||||
|
||||
for (args) |arg| {
|
||||
writer.print("{s} ", .{arg});
|
||||
}
|
||||
|
||||
buffer.append(0);
|
||||
|
||||
return buffer;
|
||||
}
|
||||
|
@ -1,15 +1,11 @@
|
||||
// From https://github.com/ziglang/zig/blob/master/lib/std/zig/tokenizer.zig
|
||||
const std = @import("std");
|
||||
const Loc = @import("shared/loc.zig").Loc;
|
||||
|
||||
pub const Token = struct {
|
||||
tag: Tag,
|
||||
loc: Loc,
|
||||
|
||||
pub const Loc = struct {
|
||||
start: usize,
|
||||
end: usize,
|
||||
};
|
||||
|
||||
pub const keywords = std.StaticStringMap(Tag).initComptime(.{
|
||||
.{ "run", .keyword_run },
|
||||
.{ "help", .keyword_help },
|
||||
|
@ -1,14 +1,10 @@
|
||||
const std = @import("std");
|
||||
const Loc = @import("shared/loc.zig").Loc;
|
||||
|
||||
pub const Token = struct {
|
||||
tag: Tag,
|
||||
loc: Loc,
|
||||
|
||||
pub const Loc = struct {
|
||||
start: usize,
|
||||
end: usize,
|
||||
};
|
||||
|
||||
pub const Tag = enum {
|
||||
string_literal,
|
||||
int_literal,
|
||||
|
@ -1,15 +1,11 @@
|
||||
// From https://github.com/ziglang/zig/blob/master/lib/std/zig/tokenizer.zig
|
||||
const std = @import("std");
|
||||
const Loc = @import("shared/loc.zig").Loc;
|
||||
|
||||
pub const Token = struct {
|
||||
tag: Tag,
|
||||
loc: Loc,
|
||||
|
||||
pub const Loc = struct {
|
||||
start: usize,
|
||||
end: usize,
|
||||
};
|
||||
|
||||
pub const types = std.StaticStringMap(Tag).initComptime(.{
|
||||
.{ "int", .type_int },
|
||||
.{ "float", .type_float },
|
||||
|
4
src/tokenizers/shared/loc.zig
Normal file
4
src/tokenizers/shared/loc.zig
Normal file
@ -0,0 +1,4 @@
|
||||
pub const Loc = struct {
|
||||
start: usize,
|
||||
end: usize,
|
||||
};
|
@ -1,15 +1,11 @@
|
||||
// From https://github.com/ziglang/zig/blob/master/lib/std/zig/tokenizer.zig
|
||||
const std = @import("std");
|
||||
const Loc = @import("shared/loc.zig").Loc;
|
||||
|
||||
pub const Token = struct {
|
||||
tag: Tag,
|
||||
loc: Loc,
|
||||
|
||||
pub const Loc = struct {
|
||||
start: usize,
|
||||
end: usize,
|
||||
};
|
||||
|
||||
pub const keywords = std.StaticStringMap(Tag).initComptime(.{
|
||||
.{ "GRAB", .keyword_grab },
|
||||
.{ "UPDATE", .keyword_update },
|
||||
|
@ -23,6 +23,10 @@ pub const DateTime = struct {
|
||||
return epoch_unix.addSecs(unix);
|
||||
}
|
||||
|
||||
pub fn now() Self {
|
||||
return epoch_unix.addYears(1970).addMs(@as(u64, @intCast(std.time.milliTimestamp())));
|
||||
}
|
||||
|
||||
/// Caller asserts that this is > epoch
|
||||
pub fn init(year: u16, month: u16, day: u16, hr: u16, min: u16, sec: u16, ms: u16) Self {
|
||||
return epoch_unix
|
||||
@ -35,10 +39,6 @@ pub const DateTime = struct {
|
||||
.addMs(ms);
|
||||
}
|
||||
|
||||
pub fn now() Self {
|
||||
return initUnixMs(@intCast(std.time.milliTimestamp()));
|
||||
}
|
||||
|
||||
pub const epoch_unix = Self{
|
||||
.ms = 0,
|
||||
.seconds = 0,
|
||||
|
@ -79,7 +79,6 @@ pub const Parser = struct {
|
||||
|
||||
pub fn deinit(self: *Parser) void {
|
||||
self.additional_data.deinit();
|
||||
self.allocator.free(self.struct_name);
|
||||
}
|
||||
|
||||
// TODO: Update to use ASC and DESC
|
||||
@ -152,7 +151,7 @@ pub const Parser = struct {
|
||||
|
||||
.expect_struct_name => {
|
||||
// Check if the struct name is in the schema
|
||||
self.struct_name = try self.allocator.dupe(u8, self.toker.getTokenSlice(token));
|
||||
self.struct_name = self.toker.getTokenSlice(token);
|
||||
if (token.tag != .identifier) return printError(
|
||||
"Error: Missing struct name",
|
||||
ZiQlParserError.StructNotFound,
|
||||
|
3
test_data/v0.1.2/LOG/main.log
Normal file
3
test_data/v0.1.2/LOG/main.log
Normal file
@ -0,0 +1,3 @@
|
||||
Time: 2024/9/18-23:33:17.471 - Info - Found envirionment variable ZIPPONDB_PATH: test_data/v0.1.2
|
||||
Time: 2024/9/18-23:33:32.696 - Info - Query received: run "ASD"
|
||||
Time: 2024/9/18-23:33:32.696 - Error - Error parsing: error.SynthaxError
|
Loading…
x
Reference in New Issue
Block a user