Created cli struct and dir

This commit is contained in:
Adrien Bouvais 2025-01-12 12:37:07 +01:00
parent 1c8384dec6
commit 1f5da7a74b
7 changed files with 586 additions and 379 deletions

View File

@ -1,6 +1,6 @@
const std = @import("std");
const dtype = @import("dtype");
const DBEngine = @import("src/main.zig").DBEngine;
const DBEngine = @import("src/cli/core.zig");
const ziqlTokenizer = @import("src/tokenizers/ziql.zig").Tokenizer;
const ziqlToken = @import("src/tokenizers/ziql.zig").Token;
const ziqlParser = @import("src/ziqlParser.zig").Parser;

View File

@ -0,0 +1,155 @@
const std = @import("std");
const config = @import("config");
const Allocator = std.mem.Allocator;
const FileEngine = @import("../file/core.zig");
const SchemaEngine = @import("../schema/core.zig");
const ThreadEngine = @import("../thread/engine.zig");
const ziqlTokenizer = @import("../tokenizers/ziql.zig").Tokenizer;
const ziqlParser = @import("../ziqlParser.zig").Parser;
const setLogPath = @import("../main.zig").setLogPath;
const log = std.log.scoped(.cli);
const DBEngineState = enum { MissingFileEngine, MissingSchemaEngine, Ok, Init };
pub const Self = @This();
var path_buffer: [1024]u8 = undefined;
var line_buffer: [config.BUFFER_SIZE]u8 = undefined;
var in_buffer: [config.BUFFER_SIZE]u8 = undefined;
var out_buffer: [config.BUFFER_SIZE]u8 = undefined;
var value_buffer: [1024]u8 = undefined;
var buffer: [1024 * 1024]u8 = undefined; // For env var
var fa = std.heap.FixedBufferAllocator.init(&buffer);
const allocator = fa.allocator();
usingnamespace @import("parser.zig");
state: DBEngineState = .Init,
file_engine: FileEngine = undefined,
schema_engine: SchemaEngine = undefined,
thread_engine: ThreadEngine = undefined,
pub fn init(potential_main_path: ?[]const u8, potential_schema_path: ?[]const u8) Self {
var self = Self{};
self.thread_engine = ThreadEngine.init();
const potential_main_path_or_environment_variable = potential_main_path orelse getEnvVariable("ZIPPONDB_PATH");
if (potential_main_path_or_environment_variable) |main_path| {
setLogPath(main_path);
log.info("Found ZIPPONDB_PATH: {s}.", .{main_path});
self.file_engine = FileEngine.init(main_path, self.thread_engine.thread_pool) catch {
log.err("Error when init FileEngine", .{});
self.state = .MissingFileEngine;
return self;
};
self.file_engine.createMainDirectories() catch {
log.err("Error when creating main directories", .{});
self.state = .MissingFileEngine;
return self;
};
self.state = .MissingSchemaEngine;
} else {
log.info("No ZIPPONDB_PATH found.", .{});
self.state = .MissingFileEngine;
return self;
}
if (self.file_engine.isSchemaFileInDir() and potential_schema_path == null) {
const schema_path = std.fmt.bufPrint(&path_buffer, "{s}/schema", .{self.file_engine.path_to_ZipponDB_dir}) catch {
self.state = .MissingSchemaEngine;
return self;
};
log.info("Schema founded in the database directory.", .{});
self.schema_engine = SchemaEngine.init(schema_path, &self.file_engine) catch |err| {
log.err("Error when init SchemaEngine: {any}", .{err});
self.state = .MissingSchemaEngine;
return self;
};
self.file_engine.createStructDirectories(self.schema_engine.struct_array) catch |err| {
log.err("Error when creating struct directories: {any}", .{err});
self.state = .MissingSchemaEngine;
return self;
};
log.debug("SchemaEngine created in DBEngine with {d} struct", .{self.schema_engine.struct_array.len});
self.file_engine.schema_engine = self.schema_engine;
self.state = .Ok;
return self;
}
log.info("Database don't have any schema yet, trying to add one.", .{});
const potential_schema_path_or_environment_variable = potential_schema_path orelse getEnvVariable("ZIPPONDB_SCHEMA");
if (potential_schema_path_or_environment_variable) |schema_path| {
log.info("Found schema path {s}.", .{schema_path});
self.schema_engine = SchemaEngine.init(schema_path, &self.file_engine) catch |err| {
log.err("Error when init SchemaEngine: {any}", .{err});
self.state = .MissingSchemaEngine;
return self;
};
self.file_engine.createStructDirectories(self.schema_engine.struct_array) catch |err| {
log.err("Error when creating struct directories: {any}", .{err});
self.state = .MissingSchemaEngine;
return self;
};
self.file_engine.schema_engine = self.schema_engine;
self.file_engine.writeSchemaFile(self.schema_engine.null_terminated) catch |err| {
log.err("Error saving schema file: {any}", .{err});
self.state = .MissingSchemaEngine;
return self;
};
self.state = .Ok;
} else {
log.info(config.HELP_MESSAGE.no_schema, .{self.file_engine.path_to_ZipponDB_dir});
}
return self;
}
pub fn start(self: *Self) !void {
while (true) {
std.debug.print("> ", .{}); // TODO: Find something better than just std.debug.print
const line = std.io.getStdIn().reader().readUntilDelimiterOrEof(&in_buffer, '\n') catch {
log.debug("Command too long for buffer", .{});
continue;
};
if (line) |line_str| {
log.debug("Query received: {s}", .{line_str});
const null_term_line_str = try std.fmt.bufPrintZ(&line_buffer, "{s}", .{line_str});
if (try self.parse(null_term_line_str)) break;
}
}
}
pub fn getEnvVariable(variable: []const u8) ?[]const u8 {
fa.reset();
var env_map = std.process.getEnvMap(allocator) catch return null;
var iter = env_map.iterator();
while (iter.next()) |entry| {
if (std.mem.eql(u8, entry.key_ptr.*, variable)) return std.fmt.bufPrint(&value_buffer, "{s}", .{entry.value_ptr.*}) catch return null;
}
return null;
}
pub fn runQuery(self: *Self, null_term_query_str: [:0]const u8) void {
var toker = ziqlTokenizer.init(null_term_query_str);
var parser = ziqlParser.init(&toker, &self.file_engine, &self.schema_engine);
parser.parse() catch |err| log.err("Error parsing: {any}", .{err});
}
pub fn deinit(self: *Self) void {
self.thread_engine.deinit();
self.schema_engine.deinit();
}

View File

View File

@ -0,0 +1,221 @@
const std = @import("std");
const cliTokenizer = @import("tokenizer.zig").Tokenizer;
const cliToken = @import("tokenizer.zig").Token;
const send = @import("../utils.zig").send;
const config = @import("config");
const log = std.log.scoped(.cli);
const State = enum {
expect_main_command,
expect_query,
expect_schema_command,
expect_path_to_schema,
expect_db_command,
expect_path_to_db,
expect_file_format,
expect_path_to_dump,
quit,
end,
};
const Self = @import("core.zig");
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
var allocator = arena.allocator();
pub fn parse(self: *Self, null_term_line_str: [:0]const u8) !bool {
var toker = cliTokenizer.init(null_term_line_str);
var token = toker.next();
var state = State.expect_main_command;
defer _ = arena.reset(.free_all);
var last_token: cliToken = undefined;
while ((state != .end) and (state != .quit)) : ({
last_token = token;
token = toker.next();
}) switch (state) {
.expect_main_command => switch (token.tag) {
.keyword_run => {
if (self.state == .MissingFileEngine) {
send("{s}", .{config.HELP_MESSAGE.no_engine});
state = .end;
continue;
}
if (self.state == .MissingSchemaEngine) {
send(config.HELP_MESSAGE.no_schema, .{self.file_engine.path_to_ZipponDB_dir});
state = .end;
continue;
}
state = .expect_query;
},
.keyword_db => state = .expect_db_command,
.keyword_schema => {
if (self.state == .MissingFileEngine) {
send("{s}", .{config.HELP_MESSAGE.no_engine});
state = .end;
continue;
}
state = .expect_schema_command;
},
.keyword_help => {
send("{s}", .{config.HELP_MESSAGE.main});
state = .end;
},
.keyword_quit => state = .quit,
.keyword_dump => {
if (self.state == .MissingFileEngine) {
send("{s}", .{config.HELP_MESSAGE.no_engine});
state = .end;
continue;
}
if (self.state == .MissingSchemaEngine) {
send(config.HELP_MESSAGE.no_schema, .{self.file_engine.path_to_ZipponDB_dir});
state = .end;
continue;
}
state = .expect_file_format;
},
.eof => state = .end,
else => {
send("Command need to start with a keyword, including: run, db, schema, help and quit", .{});
state = .end;
},
},
.expect_file_format => switch (token.tag) {
.keyword_csv => state = .expect_path_to_dump,
.keyword_json => state = .expect_path_to_dump,
.keyword_zid => state = .expect_path_to_dump,
else => {
send("Error: format available: csv, json, zid", .{});
state = .end;
},
},
.expect_db_command => switch (token.tag) {
.keyword_new, .keyword_use => state = .expect_path_to_db, //TODO: When new, create the dir. If use, dont create the dir
.keyword_metrics => {
if (self.state == .MissingFileEngine) {
send("{s}", .{config.HELP_MESSAGE.no_engine});
state = .end;
continue;
}
if (self.state == .MissingSchemaEngine) {
send(config.HELP_MESSAGE.no_schema, .{self.file_engine.path_to_ZipponDB_dir});
state = .end;
continue;
}
var buffer = std.ArrayList(u8).init(allocator);
defer buffer.deinit();
try self.file_engine.writeDbMetrics(&buffer);
send("{s}", .{buffer.items});
state = .end;
},
.keyword_state => {
send("{any}", .{self.state});
state = .end;
},
.keyword_help => {
send("{s}", .{config.HELP_MESSAGE.db});
state = .end;
},
else => {
send("Error: db commands available: new, metrics, swap & help", .{});
state = .end;
},
},
.expect_path_to_db => switch (token.tag) {
.identifier => {
self.deinit();
self.* = Self.init(toker.getTokenSlice(token), null);
state = .end;
},
else => {
send("Error Expect a path to a ZipponDB folder.", .{});
state = .end;
},
},
.expect_query => switch (token.tag) {
.string_literal => {
const null_term_query_str = try allocator.dupeZ(u8, toker.buffer[token.loc.start + 1 .. token.loc.end - 1]);
defer allocator.free(null_term_query_str);
self.runQuery(null_term_query_str); // TODO: THis should return something and I should send from here, not from the parser
state = .end;
},
.keyword_help => {
send("The run command take a ZiQL query between \" and run it. eg: run \"GRAB User\"", .{});
state = .end;
},
else => {
send("Error: After command run, need a query, eg: \"GRAB User\"", .{});
state = .end;
},
},
.expect_schema_command => switch (token.tag) {
.keyword_describe => {
if (self.state == .MissingFileEngine) send("Error: No database selected. Please use 'db new' or 'db use'.", .{});
if (self.state == .MissingSchemaEngine) send("Error: No schema in database. Please use 'schema init'.", .{});
send("Schema:\n {s}", .{self.schema_engine.null_terminated});
state = .end;
},
.keyword_init => {
if (self.state == .MissingFileEngine) send("Error: No database selected. Please use 'db new' or 'db use'.", .{});
state = .expect_path_to_schema;
},
.keyword_help => {
send("{s}", .{config.HELP_MESSAGE.schema});
state = .end;
},
else => {
send("{s}", .{config.HELP_MESSAGE.schema});
state = .end;
},
},
.expect_path_to_schema => switch (token.tag) {
.identifier => {
const main_path = try allocator.dupe(u8, self.file_engine.path_to_ZipponDB_dir);
self.deinit();
self.* = Self.init(main_path, toker.getTokenSlice(token));
try self.file_engine.writeSchemaFile(self.schema_engine.null_terminated);
state = .end;
},
else => {
send("Error: Expect path to schema file.", .{});
state = .end;
},
},
.expect_path_to_dump => switch (token.tag) {
.identifier => {
try self.file_engine.dumpDb(allocator, toker.getTokenSlice(token), switch (last_token.tag) {
.keyword_csv => .csv,
.keyword_zid => .zid,
.keyword_json => .json,
else => unreachable,
});
state = .end;
},
else => {
send("Error: Expect path to dump dir.", .{});
state = .end;
},
},
.quit, .end => unreachable,
};
if (state == .quit) {
log.info("Bye bye\n", .{});
return true;
}
return false;
}

View File

@ -0,0 +1,195 @@
// From https://github.com/ziglang/zig/blob/master/lib/std/zig/tokenizer.zig
const std = @import("std");
const Loc = @import("../dataStructure/loc.zig");
pub const Token = struct {
tag: Tag,
loc: Loc,
pub const keywords = std.StaticStringMap(Tag).initComptime(.{
.{ "run", .keyword_run },
.{ "help", .keyword_help },
.{ "describe", .keyword_describe },
.{ "init", .keyword_init },
.{ "schema", .keyword_schema },
.{ "quit", .keyword_quit },
.{ "db", .keyword_db },
.{ "new", .keyword_new },
.{ "metrics", .keyword_metrics },
.{ "use", .keyword_use },
.{ "state", .keyword_state },
.{ "dump", .keyword_dump },
.{ "csv", .keyword_csv },
.{ "json", .keyword_json },
.{ "zid", .keyword_zid },
});
pub fn getKeyword(bytes: []const u8) ?Tag {
return keywords.get(bytes);
}
pub const Tag = enum {
eof,
invalid,
keyword_run,
keyword_help,
keyword_describe,
keyword_schema,
keyword_init,
keyword_quit,
keyword_db,
keyword_new,
keyword_metrics,
keyword_use,
keyword_state,
keyword_dump,
keyword_csv,
keyword_json,
keyword_zid,
string_literal,
identifier,
};
};
pub const Tokenizer = struct {
buffer: [:0]const u8,
index: usize,
pub fn init(buffer: [:0]const u8) Tokenizer {
// Skip the UTF-8 BOM if present.
return .{
.buffer = buffer,
.index = if (std.mem.startsWith(u8, buffer, "\xEF\xBB\xBF")) 3 else 0,
};
}
const State = enum {
start,
invalid,
identifier,
string_literal,
string_literal_backslash,
};
pub fn getTokenSlice(self: *Tokenizer, token: Token) []const u8 {
return self.buffer[token.loc.start..token.loc.end];
}
pub fn next(self: *Tokenizer) Token {
var state: State = .start;
var result: Token = .{
.tag = undefined,
.loc = .{
.start = self.index,
.end = undefined,
},
};
while (true) : (self.index += 1) {
const c = self.buffer[self.index];
switch (state) {
.start => switch (c) {
0 => {
if (self.index == self.buffer.len) return .{
.tag = .eof,
.loc = .{
.start = self.index,
.end = self.index,
},
};
state = .invalid;
},
' ', '\n', '\t', '\r' => {
result.loc.start = self.index + 1;
},
'a'...'z', 'A'...'Z', '_' => {
state = .identifier;
result.tag = .identifier;
},
'"' => {
state = .string_literal;
result.tag = .string_literal;
},
else => {
state = .invalid;
},
},
.invalid => {
result.tag = .invalid;
break;
},
.identifier => switch (c) {
'a'...'z', 'A'...'Z', '_', '0'...'9', '.', '/' => continue,
else => {
if (Token.getKeyword(self.buffer[result.loc.start..self.index])) |tag| {
result.tag = tag;
}
break;
},
},
.string_literal => switch (c) {
0 => {
if (self.index != self.buffer.len) {
state = .invalid;
continue;
}
result.tag = .invalid;
break;
},
'\n' => {
result.tag = .invalid;
break;
},
'\\' => {
state = .string_literal_backslash;
},
'"' => {
self.index += 1;
break;
},
0x01...0x09, 0x0b...0x1f, 0x7f => {
state = .invalid;
},
else => continue,
},
.string_literal_backslash => switch (c) {
0, '\n' => {
result.tag = .invalid;
break;
},
else => {
state = .string_literal;
},
},
}
}
result.loc.end = self.index;
return result;
}
};
test "Basics" {
try testTokenize("help", &.{.keyword_help});
try testTokenize("run \"Hello world\"", &.{ .keyword_run, .string_literal });
}
fn testTokenize(source: [:0]const u8, expected_token_tags: []const Token.Tag) !void {
var tokenizer = Tokenizer.init(source);
for (expected_token_tags) |expected_token_tag| {
const token = tokenizer.next();
try std.testing.expectEqual(expected_token_tag, token.tag);
}
// Last token should always be eof, even when the last token was invalid,
// in which case the tokenizer is in an invalid state, which can only be
// recovered by opinionated means outside the scope of this implementation.
const last_token = tokenizer.next();
try std.testing.expectEqual(Token.Tag.eof, last_token.tag);
try std.testing.expectEqual(source.len, last_token.loc.start);
try std.testing.expectEqual(source.len, last_token.loc.end);
}

View File

@ -1,40 +1,16 @@
const std = @import("std");
const utils = @import("utils.zig");
const send = utils.send;
const Allocator = std.mem.Allocator;
const Pool = std.Thread.Pool;
const FileEngine = @import("file/core.zig");
const SchemaEngine = @import("schema/core.zig");
const ThreadEngine = @import("thread/engine.zig");
const cliTokenizer = @import("tokenizers/cli.zig").Tokenizer;
const cliToken = @import("tokenizers/cli.zig").Token;
const ziqlTokenizer = @import("tokenizers/ziql.zig").Tokenizer;
const ziqlToken = @import("tokenizers/ziql.zig").Token;
const ziqlParser = @import("ziqlParser.zig");
const config = @import("config");
const Cli = @import("cli/core.zig");
const ZipponError = @import("error").ZipponError;
const config = @import("config");
const BUFFER_SIZE = config.BUFFER_SIZE;
const CPU_CORE = config.CPU_CORE;
const HELP_MESSAGE = config.HELP_MESSAGE;
// End up using like 302kB of memory here
var log_buff: [1024]u8 = undefined;
var log_path: []const u8 = undefined;
var date_buffer: [64]u8 = undefined;
var date_fa = std.heap.FixedBufferAllocator.init(&date_buffer);
const date_allocator = date_fa.allocator();
var path_buffer: [1024]u8 = undefined;
var line_buffer: [BUFFER_SIZE]u8 = undefined;
var in_buffer: [BUFFER_SIZE]u8 = undefined;
var out_buffer: [BUFFER_SIZE]u8 = undefined;
const log = std.log.scoped(.cli);
pub const std_options = .{
.logFn = myLog,
};
@ -45,20 +21,18 @@ pub fn myLog(
comptime format: []const u8,
args: anytype,
) void {
if (true) return;
const level_txt = comptime message_level.asText();
const prefix = if (scope == .default) " - " else "(" ++ @tagName(scope) ++ ") - ";
const potential_file: ?std.fs.File = std.fs.cwd().openFile(log_path, .{ .mode = .write_only }) catch null;
date_fa.reset();
const now = @import("dtype").DateTime.now();
var date_format_buffer = std.ArrayList(u8).init(date_allocator);
defer date_format_buffer.deinit();
now.format("YYYY/MM/DD-HH:mm:ss.SSSS", date_format_buffer.writer()) catch return;
if (potential_file) |file| {
date_fa.reset();
const now = @import("dtype").DateTime.now();
var date_format_buffer = std.ArrayList(u8).init(date_allocator);
defer date_format_buffer.deinit();
now.format("YYYY/MM/DD-HH:mm:ss.SSSS", date_format_buffer.writer()) catch return;
file.seekFromEnd(0) catch return;
const writer = file.writer();
@ -66,340 +40,16 @@ pub fn myLog(
writer.print(format, args) catch return;
writer.writeByte('\n') catch return;
file.close();
} else {
//const writer = std.io.getStdErr().writer();
//writer.print("{s}{s}Time: {s} - ", .{ level_txt, prefix, date_format_buffer.items }) catch return;
//writer.print(format, args) catch return;
//writer.writeByte('\n') catch return;
}
}
const DBEngineState = enum { MissingFileEngine, MissingSchemaEngine, Ok, Init };
pub const DBEngine = struct {
state: DBEngineState = .Init,
file_engine: FileEngine = undefined,
schema_engine: SchemaEngine = undefined,
thread_engine: ThreadEngine = undefined,
pub fn init(potential_main_path: ?[]const u8, potential_schema_path: ?[]const u8) DBEngine {
var self = DBEngine{};
self.thread_engine = ThreadEngine.init();
const potential_main_path_or_environment_variable = potential_main_path orelse utils.getEnvVariable("ZIPPONDB_PATH");
if (potential_main_path_or_environment_variable) |main_path| {
log_path = std.fmt.bufPrint(&log_buff, "{s}/LOG/log", .{main_path}) catch "";
log.info("Found ZIPPONDB_PATH: {s}.", .{main_path});
self.file_engine = FileEngine.init(main_path, self.thread_engine.thread_pool) catch {
log.err("Error when init FileEngine", .{});
self.state = .MissingFileEngine;
return self;
};
self.file_engine.createMainDirectories() catch {
log.err("Error when creating main directories", .{});
self.state = .MissingFileEngine;
return self;
};
self.state = .MissingSchemaEngine;
} else {
log.info("No ZIPPONDB_PATH found.", .{});
self.state = .MissingFileEngine;
return self;
}
if (self.file_engine.isSchemaFileInDir() and potential_schema_path == null) {
const schema_path = std.fmt.bufPrint(&path_buffer, "{s}/schema", .{self.file_engine.path_to_ZipponDB_dir}) catch {
self.state = .MissingSchemaEngine;
return self;
};
log.info("Schema founded in the database directory.", .{});
self.schema_engine = SchemaEngine.init(schema_path, &self.file_engine) catch |err| {
log.err("Error when init SchemaEngine: {any}", .{err});
self.state = .MissingSchemaEngine;
return self;
};
self.file_engine.createStructDirectories(self.schema_engine.struct_array) catch |err| {
log.err("Error when creating struct directories: {any}", .{err});
self.state = .MissingSchemaEngine;
return self;
};
log.debug("SchemaEngine created in DBEngine with {d} struct", .{self.schema_engine.struct_array.len});
self.file_engine.schema_engine = self.schema_engine;
self.state = .Ok;
return self;
}
log.info("Database don't have any schema yet, trying to add one.", .{});
const potential_schema_path_or_environment_variable = potential_schema_path orelse utils.getEnvVariable("ZIPPONDB_SCHEMA");
if (potential_schema_path_or_environment_variable) |schema_path| {
log.info("Found schema path {s}.", .{schema_path});
self.schema_engine = SchemaEngine.init(schema_path, &self.file_engine) catch |err| {
log.err("Error when init SchemaEngine: {any}", .{err});
self.state = .MissingSchemaEngine;
return self;
};
self.file_engine.createStructDirectories(self.schema_engine.struct_array) catch |err| {
log.err("Error when creating struct directories: {any}", .{err});
self.state = .MissingSchemaEngine;
return self;
};
self.file_engine.schema_engine = self.schema_engine;
self.file_engine.writeSchemaFile(self.schema_engine.null_terminated) catch |err| {
log.err("Error saving schema file: {any}", .{err});
self.state = .MissingSchemaEngine;
return self;
};
self.state = .Ok;
} else {
log.info(HELP_MESSAGE.no_schema, .{self.file_engine.path_to_ZipponDB_dir});
}
return self;
}
pub fn runQuery(self: *DBEngine, null_term_query_str: [:0]const u8) void {
var toker = ziqlTokenizer.init(null_term_query_str);
var parser = ziqlParser.init(&toker, &self.file_engine, &self.schema_engine);
parser.parse() catch |err| log.err("Error parsing: {any}", .{err});
}
pub fn deinit(self: *DBEngine) void {
self.thread_engine.deinit();
self.schema_engine.deinit();
}
};
pub fn setLogPath(path: []const u8) void {
log_path = std.fmt.bufPrint(&log_buff, "{s}/LOG/log", .{path}) catch return;
}
pub fn main() !void {
const State = enum {
expect_main_command,
expect_query,
expect_schema_command,
expect_path_to_schema,
expect_db_command,
expect_path_to_db,
expect_file_format,
expect_path_to_dump,
quit,
end,
};
var cli = Cli.init(null, null);
defer cli.deinit();
var db_engine = DBEngine.init(null, null);
defer db_engine.deinit();
var fa = std.heap.FixedBufferAllocator.init(&out_buffer);
const allocator = fa.allocator();
while (true) {
fa.reset();
std.debug.print("> ", .{}); // TODO: Find something better than just std.debug.print
const line = std.io.getStdIn().reader().readUntilDelimiterOrEof(&in_buffer, '\n') catch {
log.debug("Command too long for buffer", .{});
continue;
};
if (line) |line_str| {
log.debug("Query received: {s}", .{line_str});
const null_term_line_str = try std.fmt.bufPrintZ(&line_buffer, "{s}", .{line_str});
var toker = cliTokenizer.init(null_term_line_str);
var token = toker.next();
var state = State.expect_main_command;
var last_token: cliToken = undefined;
while ((state != .end) and (state != .quit)) : ({
last_token = token;
token = toker.next();
}) switch (state) {
.expect_main_command => switch (token.tag) {
.keyword_run => {
if (db_engine.state == .MissingFileEngine) {
send("{s}", .{HELP_MESSAGE.no_engine});
state = .end;
continue;
}
if (db_engine.state == .MissingSchemaEngine) {
send(HELP_MESSAGE.no_schema, .{db_engine.file_engine.path_to_ZipponDB_dir});
state = .end;
continue;
}
state = .expect_query;
},
.keyword_db => state = .expect_db_command,
.keyword_schema => {
if (db_engine.state == .MissingFileEngine) {
send("{s}", .{HELP_MESSAGE.no_engine});
state = .end;
continue;
}
state = .expect_schema_command;
},
.keyword_help => {
send("{s}", .{HELP_MESSAGE.main});
state = .end;
},
.keyword_quit => state = .quit,
.keyword_dump => {
if (db_engine.state == .MissingFileEngine) {
send("{s}", .{HELP_MESSAGE.no_engine});
state = .end;
continue;
}
if (db_engine.state == .MissingSchemaEngine) {
send(HELP_MESSAGE.no_schema, .{db_engine.file_engine.path_to_ZipponDB_dir});
state = .end;
continue;
}
state = .expect_file_format;
},
.eof => state = .end,
else => {
send("Command need to start with a keyword, including: run, db, schema, help and quit", .{});
state = .end;
},
},
.expect_file_format => switch (token.tag) {
.keyword_csv => state = .expect_path_to_dump,
.keyword_json => state = .expect_path_to_dump,
.keyword_zid => state = .expect_path_to_dump,
else => {
send("Error: format available: csv, json, zid", .{});
state = .end;
},
},
.expect_db_command => switch (token.tag) {
.keyword_new, .keyword_use => state = .expect_path_to_db, //TODO: When new, create the dir. If use, dont create the dir
.keyword_metrics => {
if (db_engine.state == .MissingFileEngine) {
send("{s}", .{HELP_MESSAGE.no_engine});
state = .end;
continue;
}
if (db_engine.state == .MissingSchemaEngine) {
send(HELP_MESSAGE.no_schema, .{db_engine.file_engine.path_to_ZipponDB_dir});
state = .end;
continue;
}
var buffer = std.ArrayList(u8).init(allocator);
defer buffer.deinit();
try db_engine.file_engine.writeDbMetrics(&buffer);
send("{s}", .{buffer.items});
state = .end;
},
.keyword_state => {
send("{any}", .{db_engine.state});
state = .end;
},
.keyword_help => {
send("{s}", .{HELP_MESSAGE.db});
state = .end;
},
else => {
send("Error: db commands available: new, metrics, swap & help", .{});
state = .end;
},
},
.expect_path_to_db => switch (token.tag) {
.identifier => {
db_engine.deinit();
db_engine = DBEngine.init(toker.getTokenSlice(token), null);
state = .end;
},
else => {
send("Error Expect a path to a ZipponDB folder.", .{});
state = .end;
},
},
.expect_query => switch (token.tag) {
.string_literal => {
const null_term_query_str = try allocator.dupeZ(u8, toker.buffer[token.loc.start + 1 .. token.loc.end - 1]);
defer allocator.free(null_term_query_str);
db_engine.runQuery(null_term_query_str); // TODO: THis should return something and I should send from here, not from the parser
state = .end;
},
.keyword_help => {
send("The run command take a ZiQL query between \" and run it. eg: run \"GRAB User\"", .{});
state = .end;
},
else => {
send("Error: After command run, need a query, eg: \"GRAB User\"", .{});
state = .end;
},
},
.expect_schema_command => switch (token.tag) {
.keyword_describe => {
if (db_engine.state == .MissingFileEngine) send("Error: No database selected. Please use 'db new' or 'db use'.", .{});
if (db_engine.state == .MissingSchemaEngine) send("Error: No schema in database. Please use 'schema init'.", .{});
send("Schema:\n {s}", .{db_engine.schema_engine.null_terminated});
state = .end;
},
.keyword_init => {
if (db_engine.state == .MissingFileEngine) send("Error: No database selected. Please use 'db new' or 'db use'.", .{});
state = .expect_path_to_schema;
},
.keyword_help => {
send("{s}", .{HELP_MESSAGE.schema});
state = .end;
},
else => {
send("{s}", .{HELP_MESSAGE.schema});
state = .end;
},
},
.expect_path_to_schema => switch (token.tag) {
.identifier => {
const main_path = try allocator.dupe(u8, db_engine.file_engine.path_to_ZipponDB_dir);
db_engine.deinit();
db_engine = DBEngine.init(main_path, toker.getTokenSlice(token));
try db_engine.file_engine.writeSchemaFile(db_engine.schema_engine.null_terminated);
state = .end;
},
else => {
send("Error: Expect path to schema file.", .{});
state = .end;
},
},
.expect_path_to_dump => switch (token.tag) {
.identifier => {
try db_engine.file_engine.dumpDb(allocator, toker.getTokenSlice(token), switch (last_token.tag) {
.keyword_csv => .csv,
.keyword_zid => .zid,
.keyword_json => .json,
else => unreachable,
});
state = .end;
},
else => {
send("Error: Expect path to dump dir.", .{});
state = .end;
},
},
.quit, .end => unreachable,
};
if (state == .quit) {
log.info("Bye bye\n", .{});
break;
}
}
}
try cli.start();
}

View File

@ -6,25 +6,11 @@ const log = std.log.scoped(.utils);
// This use 2MB / 2048KB of memory
var map_error_buffer: [1024 * 1024]u8 = undefined; // This is for map AND error, not map of error and whatever
var value_buffer: [1024]u8 = undefined;
var path_buffer: [1024 * 1024]u8 = undefined;
var fa = std.heap.FixedBufferAllocator.init(&map_error_buffer);
const allocator = fa.allocator();
pub fn getEnvVariable(variable: []const u8) ?[]const u8 {
fa.reset();
var env_map = std.process.getEnvMap(allocator) catch return null;
var iter = env_map.iterator();
while (iter.next()) |entry| {
if (std.mem.eql(u8, entry.key_ptr.*, variable)) return std.fmt.bufPrint(&value_buffer, "{s}", .{entry.value_ptr.*}) catch return null;
}
return null;
}
const stdout = std.io.getStdOut().writer();
// Maybe create a struct for that