Date, clean and errors
Added the datatype date, time and datetime Moved all custom erros to a single file Cleaned code to be more readable and created some small utils Other stuffs that I dont recall. Basically preparing for the relationship implementation
This commit is contained in:
parent
673148ba77
commit
6bcb369cbc
4
.gitignore
vendored
4
.gitignore
vendored
@ -1,8 +1,8 @@
|
||||
.zig-cache
|
||||
.venv
|
||||
ZipponDB
|
||||
data
|
||||
engine
|
||||
engine.o
|
||||
zig-out
|
||||
TODO v0.1.md
|
||||
generate_dummy_data.py
|
||||
TODO v0.2.md
|
||||
|
@ -154,11 +154,9 @@ There is 5 data types for the moment:
|
||||
- `bool`: Boolean, can be `true` or `false`
|
||||
- `string`: Character array between `''`
|
||||
- `UUID`: Id in the UUID format, used for relationship, ect. All struct have an id member.
|
||||
|
||||
Comming soon:
|
||||
- `date`: A date in yyyy/mm/dd
|
||||
- `datetime`: A date time in yyyy/mm/dd/hh/mm/ss
|
||||
- `time`: A time in hh/mm/ss
|
||||
- `time`: A time in hh:mm:ss.mmmm
|
||||
- `datetime`: A date time in yyyy/mm/dd-hh:mm:ss:mmmm
|
||||
|
||||
All data types can be an array of those types using `[]` in front of it. So `[]int` is an array of integer.
|
||||
|
||||
|
48
ZiQL.md
48
ZiQL.md
@ -27,6 +27,22 @@ GRAB User {name = 'Bob' AND {age > 10}}
|
||||
^
|
||||
```
|
||||
|
||||
# Data types
|
||||
|
||||
There is 5 data types for the moment:
|
||||
- `int`: 64 bit integer
|
||||
- `float`: 64 bit float. Need to have a dot, `1.` is a float `1` is an integer.
|
||||
- `bool`: Boolean, can be `true` or `false`
|
||||
- `string`: Character array between `''`
|
||||
- `UUID`: Id in the UUID format, used for relationship, ect. All struct have an id member.
|
||||
- `date`: A date in yyyy/mm/dd
|
||||
- `time`: A time in hh:mm:ss.mmmm
|
||||
- `datetime`: A date time in yyyy/mm/dd-hh:mm:ss:mmmm
|
||||
|
||||
All data types can be an array of those types using `[]` in front of it. So `[]int` is an array of integer.
|
||||
|
||||
All data types can also be `null`. Except arrays that can only be empty.
|
||||
|
||||
# Examples
|
||||
|
||||
## GRAB
|
||||
@ -248,3 +264,35 @@ UPDATE User {name='Bob'} TO (comments REMOVE { at < '2023/12/31'})
|
||||
```
|
||||
|
||||
I may include more options later.
|
||||
|
||||
# Date
|
||||
|
||||
***WIP***
|
||||
|
||||
## Date
|
||||
|
||||
To write a date, you use this format: `yyyy/mm/dd`.
|
||||
Like that: `2024/10/19`.
|
||||
|
||||
***Note: You cant use negative years***
|
||||
|
||||
## Time
|
||||
|
||||
To write a time, you use this format: `hh:mm:ss.mmmmm`.
|
||||
Like that: `12:45:00.0000`.
|
||||
|
||||
Millisecond and second are optional so this work too: `12:45:00` and `12:45`
|
||||
|
||||
|
||||
## Datetime
|
||||
|
||||
Mix of both, to write a datetime, you use this format: `yyyy/mm/dd-hh:mm:ss.mmmmm`.
|
||||
Like that: `2024/10/19-12:45:00.0000`.
|
||||
|
||||
Millisecond and second are optional so this work too: `2024/10/19-12:45:00` and `2024/10/19-12:45`
|
||||
|
||||
GRAB User {birthday > 2020/10/19 AND birthday < 2024/01/01 }
|
||||
|
||||
GRAB IOT {id = '0000-0000' AND .TemperatureSensor.Temperature.timestamp > 22-45-50.0000}
|
||||
GRAB IOT {.TemperatureSensor.TemperatureRecord IN TemperatureRecord{.timestamp > 22-45-50.0000 AND version = 3}}
|
||||
GRAB IOT {.TemperatureSensor.TemperatureRecord IN .{timestamp > 22-45-50.0000} AND .TemperatureSensor IN .{version = 3}}
|
||||
|
@ -2,6 +2,9 @@ User (
|
||||
name: str,
|
||||
age: int,
|
||||
email: str,
|
||||
bday: date,
|
||||
last_order: datetime,
|
||||
a_time: time,
|
||||
scores: []int,
|
||||
friends: []str,
|
||||
)
|
||||
|
@ -1 +1,6 @@
|
||||
pub const BUFFER_SIZE = 1024 * 50; // Line limit when parsing file
|
||||
pub const MAX_FILE_SIZE = 5e+4; // 50kb TODO: Put in config file
|
||||
|
||||
// Testing
|
||||
|
||||
pub const TEST_DATA_DIR = "test_data/v0.1.1"; // Maybe put that directly in the build
|
||||
|
File diff suppressed because it is too large
Load Diff
11
src/main.zig
11
src/main.zig
@ -23,6 +23,7 @@ const State = enum {
|
||||
end,
|
||||
};
|
||||
|
||||
// TODO: If an argument is given when starting the binary, it is the db path
|
||||
pub fn main() !void {
|
||||
var state: State = .expect_main_command;
|
||||
|
||||
@ -44,10 +45,10 @@ pub fn main() !void {
|
||||
_ = std.fs.cwd().openDir(path, .{}) catch {
|
||||
std.debug.print("Error opening ZipponDB path using environment variable, please select the database using 'db use' or create a new one with 'db new'\n", .{});
|
||||
file_engine = FileEngine.init(allocator, try allocator.dupe(u8, ""));
|
||||
to_init = true;
|
||||
to_init = false;
|
||||
};
|
||||
if (to_init) {
|
||||
file_engine = FileEngine.init(allocator, path_env_variable.?);
|
||||
file_engine = FileEngine.init(allocator, path);
|
||||
try file_engine.checkAndCreateDirectories();
|
||||
}
|
||||
} else {
|
||||
@ -73,7 +74,7 @@ pub fn main() !void {
|
||||
while ((state != .end) and (state != .quit)) : (token = toker.next()) switch (state) {
|
||||
.expect_main_command => switch (token.tag) {
|
||||
.keyword_run => {
|
||||
if (!file_engine.usable) {
|
||||
if (!file_engine.usable()) {
|
||||
send("Error: No database selected. Please use db new or db use.", .{});
|
||||
state = .end;
|
||||
continue;
|
||||
@ -82,7 +83,7 @@ pub fn main() !void {
|
||||
},
|
||||
.keyword_db => state = .expect_db_command,
|
||||
.keyword_schema => {
|
||||
if (!file_engine.usable) {
|
||||
if (!file_engine.usable()) {
|
||||
send("Error: No database selected. Please use db new or db use.", .{});
|
||||
state = .end;
|
||||
continue;
|
||||
@ -116,7 +117,7 @@ pub fn main() !void {
|
||||
.keyword_new => state = .expect_path_to_new_db,
|
||||
.keyword_use => state = .expect_path_to_db,
|
||||
.keyword_metrics => {
|
||||
if (!file_engine.usable) {
|
||||
if (!file_engine.usable()) {
|
||||
send("Error: No database selected. Please use db new or db use.", .{});
|
||||
state = .end;
|
||||
continue;
|
||||
|
@ -6,10 +6,7 @@ const Token = @import("tokenizers/schema.zig").Token;
|
||||
const send = @import("stuffs/utils.zig").send;
|
||||
const printError = @import("stuffs/utils.zig").printError;
|
||||
|
||||
const SchemaParserError = error{
|
||||
SynthaxError,
|
||||
FeatureMissing,
|
||||
};
|
||||
const SchemaParserError = @import("stuffs/errors.zig").SchemaParserError;
|
||||
|
||||
const State = enum {
|
||||
end,
|
||||
@ -35,10 +32,6 @@ pub const Parser = struct {
|
||||
};
|
||||
}
|
||||
|
||||
// Maybe I the name and member can be Loc, with a start and end, and use the buffer to get back the value
|
||||
// This is how Token works
|
||||
// From my understanding this is the same here. I put slices, that can just a len and a pointer, put I con't save the value itself.
|
||||
// Or maybe I do actually, and an array of pointer would be *[]u8
|
||||
pub const SchemaStruct = struct {
|
||||
allocator: Allocator,
|
||||
name: Token.Loc,
|
||||
@ -46,7 +39,12 @@ pub const Parser = struct {
|
||||
types: std.ArrayList(DataType),
|
||||
|
||||
pub fn init(allocator: Allocator, name: Token.Loc) SchemaStruct {
|
||||
return SchemaStruct{ .allocator = allocator, .name = name, .members = std.ArrayList(Token.Loc).init(allocator), .types = std.ArrayList(DataType).init(allocator) };
|
||||
return SchemaStruct{
|
||||
.allocator = allocator,
|
||||
.name = name,
|
||||
.members = std.ArrayList(Token.Loc).init(allocator),
|
||||
.types = std.ArrayList(DataType).init(allocator),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn deinit(self: *SchemaStruct) void {
|
||||
@ -78,15 +76,27 @@ pub const Parser = struct {
|
||||
.expect_struct_name_OR_end => switch (token.tag) {
|
||||
.identifier => {
|
||||
state = .expect_l_paren;
|
||||
struct_array.append(SchemaStruct.init(self.allocator, token.loc)) catch @panic("Error appending a struct name.");
|
||||
struct_array.append(SchemaStruct.init(self.allocator, token.loc)) catch return SchemaParserError.MemoryError;
|
||||
},
|
||||
.eof => state = .end,
|
||||
else => return printError("Error parsing schema: Expected a struct name", SchemaParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
else => return printError(
|
||||
"Error parsing schema: Expected a struct name",
|
||||
SchemaParserError.SynthaxError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
),
|
||||
},
|
||||
|
||||
.expect_l_paren => switch (token.tag) {
|
||||
.l_paren => state = .expect_member_name,
|
||||
else => return printError("Error parsing schema: Expected (", SchemaParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
else => return printError(
|
||||
"Error parsing schema: Expected (",
|
||||
SchemaParserError.SynthaxError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
),
|
||||
},
|
||||
|
||||
.expect_member_name_OR_r_paren => switch (token.tag) {
|
||||
@ -98,67 +108,125 @@ pub const Parser = struct {
|
||||
state = .expect_struct_name_OR_end;
|
||||
index += 1;
|
||||
},
|
||||
else => return printError("Error parsing schema: Expected member name or )", SchemaParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
else => return printError(
|
||||
"Error parsing schema: Expected member name or )",
|
||||
SchemaParserError.SynthaxError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
),
|
||||
},
|
||||
|
||||
.expect_member_name => {
|
||||
state = .expect_two_dot;
|
||||
struct_array.items[index].members.append(token.loc) catch @panic("Error appending a member name.");
|
||||
struct_array.items[index].members.append(token.loc) catch return SchemaParserError.MemoryError;
|
||||
},
|
||||
|
||||
.expect_two_dot => switch (token.tag) {
|
||||
.two_dot => state = .expect_value_type,
|
||||
else => return printError("Error parsing schema: Expected :", SchemaParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
else => return printError(
|
||||
"Error parsing schema: Expected :",
|
||||
SchemaParserError.SynthaxError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
),
|
||||
},
|
||||
|
||||
.expect_value_type => switch (token.tag) {
|
||||
.type_int => {
|
||||
state = .expect_comma;
|
||||
struct_array.items[index].types.append(DataType.int) catch @panic("Error appending a type.");
|
||||
struct_array.items[index].types.append(DataType.int) catch return SchemaParserError.MemoryError;
|
||||
},
|
||||
.type_str => {
|
||||
state = .expect_comma;
|
||||
struct_array.items[index].types.append(DataType.str) catch @panic("Error appending a type.");
|
||||
struct_array.items[index].types.append(DataType.str) catch return SchemaParserError.MemoryError;
|
||||
},
|
||||
.type_float => {
|
||||
state = .expect_comma;
|
||||
struct_array.items[index].types.append(DataType.float) catch @panic("Error appending a type.");
|
||||
struct_array.items[index].types.append(DataType.float) catch return SchemaParserError.MemoryError;
|
||||
},
|
||||
.type_bool => {
|
||||
state = .expect_comma;
|
||||
struct_array.items[index].types.append(DataType.bool) catch @panic("Error appending a type.");
|
||||
struct_array.items[index].types.append(DataType.bool) catch return SchemaParserError.MemoryError;
|
||||
},
|
||||
.type_date => @panic("Date not yet implemented"),
|
||||
.identifier => @panic("Link not yet implemented"),
|
||||
.type_date => {
|
||||
state = .expect_comma;
|
||||
struct_array.items[index].types.append(DataType.date) catch return SchemaParserError.MemoryError;
|
||||
},
|
||||
.type_time => {
|
||||
state = .expect_comma;
|
||||
struct_array.items[index].types.append(DataType.time) catch return SchemaParserError.MemoryError;
|
||||
},
|
||||
.type_datetime => {
|
||||
state = .expect_comma;
|
||||
struct_array.items[index].types.append(DataType.datetime) catch return SchemaParserError.MemoryError;
|
||||
},
|
||||
.identifier => return SchemaParserError.FeatureMissing,
|
||||
.lr_bracket => state = .expext_array_type,
|
||||
else => return printError("Error parsing schema: Expected data type", SchemaParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
else => return printError(
|
||||
"Error parsing schema: Expected data type",
|
||||
SchemaParserError.SynthaxError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
),
|
||||
},
|
||||
|
||||
.expext_array_type => switch (token.tag) {
|
||||
.type_int => {
|
||||
state = .expect_comma;
|
||||
struct_array.items[index].types.append(DataType.int_array) catch @panic("Error appending a type.");
|
||||
struct_array.items[index].types.append(DataType.int_array) catch return SchemaParserError.MemoryError;
|
||||
},
|
||||
.type_str => {
|
||||
state = .expect_comma;
|
||||
struct_array.items[index].types.append(DataType.str_array) catch @panic("Error appending a type.");
|
||||
struct_array.items[index].types.append(DataType.str_array) catch return SchemaParserError.MemoryError;
|
||||
},
|
||||
.type_float => {
|
||||
state = .expect_comma;
|
||||
struct_array.items[index].types.append(DataType.float_array) catch @panic("Error appending a type.");
|
||||
struct_array.items[index].types.append(DataType.float_array) catch return SchemaParserError.MemoryError;
|
||||
},
|
||||
.type_bool => {
|
||||
state = .expect_comma;
|
||||
struct_array.items[index].types.append(DataType.bool_array) catch @panic("Error appending a type.");
|
||||
struct_array.items[index].types.append(DataType.bool_array) catch return SchemaParserError.MemoryError;
|
||||
},
|
||||
.type_date => return printError("Error parsing schema: Data not yet implemented", SchemaParserError.FeatureMissing, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
.identifier => return printError("Error parsing schema: Relationship not yet implemented", SchemaParserError.FeatureMissing, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
else => return printError("Error parsing schema: Expected data type", SchemaParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
.type_date => {
|
||||
state = .expect_comma;
|
||||
struct_array.items[index].types.append(DataType.date_array) catch return SchemaParserError.MemoryError;
|
||||
},
|
||||
.type_time => {
|
||||
state = .expect_comma;
|
||||
struct_array.items[index].types.append(DataType.time_array) catch return SchemaParserError.MemoryError;
|
||||
},
|
||||
.type_datetime => {
|
||||
state = .expect_comma;
|
||||
struct_array.items[index].types.append(DataType.datetime_array) catch return SchemaParserError.MemoryError;
|
||||
},
|
||||
.identifier => return printError(
|
||||
"Error parsing schema: Relationship not yet implemented",
|
||||
SchemaParserError.FeatureMissing,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
),
|
||||
else => return printError(
|
||||
"Error parsing schema: Expected data type",
|
||||
SchemaParserError.SynthaxError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
),
|
||||
},
|
||||
|
||||
.expect_comma => switch (token.tag) {
|
||||
.comma => state = .expect_member_name_OR_r_paren,
|
||||
else => return printError("Error parsing schema: Expected ,", SchemaParserError.SynthaxError, self.toker.buffer, token.loc.start, token.loc.end),
|
||||
else => return printError(
|
||||
"Error parsing schema: Expected ,",
|
||||
SchemaParserError.SynthaxError,
|
||||
self.toker.buffer,
|
||||
token.loc.start,
|
||||
token.loc.end,
|
||||
),
|
||||
},
|
||||
|
||||
else => unreachable,
|
||||
|
42
src/stuffs/errors.zig
Normal file
42
src/stuffs/errors.zig
Normal file
@ -0,0 +1,42 @@
|
||||
pub const ZiQlParserError = error{
|
||||
SynthaxError,
|
||||
MemberNotFound,
|
||||
MemberMissing,
|
||||
StructNotFound,
|
||||
FeatureMissing,
|
||||
ParsingValueError,
|
||||
ConditionError,
|
||||
};
|
||||
|
||||
pub const SchemaParserError = error{
|
||||
SynthaxError,
|
||||
FeatureMissing,
|
||||
ValueParsingError,
|
||||
MemoryError,
|
||||
};
|
||||
|
||||
pub const FileEngineError = error{
|
||||
SchemaFileNotFound,
|
||||
SchemaNotConform,
|
||||
DATAFolderNotFound,
|
||||
StructFolderNotFound,
|
||||
CantMakeDir,
|
||||
CantMakeFile,
|
||||
CantOpenDir,
|
||||
CantOpenFile,
|
||||
MemoryError,
|
||||
StreamError,
|
||||
ReadError, // TODO: Only use stream
|
||||
InvalidUUID,
|
||||
InvalidDate,
|
||||
InvalidFileIndex,
|
||||
DirIterError,
|
||||
WriteError,
|
||||
FileStatError,
|
||||
DeleteFileError,
|
||||
RenameFileError,
|
||||
StructNotFound,
|
||||
MemberNotFound,
|
||||
};
|
||||
|
||||
pub const ZipponError = ZiQlParserError || FileEngineError || SchemaParserError;
|
@ -1,4 +1,5 @@
|
||||
const std = @import("std");
|
||||
const ZipponError = @import("errors.zig").ZipponError;
|
||||
|
||||
const stdout = std.io.getStdOut().writer();
|
||||
|
||||
@ -43,7 +44,7 @@ pub fn send(comptime format: []const u8, args: anytype) void {
|
||||
}
|
||||
|
||||
/// Print an error and send it to the user pointing to the token
|
||||
pub fn printError(message: []const u8, err: anyerror, query: ?[]const u8, start: ?usize, end: ?usize) anyerror {
|
||||
pub fn printError(message: []const u8, err: ZipponError, query: ?[]const u8, start: ?usize, end: ?usize) ZipponError {
|
||||
const allocator = std.heap.page_allocator;
|
||||
var buffer = std.ArrayList(u8).init(allocator);
|
||||
defer buffer.deinit();
|
||||
@ -54,7 +55,7 @@ pub fn printError(message: []const u8, err: anyerror, query: ?[]const u8, start:
|
||||
writer.print("{s}\n", .{message}) catch {};
|
||||
|
||||
if ((start != null) and (end != null) and (query != null)) {
|
||||
const buffer_query = try allocator.dupe(u8, query.?);
|
||||
const buffer_query = allocator.dupe(u8, query.?) catch return ZipponError.MemoryError;
|
||||
defer allocator.free(buffer_query);
|
||||
|
||||
std.mem.replaceScalar(u8, buffer_query, '\n', ' ');
|
||||
|
@ -116,7 +116,7 @@ pub const Tokenizer = struct {
|
||||
},
|
||||
|
||||
.identifier => switch (c) {
|
||||
'a'...'z', 'A'...'Z', '_', '0'...'9', '.' => continue,
|
||||
'a'...'z', 'A'...'Z', '_', '0'...'9', '.', '/' => continue,
|
||||
else => {
|
||||
if (Token.getKeyword(self.buffer[result.loc.start..self.index])) |tag| {
|
||||
result.tag = tag;
|
||||
|
@ -13,6 +13,10 @@ pub const Token = struct {
|
||||
string_literal,
|
||||
int_literal,
|
||||
float_literal,
|
||||
uuid_literal,
|
||||
date_literal,
|
||||
time_literal,
|
||||
datetime_literal,
|
||||
l_bracket, // [
|
||||
r_bracket, // ]
|
||||
};
|
||||
@ -36,6 +40,9 @@ pub const Tokenizer = struct {
|
||||
string_literal,
|
||||
float,
|
||||
int,
|
||||
uuid_literal,
|
||||
date_literal,
|
||||
time_literal,
|
||||
};
|
||||
|
||||
pub fn getTokenSlice(self: *Tokenizer, token: Token) []const u8 {
|
||||
@ -65,6 +72,11 @@ pub const Tokenizer = struct {
|
||||
state = .string_literal;
|
||||
result.tag = .string_literal;
|
||||
},
|
||||
'a'...'z' => {
|
||||
state = .uuid_literal;
|
||||
result.tag = .uuid_literal;
|
||||
},
|
||||
|
||||
'0'...'9', '-' => {
|
||||
state = .int;
|
||||
result.tag = .int_literal;
|
||||
@ -95,9 +107,22 @@ pub const Tokenizer = struct {
|
||||
state = .float;
|
||||
result.tag = .float_literal;
|
||||
},
|
||||
'0'...'9' => continue,
|
||||
'a'...'z', '-' => {
|
||||
state = .uuid_literal;
|
||||
result.tag = .uuid_literal;
|
||||
},
|
||||
'/' => {
|
||||
state = .date_literal;
|
||||
result.tag = .date_literal;
|
||||
},
|
||||
':' => {
|
||||
state = .time_literal;
|
||||
result.tag = .time_literal;
|
||||
},
|
||||
'_', '0'...'9' => continue,
|
||||
else => break,
|
||||
},
|
||||
|
||||
.float => switch (c) {
|
||||
'0'...'9' => {
|
||||
continue;
|
||||
@ -106,6 +131,25 @@ pub const Tokenizer = struct {
|
||||
break;
|
||||
},
|
||||
},
|
||||
|
||||
.date_literal => switch (c) {
|
||||
'-' => {
|
||||
state = .time_literal;
|
||||
result.tag = .datetime_literal;
|
||||
},
|
||||
'0'...'9', '/' => continue,
|
||||
else => break,
|
||||
},
|
||||
|
||||
.time_literal => switch (c) {
|
||||
'0'...'9', ':', '.' => continue,
|
||||
else => break,
|
||||
},
|
||||
|
||||
.uuid_literal => switch (c) {
|
||||
'0'...'9', 'a'...'z', '-' => continue,
|
||||
else => break,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -95,7 +95,6 @@ pub const Tokenizer = struct {
|
||||
string_literal,
|
||||
date_literal,
|
||||
time_literal,
|
||||
datetime_literal,
|
||||
uuid_literal,
|
||||
identifier,
|
||||
equal,
|
||||
@ -103,9 +102,7 @@ pub const Tokenizer = struct {
|
||||
angle_bracket_left,
|
||||
angle_bracket_right,
|
||||
string_literal_backslash,
|
||||
int_exponent,
|
||||
float,
|
||||
float_exponent,
|
||||
int,
|
||||
};
|
||||
|
||||
@ -204,7 +201,7 @@ pub const Tokenizer = struct {
|
||||
state = .float;
|
||||
result.tag = .float_literal;
|
||||
},
|
||||
'0'...'9' => {
|
||||
'0'...'9', '-' => {
|
||||
state = .int;
|
||||
result.tag = .int_literal;
|
||||
},
|
||||
@ -324,7 +321,7 @@ pub const Tokenizer = struct {
|
||||
state = .float;
|
||||
result.tag = .float_literal;
|
||||
},
|
||||
'a'...'d', 'f'...'z' => {
|
||||
'a'...'z', '-' => {
|
||||
state = .uuid_literal;
|
||||
result.tag = .uuid_literal;
|
||||
},
|
||||
@ -332,35 +329,15 @@ pub const Tokenizer = struct {
|
||||
state = .date_literal;
|
||||
result.tag = .date_literal;
|
||||
},
|
||||
'-' => {
|
||||
if ((self.index - result.loc.start) == 2) {
|
||||
state = .time_literal;
|
||||
result.tag = .time_literal;
|
||||
} else { // Just in case a uuid have only number as fist part of its UUID
|
||||
state = .uuid_literal;
|
||||
result.tag = .uuid_literal;
|
||||
}
|
||||
},
|
||||
'e', 'E' => {
|
||||
state = .int_exponent;
|
||||
result.tag = .float_literal;
|
||||
':' => {
|
||||
state = .time_literal;
|
||||
result.tag = .time_literal;
|
||||
},
|
||||
'_', '0'...'9' => continue,
|
||||
else => break,
|
||||
},
|
||||
.int_exponent => switch (c) {
|
||||
'+', '-', '0'...'9' => {
|
||||
state = .float;
|
||||
},
|
||||
else => {
|
||||
self.index -= 1;
|
||||
break;
|
||||
},
|
||||
},
|
||||
|
||||
.float => switch (c) {
|
||||
'e', 'E' => {
|
||||
state = .float_exponent;
|
||||
},
|
||||
'_', '0'...'9' => {
|
||||
continue;
|
||||
},
|
||||
@ -368,31 +345,21 @@ pub const Tokenizer = struct {
|
||||
break;
|
||||
},
|
||||
},
|
||||
.float_exponent => switch (c) {
|
||||
'+', '-', '0'...'9' => {
|
||||
continue;
|
||||
},
|
||||
else => {
|
||||
self.index -= 1;
|
||||
break;
|
||||
},
|
||||
},
|
||||
|
||||
.date_literal => switch (c) {
|
||||
'|' => {
|
||||
state = .datetime_literal;
|
||||
'-' => {
|
||||
state = .time_literal;
|
||||
result.tag = .datetime_literal;
|
||||
},
|
||||
'0'...'9', '/' => continue,
|
||||
else => break,
|
||||
},
|
||||
|
||||
.time_literal => switch (c) {
|
||||
'0'...'9', '-', '.' => continue,
|
||||
else => break,
|
||||
},
|
||||
.datetime_literal => switch (c) {
|
||||
'0'...'9', '-', '.' => continue,
|
||||
'0'...'9', ':', '.' => continue,
|
||||
else => break,
|
||||
},
|
||||
|
||||
.uuid_literal => switch (c) {
|
||||
'0'...'9', 'a'...'z', '-' => continue,
|
||||
else => break,
|
||||
@ -421,9 +388,9 @@ test "basic query" {
|
||||
|
||||
test "basic date" {
|
||||
try testTokenize("1a5527af-88fb-48c1-8d5c-49c9b73c2379", &.{.uuid_literal});
|
||||
try testTokenize("21/01/1998", &.{.date_literal});
|
||||
try testTokenize("17-55-31.0000", &.{.time_literal});
|
||||
try testTokenize("21/01/1998|17-55-31.0000", &.{.datetime_literal});
|
||||
try testTokenize("1998/01/21", &.{.date_literal});
|
||||
try testTokenize("17:55:31.0000", &.{.time_literal});
|
||||
try testTokenize("1998/01/21-17:55:31.0000", &.{.datetime_literal});
|
||||
}
|
||||
|
||||
fn testTokenize(source: [:0]const u8, expected_token_tags: []const Token.Tag) !void {
|
||||
|
@ -1,13 +1,20 @@
|
||||
/// Suported dataType for the DB
|
||||
/// Maybe start using a unionenum
|
||||
pub const DataType = enum {
|
||||
int,
|
||||
float,
|
||||
str,
|
||||
bool,
|
||||
id,
|
||||
date,
|
||||
time,
|
||||
datetime,
|
||||
int_array,
|
||||
float_array,
|
||||
str_array,
|
||||
bool_array,
|
||||
id_array,
|
||||
date_array,
|
||||
time_array,
|
||||
datetime_array,
|
||||
};
|
||||
|
@ -2,7 +2,6 @@
|
||||
|
||||
const std = @import("std");
|
||||
const string = []const u8;
|
||||
const extras = @import("extras");
|
||||
const time = @This();
|
||||
|
||||
pub const DateTime = struct {
|
||||
@ -13,9 +12,8 @@ pub const DateTime = struct {
|
||||
days: u8,
|
||||
months: u8,
|
||||
years: u16,
|
||||
timezone: TimeZone,
|
||||
|
||||
const Self = @This();
|
||||
const Self = @This(); // Pas mal ca
|
||||
|
||||
pub fn initUnixMs(unix: u64) Self {
|
||||
return epoch_unix.addMs(unix);
|
||||
@ -26,14 +24,15 @@ pub const DateTime = struct {
|
||||
}
|
||||
|
||||
/// Caller asserts that this is > epoch
|
||||
pub fn init(year: u16, month: u16, day: u16, hr: u16, min: u16, sec: u16) Self {
|
||||
pub fn init(year: u16, month: u16, day: u16, hr: u16, min: u16, sec: u16, ms: u16) Self {
|
||||
return epoch_unix
|
||||
.addYears(year - epoch_unix.years)
|
||||
.addMonths(month)
|
||||
.addDays(day)
|
||||
.addHours(hr)
|
||||
.addMins(min)
|
||||
.addSecs(sec);
|
||||
.addSecs(sec)
|
||||
.addMs(ms);
|
||||
}
|
||||
|
||||
pub fn now() Self {
|
||||
@ -47,8 +46,7 @@ pub const DateTime = struct {
|
||||
.hours = 0,
|
||||
.days = 0,
|
||||
.months = 0,
|
||||
.years = 1970, // Why ?
|
||||
.timezone = .UTC,
|
||||
.years = 0,
|
||||
};
|
||||
|
||||
pub fn eql(self: Self, other: Self) bool {
|
||||
@ -58,9 +56,30 @@ pub const DateTime = struct {
|
||||
self.hours == other.hours and
|
||||
self.days == other.days and
|
||||
self.months == other.months and
|
||||
self.years == other.years and
|
||||
self.timezone == other.timezone and
|
||||
self.weekday == other.weekday;
|
||||
self.years == other.years;
|
||||
}
|
||||
|
||||
pub fn compareDate(self: Self, other: Self) bool {
|
||||
return self.days == other.days and
|
||||
self.months == other.months and
|
||||
self.years == other.years;
|
||||
}
|
||||
|
||||
pub fn compareTime(self: Self, other: Self) bool {
|
||||
return self.ms == other.ms and
|
||||
self.seconds == other.seconds and
|
||||
self.minutes == other.minutes and
|
||||
self.hours == other.hours;
|
||||
}
|
||||
|
||||
pub fn compareDatetime(self: Self, other: Self) bool {
|
||||
return self.ms == other.ms and
|
||||
self.seconds == other.seconds and
|
||||
self.minutes == other.minutes and
|
||||
self.hours == other.hours and
|
||||
self.days == other.days and
|
||||
self.months == other.months and
|
||||
self.years == other.years;
|
||||
}
|
||||
|
||||
// So as long as the count / unit, it continue adding the next unit, smart
|
||||
@ -390,20 +409,6 @@ pub const DateTime = struct {
|
||||
.ms = self.toUnixMilli() - other_in_the_past.toUnixMilli(),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn era(self: Self) Era {
|
||||
if (self.years >= 0) return .AD;
|
||||
@compileError("TODO");
|
||||
}
|
||||
|
||||
pub fn weekday(self: Self) WeekDay {
|
||||
var i = self.daysSinceEpoch() % 7;
|
||||
var result = WeekDay.Thu; // weekday of epoch_unix
|
||||
while (i > 0) : (i -= 1) {
|
||||
result = result.next();
|
||||
}
|
||||
return result;
|
||||
}
|
||||
};
|
||||
|
||||
pub const format = struct {
|
||||
@ -419,43 +424,6 @@ pub const format = struct {
|
||||
pub const llll = "ddd, " ++ lll;
|
||||
};
|
||||
|
||||
pub const TimeZone = enum {
|
||||
UTC,
|
||||
|
||||
usingnamespace extras.TagNameJsonStringifyMixin(@This());
|
||||
};
|
||||
|
||||
pub const WeekDay = enum {
|
||||
Sun,
|
||||
Mon,
|
||||
Tue,
|
||||
Wed,
|
||||
Thu,
|
||||
Fri,
|
||||
Sat,
|
||||
|
||||
pub fn next(self: WeekDay) WeekDay {
|
||||
return switch (self) {
|
||||
.Sun => .Mon,
|
||||
.Mon => .Tue,
|
||||
.Tue => .Wed,
|
||||
.Wed => .Thu,
|
||||
.Thu => .Fri,
|
||||
.Fri => .Sat,
|
||||
.Sat => .Sun,
|
||||
};
|
||||
}
|
||||
|
||||
usingnamespace extras.TagNameJsonStringifyMixin(@This());
|
||||
};
|
||||
|
||||
pub const Era = enum {
|
||||
// BC,
|
||||
AD,
|
||||
|
||||
usingnamespace extras.TagNameJsonStringifyMixin(@This());
|
||||
};
|
||||
|
||||
pub fn isLeapYear(year: u16) bool {
|
||||
var ret = false;
|
||||
if (year % 4 == 0) ret = true;
|
||||
|
@ -1,5 +1,6 @@
|
||||
const std = @import("std");
|
||||
const UUID = @import("uuid.zig").UUID;
|
||||
const DateTime = @import("date.zig").DateTime;
|
||||
|
||||
// TODO: Put those functions somewhere else
|
||||
pub fn parseInt(value_str: []const u8) i64 {
|
||||
@ -36,6 +37,69 @@ pub fn parseBool(value_str: []const u8) bool {
|
||||
return (value_str[0] != '0');
|
||||
}
|
||||
|
||||
// TODO: Optimize all date parsing
|
||||
pub fn parseDate(value_str: []const u8) DateTime {
|
||||
const year: u16 = std.fmt.parseInt(u16, value_str[0..4], 10) catch 0;
|
||||
const month: u16 = std.fmt.parseInt(u16, value_str[5..7], 10) catch 0;
|
||||
const day: u16 = std.fmt.parseInt(u16, value_str[8..10], 10) catch 0;
|
||||
|
||||
return DateTime.init(year, month, day, 0, 0, 0, 0);
|
||||
}
|
||||
|
||||
pub fn parseArrayDate(allocator: std.mem.Allocator, array_str: []const u8) std.ArrayList(DateTime) {
|
||||
var array = std.ArrayList(DateTime).init(allocator);
|
||||
|
||||
var it = std.mem.splitAny(u8, array_str[1 .. array_str.len - 1], " ");
|
||||
while (it.next()) |x| {
|
||||
array.append(parseDate(x)) catch {};
|
||||
}
|
||||
|
||||
return array;
|
||||
}
|
||||
|
||||
pub fn parseTime(value_str: []const u8) DateTime {
|
||||
const hours: u16 = std.fmt.parseInt(u16, value_str[0..2], 10) catch 0;
|
||||
const minutes: u16 = std.fmt.parseInt(u16, value_str[3..5], 10) catch 0;
|
||||
const seconds: u16 = if (value_str.len > 6) std.fmt.parseInt(u16, value_str[6..8], 10) catch 0 else 0;
|
||||
const milliseconds: u16 = if (value_str.len > 9) std.fmt.parseInt(u16, value_str[9..13], 10) catch 0 else 0;
|
||||
|
||||
return DateTime.init(0, 0, 0, hours, minutes, seconds, milliseconds);
|
||||
}
|
||||
|
||||
pub fn parseArrayTime(allocator: std.mem.Allocator, array_str: []const u8) std.ArrayList(DateTime) {
|
||||
var array = std.ArrayList(DateTime).init(allocator);
|
||||
|
||||
var it = std.mem.splitAny(u8, array_str[1 .. array_str.len - 1], " ");
|
||||
while (it.next()) |x| {
|
||||
array.append(parseTime(x)) catch {};
|
||||
}
|
||||
|
||||
return array;
|
||||
}
|
||||
|
||||
pub fn parseDatetime(value_str: []const u8) DateTime {
|
||||
const year: u16 = std.fmt.parseInt(u16, value_str[0..4], 10) catch 0;
|
||||
const month: u16 = std.fmt.parseInt(u16, value_str[5..7], 10) catch 0;
|
||||
const day: u16 = std.fmt.parseInt(u16, value_str[8..10], 10) catch 0;
|
||||
const hours: u16 = std.fmt.parseInt(u16, value_str[11..13], 10) catch 0;
|
||||
const minutes: u16 = std.fmt.parseInt(u16, value_str[14..16], 10) catch 0;
|
||||
const seconds: u16 = if (value_str.len > 17) std.fmt.parseInt(u16, value_str[17..19], 10) catch 0 else 0;
|
||||
const milliseconds: u16 = if (value_str.len > 20) std.fmt.parseInt(u16, value_str[20..24], 10) catch 0 else 0;
|
||||
|
||||
return DateTime.init(year, month, day, hours, minutes, seconds, milliseconds);
|
||||
}
|
||||
|
||||
pub fn parseArrayDatetime(allocator: std.mem.Allocator, array_str: []const u8) std.ArrayList(DateTime) {
|
||||
var array = std.ArrayList(DateTime).init(allocator);
|
||||
|
||||
var it = std.mem.splitAny(u8, array_str[1 .. array_str.len - 1], " ");
|
||||
while (it.next()) |x| {
|
||||
array.append(parseDatetime(x)) catch {};
|
||||
}
|
||||
|
||||
return array;
|
||||
}
|
||||
|
||||
pub fn parseArrayBool(allocator: std.mem.Allocator, array_str: []const u8) std.ArrayList(bool) {
|
||||
var array = std.ArrayList(bool).init(allocator);
|
||||
|
||||
@ -59,65 +123,176 @@ pub fn parseArrayUUID(allocator: std.mem.Allocator, array_str: []const u8) std.A
|
||||
return array;
|
||||
}
|
||||
|
||||
// FIXME: I think it will not work if there is a ' inside the string
|
||||
// FIXME: I think it will not work if there is a ' inside the string, even \', need to fix that
|
||||
pub fn parseArrayStr(allocator: std.mem.Allocator, array_str: []const u8) std.ArrayList([]const u8) {
|
||||
var array = std.ArrayList([]const u8).init(allocator);
|
||||
|
||||
var it = std.mem.splitAny(u8, array_str[1 .. array_str.len - 1], "'");
|
||||
_ = it.next(); // SSkip first token that is empty
|
||||
while (it.next()) |x| {
|
||||
if (std.mem.eql(u8, " ", x)) continue;
|
||||
const x_copy = allocator.dupe(u8, x) catch @panic("=(");
|
||||
// FIXME: I think I need to add the '' on each side again
|
||||
const x_copy = std.fmt.allocPrint(allocator, "'{s}'", .{x}) catch @panic("=(");
|
||||
array.append(x_copy) catch {};
|
||||
}
|
||||
|
||||
allocator.free(array.pop()); // Remove the last because empty like the first one
|
||||
|
||||
return array;
|
||||
}
|
||||
|
||||
test "Data parsing" {
|
||||
test "Value parsing: Int" {
|
||||
const allocator = std.testing.allocator;
|
||||
|
||||
// Int
|
||||
const in1: [3][]const u8 = .{ "1", "42", "Hello" };
|
||||
const expected_out1: [3]i64 = .{ 1, 42, 0 };
|
||||
for (in1, 0..) |value, i| {
|
||||
try std.testing.expect(parseInt(value) == expected_out1[i]);
|
||||
const values: [3][]const u8 = .{ "1", "42", "Hello" };
|
||||
const expected_values: [3]i64 = .{ 1, 42, 0 };
|
||||
for (values, 0..) |value, i| {
|
||||
try std.testing.expect(parseInt(value) == expected_values[i]);
|
||||
}
|
||||
|
||||
// Int array
|
||||
const in2 = "[1 14 44 42 hello]";
|
||||
const out2 = parseArrayInt(allocator, in2);
|
||||
defer out2.deinit();
|
||||
const expected_out2: [5]i64 = .{ 1, 14, 44, 42, 0 };
|
||||
try std.testing.expect(std.mem.eql(i64, out2.items, &expected_out2));
|
||||
const array_str = "[1 14 44 42 hello]";
|
||||
const array = parseArrayInt(allocator, array_str);
|
||||
defer array.deinit();
|
||||
const expected_array: [5]i64 = .{ 1, 14, 44, 42, 0 };
|
||||
try std.testing.expect(std.mem.eql(i64, array.items, &expected_array));
|
||||
}
|
||||
|
||||
test "Value parsing: Float" {
|
||||
const allocator = std.testing.allocator;
|
||||
// Float
|
||||
const in3: [3][]const u8 = .{ "1.3", "65.991", "Hello" };
|
||||
const expected_out3: [3]f64 = .{ 1.3, 65.991, 0 };
|
||||
for (in3, 0..) |value, i| {
|
||||
try std.testing.expect(parseFloat(value) == expected_out3[i]);
|
||||
const values: [3][]const u8 = .{ "1.3", "65.991", "Hello" };
|
||||
const expected_values: [3]f64 = .{ 1.3, 65.991, 0 };
|
||||
for (values, 0..) |value, i| {
|
||||
try std.testing.expect(parseFloat(value) == expected_values[i]);
|
||||
}
|
||||
|
||||
// Float array
|
||||
const in4 = "[1.5 14.3 44.9999 42 hello]";
|
||||
const out4 = parseArrayFloat(allocator, in4);
|
||||
defer out4.deinit();
|
||||
const expected_out4: [5]f64 = .{ 1.5, 14.3, 44.9999, 42, 0 };
|
||||
try std.testing.expect(std.mem.eql(f64, out4.items, &expected_out4));
|
||||
const array_str = "[1.5 14.3 44.9999 42 hello]";
|
||||
const array = parseArrayFloat(allocator, array_str);
|
||||
defer array.deinit();
|
||||
const expected_array: [5]f64 = .{ 1.5, 14.3, 44.9999, 42, 0 };
|
||||
try std.testing.expect(std.mem.eql(f64, array.items, &expected_array));
|
||||
}
|
||||
|
||||
// Bool
|
||||
const in5: [3][]const u8 = .{ "1", "Hello", "0" };
|
||||
const expected_out5: [3]bool = .{ true, true, false };
|
||||
for (in5, 0..) |value, i| {
|
||||
try std.testing.expect(parseBool(value) == expected_out5[i]);
|
||||
test "Value parsing: String" {
|
||||
// Note that I dont parse string because I dont need to, a string is a string
|
||||
|
||||
const allocator = std.testing.allocator;
|
||||
|
||||
// string array
|
||||
const array_str = "['Hello' 'How are you doing ?' '']";
|
||||
const array = parseArrayStr(allocator, array_str);
|
||||
defer {
|
||||
for (array.items) |parsed| {
|
||||
allocator.free(parsed);
|
||||
}
|
||||
array.deinit();
|
||||
}
|
||||
const expected_array: [3][]const u8 = .{ "'Hello'", "'How are you doing ?'", "''" };
|
||||
for (array.items, expected_array) |parsed, expected| {
|
||||
try std.testing.expect(std.mem.eql(u8, parsed, expected));
|
||||
}
|
||||
}
|
||||
|
||||
test "Value parsing: Bool array" {
|
||||
const allocator = std.testing.allocator;
|
||||
|
||||
const values: [3][]const u8 = .{ "1", "Hello", "0" };
|
||||
const expected_values: [3]bool = .{ true, true, false };
|
||||
for (values, 0..) |value, i| {
|
||||
try std.testing.expect(parseBool(value) == expected_values[i]);
|
||||
}
|
||||
|
||||
// Bool array
|
||||
const in6 = "[1 0 0 1 1]";
|
||||
const out6 = parseArrayBool(allocator, in6);
|
||||
defer out6.deinit();
|
||||
const expected_out6: [5]bool = .{ true, false, false, true, true };
|
||||
try std.testing.expect(std.mem.eql(bool, out6.items, &expected_out6));
|
||||
|
||||
// TODO: Test the string array
|
||||
const array_str = "[1 0 0 1 1]";
|
||||
const array = parseArrayBool(allocator, array_str);
|
||||
defer array.deinit();
|
||||
const expected_array: [5]bool = .{ true, false, false, true, true };
|
||||
try std.testing.expect(std.mem.eql(bool, array.items, &expected_array));
|
||||
}
|
||||
|
||||
test "Value parsing: Date" {
|
||||
const allocator = std.testing.allocator;
|
||||
// Date
|
||||
const values: [3][]const u8 = .{ "1920/01/01", "1998/01/21", "2024/12/31" };
|
||||
const expected_values: [3]DateTime = .{
|
||||
DateTime.init(1920, 1, 1, 0, 0, 0, 0),
|
||||
DateTime.init(1998, 1, 21, 0, 0, 0, 0),
|
||||
DateTime.init(2024, 12, 31, 0, 0, 0, 0),
|
||||
};
|
||||
for (values, 0..) |value, i| {
|
||||
try std.testing.expect(expected_values[i].compareDate(parseDate(value)));
|
||||
}
|
||||
|
||||
// Date array
|
||||
const array_str = "[1920/01/01 1998/01/21 2024/12/31]";
|
||||
const array = parseArrayDate(allocator, array_str);
|
||||
defer array.deinit();
|
||||
const expected_array: [3]DateTime = .{
|
||||
DateTime.init(1920, 1, 1, 0, 0, 0, 0),
|
||||
DateTime.init(1998, 1, 21, 0, 0, 0, 0),
|
||||
DateTime.init(2024, 12, 31, 0, 0, 0, 0),
|
||||
};
|
||||
for (array.items, expected_array) |parsed, expected| {
|
||||
try std.testing.expect(expected.compareDate(parsed));
|
||||
}
|
||||
}
|
||||
|
||||
test "Value parsing: Time" {
|
||||
const allocator = std.testing.allocator;
|
||||
|
||||
const values: [4][]const u8 = .{ "12:45:00.0000", "18:12:53.7491", "02:30:10", "12:30" };
|
||||
const expected_values: [4]DateTime = .{
|
||||
DateTime.init(0, 0, 0, 12, 45, 0, 0),
|
||||
DateTime.init(0, 0, 0, 18, 12, 53, 7491),
|
||||
DateTime.init(0, 0, 0, 2, 30, 10, 0),
|
||||
DateTime.init(0, 0, 0, 12, 30, 0, 0),
|
||||
};
|
||||
for (values, 0..) |value, i| {
|
||||
try std.testing.expect(expected_values[i].compareTime(parseTime(value)));
|
||||
}
|
||||
|
||||
// Time array
|
||||
const array_str = "[12:45:00.0000 18:12:53.7491 02:30:10 12:30]";
|
||||
const array = parseArrayTime(allocator, array_str);
|
||||
defer array.deinit();
|
||||
const expected_array: [4]DateTime = .{
|
||||
DateTime.init(0, 0, 0, 12, 45, 0, 0),
|
||||
DateTime.init(0, 0, 0, 18, 12, 53, 7491),
|
||||
DateTime.init(0, 0, 0, 2, 30, 10, 0),
|
||||
DateTime.init(0, 0, 0, 12, 30, 0, 0),
|
||||
};
|
||||
for (array.items, expected_array) |parsed, expected| {
|
||||
try std.testing.expect(expected.compareTime(parsed));
|
||||
}
|
||||
}
|
||||
|
||||
test "Value parsing: Datetime" {
|
||||
const allocator = std.testing.allocator;
|
||||
|
||||
const values: [4][]const u8 = .{ "1920/01/01-12:45:00.0000", "1920/01/01-18:12:53.7491", "1920/01/01-02:30:10", "1920/01/01-12:30" };
|
||||
const expected_values: [4]DateTime = .{
|
||||
DateTime.init(1920, 1, 1, 12, 45, 0, 0),
|
||||
DateTime.init(1920, 1, 1, 18, 12, 53, 7491),
|
||||
DateTime.init(1920, 1, 1, 2, 30, 10, 0),
|
||||
DateTime.init(1920, 1, 1, 12, 30, 0, 0),
|
||||
};
|
||||
for (values, 0..) |value, i| {
|
||||
try std.testing.expect(expected_values[i].compareDatetime(parseDatetime(value)));
|
||||
}
|
||||
|
||||
// Time array
|
||||
const array_str = "[1920/01/01-12:45:00.0000 1920/01/01-18:12:53.7491 1920/01/01-02:30:10 1920/01/01-12:30]";
|
||||
const array = parseArrayDatetime(allocator, array_str);
|
||||
defer array.deinit();
|
||||
const expected_array: [4]DateTime = .{
|
||||
DateTime.init(1920, 1, 1, 12, 45, 0, 0),
|
||||
DateTime.init(1920, 1, 1, 18, 12, 53, 7491),
|
||||
DateTime.init(1920, 1, 1, 2, 30, 10, 0),
|
||||
DateTime.init(1920, 1, 1, 12, 30, 0, 0),
|
||||
};
|
||||
for (array.items, expected_array) |parsed, expected| {
|
||||
try std.testing.expect(expected.compareDatetime(parsed));
|
||||
}
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
95
test_data/v0.1.1/DATA/User/0.zippondata
Normal file
95
test_data/v0.1.1/DATA/User/0.zippondata
Normal file
@ -0,0 +1,95 @@
|
||||
6f15128c-f6fc-4017-b047-618da3dd59ec 'Nicole Wright' 89 'icarter@example.org' 2006/01/17 1995/02/03-12:41:35.347682 00:34:40.853006 [9 81 75 41 66 39 69 28 2] []
|
||||
c55ee39e-d519-4801-a817-4905f1014686 'Andrea Salazar' 31 'jeremymiller@example.net' 1982/11/19 2000/01/27-11:05:27.447913 11:09:30.575356 [43 100 7 26 30 56] []
|
||||
62740397-5a9b-47ef-bfb8-3c5e61b79c8c 'Devin Burns' 44 'coliver@example.net' 2003/02/27 1975/07/24-15:05:53.420305 13:20:58.874282 [84 35 62 84 25 69 99 30] []
|
||||
4f31323f-3bf4-4091-a8cd-d4bc620dd9ff 'Lawrence Craig' 34 'dannyfisher@example.org' 1998/07/12 2002/04/10-09:39:48.189347 14:42:58.739696 [34 61 2 14 16] []
|
||||
7d6a71c3-b3d0-4165-a660-bf2085333b77 'Katherine King' 83 'mirandawatkins@example.org' 2018/01/15 2000/09/27-12:08:16.778628 02:58:57.603440 [96 56 9 8 11 78 81 92 33] []
|
||||
69bb8ec8-1dfb-430f-a921-502441bb80ce 'Holly Griffin' 56 'troyarias@example.net' 1988/12/14 1987/07/07-00:22:27.255196 13:54:28.097706 [67 12 38 41 73 84 27 30 27] []
|
||||
02d063ca-bf05-4d8f-a26a-2c425053ed5f 'Karen Ford' 25 'brewerjulie@example.org' 2021/06/05 2013/12/23-09:52:40.159762 13:58:02.129021 [32 38] []
|
||||
79f470f3-daa0-4e3c-b7ce-49c56ed71efa 'Heather Walter' 45 'nathan09@example.org' 2005/02/15 2011/04/08-12:46:07.231369 20:25:35.561851 [78 76] []
|
||||
b4ca1d16-9844-434c-b51e-f5f954631e5b 'Timothy Gilbert' 9 'prattangela@example.org' 1971/03/22 1995/11/30-19:55:29.967903 06:59:02.893410 [64 30 47 7 88 46 32 100 69] []
|
||||
6a194c32-1971-4544-bf0c-1f044095cc68 'Jose Becker' 79 'ymartinez@example.com' 2008/08/14 1984/10/03-17:12:17.833573 17:47:37.450798 [93 60 53 76 17 14 6 55] []
|
||||
2a7bfc0f-6026-4112-a7b2-665c7429a0b6 'Tony Contreras' 62 'lbrown@example.org' 2014/07/07 1991/08/19-17:41:13.825644 06:00:52.599448 [35] []
|
||||
ef5f2f67-8c26-400f-865d-b5c512938b5c 'Karen Gonzalez' 25 'klong@example.org' 1989/08/04 1970/02/27-07:08:05.564124 10:13:57.631108 [95 55 80 78 42 77 90 37 86] []
|
||||
b25015b6-f78b-4ec4-a2a4-67e12b59d20c 'Nicole Woodard' 70 'rhernandez@example.com' 2005/05/14 2000/06/30-17:04:05.448483 05:20:12.524938 [75 98 100 91 82 68] []
|
||||
6c7855e6-05b4-4f57-b306-db46fb05b973 'Monique Lee' 12 'cruzmatthew@example.com' 1989/05/20 2000/10/30-03:22:34.691532 00:58:43.618501 [8 51 50 24 29 53 42 65] []
|
||||
fd8d9292-609b-4e9f-b4bc-3f10a58b0b14 'Donald Bush' 42 'ucontreras@example.net' 2003/08/04 1974/09/14-07:30:16.101479 23:14:47.929777 [64] []
|
||||
4071a5a2-6350-422b-b2aa-935b128ebe87 'Michael Young' 17 'timothy68@example.net' 1970/05/16 1981/04/03-13:41:01.404102 13:05:47.903322 [70] []
|
||||
d2d3e163-b914-4fdb-b5a4-2c0082bd7f86 'Scott Johnson' 57 'amanda44@example.org' 2012/05/04 1990/10/31-16:30:09.218291 00:20:15.601723 [10 71] []
|
||||
fe49b76b-21d5-423a-b139-35e7c8f49d83 'Harry Frank' 69 'kayleeshaw@example.org' 1970/01/18 2013/01/16-17:05:34.108621 12:17:46.996227 [93 7 97 20 43] []
|
||||
c01a286d-936f-4d3f-adde-abe900b403c6 'Steven Jones' 67 'bonniewalls@example.com' 1988/07/03 1997/01/11-09:52:57.654011 01:37:24.771031 [] []
|
||||
7ca08195-c5ff-4143-a243-0ad1bac72f1e 'Gregory Thomas' 81 'tjordan@example.org' 1988/07/08 1997/03/01-23:06:34.417278 18:32:20.997120 [60 38 95 33 70 18 62 40] []
|
||||
0ab42d73-5c9e-4269-b03d-fd722333a1e1 'Jason Taylor' 2 'katherine20@example.net' 1977/10/10 2005/01/13-16:03:48.152308 01:10:42.486388 [13 41 52] []
|
||||
34732e47-061c-4a36-bbf6-452c62258c07 'Courtney Buckley' 35 'pwatkins@example.net' 2001/10/27 2012/03/30-13:12:25.647147 06:10:25.862129 [25 50 98 54 19 21 99 22 26 6] []
|
||||
a115e144-6adc-4948-a13d-0c6a98240cf7 'John Young' 11 'ygarcia@example.com' 2001/06/11 1980/08/10-10:26:05.493420 13:46:04.978166 [11 80 70 25 17] []
|
||||
a8983cf9-7f9b-4009-9e43-a30cd783e538 'Lindsay Perry' 30 'dominique55@example.org' 2013/01/21 2003/05/26-05:31:54.921319 13:01:07.743739 [73 41 4 2 84 97 92 84] []
|
||||
e4ef6974-58c5-4233-88ec-fcc0409f656f 'Matthew Silva' 51 'colecarlos@example.org' 2001/12/27 1984/05/29-16:23:04.249520 03:48:15.293856 [94 2 3 81] []
|
||||
6f72bdcc-d9c0-4852-969e-e81a0af2f0a7 'Kathryn Thomas' 96 'sloankelly@example.org' 2023/07/22 2000/01/28-06:45:08.683359 23:16:37.477658 [1] []
|
||||
3f459011-74d2-465b-94a1-2f23f19615e0 'Bethany Walker' 66 'brooke05@example.com' 2002/11/26 1988/03/26-14:03:37.178294 10:57:40.636124 [75 97 89] []
|
||||
1aab1aa1-6148-4e71-8109-f1421d7a0256 'Nancy Ayala' 64 'oreed@example.com' 1985/01/14 1985/09/24-03:37:28.419038 12:58:02.017099 [26] []
|
||||
98dfba72-24c4-4780-b489-13ebd78c4006 'Andrea Carter' 91 'ylopez@example.org' 2006/11/13 1983/12/06-02:36:18.776250 11:44:44.393381 [81 5 94 100 36 67 70] []
|
||||
e8641c86-1e63-44d9-b610-8ad30b0b00dc 'Courtney Ramirez' 87 'byrdhenry@example.net' 1998/05/03 2014/05/05-15:03:02.703651 12:37:45.835634 [93] []
|
||||
87e7ee8f-7424-44a0-8ea2-354e80f13a67 'Jesse Diaz' 43 'rmclaughlin@example.com' 1992/02/19 1988/12/02-03:30:13.802912 13:43:02.317048 [78 63 41] []
|
||||
699ba43d-8a20-4cfe-aa96-305372110f9e 'Sarah Williams' 49 'donaldsimpson@example.net' 1989/10/08 2011/05/18-09:22:54.189722 11:07:05.033493 [52 71 84 82 21 66 99 49 91 57] []
|
||||
5eecc879-1acb-461b-8b81-2a0b6d8b0045 'Jesus Martinez' 17 'oedwards@example.org' 2008/12/08 2022/03/09-08:59:07.297182 21:52:17.891999 [17 35 4 54 88 12 64 20 31] []
|
||||
f1c6fd43-5d99-4025-a1c0-02264144d7de 'Stephen Lyons' 59 'diazkayla@example.org' 2013/10/22 2011/08/11-22:38:36.345329 07:49:48.479873 [32 56 38 56 71 3 52 64 20] []
|
||||
ff3f6544-1060-4c94-ad52-4ef2bb04f639 'Sheena Vaughn' 22 'danielhowell@example.com' 1974/07/21 2022/08/19-00:41:18.729117 03:22:58.505303 [100 81 15 7 25] []
|
||||
b672f982-9fff-4c3c-889d-b2d71a876a2e 'Nicholas Stewart' 29 'cwinters@example.com' 1984/02/10 1990/01/13-05:57:18.864281 21:27:16.731434 [] []
|
||||
16d18dcb-0c4e-4176-83ae-dde38194da9e 'Jacqueline Harris' 27 'hernandezmatthew@example.net' 1975/09/13 1976/11/05-10:43:58.720147 01:59:09.360304 [88 74 71 68 13 24 92] []
|
||||
e7b8f128-264a-48c4-ad7b-fe223a48942e 'Randall Roberts' 32 'amanda35@example.net' 2023/12/17 2021/04/23-16:33:49.938777 17:21:05.318298 [] []
|
||||
6c6bd267-3b42-4b12-b24a-c8ed6b47e373 'Kim Marshall' 54 'ramirezmary@example.org' 1972/05/28 1989/05/07-09:48:35.231589 06:57:10.569574 [33 22 59 34 7] []
|
||||
69b9ebcc-9e5c-4a9e-99b3-e97b79cd4a7c 'Kenneth Taylor' 76 'sarahgarrett@example.com' 1982/09/13 1974/07/20-18:24:31.972235 15:13:37.414090 [23] []
|
||||
4c682774-7b58-4a4a-a388-5b0bca50408e 'Melissa Singh' 40 'walkerkyle@example.net' 1996/06/02 1972/10/10-08:09:15.367654 18:47:13.558160 [19 13 86 2 17 35 48] []
|
||||
8a554cbe-90cf-441e-8313-6a6b5874ca7c 'Monique Taylor' 30 'bmiller@example.com' 2011/09/12 1999/09/04-04:11:54.860487 03:11:25.453971 [20 89] []
|
||||
b6d5db38-6708-4057-8d41-bda61e00de19 'Crystal Morse PhD' 37 'troyclark@example.com' 1978/08/29 1983/07/29-12:15:00.474676 12:25:42.213966 [] []
|
||||
29687dea-ed28-4e7d-868b-fe95a1bd0ae1 'Jorge Camacho' 64 'ncalhoun@example.org' 2005/06/03 1987/11/11-04:25:06.339660 03:38:53.874215 [26 97 78 94 79 82 16 73] []
|
||||
badd9d70-15fd-4d92-9c63-1e516fd94bf5 'Ronald Bowman' 6 'jessicaroberts@example.org' 1989/12/10 1997/04/04-14:33:17.025758 22:23:38.684410 [49 41 72 91 56 22 89 5 66] []
|
||||
991f06d6-5c41-48f5-a6cf-bb0e8f4d0a2b 'Alex Mcintyre' 21 'omurillo@example.net' 2018/07/07 1980/12/30-07:08:33.241262 22:38:12.623654 [81 44 88 61 91 20 44 12 11 51] []
|
||||
7a8dfc49-3603-436d-88fb-f425fcb341f5 'Jennifer Tate' 100 'kleinashley@example.com' 1979/07/20 1989/11/03-09:09:07.664687 03:30:19.034533 [74 80 56] []
|
||||
4105ced1-f986-406c-92ce-b771c4475b92 'Lindsay Russell' 47 'jeremy04@example.org' 1975/04/25 2006/11/17-23:05:10.042363 11:30:19.238273 [87] []
|
||||
1fed0010-5a9f-44fe-a518-60935961b521 'William Carter' 47 'ryan63@example.org' 1983/12/29 1972/02/01-21:05:07.185256 15:58:24.650869 [31 73 95 36 50 11 47 71] []
|
||||
9eb2de04-c20b-4a8c-8726-4e1ebe849d4f 'Eric Brewer' 61 'allison33@example.com' 2010/04/17 1978/12/12-15:20:08.485655 06:39:24.067146 [] []
|
||||
c6d56661-2e08-4099-b98b-a0df14fda563 'Thomas Nguyen' 86 'robinsonmichael@example.net' 1989/06/03 1999/12/13-05:51:32.860085 07:41:36.279351 [] []
|
||||
ef6c7b92-d0f9-4541-b37c-1b19ec9c3d76 'Gregory Hawkins' 68 'christinabrown@example.net' 2020/12/05 1989/11/22-01:40:05.282515 04:25:25.661361 [41 92 0] []
|
||||
e92c2a5e-9886-4bc6-b041-03426fdda9b4 'Allison Hansen DDS' 79 'mooneydanielle@example.org' 2024/06/07 1983/07/15-16:53:03.604798 15:56:43.407431 [51 5 37 41] []
|
||||
1569cf7a-7c5a-489a-a7fc-17c868a4d955 'Nicole Walker' 69 'ysmith@example.com' 2023/04/16 1989/08/13-05:22:33.896806 05:29:40.942315 [77 98 34] []
|
||||
3a43b066-40ab-4aeb-84e1-33d8b0b38393 'Angela Nelson' 12 'yrobinson@example.org' 1973/06/10 2000/01/29-10:37:36.189531 12:02:18.070502 [] []
|
||||
ddf229ad-07f5-42a0-abcb-962286246665 'Michael Garza' 9 'cruzrodney@example.org' 1996/03/29 2018/09/18-20:28:23.590399 06:02:48.237654 [] []
|
||||
01870e4b-8ab8-478c-b422-dfd5cf0d5194 'Amy Jennings' 38 'luiscarey@example.com' 2011/09/10 1993/03/14-19:26:31.568862 22:12:11.540630 [58] []
|
||||
0fa38607-208c-4e0f-9007-ec8160ecc881 'Emily Wade' 59 'barajassusan@example.net' 1982/04/02 1983/11/15-03:15:31.100487 16:16:22.748024 [37 28 7 59 22 22 82] []
|
||||
6d5c1287-d51b-45c8-8d66-bd4abfd5ca75 'Mrs. Julie Oconnor MD' 43 'rwallace@example.net' 1983/10/24 1977/03/01-20:04:48.664772 19:51:54.960776 [66 0 1] []
|
||||
dfbc47e3-b74a-42ea-8e47-efa5b5b1eb34 'Jessica Aguilar' 29 'cjones@example.org' 1979/03/19 1993/08/27-19:45:10.648767 07:44:34.802907 [] []
|
||||
d39fc239-aa4b-44a7-8eed-cec344972ac9 'Kenneth Ramos' 21 'mcdonaldmichael@example.org' 1973/02/08 1987/01/18-07:15:40.134471 02:00:26.886714 [22 38 69 33 9 86 50 62 14] []
|
||||
275d0acb-2ddd-4af9-997f-2a863dd8bdcd 'Jonathan Mccoy' 44 'graywanda@example.net' 1981/12/18 1973/06/24-10:23:26.893595 19:25:38.105128 [79 78] []
|
||||
7bbe5d3f-41c3-447f-adc8-17b001ab57bf 'Mark Lamb' 74 'millertimothy@example.com' 2020/07/28 2019/08/24-05:29:49.061194 00:50:20.659030 [92 7 74 27 44 69 9 85] []
|
||||
192f07a6-5e51-4955-84c5-9a1e935ceb0c 'Jose Hahn' 88 'larrybrewer@example.net' 1996/10/11 1990/09/17-05:09:53.913448 11:05:19.668141 [21 63 63 13 63 38 68] []
|
||||
a1479495-4520-46b5-b8df-2a2e7a7caf12 'Christopher Moore' 35 'milleralbert@example.org' 1985/10/10 2003/11/26-05:20:51.267371 10:31:31.669712 [66] []
|
||||
2accecd9-8bdb-4be0-85a1-1294c4b421c1 'Joshua Wilson' 67 'chad03@example.net' 2011/12/16 2013/06/23-23:46:45.533988 17:53:55.824275 [71 50 50 34 63 42 41 83 87 46] []
|
||||
0632a861-fd7e-40e6-a3db-c37f9174bf72 'Craig Wells' 39 'blevinsjessica@example.org' 2024/06/17 2016/06/08-08:43:33.145082 17:38:33.801481 [62 47 11 67 19] []
|
||||
57f94d12-41b5-45a0-b509-286893029a68 'Catherine Nelson' 12 'apowell@example.net' 2011/08/26 1971/04/21-06:18:44.227037 16:34:12.817227 [25 90 24] []
|
||||
917dae19-9906-4082-a015-7e5601be33c0 'Louis Mcknight' 97 'whitneytyler@example.org' 1989/08/07 2004/04/22-06:09:24.295995 05:43:30.569809 [49] []
|
||||
36924d24-ad7a-4ef8-af65-17f37b073913 'Dustin Frost' 86 'carmen43@example.net' 1980/05/22 1980/01/12-00:38:56.239871 05:14:53.584038 [86 33 12] []
|
||||
4b9a27a1-6b55-43ec-9f97-64689550fe0f 'Amy Gill' 69 'peckwilliam@example.net' 1975/06/07 1970/05/12-05:29:50.339568 15:26:41.551220 [48 19 91 58] []
|
||||
70bc00b9-6d53-4d75-b77a-a5880726e9db 'Tony Edwards' 30 'simmonscharles@example.net' 1987/03/08 2000/09/12-23:37:21.850780 15:03:04.343320 [41 16 38 1 92 6 14 65 8] []
|
||||
9fe3cc77-e4b0-48d4-90f8-d02e51f65b7e 'Christopher Hampton' 21 'dkim@example.org' 2019/05/20 1982/05/26-19:48:52.386687 11:44:55.421839 [] []
|
||||
6530eceb-15cf-4b6e-a96c-9dc377652fde 'Samantha Cline' 75 'sallyguzman@example.org' 2022/08/20 2019/11/17-06:59:59.793431 13:34:09.266364 [89] []
|
||||
85d4c7ad-7d87-421d-836a-452e9fcb9444 'Lindsay Rodriguez' 85 'lrush@example.net' 2010/03/10 2013/03/25-05:13:23.089586 18:28:41.293076 [77 7 9] []
|
||||
88a030c0-0fb5-4f02-9943-6f1fc773a12b 'Sabrina Larsen DDS' 33 'fwilliams@example.net' 2007/04/05 2004/05/21-04:21:36.580782 07:08:17.851384 [38 2 18 82 35 55 78 53] []
|
||||
87389248-a10b-41a3-b11e-f0d831db5510 'Nancy Edwards' 67 'ycasey@example.net' 2020/10/25 1973/05/10-02:33:22.938409 06:10:05.932522 [72 39 10 62 38 91 48 92 2] []
|
||||
788bbcc3-20a1-43d1-8a76-a54699840cbf 'Nicole Wolf' 35 'wardshelby@example.org' 2024/05/04 2018/07/22-06:23:11.213613 09:40:51.342992 [77 80 44 50 55 1 30 25 50] []
|
||||
dc9d8f21-fd2b-404e-a003-70199d889569 'Steven Brown' 19 'scottlindsay@example.com' 1994/05/26 2005/07/27-04:34:06.373227 02:28:56.761866 [66] []
|
||||
3d06d0f8-fb81-48bb-bc21-3fb8527fa6ce 'Kimberly Graham' 9 'mmcmahon@example.org' 2009/12/12 1978/09/09-14:12:39.019120 14:24:54.144847 [63 11 12 51 82] []
|
||||
8baf6bcd-41c5-4639-a6ab-ba66dec4d9a8 'Brianna Kennedy' 46 'david56@example.net' 1992/07/24 2012/11/28-02:02:17.460570 01:17:41.532201 [84 34] []
|
||||
256a0165-3d27-43a4-95e9-c7daaab38797 'Mr. Kevin Cole' 47 'luisjohnson@example.org' 1975/12/27 2003/10/06-08:10:59.441154 01:43:16.135264 [94 43 29 61] []
|
||||
5ca72f62-efd9-48fc-b505-7c3aa10e2799 'Ronald Luna' 100 'frances66@example.org' 2000/04/29 1973/06/25-08:14:51.197299 22:14:59.321156 [14 42] []
|
||||
e9752afe-8f2f-45f2-8e25-6b4a0d1cff54 'Michelle Phillips' 27 'robert68@example.net' 2015/01/05 1970/03/12-07:51:11.411035 09:00:10.096165 [75 96 12 35 69 27 77 47 36 28] []
|
||||
47302274-1d43-4702-b272-7481d64f43e5 'Amanda Perez' 47 'cyoder@example.org' 2017/03/29 2023/11/06-22:14:01.256222 16:58:27.758300 [47 94 16 15 43 16 27 73] []
|
||||
7d463829-d6e8-4142-9779-cbabc609619b 'Adam Tucker' 29 'djefferson@example.net' 1974/10/16 1983/05/12-23:43:49.811048 01:38:16.354009 [20 82 14 11 38] []
|
||||
7caabde4-0c04-4204-9c19-61fe6d3880e5 'Sarah Martin' 93 'robertbowman@example.org' 1985/10/12 1971/03/26-09:04:14.956300 16:13:12.309921 [] []
|
||||
0ff2434c-9aab-4dac-8b7e-bfe3d9efbe24 'Tammy Wilson' 26 'millsbilly@example.com' 1998/03/20 1998/12/01-13:05:07.237752 13:07:56.172858 [36 3 60 41 95 76 4 10 16] []
|
||||
d7264837-69eb-4d72-a8da-ae45b083d30c 'Kenneth Day' 63 'coryvasquez@example.org' 1991/01/24 2010/02/11-06:24:35.603560 15:02:52.501315 [66 36 64 45 61 25 60 42 78] []
|
||||
3cf85fa9-7bf6-4bc9-a876-9f3fbc6cfab4 'Diane Kelley' 84 'kelly23@example.com' 1976/01/02 2007/06/29-05:48:26.768312 22:37:56.820676 [93 41 91 94 50 43 85 57 8] []
|
||||
f1068ced-afea-47f0-a341-b40b6fab8dfc 'Jeffrey Stein' 6 'petersonveronica@example.org' 2008/05/03 2015/04/24-20:53:18.082149 05:06:01.654974 [52 54 55 30 10] []
|
||||
3093f1b7-ff0e-4e07-b2e6-bdd6743cf256 'William Vaughan' 97 'zhill@example.com' 2024/07/02 1970/02/20-02:11:22.624148 14:25:26.208268 [96 21 95 19 96 39 84 39] []
|
||||
0264849d-20d1-46c5-8525-c722f399714e 'Sandra Smith' 79 'bclayton@example.net' 1971/10/14 1997/08/03-11:49:06.366284 17:56:01.969774 [10 83 23 49 77 60 70 51] []
|
||||
5ca111fe-02d0-4a29-b630-8fbcd7a0c8f9 'Jill Snyder' 82 'hunter72@example.net' 1996/08/10 2018/11/18-18:18:10.149410 20:23:18.321137 [] []
|
||||
ebd12579-694f-4ba4-96a0-71621711a95f 'Calvin Fernandez' 1 'andrew92@example.com' 1994/11/17 2001/02/06-20:30:11.550466 04:41:12.634956 [21 49 94 57] []
|
55
test_data/v0.1.1/generate_dummy_data.py
Normal file
55
test_data/v0.1.1/generate_dummy_data.py
Normal file
@ -0,0 +1,55 @@
|
||||
import subprocess
|
||||
from faker import Faker
|
||||
import random
|
||||
fake = Faker()
|
||||
|
||||
# Start the Zig binary process
|
||||
|
||||
|
||||
def random_array():
|
||||
length = random.randint(-1, 10)
|
||||
scores = [random.randint(-1, 100) for _ in range(length)]
|
||||
return f"[{' '.join(map(str, scores))}]"
|
||||
|
||||
def run(process, command):
|
||||
"""Sends a command to the Zig process and returns the output."""
|
||||
|
||||
process.stdin.write('run "' + command + '"\n')
|
||||
process.stdin.flush()
|
||||
|
||||
output = ""
|
||||
char = process.stdout.read(1) # Read one character
|
||||
while char:
|
||||
if char == "\x03": # Check for ETX
|
||||
break
|
||||
output += char
|
||||
char = process.stdout.read(1)
|
||||
|
||||
return output.strip()
|
||||
|
||||
from tqdm import tqdm
|
||||
|
||||
for i in tqdm(range(100)):
|
||||
process = subprocess.Popen(
|
||||
["zig-out/bin/zippon"],
|
||||
stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
text=True # For easier string handling
|
||||
)
|
||||
|
||||
query = "ADD User ("
|
||||
query += f"name = '{fake.name()}',"
|
||||
query += f"age = {random.randint(0, 100)},"
|
||||
query += f"email = '{fake.email()}',"
|
||||
query += f"scores={random_array()},"
|
||||
query += f"friends = [],"
|
||||
query += f"bday={fake.date(pattern='%Y/%m/%d')},"
|
||||
query += f"last_order={fake.date_time().strftime('%Y/%m/%d-%H:%M:%S.%f')}," # Shouldn't create an error if the millisecond are too long, here it's 6 digit instead of 4
|
||||
query += f"a_time={fake.date_time().strftime('%H:%M:%S.%f')}"
|
||||
query += f")"
|
||||
|
||||
output = run(process, query)
|
||||
print(output)
|
||||
process.terminate()
|
||||
|
10
test_data/v0.1.1/schema.zipponschema
Normal file
10
test_data/v0.1.1/schema.zipponschema
Normal file
@ -0,0 +1,10 @@
|
||||
User (
|
||||
name: str,
|
||||
age: int,
|
||||
email: str,
|
||||
bday: date,
|
||||
last_order: datetime,
|
||||
a_time: time,
|
||||
scores: []int,
|
||||
friends: []str,
|
||||
)
|
Loading…
x
Reference in New Issue
Block a user