Write every 1000 map
Now if I run a big ADD query, write every 1000 entities to prevent a maps array too big and go OOM
This commit is contained in:
parent
5300f932d5
commit
7d012b527f
@ -352,6 +352,12 @@ pub const Parser = struct {
|
|||||||
|
|
||||||
maps.append(data_map.clone() catch return ZipponError.MemoryError) catch return ZipponError.MemoryError;
|
maps.append(data_map.clone() catch return ZipponError.MemoryError) catch return ZipponError.MemoryError;
|
||||||
|
|
||||||
|
if (maps.items.len >= 1_000) {
|
||||||
|
self.file_engine.addEntity(struct_name, maps.items, &buff.writer()) catch return ZipponError.CantWriteEntity;
|
||||||
|
for (maps.items) |*map| map.deinit();
|
||||||
|
maps.clearRetainingCapacity();
|
||||||
|
}
|
||||||
|
|
||||||
token = self.toker.last_token;
|
token = self.toker.last_token;
|
||||||
if (token.tag == .l_paren) continue;
|
if (token.tag == .l_paren) continue;
|
||||||
break;
|
break;
|
||||||
|
Loading…
x
Reference in New Issue
Block a user