Write every 1000 map

Now if I run a big ADD query, write every 1000 entities to prevent a
maps array too big and go OOM
This commit is contained in:
Adrien Bouvais 2025-01-11 13:50:32 +01:00
parent 5300f932d5
commit 7d012b527f

View File

@ -352,6 +352,12 @@ pub const Parser = struct {
maps.append(data_map.clone() catch return ZipponError.MemoryError) catch return ZipponError.MemoryError; maps.append(data_map.clone() catch return ZipponError.MemoryError) catch return ZipponError.MemoryError;
if (maps.items.len >= 1_000) {
self.file_engine.addEntity(struct_name, maps.items, &buff.writer()) catch return ZipponError.CantWriteEntity;
for (maps.items) |*map| map.deinit();
maps.clearRetainingCapacity();
}
token = self.toker.last_token; token = self.toker.last_token;
if (token.tag == .l_paren) continue; if (token.tag == .l_paren) continue;
break; break;