mirror of
https://github.com/ziglang/zig.git
synced 2026-01-04 04:25:05 +00:00
std.compress.zstandard: update for multi-for-loop change
This commit is contained in:
parent
2766b704c1
commit
a74f800dd7
@ -214,9 +214,8 @@ pub fn ZstandardStream(
|
||||
}
|
||||
|
||||
const size = @min(self.buffer.len(), buffer.len);
|
||||
var count: usize = 0;
|
||||
while (count < size) : (count += 1) {
|
||||
buffer[count] = self.buffer.read().?;
|
||||
for (0..size) |i| {
|
||||
buffer[i] = self.buffer.read().?;
|
||||
}
|
||||
if (self.state == .LastBlock and self.buffer.len() == 0) {
|
||||
self.state = .NewFrame;
|
||||
@ -227,7 +226,7 @@ pub fn ZstandardStream(
|
||||
self.allocator.free(self.sequence_buffer);
|
||||
self.buffer.deinit(self.allocator);
|
||||
}
|
||||
return count;
|
||||
return size;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@ -453,8 +453,7 @@ pub const DecodeState = struct {
|
||||
self.written_count += len;
|
||||
},
|
||||
.rle => {
|
||||
var i: usize = 0;
|
||||
while (i < len) : (i += 1) {
|
||||
for (0..len) |i| {
|
||||
dest[i] = self.literal_streams.one[0];
|
||||
}
|
||||
self.literal_written_count += len;
|
||||
@ -471,8 +470,7 @@ pub const DecodeState = struct {
|
||||
var bits_read: u4 = 0;
|
||||
var huffman_tree_index: usize = huffman_tree.symbol_count_minus_one;
|
||||
var bit_count_to_read: u4 = starting_bit_count;
|
||||
var i: usize = 0;
|
||||
while (i < len) : (i += 1) {
|
||||
for (0..len) |i| {
|
||||
var prefix: u16 = 0;
|
||||
while (true) {
|
||||
const new_bits = self.readLiteralsBits(bit_count_to_read) catch |err| {
|
||||
@ -528,8 +526,7 @@ pub const DecodeState = struct {
|
||||
self.written_count += len;
|
||||
},
|
||||
.rle => {
|
||||
var i: usize = 0;
|
||||
while (i < len) : (i += 1) {
|
||||
for (0..len) |_| {
|
||||
dest.writeAssumeCapacity(self.literal_streams.one[0]);
|
||||
}
|
||||
self.literal_written_count += len;
|
||||
@ -546,8 +543,7 @@ pub const DecodeState = struct {
|
||||
var bits_read: u4 = 0;
|
||||
var huffman_tree_index: usize = huffman_tree.symbol_count_minus_one;
|
||||
var bit_count_to_read: u4 = starting_bit_count;
|
||||
var i: usize = 0;
|
||||
while (i < len) : (i += 1) {
|
||||
for (0..len) |_| {
|
||||
var prefix: u16 = 0;
|
||||
while (true) {
|
||||
const new_bits = try self.readLiteralsBits(bit_count_to_read);
|
||||
@ -630,8 +626,7 @@ pub fn decodeBlock(
|
||||
.rle => {
|
||||
if (src.len < 1) return error.MalformedRleBlock;
|
||||
if (dest[written_count..].len < block_size) return error.DestTooSmall;
|
||||
var write_pos: usize = written_count;
|
||||
while (write_pos < block_size + written_count) : (write_pos += 1) {
|
||||
for (written_count..block_size + written_count) |write_pos| {
|
||||
dest[write_pos] = src[0];
|
||||
}
|
||||
consumed_count.* += 1;
|
||||
@ -664,8 +659,7 @@ pub fn decodeBlock(
|
||||
return error.MalformedCompressedBlock;
|
||||
|
||||
var sequence_size_limit = block_size_max;
|
||||
var i: usize = 0;
|
||||
while (i < sequences_header.sequence_count) : (i += 1) {
|
||||
for (0..sequences_header.sequence_count) |i| {
|
||||
const write_pos = written_count + bytes_written;
|
||||
const decompressed_size = decode_state.decodeSequenceSlice(
|
||||
dest,
|
||||
@ -734,8 +728,7 @@ pub fn decodeBlockRingBuffer(
|
||||
},
|
||||
.rle => {
|
||||
if (src.len < 1) return error.MalformedRleBlock;
|
||||
var write_pos: usize = 0;
|
||||
while (write_pos < block_size) : (write_pos += 1) {
|
||||
for (0..block_size) |_| {
|
||||
dest.writeAssumeCapacity(src[0]);
|
||||
}
|
||||
consumed_count.* += 1;
|
||||
@ -768,8 +761,7 @@ pub fn decodeBlockRingBuffer(
|
||||
return error.MalformedCompressedBlock;
|
||||
|
||||
var sequence_size_limit = block_size_max;
|
||||
var i: usize = 0;
|
||||
while (i < sequences_header.sequence_count) : (i += 1) {
|
||||
for (0..sequences_header.sequence_count) |i| {
|
||||
const decompressed_size = decode_state.decodeSequenceRingBuffer(
|
||||
dest,
|
||||
&bit_stream,
|
||||
@ -840,8 +832,7 @@ pub fn decodeBlockReader(
|
||||
},
|
||||
.rle => {
|
||||
const byte = try source.readByte();
|
||||
var i: usize = 0;
|
||||
while (i < block_size) : (i += 1) {
|
||||
for (0..block_size) |_| {
|
||||
dest.writeAssumeCapacity(byte);
|
||||
}
|
||||
decode_state.written_count += block_size;
|
||||
@ -866,8 +857,7 @@ pub fn decodeBlockReader(
|
||||
return error.MalformedCompressedBlock;
|
||||
|
||||
var sequence_size_limit = block_size_max;
|
||||
var i: usize = 0;
|
||||
while (i < sequences_header.sequence_count) : (i += 1) {
|
||||
for (0..sequences_header.sequence_count) |i| {
|
||||
const decompressed_size = decode_state.decodeSequenceRingBuffer(
|
||||
dest,
|
||||
&bit_stream,
|
||||
|
||||
@ -47,8 +47,7 @@ pub fn decodeFseTable(
|
||||
while (true) {
|
||||
const repeat_flag = try bit_reader.readBitsNoEof(u2, 2);
|
||||
if (repeat_flag + value_count > 256) return error.MalformedFseTable;
|
||||
var i: usize = 0;
|
||||
while (i < repeat_flag) : (i += 1) {
|
||||
for (0..repeat_flag) |_| {
|
||||
values[value_count] = 1;
|
||||
value_count += 1;
|
||||
}
|
||||
@ -75,7 +74,7 @@ fn buildFseTable(values: []const u16, entries: []Table.Fse) !void {
|
||||
assert(total_probability <= 1 << 9);
|
||||
|
||||
var less_than_one_count: usize = 0;
|
||||
for (values) |value, i| {
|
||||
for (values, 0..) |value, i| {
|
||||
if (value == 0) {
|
||||
entries[entries.len - 1 - less_than_one_count] = Table.Fse{
|
||||
.symbol = @intCast(u8, i),
|
||||
@ -88,7 +87,7 @@ fn buildFseTable(values: []const u16, entries: []Table.Fse) !void {
|
||||
|
||||
var position: usize = 0;
|
||||
var temp_states: [1 << 9]u16 = undefined;
|
||||
for (values) |value, symbol| {
|
||||
for (values, 0..) |value, symbol| {
|
||||
if (value == 0 or value == 1) continue;
|
||||
const probability = value - 1;
|
||||
|
||||
@ -99,8 +98,7 @@ fn buildFseTable(values: []const u16, entries: []Table.Fse) !void {
|
||||
const single_state_count = probability - double_state_count;
|
||||
const share_size_log = std.math.log2_int(u16, share_size);
|
||||
|
||||
var i: u16 = 0;
|
||||
while (i < probability) : (i += 1) {
|
||||
for (0..probability) |i| {
|
||||
temp_states[i] = @intCast(u16, position);
|
||||
position += (entries.len >> 1) + (entries.len >> 3) + 3;
|
||||
position &= entries.len - 1;
|
||||
@ -110,16 +108,15 @@ fn buildFseTable(values: []const u16, entries: []Table.Fse) !void {
|
||||
}
|
||||
}
|
||||
std.sort.sort(u16, temp_states[0..probability], {}, std.sort.asc(u16));
|
||||
i = 0;
|
||||
while (i < probability) : (i += 1) {
|
||||
for (0..probability) |i| {
|
||||
entries[temp_states[i]] = if (i < double_state_count) Table.Fse{
|
||||
.symbol = @intCast(u8, symbol),
|
||||
.bits = share_size_log + 1,
|
||||
.baseline = single_state_count * share_size + i * 2 * share_size,
|
||||
.baseline = single_state_count * share_size + @intCast(u16, i) * 2 * share_size,
|
||||
} else Table.Fse{
|
||||
.symbol = @intCast(u8, symbol),
|
||||
.bits = share_size_log,
|
||||
.baseline = (i - double_state_count) * share_size,
|
||||
.baseline = (@intCast(u16, i) - double_state_count) * share_size,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@ -95,8 +95,7 @@ fn assignWeights(huff_bits: *readers.ReverseBitReader, accuracy_log: usize, entr
|
||||
|
||||
fn decodeDirectHuffmanTree(source: anytype, encoded_symbol_count: usize, weights: *[256]u4) !usize {
|
||||
const weights_byte_count = (encoded_symbol_count + 1) / 2;
|
||||
var i: usize = 0;
|
||||
while (i < weights_byte_count) : (i += 1) {
|
||||
for (0..weights_byte_count) |i| {
|
||||
const byte = try source.readByte();
|
||||
weights[2 * i] = @intCast(u4, byte >> 4);
|
||||
weights[2 * i + 1] = @intCast(u4, byte & 0xF);
|
||||
@ -105,7 +104,7 @@ fn decodeDirectHuffmanTree(source: anytype, encoded_symbol_count: usize, weights
|
||||
}
|
||||
|
||||
fn assignSymbols(weight_sorted_prefixed_symbols: []LiteralsSection.HuffmanTree.PrefixedSymbol, weights: [256]u4) usize {
|
||||
for (weight_sorted_prefixed_symbols) |_, i| {
|
||||
for (0..weight_sorted_prefixed_symbols.len) |i| {
|
||||
weight_sorted_prefixed_symbols[i] = .{
|
||||
.symbol = @intCast(u8, i),
|
||||
.weight = undefined,
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user