mirror of
https://github.com/ziglang/zig.git
synced 2026-02-12 20:37:54 +00:00
resinator: Complete the update to the new Reader/Writer
This commit is contained in:
parent
9b47dd2028
commit
46b60dc069
@ -16,31 +16,31 @@ const std = @import("std");
|
||||
|
||||
const AF_ICON: u32 = 1;
|
||||
|
||||
pub fn isAnimatedIcon(reader: anytype) bool {
|
||||
pub fn isAnimatedIcon(reader: *std.Io.Reader) bool {
|
||||
const flags = getAniheaderFlags(reader) catch return false;
|
||||
return flags & AF_ICON == AF_ICON;
|
||||
}
|
||||
|
||||
fn getAniheaderFlags(reader: anytype) !u32 {
|
||||
const riff_header = try reader.readBytesNoEof(4);
|
||||
if (!std.mem.eql(u8, &riff_header, "RIFF")) return error.InvalidFormat;
|
||||
fn getAniheaderFlags(reader: *std.Io.Reader) !u32 {
|
||||
const riff_header = try reader.takeArray(4);
|
||||
if (!std.mem.eql(u8, riff_header, "RIFF")) return error.InvalidFormat;
|
||||
|
||||
_ = try reader.readInt(u32, .little); // size of RIFF chunk
|
||||
_ = try reader.takeInt(u32, .little); // size of RIFF chunk
|
||||
|
||||
const form_type = try reader.readBytesNoEof(4);
|
||||
if (!std.mem.eql(u8, &form_type, "ACON")) return error.InvalidFormat;
|
||||
const form_type = try reader.takeArray(4);
|
||||
if (!std.mem.eql(u8, form_type, "ACON")) return error.InvalidFormat;
|
||||
|
||||
while (true) {
|
||||
const chunk_id = try reader.readBytesNoEof(4);
|
||||
const chunk_len = try reader.readInt(u32, .little);
|
||||
if (!std.mem.eql(u8, &chunk_id, "anih")) {
|
||||
const chunk_id = try reader.takeArray(4);
|
||||
const chunk_len = try reader.takeInt(u32, .little);
|
||||
if (!std.mem.eql(u8, chunk_id, "anih")) {
|
||||
// TODO: Move file cursor instead of skipBytes
|
||||
try reader.skipBytes(chunk_len, .{});
|
||||
try reader.discardAll(chunk_len);
|
||||
continue;
|
||||
}
|
||||
|
||||
const aniheader = try reader.readStruct(ANIHEADER);
|
||||
return std.mem.nativeToLittle(u32, aniheader.flags);
|
||||
const aniheader = try reader.takeStruct(ANIHEADER, .little);
|
||||
return aniheader.flags;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -22,13 +22,13 @@ pub const Tree = struct {
|
||||
return @alignCast(@fieldParentPtr("base", self.node));
|
||||
}
|
||||
|
||||
pub fn dump(self: *Tree, writer: anytype) @TypeOf(writer).Error!void {
|
||||
pub fn dump(self: *Tree, writer: *std.io.Writer) !void {
|
||||
try self.node.dump(self, writer, 0);
|
||||
}
|
||||
};
|
||||
|
||||
pub const CodePageLookup = struct {
|
||||
lookup: std.ArrayListUnmanaged(SupportedCodePage) = .empty,
|
||||
lookup: std.ArrayList(SupportedCodePage) = .empty,
|
||||
allocator: Allocator,
|
||||
default_code_page: SupportedCodePage,
|
||||
|
||||
@ -726,10 +726,10 @@ pub const Node = struct {
|
||||
pub fn dump(
|
||||
node: *const Node,
|
||||
tree: *const Tree,
|
||||
writer: anytype,
|
||||
writer: *std.io.Writer,
|
||||
indent: usize,
|
||||
) @TypeOf(writer).Error!void {
|
||||
try writer.writeByteNTimes(' ', indent);
|
||||
) std.io.Writer.Error!void {
|
||||
try writer.splatByteAll(' ', indent);
|
||||
try writer.writeAll(@tagName(node.id));
|
||||
switch (node.id) {
|
||||
.root => {
|
||||
@ -768,11 +768,11 @@ pub const Node = struct {
|
||||
.grouped_expression => {
|
||||
const grouped: *const Node.GroupedExpression = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.writeAll("\n");
|
||||
try writer.writeByteNTimes(' ', indent);
|
||||
try writer.splatByteAll(' ', indent);
|
||||
try writer.writeAll(grouped.open_token.slice(tree.source));
|
||||
try writer.writeAll("\n");
|
||||
try grouped.expression.dump(tree, writer, indent + 1);
|
||||
try writer.writeByteNTimes(' ', indent);
|
||||
try writer.splatByteAll(' ', indent);
|
||||
try writer.writeAll(grouped.close_token.slice(tree.source));
|
||||
try writer.writeAll("\n");
|
||||
},
|
||||
@ -790,13 +790,13 @@ pub const Node = struct {
|
||||
for (accelerators.optional_statements) |statement| {
|
||||
try statement.dump(tree, writer, indent + 1);
|
||||
}
|
||||
try writer.writeByteNTimes(' ', indent);
|
||||
try writer.splatByteAll(' ', indent);
|
||||
try writer.writeAll(accelerators.begin_token.slice(tree.source));
|
||||
try writer.writeAll("\n");
|
||||
for (accelerators.accelerators) |accelerator| {
|
||||
try accelerator.dump(tree, writer, indent + 1);
|
||||
}
|
||||
try writer.writeByteNTimes(' ', indent);
|
||||
try writer.splatByteAll(' ', indent);
|
||||
try writer.writeAll(accelerators.end_token.slice(tree.source));
|
||||
try writer.writeAll("\n");
|
||||
},
|
||||
@ -815,25 +815,25 @@ pub const Node = struct {
|
||||
const dialog: *const Node.Dialog = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.print(" {s} {s} [{d} common_resource_attributes]\n", .{ dialog.id.slice(tree.source), dialog.type.slice(tree.source), dialog.common_resource_attributes.len });
|
||||
inline for (.{ "x", "y", "width", "height" }) |arg| {
|
||||
try writer.writeByteNTimes(' ', indent + 1);
|
||||
try writer.splatByteAll(' ', indent + 1);
|
||||
try writer.writeAll(arg ++ ":\n");
|
||||
try @field(dialog, arg).dump(tree, writer, indent + 2);
|
||||
}
|
||||
if (dialog.help_id) |help_id| {
|
||||
try writer.writeByteNTimes(' ', indent + 1);
|
||||
try writer.splatByteAll(' ', indent + 1);
|
||||
try writer.writeAll("help_id:\n");
|
||||
try help_id.dump(tree, writer, indent + 2);
|
||||
}
|
||||
for (dialog.optional_statements) |statement| {
|
||||
try statement.dump(tree, writer, indent + 1);
|
||||
}
|
||||
try writer.writeByteNTimes(' ', indent);
|
||||
try writer.splatByteAll(' ', indent);
|
||||
try writer.writeAll(dialog.begin_token.slice(tree.source));
|
||||
try writer.writeAll("\n");
|
||||
for (dialog.controls) |control| {
|
||||
try control.dump(tree, writer, indent + 1);
|
||||
}
|
||||
try writer.writeByteNTimes(' ', indent);
|
||||
try writer.splatByteAll(' ', indent);
|
||||
try writer.writeAll(dialog.end_token.slice(tree.source));
|
||||
try writer.writeAll("\n");
|
||||
},
|
||||
@ -845,30 +845,30 @@ pub const Node = struct {
|
||||
}
|
||||
try writer.writeByte('\n');
|
||||
if (control.class) |class| {
|
||||
try writer.writeByteNTimes(' ', indent + 1);
|
||||
try writer.splatByteAll(' ', indent + 1);
|
||||
try writer.writeAll("class:\n");
|
||||
try class.dump(tree, writer, indent + 2);
|
||||
}
|
||||
inline for (.{ "id", "x", "y", "width", "height" }) |arg| {
|
||||
try writer.writeByteNTimes(' ', indent + 1);
|
||||
try writer.splatByteAll(' ', indent + 1);
|
||||
try writer.writeAll(arg ++ ":\n");
|
||||
try @field(control, arg).dump(tree, writer, indent + 2);
|
||||
}
|
||||
inline for (.{ "style", "exstyle", "help_id" }) |arg| {
|
||||
if (@field(control, arg)) |val_node| {
|
||||
try writer.writeByteNTimes(' ', indent + 1);
|
||||
try writer.splatByteAll(' ', indent + 1);
|
||||
try writer.writeAll(arg ++ ":\n");
|
||||
try val_node.dump(tree, writer, indent + 2);
|
||||
}
|
||||
}
|
||||
if (control.extra_data_begin != null) {
|
||||
try writer.writeByteNTimes(' ', indent);
|
||||
try writer.splatByteAll(' ', indent);
|
||||
try writer.writeAll(control.extra_data_begin.?.slice(tree.source));
|
||||
try writer.writeAll("\n");
|
||||
for (control.extra_data) |data_node| {
|
||||
try data_node.dump(tree, writer, indent + 1);
|
||||
}
|
||||
try writer.writeByteNTimes(' ', indent);
|
||||
try writer.splatByteAll(' ', indent);
|
||||
try writer.writeAll(control.extra_data_end.?.slice(tree.source));
|
||||
try writer.writeAll("\n");
|
||||
}
|
||||
@ -877,17 +877,17 @@ pub const Node = struct {
|
||||
const toolbar: *const Node.Toolbar = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.print(" {s} {s} [{d} common_resource_attributes]\n", .{ toolbar.id.slice(tree.source), toolbar.type.slice(tree.source), toolbar.common_resource_attributes.len });
|
||||
inline for (.{ "button_width", "button_height" }) |arg| {
|
||||
try writer.writeByteNTimes(' ', indent + 1);
|
||||
try writer.splatByteAll(' ', indent + 1);
|
||||
try writer.writeAll(arg ++ ":\n");
|
||||
try @field(toolbar, arg).dump(tree, writer, indent + 2);
|
||||
}
|
||||
try writer.writeByteNTimes(' ', indent);
|
||||
try writer.splatByteAll(' ', indent);
|
||||
try writer.writeAll(toolbar.begin_token.slice(tree.source));
|
||||
try writer.writeAll("\n");
|
||||
for (toolbar.buttons) |button_or_sep| {
|
||||
try button_or_sep.dump(tree, writer, indent + 1);
|
||||
}
|
||||
try writer.writeByteNTimes(' ', indent);
|
||||
try writer.splatByteAll(' ', indent);
|
||||
try writer.writeAll(toolbar.end_token.slice(tree.source));
|
||||
try writer.writeAll("\n");
|
||||
},
|
||||
@ -898,17 +898,17 @@ pub const Node = struct {
|
||||
try statement.dump(tree, writer, indent + 1);
|
||||
}
|
||||
if (menu.help_id) |help_id| {
|
||||
try writer.writeByteNTimes(' ', indent + 1);
|
||||
try writer.splatByteAll(' ', indent + 1);
|
||||
try writer.writeAll("help_id:\n");
|
||||
try help_id.dump(tree, writer, indent + 2);
|
||||
}
|
||||
try writer.writeByteNTimes(' ', indent);
|
||||
try writer.splatByteAll(' ', indent);
|
||||
try writer.writeAll(menu.begin_token.slice(tree.source));
|
||||
try writer.writeAll("\n");
|
||||
for (menu.items) |item| {
|
||||
try item.dump(tree, writer, indent + 1);
|
||||
}
|
||||
try writer.writeByteNTimes(' ', indent);
|
||||
try writer.splatByteAll(' ', indent);
|
||||
try writer.writeAll(menu.end_token.slice(tree.source));
|
||||
try writer.writeAll("\n");
|
||||
},
|
||||
@ -926,7 +926,7 @@ pub const Node = struct {
|
||||
try writer.print(" {s} {s}\n", .{ menu_item.menuitem.slice(tree.source), menu_item.text.slice(tree.source) });
|
||||
inline for (.{ "id", "type", "state" }) |arg| {
|
||||
if (@field(menu_item, arg)) |val_node| {
|
||||
try writer.writeByteNTimes(' ', indent + 1);
|
||||
try writer.splatByteAll(' ', indent + 1);
|
||||
try writer.writeAll(arg ++ ":\n");
|
||||
try val_node.dump(tree, writer, indent + 2);
|
||||
}
|
||||
@ -935,13 +935,13 @@ pub const Node = struct {
|
||||
.popup => {
|
||||
const popup: *const Node.Popup = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.print(" {s} {s} [{d} options]\n", .{ popup.popup.slice(tree.source), popup.text.slice(tree.source), popup.option_list.len });
|
||||
try writer.writeByteNTimes(' ', indent);
|
||||
try writer.splatByteAll(' ', indent);
|
||||
try writer.writeAll(popup.begin_token.slice(tree.source));
|
||||
try writer.writeAll("\n");
|
||||
for (popup.items) |item| {
|
||||
try item.dump(tree, writer, indent + 1);
|
||||
}
|
||||
try writer.writeByteNTimes(' ', indent);
|
||||
try writer.splatByteAll(' ', indent);
|
||||
try writer.writeAll(popup.end_token.slice(tree.source));
|
||||
try writer.writeAll("\n");
|
||||
},
|
||||
@ -950,18 +950,18 @@ pub const Node = struct {
|
||||
try writer.print(" {s} {s}\n", .{ popup.popup.slice(tree.source), popup.text.slice(tree.source) });
|
||||
inline for (.{ "id", "type", "state", "help_id" }) |arg| {
|
||||
if (@field(popup, arg)) |val_node| {
|
||||
try writer.writeByteNTimes(' ', indent + 1);
|
||||
try writer.splatByteAll(' ', indent + 1);
|
||||
try writer.writeAll(arg ++ ":\n");
|
||||
try val_node.dump(tree, writer, indent + 2);
|
||||
}
|
||||
}
|
||||
try writer.writeByteNTimes(' ', indent);
|
||||
try writer.splatByteAll(' ', indent);
|
||||
try writer.writeAll(popup.begin_token.slice(tree.source));
|
||||
try writer.writeAll("\n");
|
||||
for (popup.items) |item| {
|
||||
try item.dump(tree, writer, indent + 1);
|
||||
}
|
||||
try writer.writeByteNTimes(' ', indent);
|
||||
try writer.splatByteAll(' ', indent);
|
||||
try writer.writeAll(popup.end_token.slice(tree.source));
|
||||
try writer.writeAll("\n");
|
||||
},
|
||||
@ -971,13 +971,13 @@ pub const Node = struct {
|
||||
for (version_info.fixed_info) |fixed_info| {
|
||||
try fixed_info.dump(tree, writer, indent + 1);
|
||||
}
|
||||
try writer.writeByteNTimes(' ', indent);
|
||||
try writer.splatByteAll(' ', indent);
|
||||
try writer.writeAll(version_info.begin_token.slice(tree.source));
|
||||
try writer.writeAll("\n");
|
||||
for (version_info.block_statements) |block| {
|
||||
try block.dump(tree, writer, indent + 1);
|
||||
}
|
||||
try writer.writeByteNTimes(' ', indent);
|
||||
try writer.splatByteAll(' ', indent);
|
||||
try writer.writeAll(version_info.end_token.slice(tree.source));
|
||||
try writer.writeAll("\n");
|
||||
},
|
||||
@ -994,13 +994,13 @@ pub const Node = struct {
|
||||
for (block.values) |value| {
|
||||
try value.dump(tree, writer, indent + 1);
|
||||
}
|
||||
try writer.writeByteNTimes(' ', indent);
|
||||
try writer.splatByteAll(' ', indent);
|
||||
try writer.writeAll(block.begin_token.slice(tree.source));
|
||||
try writer.writeAll("\n");
|
||||
for (block.children) |child| {
|
||||
try child.dump(tree, writer, indent + 1);
|
||||
}
|
||||
try writer.writeByteNTimes(' ', indent);
|
||||
try writer.splatByteAll(' ', indent);
|
||||
try writer.writeAll(block.end_token.slice(tree.source));
|
||||
try writer.writeAll("\n");
|
||||
},
|
||||
@ -1025,13 +1025,13 @@ pub const Node = struct {
|
||||
for (string_table.optional_statements) |statement| {
|
||||
try statement.dump(tree, writer, indent + 1);
|
||||
}
|
||||
try writer.writeByteNTimes(' ', indent);
|
||||
try writer.splatByteAll(' ', indent);
|
||||
try writer.writeAll(string_table.begin_token.slice(tree.source));
|
||||
try writer.writeAll("\n");
|
||||
for (string_table.strings) |string| {
|
||||
try string.dump(tree, writer, indent + 1);
|
||||
}
|
||||
try writer.writeByteNTimes(' ', indent);
|
||||
try writer.splatByteAll(' ', indent);
|
||||
try writer.writeAll(string_table.end_token.slice(tree.source));
|
||||
try writer.writeAll("\n");
|
||||
},
|
||||
@ -1039,7 +1039,7 @@ pub const Node = struct {
|
||||
try writer.writeAll("\n");
|
||||
const string: *const Node.StringTableString = @alignCast(@fieldParentPtr("base", node));
|
||||
try string.id.dump(tree, writer, indent + 1);
|
||||
try writer.writeByteNTimes(' ', indent + 1);
|
||||
try writer.splatByteAll(' ', indent + 1);
|
||||
try writer.print("{s}\n", .{string.string.slice(tree.source)});
|
||||
},
|
||||
.language_statement => {
|
||||
@ -1051,12 +1051,12 @@ pub const Node = struct {
|
||||
.font_statement => {
|
||||
const font: *const Node.FontStatement = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.print(" {s} typeface: {s}\n", .{ font.identifier.slice(tree.source), font.typeface.slice(tree.source) });
|
||||
try writer.writeByteNTimes(' ', indent + 1);
|
||||
try writer.splatByteAll(' ', indent + 1);
|
||||
try writer.writeAll("point_size:\n");
|
||||
try font.point_size.dump(tree, writer, indent + 2);
|
||||
inline for (.{ "weight", "italic", "char_set" }) |arg| {
|
||||
if (@field(font, arg)) |arg_node| {
|
||||
try writer.writeByteNTimes(' ', indent + 1);
|
||||
try writer.splatByteAll(' ', indent + 1);
|
||||
try writer.writeAll(arg ++ ":\n");
|
||||
try arg_node.dump(tree, writer, indent + 2);
|
||||
}
|
||||
@ -1071,7 +1071,7 @@ pub const Node = struct {
|
||||
const invalid: *const Node.Invalid = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.print(" context.len: {}\n", .{invalid.context.len});
|
||||
for (invalid.context) |context_token| {
|
||||
try writer.writeByteNTimes(' ', indent + 1);
|
||||
try writer.splatByteAll(' ', indent + 1);
|
||||
try writer.print("{s}:{s}", .{ @tagName(context_token.id), context_token.slice(tree.source) });
|
||||
try writer.writeByte('\n');
|
||||
}
|
||||
|
||||
@ -27,6 +27,7 @@ pub const windows_format_id = std.mem.readInt(u16, "BM", native_endian);
|
||||
pub const file_header_len = 14;
|
||||
|
||||
pub const ReadError = error{
|
||||
ReadFailed,
|
||||
UnexpectedEOF,
|
||||
InvalidFileHeader,
|
||||
ImpossiblePixelDataOffset,
|
||||
@ -94,9 +95,12 @@ pub const BitmapInfo = struct {
|
||||
}
|
||||
};
|
||||
|
||||
pub fn read(reader: anytype, max_size: u64) ReadError!BitmapInfo {
|
||||
pub fn read(reader: *std.Io.Reader, max_size: u64) ReadError!BitmapInfo {
|
||||
var bitmap_info: BitmapInfo = undefined;
|
||||
const file_header = reader.readBytesNoEof(file_header_len) catch return error.UnexpectedEOF;
|
||||
const file_header = reader.takeArray(file_header_len) catch |err| switch (err) {
|
||||
error.EndOfStream => return error.UnexpectedEOF,
|
||||
else => |e| return e,
|
||||
};
|
||||
|
||||
const id = std.mem.readInt(u16, file_header[0..2], native_endian);
|
||||
if (id != windows_format_id) return error.InvalidFileHeader;
|
||||
@ -104,14 +108,17 @@ pub fn read(reader: anytype, max_size: u64) ReadError!BitmapInfo {
|
||||
bitmap_info.pixel_data_offset = std.mem.readInt(u32, file_header[10..14], .little);
|
||||
if (bitmap_info.pixel_data_offset > max_size) return error.ImpossiblePixelDataOffset;
|
||||
|
||||
bitmap_info.dib_header_size = reader.readInt(u32, .little) catch return error.UnexpectedEOF;
|
||||
bitmap_info.dib_header_size = reader.takeInt(u32, .little) catch return error.UnexpectedEOF;
|
||||
if (bitmap_info.pixel_data_offset < file_header_len + bitmap_info.dib_header_size) return error.ImpossiblePixelDataOffset;
|
||||
const dib_version = BitmapHeader.Version.get(bitmap_info.dib_header_size);
|
||||
switch (dib_version) {
|
||||
.@"nt3.1", .@"nt4.0", .@"nt5.0" => {
|
||||
var dib_header_buf: [@sizeOf(BITMAPINFOHEADER)]u8 align(@alignOf(BITMAPINFOHEADER)) = undefined;
|
||||
std.mem.writeInt(u32, dib_header_buf[0..4], bitmap_info.dib_header_size, .little);
|
||||
reader.readNoEof(dib_header_buf[4..]) catch return error.UnexpectedEOF;
|
||||
reader.readSliceAll(dib_header_buf[4..]) catch |err| switch (err) {
|
||||
error.EndOfStream => return error.UnexpectedEOF,
|
||||
error.ReadFailed => |e| return e,
|
||||
};
|
||||
var dib_header: *BITMAPINFOHEADER = @ptrCast(&dib_header_buf);
|
||||
structFieldsLittleToNative(BITMAPINFOHEADER, dib_header);
|
||||
|
||||
@ -126,7 +133,10 @@ pub fn read(reader: anytype, max_size: u64) ReadError!BitmapInfo {
|
||||
.@"win2.0" => {
|
||||
var dib_header_buf: [@sizeOf(BITMAPCOREHEADER)]u8 align(@alignOf(BITMAPCOREHEADER)) = undefined;
|
||||
std.mem.writeInt(u32, dib_header_buf[0..4], bitmap_info.dib_header_size, .little);
|
||||
reader.readNoEof(dib_header_buf[4..]) catch return error.UnexpectedEOF;
|
||||
reader.readSliceAll(dib_header_buf[4..]) catch |err| switch (err) {
|
||||
error.EndOfStream => return error.UnexpectedEOF,
|
||||
error.ReadFailed => |e| return e,
|
||||
};
|
||||
const dib_header: *BITMAPCOREHEADER = @ptrCast(&dib_header_buf);
|
||||
structFieldsLittleToNative(BITMAPCOREHEADER, dib_header);
|
||||
|
||||
@ -238,26 +248,26 @@ fn structFieldsLittleToNative(comptime T: type, x: *T) void {
|
||||
|
||||
test "read" {
|
||||
var bmp_data = "BM<\x00\x00\x00\x00\x00\x00\x006\x00\x00\x00(\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x10\x00\x00\x00\x00\x00\x06\x00\x00\x00\x12\x0b\x00\x00\x12\x0b\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\x7f\x00\x00\x00\x00".*;
|
||||
var fbs = std.io.fixedBufferStream(&bmp_data);
|
||||
var fbs: std.Io.Reader = .fixed(&bmp_data);
|
||||
|
||||
{
|
||||
const bitmap = try read(fbs.reader(), bmp_data.len);
|
||||
const bitmap = try read(&fbs, bmp_data.len);
|
||||
try std.testing.expectEqual(@as(u32, BitmapHeader.Version.@"nt3.1".len()), bitmap.dib_header_size);
|
||||
}
|
||||
|
||||
{
|
||||
fbs.reset();
|
||||
fbs.seek = 0;
|
||||
bmp_data[file_header_len] = 11;
|
||||
try std.testing.expectError(error.UnknownBitmapVersion, read(fbs.reader(), bmp_data.len));
|
||||
try std.testing.expectError(error.UnknownBitmapVersion, read(&fbs, bmp_data.len));
|
||||
|
||||
// restore
|
||||
bmp_data[file_header_len] = BitmapHeader.Version.@"nt3.1".len();
|
||||
}
|
||||
|
||||
{
|
||||
fbs.reset();
|
||||
fbs.seek = 0;
|
||||
bmp_data[0] = 'b';
|
||||
try std.testing.expectError(error.InvalidFileHeader, read(fbs.reader(), bmp_data.len));
|
||||
try std.testing.expectError(error.InvalidFileHeader, read(&fbs, bmp_data.len));
|
||||
|
||||
// restore
|
||||
bmp_data[0] = 'B';
|
||||
@ -265,13 +275,13 @@ test "read" {
|
||||
|
||||
{
|
||||
const cutoff_len = file_header_len + BitmapHeader.Version.@"nt3.1".len() - 1;
|
||||
var dib_cutoff_fbs = std.io.fixedBufferStream(bmp_data[0..cutoff_len]);
|
||||
try std.testing.expectError(error.UnexpectedEOF, read(dib_cutoff_fbs.reader(), bmp_data.len));
|
||||
var dib_cutoff_fbs: std.Io.Reader = .fixed(bmp_data[0..cutoff_len]);
|
||||
try std.testing.expectError(error.UnexpectedEOF, read(&dib_cutoff_fbs, bmp_data.len));
|
||||
}
|
||||
|
||||
{
|
||||
const cutoff_len = file_header_len - 1;
|
||||
var bmp_cutoff_fbs = std.io.fixedBufferStream(bmp_data[0..cutoff_len]);
|
||||
try std.testing.expectError(error.UnexpectedEOF, read(bmp_cutoff_fbs.reader(), bmp_data.len));
|
||||
var bmp_cutoff_fbs: std.Io.Reader = .fixed(bmp_data[0..cutoff_len]);
|
||||
try std.testing.expectError(error.UnexpectedEOF, read(&bmp_cutoff_fbs, bmp_data.len));
|
||||
}
|
||||
}
|
||||
|
||||
@ -80,20 +80,20 @@ pub const usage_string_after_command_name =
|
||||
\\
|
||||
;
|
||||
|
||||
pub fn writeUsage(writer: anytype, command_name: []const u8) !void {
|
||||
pub fn writeUsage(writer: *std.Io.Writer, command_name: []const u8) !void {
|
||||
try writer.writeAll("Usage: ");
|
||||
try writer.writeAll(command_name);
|
||||
try writer.writeAll(usage_string_after_command_name);
|
||||
}
|
||||
|
||||
pub const Diagnostics = struct {
|
||||
errors: std.ArrayListUnmanaged(ErrorDetails) = .empty,
|
||||
errors: std.ArrayList(ErrorDetails) = .empty,
|
||||
allocator: Allocator,
|
||||
|
||||
pub const ErrorDetails = struct {
|
||||
arg_index: usize,
|
||||
arg_span: ArgSpan = .{},
|
||||
msg: std.ArrayListUnmanaged(u8) = .empty,
|
||||
msg: std.ArrayList(u8) = .empty,
|
||||
type: Type = .err,
|
||||
print_args: bool = true,
|
||||
|
||||
@ -148,7 +148,7 @@ pub const Options = struct {
|
||||
allocator: Allocator,
|
||||
input_source: IoSource = .{ .filename = &[_]u8{} },
|
||||
output_source: IoSource = .{ .filename = &[_]u8{} },
|
||||
extra_include_paths: std.ArrayListUnmanaged([]const u8) = .empty,
|
||||
extra_include_paths: std.ArrayList([]const u8) = .empty,
|
||||
ignore_include_env_var: bool = false,
|
||||
preprocess: Preprocess = .yes,
|
||||
default_language_id: ?u16 = null,
|
||||
@ -295,7 +295,7 @@ pub const Options = struct {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn dumpVerbose(self: *const Options, writer: anytype) !void {
|
||||
pub fn dumpVerbose(self: *const Options, writer: *std.Io.Writer) !void {
|
||||
const input_source_name = switch (self.input_source) {
|
||||
.stdio => "<stdin>",
|
||||
.filename => |filename| filename,
|
||||
@ -1230,19 +1230,19 @@ pub fn parse(allocator: Allocator, args: []const []const u8, diagnostics: *Diagn
|
||||
}
|
||||
|
||||
pub fn filepathWithExtension(allocator: Allocator, path: []const u8, ext: []const u8) ![]const u8 {
|
||||
var buf = std.array_list.Managed(u8).init(allocator);
|
||||
errdefer buf.deinit();
|
||||
var buf: std.ArrayList(u8) = .empty;
|
||||
errdefer buf.deinit(allocator);
|
||||
if (std.fs.path.dirname(path)) |dirname| {
|
||||
var end_pos = dirname.len;
|
||||
// We want to ensure that we write a path separator at the end, so if the dirname
|
||||
// doesn't end with a path sep then include the char after the dirname
|
||||
// which must be a path sep.
|
||||
if (!std.fs.path.isSep(dirname[dirname.len - 1])) end_pos += 1;
|
||||
try buf.appendSlice(path[0..end_pos]);
|
||||
try buf.appendSlice(allocator, path[0..end_pos]);
|
||||
}
|
||||
try buf.appendSlice(std.fs.path.stem(path));
|
||||
try buf.appendSlice(ext);
|
||||
return try buf.toOwnedSlice();
|
||||
try buf.appendSlice(allocator, std.fs.path.stem(path));
|
||||
try buf.appendSlice(allocator, ext);
|
||||
return try buf.toOwnedSlice(allocator);
|
||||
}
|
||||
|
||||
pub fn isSupportedInputExtension(ext: []const u8) bool {
|
||||
@ -1476,7 +1476,7 @@ fn testParseOutput(args: []const []const u8, expected_output: []const u8) !?Opti
|
||||
var options = parse(std.testing.allocator, args, &diagnostics) catch |err| switch (err) {
|
||||
error.ParseError => {
|
||||
try diagnostics.renderToWriter(args, &output.writer, .no_color);
|
||||
try std.testing.expectEqualStrings(expected_output, output.getWritten());
|
||||
try std.testing.expectEqualStrings(expected_output, output.written());
|
||||
return null;
|
||||
},
|
||||
else => |e| return e,
|
||||
@ -1484,7 +1484,7 @@ fn testParseOutput(args: []const []const u8, expected_output: []const u8) !?Opti
|
||||
errdefer options.deinit();
|
||||
|
||||
try diagnostics.renderToWriter(args, &output.writer, .no_color);
|
||||
try std.testing.expectEqualStrings(expected_output, output.getWritten());
|
||||
try std.testing.expectEqualStrings(expected_output, output.written());
|
||||
return options;
|
||||
}
|
||||
|
||||
|
||||
@ -35,10 +35,7 @@ pub const CompileOptions = struct {
|
||||
diagnostics: *Diagnostics,
|
||||
source_mappings: ?*SourceMappings = null,
|
||||
/// List of paths (absolute or relative to `cwd`) for every file that the resources within the .rc file depend on.
|
||||
/// Items within the list will be allocated using the allocator of the ArrayList and must be
|
||||
/// freed by the caller.
|
||||
/// TODO: Maybe a dedicated struct for this purpose so that it's a bit nicer to work with.
|
||||
dependencies_list: ?*std.array_list.Managed([]const u8) = null,
|
||||
dependencies: ?*Dependencies = null,
|
||||
default_code_page: SupportedCodePage = .windows1252,
|
||||
/// If true, the first #pragma code_page directive only sets the input code page, but not the output code page.
|
||||
/// This check must be done before comments are removed from the file.
|
||||
@ -61,6 +58,25 @@ pub const CompileOptions = struct {
|
||||
warn_instead_of_error_on_invalid_code_page: bool = false,
|
||||
};
|
||||
|
||||
pub const Dependencies = struct {
|
||||
list: std.ArrayList([]const u8),
|
||||
allocator: Allocator,
|
||||
|
||||
pub fn init(allocator: Allocator) Dependencies {
|
||||
return .{
|
||||
.list = .empty,
|
||||
.allocator = allocator,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Dependencies) void {
|
||||
for (self.list.items) |item| {
|
||||
self.allocator.free(item);
|
||||
}
|
||||
self.list.deinit(self.allocator);
|
||||
}
|
||||
};
|
||||
|
||||
pub fn compile(allocator: Allocator, source: []const u8, writer: *std.Io.Writer, options: CompileOptions) !void {
|
||||
var lexer = lex.Lexer.init(source, .{
|
||||
.default_code_page = options.default_code_page,
|
||||
@ -74,12 +90,12 @@ pub fn compile(allocator: Allocator, source: []const u8, writer: *std.Io.Writer,
|
||||
var tree = try parser.parse(allocator, options.diagnostics);
|
||||
defer tree.deinit();
|
||||
|
||||
var search_dirs = std.array_list.Managed(SearchDir).init(allocator);
|
||||
var search_dirs: std.ArrayList(SearchDir) = .empty;
|
||||
defer {
|
||||
for (search_dirs.items) |*search_dir| {
|
||||
search_dir.deinit(allocator);
|
||||
}
|
||||
search_dirs.deinit();
|
||||
search_dirs.deinit(allocator);
|
||||
}
|
||||
|
||||
if (options.source_mappings) |source_mappings| {
|
||||
@ -89,7 +105,7 @@ pub fn compile(allocator: Allocator, source: []const u8, writer: *std.Io.Writer,
|
||||
if (std.fs.path.dirname(root_path)) |root_dir_path| {
|
||||
var root_dir = try options.cwd.openDir(root_dir_path, .{});
|
||||
errdefer root_dir.close();
|
||||
try search_dirs.append(.{ .dir = root_dir, .path = try allocator.dupe(u8, root_dir_path) });
|
||||
try search_dirs.append(allocator, .{ .dir = root_dir, .path = try allocator.dupe(u8, root_dir_path) });
|
||||
}
|
||||
}
|
||||
// Re-open the passed in cwd since we want to be able to close it (std.fs.cwd() shouldn't be closed)
|
||||
@ -111,14 +127,14 @@ pub fn compile(allocator: Allocator, source: []const u8, writer: *std.Io.Writer,
|
||||
});
|
||||
return error.CompileError;
|
||||
};
|
||||
try search_dirs.append(.{ .dir = cwd_dir, .path = null });
|
||||
try search_dirs.append(allocator, .{ .dir = cwd_dir, .path = null });
|
||||
for (options.extra_include_paths) |extra_include_path| {
|
||||
var dir = openSearchPathDir(options.cwd, extra_include_path) catch {
|
||||
// TODO: maybe a warning that the search path is skipped?
|
||||
continue;
|
||||
};
|
||||
errdefer dir.close();
|
||||
try search_dirs.append(.{ .dir = dir, .path = try allocator.dupe(u8, extra_include_path) });
|
||||
try search_dirs.append(allocator, .{ .dir = dir, .path = try allocator.dupe(u8, extra_include_path) });
|
||||
}
|
||||
for (options.system_include_paths) |system_include_path| {
|
||||
var dir = openSearchPathDir(options.cwd, system_include_path) catch {
|
||||
@ -126,7 +142,7 @@ pub fn compile(allocator: Allocator, source: []const u8, writer: *std.Io.Writer,
|
||||
continue;
|
||||
};
|
||||
errdefer dir.close();
|
||||
try search_dirs.append(.{ .dir = dir, .path = try allocator.dupe(u8, system_include_path) });
|
||||
try search_dirs.append(allocator, .{ .dir = dir, .path = try allocator.dupe(u8, system_include_path) });
|
||||
}
|
||||
if (!options.ignore_include_env_var) {
|
||||
const INCLUDE = std.process.getEnvVarOwned(allocator, "INCLUDE") catch "";
|
||||
@ -142,7 +158,7 @@ pub fn compile(allocator: Allocator, source: []const u8, writer: *std.Io.Writer,
|
||||
while (it.next()) |search_path| {
|
||||
var dir = openSearchPathDir(options.cwd, search_path) catch continue;
|
||||
errdefer dir.close();
|
||||
try search_dirs.append(.{ .dir = dir, .path = try allocator.dupe(u8, search_path) });
|
||||
try search_dirs.append(allocator, .{ .dir = dir, .path = try allocator.dupe(u8, search_path) });
|
||||
}
|
||||
}
|
||||
|
||||
@ -156,7 +172,7 @@ pub fn compile(allocator: Allocator, source: []const u8, writer: *std.Io.Writer,
|
||||
.allocator = allocator,
|
||||
.cwd = options.cwd,
|
||||
.diagnostics = options.diagnostics,
|
||||
.dependencies_list = options.dependencies_list,
|
||||
.dependencies = options.dependencies,
|
||||
.input_code_pages = &tree.input_code_pages,
|
||||
.output_code_pages = &tree.output_code_pages,
|
||||
// This is only safe because we know search_dirs won't be modified past this point
|
||||
@ -178,7 +194,7 @@ pub const Compiler = struct {
|
||||
cwd: std.fs.Dir,
|
||||
state: State = .{},
|
||||
diagnostics: *Diagnostics,
|
||||
dependencies_list: ?*std.array_list.Managed([]const u8),
|
||||
dependencies: ?*Dependencies,
|
||||
input_code_pages: *const CodePageLookup,
|
||||
output_code_pages: *const CodePageLookup,
|
||||
search_dirs: []SearchDir,
|
||||
@ -279,32 +295,32 @@ pub const Compiler = struct {
|
||||
.literal, .number => {
|
||||
const slice = literal_node.token.slice(self.source);
|
||||
const code_page = self.input_code_pages.getForToken(literal_node.token);
|
||||
var buf = try std.array_list.Managed(u8).initCapacity(self.allocator, slice.len);
|
||||
errdefer buf.deinit();
|
||||
var buf = try std.ArrayList(u8).initCapacity(self.allocator, slice.len);
|
||||
errdefer buf.deinit(self.allocator);
|
||||
|
||||
var index: usize = 0;
|
||||
while (code_page.codepointAt(index, slice)) |codepoint| : (index += codepoint.byte_len) {
|
||||
const c = codepoint.value;
|
||||
if (c == code_pages.Codepoint.invalid) {
|
||||
try buf.appendSlice("<EFBFBD>");
|
||||
try buf.appendSlice(self.allocator, "<EFBFBD>");
|
||||
} else {
|
||||
// Anything that is not returned as an invalid codepoint must be encodable as UTF-8.
|
||||
const utf8_len = std.unicode.utf8CodepointSequenceLength(c) catch unreachable;
|
||||
try buf.ensureUnusedCapacity(utf8_len);
|
||||
try buf.ensureUnusedCapacity(self.allocator, utf8_len);
|
||||
_ = std.unicode.utf8Encode(c, buf.unusedCapacitySlice()) catch unreachable;
|
||||
buf.items.len += utf8_len;
|
||||
}
|
||||
}
|
||||
|
||||
return buf.toOwnedSlice();
|
||||
return buf.toOwnedSlice(self.allocator);
|
||||
},
|
||||
.quoted_ascii_string, .quoted_wide_string => {
|
||||
const slice = literal_node.token.slice(self.source);
|
||||
const column = literal_node.token.calculateColumn(self.source, 8, null);
|
||||
const bytes = SourceBytes{ .slice = slice, .code_page = self.input_code_pages.getForToken(literal_node.token) };
|
||||
|
||||
var buf = std.array_list.Managed(u8).init(self.allocator);
|
||||
errdefer buf.deinit();
|
||||
var buf: std.ArrayList(u8) = .empty;
|
||||
errdefer buf.deinit(self.allocator);
|
||||
|
||||
// Filenames are sort-of parsed as if they were wide strings, but the max escape width of
|
||||
// hex/octal escapes is still determined by the L prefix. Since we want to end up with
|
||||
@ -320,19 +336,19 @@ pub const Compiler = struct {
|
||||
while (try parser.nextUnchecked()) |parsed| {
|
||||
const c = parsed.codepoint;
|
||||
if (c == code_pages.Codepoint.invalid) {
|
||||
try buf.appendSlice("<EFBFBD>");
|
||||
try buf.appendSlice(self.allocator, "<EFBFBD>");
|
||||
} else {
|
||||
var codepoint_buf: [4]u8 = undefined;
|
||||
// If the codepoint cannot be encoded, we fall back to <EFBFBD>
|
||||
if (std.unicode.utf8Encode(c, &codepoint_buf)) |len| {
|
||||
try buf.appendSlice(codepoint_buf[0..len]);
|
||||
try buf.appendSlice(self.allocator, codepoint_buf[0..len]);
|
||||
} else |_| {
|
||||
try buf.appendSlice("<EFBFBD>");
|
||||
try buf.appendSlice(self.allocator, "<EFBFBD>");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return buf.toOwnedSlice();
|
||||
return buf.toOwnedSlice(self.allocator);
|
||||
},
|
||||
else => unreachable, // no other token types should be in a filename literal node
|
||||
}
|
||||
@ -386,10 +402,10 @@ pub const Compiler = struct {
|
||||
const file = try utils.openFileNotDir(std.fs.cwd(), path, .{});
|
||||
errdefer file.close();
|
||||
|
||||
if (self.dependencies_list) |dependencies_list| {
|
||||
const duped_path = try dependencies_list.allocator.dupe(u8, path);
|
||||
errdefer dependencies_list.allocator.free(duped_path);
|
||||
try dependencies_list.append(duped_path);
|
||||
if (self.dependencies) |dependencies| {
|
||||
const duped_path = try dependencies.allocator.dupe(u8, path);
|
||||
errdefer dependencies.allocator.free(duped_path);
|
||||
try dependencies.list.append(dependencies.allocator, duped_path);
|
||||
}
|
||||
}
|
||||
|
||||
@ -398,12 +414,12 @@ pub const Compiler = struct {
|
||||
if (utils.openFileNotDir(search_dir.dir, path, .{})) |file| {
|
||||
errdefer file.close();
|
||||
|
||||
if (self.dependencies_list) |dependencies_list| {
|
||||
const searched_file_path = try std.fs.path.join(dependencies_list.allocator, &.{
|
||||
if (self.dependencies) |dependencies| {
|
||||
const searched_file_path = try std.fs.path.join(dependencies.allocator, &.{
|
||||
search_dir.path orelse "", path,
|
||||
});
|
||||
errdefer dependencies_list.allocator.free(searched_file_path);
|
||||
try dependencies_list.append(searched_file_path);
|
||||
errdefer dependencies.allocator.free(searched_file_path);
|
||||
try dependencies.list.append(dependencies.allocator, searched_file_path);
|
||||
}
|
||||
|
||||
return file;
|
||||
@ -421,8 +437,8 @@ pub const Compiler = struct {
|
||||
const bytes = self.sourceBytesForToken(token);
|
||||
const output_code_page = self.output_code_pages.getForToken(token);
|
||||
|
||||
var buf = try std.array_list.Managed(u8).initCapacity(self.allocator, bytes.slice.len);
|
||||
errdefer buf.deinit();
|
||||
var buf = try std.ArrayList(u8).initCapacity(self.allocator, bytes.slice.len);
|
||||
errdefer buf.deinit(self.allocator);
|
||||
|
||||
var iterative_parser = literals.IterativeStringParser.init(bytes, .{
|
||||
.start_column = token.calculateColumn(self.source, 8, null),
|
||||
@ -444,11 +460,11 @@ pub const Compiler = struct {
|
||||
switch (iterative_parser.declared_string_type) {
|
||||
.wide => {
|
||||
if (windows1252.bestFitFromCodepoint(c)) |best_fit| {
|
||||
try buf.append(best_fit);
|
||||
try buf.append(self.allocator, best_fit);
|
||||
} else if (c < 0x10000 or c == code_pages.Codepoint.invalid or parsed.escaped_surrogate_pair) {
|
||||
try buf.append('?');
|
||||
try buf.append(self.allocator, '?');
|
||||
} else {
|
||||
try buf.appendSlice("??");
|
||||
try buf.appendSlice(self.allocator, "??");
|
||||
}
|
||||
},
|
||||
.ascii => {
|
||||
@ -456,27 +472,27 @@ pub const Compiler = struct {
|
||||
const truncated: u8 = @truncate(c);
|
||||
switch (output_code_page) {
|
||||
.utf8 => switch (truncated) {
|
||||
0...0x7F => try buf.append(truncated),
|
||||
else => try buf.append('?'),
|
||||
0...0x7F => try buf.append(self.allocator, truncated),
|
||||
else => try buf.append(self.allocator, '?'),
|
||||
},
|
||||
.windows1252 => {
|
||||
try buf.append(truncated);
|
||||
try buf.append(self.allocator, truncated);
|
||||
},
|
||||
}
|
||||
} else {
|
||||
if (windows1252.bestFitFromCodepoint(c)) |best_fit| {
|
||||
try buf.append(best_fit);
|
||||
try buf.append(self.allocator, best_fit);
|
||||
} else if (c < 0x10000 or c == code_pages.Codepoint.invalid) {
|
||||
try buf.append('?');
|
||||
try buf.append(self.allocator, '?');
|
||||
} else {
|
||||
try buf.appendSlice("??");
|
||||
try buf.appendSlice(self.allocator, "??");
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
return buf.toOwnedSlice();
|
||||
return buf.toOwnedSlice(self.allocator);
|
||||
}
|
||||
|
||||
pub fn writeResourceExternal(self: *Compiler, node: *Node.ResourceExternal, writer: *std.Io.Writer) !void {
|
||||
@ -572,7 +588,7 @@ pub const Compiler = struct {
|
||||
switch (predefined_type) {
|
||||
.GROUP_ICON, .GROUP_CURSOR => {
|
||||
// Check for animated icon first
|
||||
if (ani.isAnimatedIcon(file_reader.interface.adaptToOldInterface())) {
|
||||
if (ani.isAnimatedIcon(&file_reader.interface)) {
|
||||
// Animated icons are just put into the resource unmodified,
|
||||
// and the resource type changes to ANIICON/ANICURSOR
|
||||
|
||||
@ -584,7 +600,12 @@ pub const Compiler = struct {
|
||||
header.type_value.ordinal = @intFromEnum(new_predefined_type);
|
||||
header.memory_flags = MemoryFlags.defaults(new_predefined_type);
|
||||
header.applyMemoryFlags(node.common_resource_attributes, self.source);
|
||||
header.data_size = @intCast(try file_reader.getSize());
|
||||
header.data_size = std.math.cast(u32, try file_reader.getSize()) orelse {
|
||||
return self.addErrorDetailsAndFail(.{
|
||||
.err = .resource_data_size_exceeds_max,
|
||||
.token = node.id,
|
||||
});
|
||||
};
|
||||
|
||||
try header.write(writer, self.errContext(node.id));
|
||||
try file_reader.seekTo(0);
|
||||
@ -595,7 +616,7 @@ pub const Compiler = struct {
|
||||
// isAnimatedIcon moved the file cursor so reset to the start
|
||||
try file_reader.seekTo(0);
|
||||
|
||||
const icon_dir = ico.read(self.allocator, file_reader.interface.adaptToOldInterface(), try file_reader.getSize()) catch |err| switch (err) {
|
||||
const icon_dir = ico.read(self.allocator, &file_reader.interface, try file_reader.getSize()) catch |err| switch (err) {
|
||||
error.OutOfMemory => |e| return e,
|
||||
else => |e| {
|
||||
return self.iconReadError(
|
||||
@ -861,7 +882,7 @@ pub const Compiler = struct {
|
||||
header.applyMemoryFlags(node.common_resource_attributes, self.source);
|
||||
const file_size = try file_reader.getSize();
|
||||
|
||||
const bitmap_info = bmp.read(file_reader.interface.adaptToOldInterface(), file_size) catch |err| {
|
||||
const bitmap_info = bmp.read(&file_reader.interface, file_size) catch |err| {
|
||||
const filename_string_index = try self.diagnostics.putString(filename_utf8);
|
||||
return self.addErrorDetailsAndFail(.{
|
||||
.err = .bmp_read_error,
|
||||
@ -969,13 +990,19 @@ pub const Compiler = struct {
|
||||
header.data_size = @intCast(file_size);
|
||||
try header.write(writer, self.errContext(node.id));
|
||||
|
||||
var header_slurping_reader = headerSlurpingReader(148, file_reader.interface.adaptToOldInterface());
|
||||
var adapter = header_slurping_reader.reader().adaptToNewApi(&.{});
|
||||
try writeResourceData(writer, &adapter.new_interface, header.data_size);
|
||||
// Slurp the first 148 bytes separately so we can store them in the FontDir
|
||||
var font_dir_header_buf: [148]u8 = @splat(0);
|
||||
const populated_len: u32 = @intCast(try file_reader.interface.readSliceShort(&font_dir_header_buf));
|
||||
|
||||
// Write only the populated bytes slurped from the header
|
||||
try writer.writeAll(font_dir_header_buf[0..populated_len]);
|
||||
// Then write the rest of the bytes and the padding
|
||||
try writeResourceDataNoPadding(writer, &file_reader.interface, header.data_size - populated_len);
|
||||
try writeDataPadding(writer, header.data_size);
|
||||
|
||||
try self.state.font_dir.add(self.arena, FontDir.Font{
|
||||
.id = header.name_value.ordinal,
|
||||
.header_bytes = header_slurping_reader.slurped_header,
|
||||
.header_bytes = font_dir_header_buf,
|
||||
}, node.id);
|
||||
return;
|
||||
},
|
||||
@ -1053,7 +1080,7 @@ pub const Compiler = struct {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn write(self: Data, writer: anytype) !void {
|
||||
pub fn write(self: Data, writer: *std.Io.Writer) !void {
|
||||
switch (self) {
|
||||
.number => |number| switch (number.is_long) {
|
||||
false => try writer.writeInt(WORD, number.asWord(), .little),
|
||||
@ -1225,36 +1252,30 @@ pub const Compiler = struct {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn writeResourceRawData(self: *Compiler, node: *Node.ResourceRawData, writer: anytype) !void {
|
||||
pub fn writeResourceRawData(self: *Compiler, node: *Node.ResourceRawData, writer: *std.Io.Writer) !void {
|
||||
var data_buffer: std.Io.Writer.Allocating = .init(self.allocator);
|
||||
defer data_buffer.deinit();
|
||||
// The header's data length field is a u32 so limit the resource's data size so that
|
||||
// we know we can always specify the real size.
|
||||
const data_writer = &data_buffer.writer;
|
||||
|
||||
for (node.raw_data) |expression| {
|
||||
const data = try self.evaluateDataExpression(expression);
|
||||
defer data.deinit(self.allocator);
|
||||
data.write(data_writer) catch |err| switch (err) {
|
||||
error.WriteFailed => {
|
||||
return self.addErrorDetailsAndFail(.{
|
||||
.err = .resource_data_size_exceeds_max,
|
||||
.token = node.id,
|
||||
});
|
||||
},
|
||||
};
|
||||
try data.write(&data_buffer.writer);
|
||||
}
|
||||
|
||||
// This intCast can't fail because the limitedWriter above guarantees that
|
||||
// we will never write more than maxInt(u32) bytes.
|
||||
const data_len: u32 = @intCast(data_buffer.written().len);
|
||||
// TODO: Limit data_buffer in some way to error when writing more than u32 max bytes
|
||||
const data_len: u32 = std.math.cast(u32, data_buffer.written().len) orelse {
|
||||
return self.addErrorDetailsAndFail(.{
|
||||
.err = .resource_data_size_exceeds_max,
|
||||
.token = node.id,
|
||||
});
|
||||
};
|
||||
try self.writeResourceHeader(writer, node.id, node.type, data_len, node.common_resource_attributes, self.state.language);
|
||||
|
||||
var data_fbs: std.Io.Reader = .fixed(data_buffer.written());
|
||||
try writeResourceData(writer, &data_fbs, data_len);
|
||||
}
|
||||
|
||||
pub fn writeResourceHeader(self: *Compiler, writer: anytype, id_token: Token, type_token: Token, data_size: u32, common_resource_attributes: []Token, language: res.Language) !void {
|
||||
pub fn writeResourceHeader(self: *Compiler, writer: *std.Io.Writer, id_token: Token, type_token: Token, data_size: u32, common_resource_attributes: []Token, language: res.Language) !void {
|
||||
var header = try self.resourceHeader(id_token, type_token, .{
|
||||
.language = language,
|
||||
.data_size = data_size,
|
||||
@ -1270,7 +1291,7 @@ pub const Compiler = struct {
|
||||
try data_reader.streamExact(writer, data_size);
|
||||
}
|
||||
|
||||
pub fn writeResourceData(writer: anytype, data_reader: *std.Io.Reader, data_size: u32) !void {
|
||||
pub fn writeResourceData(writer: *std.Io.Writer, data_reader: *std.Io.Reader, data_size: u32) !void {
|
||||
try writeResourceDataNoPadding(writer, data_reader, data_size);
|
||||
try writeDataPadding(writer, data_size);
|
||||
}
|
||||
@ -1303,27 +1324,19 @@ pub const Compiler = struct {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn writeAccelerators(self: *Compiler, node: *Node.Accelerators, writer: anytype) !void {
|
||||
pub fn writeAccelerators(self: *Compiler, node: *Node.Accelerators, writer: *std.Io.Writer) !void {
|
||||
var data_buffer: std.Io.Writer.Allocating = .init(self.allocator);
|
||||
defer data_buffer.deinit();
|
||||
|
||||
// The header's data length field is a u32 so limit the resource's data size so that
|
||||
// we know we can always specify the real size.
|
||||
const data_writer = &data_buffer.writer;
|
||||
try self.writeAcceleratorsData(node, &data_buffer.writer);
|
||||
|
||||
self.writeAcceleratorsData(node, data_writer) catch |err| switch (err) {
|
||||
error.WriteFailed => {
|
||||
return self.addErrorDetailsAndFail(.{
|
||||
.err = .resource_data_size_exceeds_max,
|
||||
.token = node.id,
|
||||
});
|
||||
},
|
||||
else => |e| return e,
|
||||
// TODO: Limit data_buffer in some way to error when writing more than u32 max bytes
|
||||
const data_size: u32 = std.math.cast(u32, data_buffer.written().len) orelse {
|
||||
return self.addErrorDetailsAndFail(.{
|
||||
.err = .resource_data_size_exceeds_max,
|
||||
.token = node.id,
|
||||
});
|
||||
};
|
||||
|
||||
// This intCast can't fail because the limitedWriter above guarantees that
|
||||
// we will never write more than maxInt(u32) bytes.
|
||||
const data_size: u32 = @intCast(data_buffer.written().len);
|
||||
var header = try self.resourceHeader(node.id, node.type, .{
|
||||
.data_size = data_size,
|
||||
});
|
||||
@ -1340,7 +1353,7 @@ pub const Compiler = struct {
|
||||
|
||||
/// Expects `data_writer` to be a LimitedWriter limited to u32, meaning all writes to
|
||||
/// the writer within this function could return error.NoSpaceLeft
|
||||
pub fn writeAcceleratorsData(self: *Compiler, node: *Node.Accelerators, data_writer: anytype) !void {
|
||||
pub fn writeAcceleratorsData(self: *Compiler, node: *Node.Accelerators, data_writer: *std.Io.Writer) !void {
|
||||
for (node.accelerators, 0..) |accel_node, i| {
|
||||
const accelerator: *Node.Accelerator = @alignCast(@fieldParentPtr("base", accel_node));
|
||||
var modifiers = res.AcceleratorModifiers{};
|
||||
@ -1401,12 +1414,9 @@ pub const Compiler = struct {
|
||||
caption: ?Token = null,
|
||||
};
|
||||
|
||||
pub fn writeDialog(self: *Compiler, node: *Node.Dialog, writer: anytype) !void {
|
||||
pub fn writeDialog(self: *Compiler, node: *Node.Dialog, writer: *std.Io.Writer) !void {
|
||||
var data_buffer: std.Io.Writer.Allocating = .init(self.allocator);
|
||||
defer data_buffer.deinit();
|
||||
// The header's data length field is a u32 so limit the resource's data size so that
|
||||
// we know we can always specify the real size.
|
||||
const data_writer = &data_buffer.writer;
|
||||
|
||||
const resource = ResourceType.fromString(.{
|
||||
.slice = node.type.slice(self.source),
|
||||
@ -1667,21 +1677,18 @@ pub const Compiler = struct {
|
||||
optional_statement_values.style |= res.WS.CAPTION;
|
||||
}
|
||||
|
||||
self.writeDialogHeaderAndStrings(
|
||||
// NOTE: Dialog header and menu/class/title strings can never exceed u32 bytes
|
||||
// on their own.
|
||||
try self.writeDialogHeaderAndStrings(
|
||||
node,
|
||||
data_writer,
|
||||
&data_buffer.writer,
|
||||
resource,
|
||||
&optional_statement_values,
|
||||
x,
|
||||
y,
|
||||
width,
|
||||
height,
|
||||
) catch |err| switch (err) {
|
||||
// Dialog header and menu/class/title strings can never exceed u32 bytes
|
||||
// on their own, so this error is unreachable.
|
||||
error.WriteFailed => unreachable,
|
||||
else => |e| return e,
|
||||
};
|
||||
);
|
||||
|
||||
var controls_by_id = std.AutoHashMap(u32, *const Node.ControlStatement).init(self.allocator);
|
||||
// Number of controls are guaranteed by the parser to be within maxInt(u16).
|
||||
@ -1691,27 +1698,26 @@ pub const Compiler = struct {
|
||||
for (node.controls) |control_node| {
|
||||
const control: *Node.ControlStatement = @alignCast(@fieldParentPtr("base", control_node));
|
||||
|
||||
self.writeDialogControl(
|
||||
try self.writeDialogControl(
|
||||
control,
|
||||
data_writer,
|
||||
&data_buffer.writer,
|
||||
resource,
|
||||
// We know the data_buffer len is limited to u32 max.
|
||||
@intCast(data_buffer.written().len),
|
||||
&controls_by_id,
|
||||
) catch |err| switch (err) {
|
||||
error.WriteFailed => {
|
||||
try self.addErrorDetails(.{
|
||||
.err = .resource_data_size_exceeds_max,
|
||||
.token = node.id,
|
||||
});
|
||||
return self.addErrorDetailsAndFail(.{
|
||||
.err = .resource_data_size_exceeds_max,
|
||||
.type = .note,
|
||||
.token = control.type,
|
||||
});
|
||||
},
|
||||
else => |e| return e,
|
||||
};
|
||||
);
|
||||
|
||||
if (data_buffer.written().len > std.math.maxInt(u32)) {
|
||||
try self.addErrorDetails(.{
|
||||
.err = .resource_data_size_exceeds_max,
|
||||
.token = node.id,
|
||||
});
|
||||
return self.addErrorDetailsAndFail(.{
|
||||
.err = .resource_data_size_exceeds_max,
|
||||
.type = .note,
|
||||
.token = control.type,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// We know the data_buffer len is limited to u32 max.
|
||||
@ -1733,7 +1739,7 @@ pub const Compiler = struct {
|
||||
fn writeDialogHeaderAndStrings(
|
||||
self: *Compiler,
|
||||
node: *Node.Dialog,
|
||||
data_writer: anytype,
|
||||
data_writer: *std.Io.Writer,
|
||||
resource: ResourceType,
|
||||
optional_statement_values: *const DialogOptionalStatementValues,
|
||||
x: Number,
|
||||
@ -1793,7 +1799,7 @@ pub const Compiler = struct {
|
||||
fn writeDialogControl(
|
||||
self: *Compiler,
|
||||
control: *Node.ControlStatement,
|
||||
data_writer: anytype,
|
||||
data_writer: *std.Io.Writer,
|
||||
resource: ResourceType,
|
||||
bytes_written_so_far: u32,
|
||||
controls_by_id: *std.AutoHashMap(u32, *const Node.ControlStatement),
|
||||
@ -1969,28 +1975,26 @@ pub const Compiler = struct {
|
||||
try NameOrOrdinal.writeEmpty(data_writer);
|
||||
}
|
||||
|
||||
// The extra data byte length must be able to fit within a u16.
|
||||
var extra_data_buf: std.Io.Writer.Allocating = .init(self.allocator);
|
||||
defer extra_data_buf.deinit();
|
||||
// The extra data byte length must be able to fit within a u16.
|
||||
const extra_data_writer = &extra_data_buf.writer;
|
||||
for (control.extra_data) |data_expression| {
|
||||
const data = try self.evaluateDataExpression(data_expression);
|
||||
defer data.deinit(self.allocator);
|
||||
data.write(extra_data_writer) catch |err| switch (err) {
|
||||
error.WriteFailed => {
|
||||
try self.addErrorDetails(.{
|
||||
.err = .control_extra_data_size_exceeds_max,
|
||||
.token = control.type,
|
||||
});
|
||||
return self.addErrorDetailsAndFail(.{
|
||||
.err = .control_extra_data_size_exceeds_max,
|
||||
.type = .note,
|
||||
.token = data_expression.getFirstToken(),
|
||||
.token_span_end = data_expression.getLastToken(),
|
||||
});
|
||||
},
|
||||
else => |e| return e,
|
||||
};
|
||||
try data.write(&extra_data_buf.writer);
|
||||
|
||||
if (extra_data_buf.written().len > std.math.maxInt(u16)) {
|
||||
try self.addErrorDetails(.{
|
||||
.err = .control_extra_data_size_exceeds_max,
|
||||
.token = control.type,
|
||||
});
|
||||
return self.addErrorDetailsAndFail(.{
|
||||
.err = .control_extra_data_size_exceeds_max,
|
||||
.type = .note,
|
||||
.token = data_expression.getFirstToken(),
|
||||
.token_span_end = data_expression.getLastToken(),
|
||||
});
|
||||
}
|
||||
}
|
||||
// We know the extra_data_buf size fits within a u16.
|
||||
const extra_data_size: u16 = @intCast(extra_data_buf.written().len);
|
||||
@ -1998,7 +2002,7 @@ pub const Compiler = struct {
|
||||
try data_writer.writeAll(extra_data_buf.written());
|
||||
}
|
||||
|
||||
pub fn writeToolbar(self: *Compiler, node: *Node.Toolbar, writer: anytype) !void {
|
||||
pub fn writeToolbar(self: *Compiler, node: *Node.Toolbar, writer: *std.Io.Writer) !void {
|
||||
var data_buffer: std.Io.Writer.Allocating = .init(self.allocator);
|
||||
defer data_buffer.deinit();
|
||||
const data_writer = &data_buffer.writer;
|
||||
@ -2051,7 +2055,7 @@ pub const Compiler = struct {
|
||||
node: *Node.FontStatement,
|
||||
};
|
||||
|
||||
pub fn writeDialogFont(self: *Compiler, resource: ResourceType, values: FontStatementValues, writer: anytype) !void {
|
||||
pub fn writeDialogFont(self: *Compiler, resource: ResourceType, values: FontStatementValues, writer: *std.Io.Writer) !void {
|
||||
const node = values.node;
|
||||
const point_size = evaluateNumberExpression(node.point_size, self.source, self.input_code_pages);
|
||||
try writer.writeInt(u16, point_size.asWord(), .little);
|
||||
@ -2076,12 +2080,9 @@ pub const Compiler = struct {
|
||||
try writer.writeAll(std.mem.sliceAsBytes(typeface[0 .. typeface.len + 1]));
|
||||
}
|
||||
|
||||
pub fn writeMenu(self: *Compiler, node: *Node.Menu, writer: anytype) !void {
|
||||
pub fn writeMenu(self: *Compiler, node: *Node.Menu, writer: *std.Io.Writer) !void {
|
||||
var data_buffer: std.Io.Writer.Allocating = .init(self.allocator);
|
||||
defer data_buffer.deinit();
|
||||
// The header's data length field is a u32 so limit the resource's data size so that
|
||||
// we know we can always specify the real size.
|
||||
const data_writer = &data_buffer.writer;
|
||||
|
||||
const type_bytes = SourceBytes{
|
||||
.slice = node.type.slice(self.source),
|
||||
@ -2090,19 +2091,15 @@ pub const Compiler = struct {
|
||||
const resource = ResourceType.fromString(type_bytes);
|
||||
std.debug.assert(resource == .menu or resource == .menuex);
|
||||
|
||||
self.writeMenuData(node, data_writer, resource) catch |err| switch (err) {
|
||||
error.WriteFailed => {
|
||||
return self.addErrorDetailsAndFail(.{
|
||||
.err = .resource_data_size_exceeds_max,
|
||||
.token = node.id,
|
||||
});
|
||||
},
|
||||
else => |e| return e,
|
||||
};
|
||||
try self.writeMenuData(node, &data_buffer.writer, resource);
|
||||
|
||||
// This intCast can't fail because the limitedWriter above guarantees that
|
||||
// we will never write more than maxInt(u32) bytes.
|
||||
const data_size: u32 = @intCast(data_buffer.written().len);
|
||||
// TODO: Limit data_buffer in some way to error when writing more than u32 max bytes
|
||||
const data_size: u32 = std.math.cast(u32, data_buffer.written().len) orelse {
|
||||
return self.addErrorDetailsAndFail(.{
|
||||
.err = .resource_data_size_exceeds_max,
|
||||
.token = node.id,
|
||||
});
|
||||
};
|
||||
var header = try self.resourceHeader(node.id, node.type, .{
|
||||
.data_size = data_size,
|
||||
});
|
||||
@ -2256,11 +2253,10 @@ pub const Compiler = struct {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn writeVersionInfo(self: *Compiler, node: *Node.VersionInfo, writer: anytype) !void {
|
||||
pub fn writeVersionInfo(self: *Compiler, node: *Node.VersionInfo, writer: *std.Io.Writer) !void {
|
||||
// NOTE: The node's length field (which is inclusive of the length of all of its children) is a u16
|
||||
var data_buffer: std.Io.Writer.Allocating = .init(self.allocator);
|
||||
defer data_buffer.deinit();
|
||||
// The node's length field (which is inclusive of the length of all of its children) is a u16
|
||||
// so limit the node's data size so that we know we can always specify the real size.
|
||||
const data_writer = &data_buffer.writer;
|
||||
|
||||
try data_writer.writeInt(u16, 0, .little); // placeholder size
|
||||
@ -2345,25 +2341,29 @@ pub const Compiler = struct {
|
||||
try fixed_file_info.write(data_writer);
|
||||
|
||||
for (node.block_statements) |statement| {
|
||||
self.writeVersionNode(statement, data_writer, &data_buffer) catch |err| switch (err) {
|
||||
error.WriteFailed => {
|
||||
try self.addErrorDetails(.{
|
||||
.err = .version_node_size_exceeds_max,
|
||||
.token = node.id,
|
||||
});
|
||||
return self.addErrorDetailsAndFail(.{
|
||||
.err = .version_node_size_exceeds_max,
|
||||
.type = .note,
|
||||
.token = statement.getFirstToken(),
|
||||
.token_span_end = statement.getLastToken(),
|
||||
});
|
||||
var overflow = false;
|
||||
self.writeVersionNode(statement, data_writer) catch |err| switch (err) {
|
||||
error.NoSpaceLeft => {
|
||||
overflow = true;
|
||||
},
|
||||
else => |e| return e,
|
||||
};
|
||||
if (overflow or data_buffer.written().len > std.math.maxInt(u16)) {
|
||||
try self.addErrorDetails(.{
|
||||
.err = .version_node_size_exceeds_max,
|
||||
.token = node.id,
|
||||
});
|
||||
return self.addErrorDetailsAndFail(.{
|
||||
.err = .version_node_size_exceeds_max,
|
||||
.type = .note,
|
||||
.token = statement.getFirstToken(),
|
||||
.token_span_end = statement.getLastToken(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// We know that data_buffer.items.len is within the limits of a u16, since we
|
||||
// limited the writer to maxInt(u16)
|
||||
// We know that data_buffer len is within the limits of a u16, since we check in the block
|
||||
// statements loop above which is the only place it can overflow.
|
||||
const data_size: u16 = @intCast(data_buffer.written().len);
|
||||
// And now that we know the full size of this node (including its children), set its size
|
||||
std.mem.writeInt(u16, data_buffer.written()[0..2], data_size, .little);
|
||||
@ -2381,18 +2381,17 @@ pub const Compiler = struct {
|
||||
try writeResourceData(writer, &data_fbs, data_size);
|
||||
}
|
||||
|
||||
/// Expects writer to be a LimitedWriter limited to u16, meaning all writes to
|
||||
/// the writer within this function could return error.NoSpaceLeft, and that buf.items.len
|
||||
/// will never be able to exceed maxInt(u16).
|
||||
pub fn writeVersionNode(self: *Compiler, node: *Node, writer: *std.Io.Writer, buf: *std.Io.Writer.Allocating) !void {
|
||||
/// Assumes that writer is Writer.Allocating (specifically, that buffered() gets the entire data)
|
||||
/// TODO: This function could be nicer if writer was guaranteed to fail if it wrote more than u16 max bytes
|
||||
pub fn writeVersionNode(self: *Compiler, node: *Node, writer: *std.Io.Writer) !void {
|
||||
// We can assume that buf.items.len will never be able to exceed the limits of a u16
|
||||
try writeDataPadding(writer, @as(u16, @intCast(buf.written().len)));
|
||||
try writeDataPadding(writer, std.math.cast(u16, writer.buffered().len) orelse return error.NoSpaceLeft);
|
||||
|
||||
const node_and_children_size_offset = buf.written().len;
|
||||
const node_and_children_size_offset = writer.buffered().len;
|
||||
try writer.writeInt(u16, 0, .little); // placeholder for size
|
||||
const data_size_offset = buf.written().len;
|
||||
const data_size_offset = writer.buffered().len;
|
||||
try writer.writeInt(u16, 0, .little); // placeholder for data size
|
||||
const data_type_offset = buf.written().len;
|
||||
const data_type_offset = writer.buffered().len;
|
||||
// Data type is string unless the node contains values that are numbers.
|
||||
try writer.writeInt(u16, res.VersionNode.type_string, .little);
|
||||
|
||||
@ -2422,7 +2421,7 @@ pub const Compiler = struct {
|
||||
// during parsing, so we can just do the correct thing here.
|
||||
var values_size: usize = 0;
|
||||
|
||||
try writeDataPadding(writer, @intCast(buf.written().len));
|
||||
try writeDataPadding(writer, std.math.cast(u16, writer.buffered().len) orelse return error.NoSpaceLeft);
|
||||
|
||||
for (block_or_value.values, 0..) |value_value_node_uncasted, i| {
|
||||
const value_value_node = value_value_node_uncasted.cast(.block_value_value).?;
|
||||
@ -2461,26 +2460,26 @@ pub const Compiler = struct {
|
||||
}
|
||||
}
|
||||
}
|
||||
var data_size_slice = buf.written()[data_size_offset..];
|
||||
var data_size_slice = writer.buffered()[data_size_offset..];
|
||||
std.mem.writeInt(u16, data_size_slice[0..@sizeOf(u16)], @as(u16, @intCast(values_size)), .little);
|
||||
|
||||
if (has_number_value) {
|
||||
const data_type_slice = buf.written()[data_type_offset..];
|
||||
const data_type_slice = writer.buffered()[data_type_offset..];
|
||||
std.mem.writeInt(u16, data_type_slice[0..@sizeOf(u16)], res.VersionNode.type_binary, .little);
|
||||
}
|
||||
|
||||
if (node_type == .block) {
|
||||
const block = block_or_value;
|
||||
for (block.children) |child| {
|
||||
try self.writeVersionNode(child, writer, buf);
|
||||
try self.writeVersionNode(child, writer);
|
||||
}
|
||||
}
|
||||
},
|
||||
else => unreachable,
|
||||
}
|
||||
|
||||
const node_and_children_size = buf.written().len - node_and_children_size_offset;
|
||||
const node_and_children_size_slice = buf.written()[node_and_children_size_offset..];
|
||||
const node_and_children_size = writer.buffered().len - node_and_children_size_offset;
|
||||
const node_and_children_size_slice = writer.buffered()[node_and_children_size_offset..];
|
||||
std.mem.writeInt(u16, node_and_children_size_slice[0..@sizeOf(u16)], @as(u16, @intCast(node_and_children_size)), .little);
|
||||
}
|
||||
|
||||
@ -2673,11 +2672,11 @@ pub const Compiler = struct {
|
||||
return .{ .bytes = header_size, .padding_after_name = padding_after_name };
|
||||
}
|
||||
|
||||
pub fn writeAssertNoOverflow(self: ResourceHeader, writer: anytype) !void {
|
||||
pub fn writeAssertNoOverflow(self: ResourceHeader, writer: *std.Io.Writer) !void {
|
||||
return self.writeSizeInfo(writer, self.calcSize() catch unreachable);
|
||||
}
|
||||
|
||||
pub fn write(self: ResourceHeader, writer: anytype, err_ctx: errors.DiagnosticsContext) !void {
|
||||
pub fn write(self: ResourceHeader, writer: *std.Io.Writer, err_ctx: errors.DiagnosticsContext) !void {
|
||||
const size_info = self.calcSize() catch {
|
||||
try err_ctx.diagnostics.append(.{
|
||||
.err = .resource_data_size_exceeds_max,
|
||||
@ -2815,7 +2814,7 @@ pub const Compiler = struct {
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn writeEmptyResource(writer: anytype) !void {
|
||||
pub fn writeEmptyResource(writer: *std.Io.Writer) !void {
|
||||
const header = ResourceHeader{
|
||||
.name_value = .{ .ordinal = 0 },
|
||||
.type_value = .{ .ordinal = 0 },
|
||||
@ -2932,39 +2931,8 @@ pub const SearchDir = struct {
|
||||
}
|
||||
};
|
||||
|
||||
/// Slurps the first `size` bytes read into `slurped_header`
|
||||
pub fn HeaderSlurpingReader(comptime size: usize, comptime ReaderType: anytype) type {
|
||||
return struct {
|
||||
child_reader: ReaderType,
|
||||
bytes_read: usize = 0,
|
||||
slurped_header: [size]u8 = [_]u8{0x00} ** size,
|
||||
|
||||
pub const Error = ReaderType.Error;
|
||||
pub const Reader = std.io.GenericReader(*@This(), Error, read);
|
||||
|
||||
pub fn read(self: *@This(), buf: []u8) Error!usize {
|
||||
const amt = try self.child_reader.read(buf);
|
||||
if (self.bytes_read < size) {
|
||||
const bytes_to_add = @min(amt, size - self.bytes_read);
|
||||
const end_index = self.bytes_read + bytes_to_add;
|
||||
@memcpy(self.slurped_header[self.bytes_read..end_index], buf[0..bytes_to_add]);
|
||||
}
|
||||
self.bytes_read +|= amt;
|
||||
return amt;
|
||||
}
|
||||
|
||||
pub fn reader(self: *@This()) Reader {
|
||||
return .{ .context = self };
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
pub fn headerSlurpingReader(comptime size: usize, reader: anytype) HeaderSlurpingReader(size, @TypeOf(reader)) {
|
||||
return .{ .child_reader = reader };
|
||||
}
|
||||
|
||||
pub const FontDir = struct {
|
||||
fonts: std.ArrayListUnmanaged(Font) = .empty,
|
||||
fonts: std.ArrayList(Font) = .empty,
|
||||
/// To keep track of which ids are set and where they were set from
|
||||
ids: std.AutoHashMapUnmanaged(u16, Token) = .empty,
|
||||
|
||||
@ -2982,7 +2950,7 @@ pub const FontDir = struct {
|
||||
try self.fonts.append(allocator, font);
|
||||
}
|
||||
|
||||
pub fn writeResData(self: *FontDir, compiler: *Compiler, writer: anytype) !void {
|
||||
pub fn writeResData(self: *FontDir, compiler: *Compiler, writer: *std.Io.Writer) !void {
|
||||
if (self.fonts.items.len == 0) return;
|
||||
|
||||
// We know the number of fonts is limited to maxInt(u16) because fonts
|
||||
@ -3106,7 +3074,7 @@ pub const StringTable = struct {
|
||||
blocks: std.AutoArrayHashMapUnmanaged(u16, Block) = .empty,
|
||||
|
||||
pub const Block = struct {
|
||||
strings: std.ArrayListUnmanaged(Token) = .empty,
|
||||
strings: std.ArrayList(Token) = .empty,
|
||||
set_indexes: std.bit_set.IntegerBitSet(16) = .{ .mask = 0 },
|
||||
memory_flags: MemoryFlags = MemoryFlags.defaults(res.RT.STRING),
|
||||
characteristics: u32,
|
||||
@ -3187,7 +3155,7 @@ pub const StringTable = struct {
|
||||
try std.testing.expectEqualStrings("a", trimToDoubleNUL(u8, "a\x00\x00b"));
|
||||
}
|
||||
|
||||
pub fn writeResData(self: *Block, compiler: *Compiler, language: res.Language, block_id: u16, writer: anytype) !void {
|
||||
pub fn writeResData(self: *Block, compiler: *Compiler, language: res.Language, block_id: u16, writer: *std.Io.Writer) !void {
|
||||
var data_buffer: std.Io.Writer.Allocating = .init(compiler.allocator);
|
||||
defer data_buffer.deinit();
|
||||
const data_writer = &data_buffer.writer;
|
||||
|
||||
@ -43,7 +43,7 @@ pub const Resource = struct {
|
||||
};
|
||||
|
||||
pub const ParsedResources = struct {
|
||||
list: std.ArrayListUnmanaged(Resource) = .empty,
|
||||
list: std.ArrayList(Resource) = .empty,
|
||||
allocator: Allocator,
|
||||
|
||||
pub fn init(allocator: Allocator) ParsedResources {
|
||||
@ -157,7 +157,7 @@ pub fn parseNameOrOrdinal(allocator: Allocator, reader: *std.Io.Reader) !NameOrO
|
||||
const ordinal_value = try reader.takeInt(u16, .little);
|
||||
return .{ .ordinal = ordinal_value };
|
||||
}
|
||||
var name_buf = try std.ArrayListUnmanaged(u16).initCapacity(allocator, 16);
|
||||
var name_buf = try std.ArrayList(u16).initCapacity(allocator, 16);
|
||||
errdefer name_buf.deinit(allocator);
|
||||
var code_unit = first_code_unit;
|
||||
while (code_unit != 0) {
|
||||
@ -373,7 +373,7 @@ pub fn writeCoff(allocator: Allocator, writer: *std.Io.Writer, resources: []cons
|
||||
try writer.writeAll(string_table.bytes.items);
|
||||
}
|
||||
|
||||
fn writeSymbol(writer: anytype, symbol: std.coff.Symbol) !void {
|
||||
fn writeSymbol(writer: *std.Io.Writer, symbol: std.coff.Symbol) !void {
|
||||
try writer.writeAll(&symbol.name);
|
||||
try writer.writeInt(u32, symbol.value, .little);
|
||||
try writer.writeInt(u16, @intFromEnum(symbol.section_number), .little);
|
||||
@ -383,7 +383,7 @@ fn writeSymbol(writer: anytype, symbol: std.coff.Symbol) !void {
|
||||
try writer.writeInt(u8, symbol.number_of_aux_symbols, .little);
|
||||
}
|
||||
|
||||
fn writeSectionDefinition(writer: anytype, def: std.coff.SectionDefinition) !void {
|
||||
fn writeSectionDefinition(writer: *std.Io.Writer, def: std.coff.SectionDefinition) !void {
|
||||
try writer.writeInt(u32, def.length, .little);
|
||||
try writer.writeInt(u16, def.number_of_relocations, .little);
|
||||
try writer.writeInt(u16, def.number_of_linenumbers, .little);
|
||||
@ -417,7 +417,7 @@ pub const ResourceDirectoryEntry = extern struct {
|
||||
to_subdirectory: bool,
|
||||
},
|
||||
|
||||
pub fn writeCoff(self: ResourceDirectoryEntry, writer: anytype) !void {
|
||||
pub fn writeCoff(self: ResourceDirectoryEntry, writer: *std.Io.Writer) !void {
|
||||
try writer.writeInt(u32, @bitCast(self.entry), .little);
|
||||
try writer.writeInt(u32, @bitCast(self.offset), .little);
|
||||
}
|
||||
@ -435,7 +435,7 @@ const ResourceTree = struct {
|
||||
type_to_name_map: std.ArrayHashMapUnmanaged(NameOrOrdinal, NameToLanguageMap, NameOrOrdinalHashContext, true),
|
||||
rsrc_string_table: std.ArrayHashMapUnmanaged(NameOrOrdinal, void, NameOrOrdinalHashContext, true),
|
||||
deduplicated_data: std.StringArrayHashMapUnmanaged(u32),
|
||||
data_offsets: std.ArrayListUnmanaged(u32),
|
||||
data_offsets: std.ArrayList(u32),
|
||||
rsrc02_len: u32,
|
||||
coff_options: CoffOptions,
|
||||
allocator: Allocator,
|
||||
@ -675,13 +675,13 @@ const ResourceTree = struct {
|
||||
return &.{};
|
||||
}
|
||||
|
||||
var level2_list: std.ArrayListUnmanaged(*const NameToLanguageMap) = .empty;
|
||||
var level2_list: std.ArrayList(*const NameToLanguageMap) = .empty;
|
||||
defer level2_list.deinit(allocator);
|
||||
|
||||
var level3_list: std.ArrayListUnmanaged(*const LanguageToResourceMap) = .empty;
|
||||
var level3_list: std.ArrayList(*const LanguageToResourceMap) = .empty;
|
||||
defer level3_list.deinit(allocator);
|
||||
|
||||
var resources_list: std.ArrayListUnmanaged(*const RelocatableResource) = .empty;
|
||||
var resources_list: std.ArrayList(*const RelocatableResource) = .empty;
|
||||
defer resources_list.deinit(allocator);
|
||||
|
||||
var relocations = Relocations.init(allocator);
|
||||
@ -896,7 +896,7 @@ const ResourceTree = struct {
|
||||
return symbols;
|
||||
}
|
||||
|
||||
fn writeRelocation(writer: anytype, relocation: std.coff.Relocation) !void {
|
||||
fn writeRelocation(writer: *std.Io.Writer, relocation: std.coff.Relocation) !void {
|
||||
try writer.writeInt(u32, relocation.virtual_address, .little);
|
||||
try writer.writeInt(u32, relocation.symbol_table_index, .little);
|
||||
try writer.writeInt(u16, relocation.type, .little);
|
||||
@ -928,7 +928,7 @@ const Relocation = struct {
|
||||
|
||||
const Relocations = struct {
|
||||
allocator: Allocator,
|
||||
list: std.ArrayListUnmanaged(Relocation) = .empty,
|
||||
list: std.ArrayList(Relocation) = .empty,
|
||||
cur_symbol_index: u32 = 5,
|
||||
|
||||
pub fn init(allocator: Allocator) Relocations {
|
||||
@ -952,7 +952,7 @@ const Relocations = struct {
|
||||
/// Does not do deduplication (only because there's no chance of duplicate strings in this
|
||||
/// instance).
|
||||
const StringTable = struct {
|
||||
bytes: std.ArrayListUnmanaged(u8) = .empty,
|
||||
bytes: std.ArrayList(u8) = .empty,
|
||||
|
||||
pub fn deinit(self: *StringTable, allocator: Allocator) void {
|
||||
self.bytes.deinit(allocator);
|
||||
|
||||
@ -15,10 +15,10 @@ const builtin = @import("builtin");
|
||||
const native_endian = builtin.cpu.arch.endian();
|
||||
|
||||
pub const Diagnostics = struct {
|
||||
errors: std.ArrayListUnmanaged(ErrorDetails) = .empty,
|
||||
errors: std.ArrayList(ErrorDetails) = .empty,
|
||||
/// Append-only, cannot handle removing strings.
|
||||
/// Expects to own all strings within the list.
|
||||
strings: std.ArrayListUnmanaged([]const u8) = .empty,
|
||||
strings: std.ArrayList([]const u8) = .empty,
|
||||
allocator: std.mem.Allocator,
|
||||
|
||||
pub fn init(allocator: std.mem.Allocator) Diagnostics {
|
||||
@ -256,7 +256,7 @@ pub const ErrorDetails = struct {
|
||||
.{ "literal", "unquoted literal" },
|
||||
});
|
||||
|
||||
pub fn writeCommaSeparated(self: ExpectedTypes, writer: anytype) !void {
|
||||
pub fn writeCommaSeparated(self: ExpectedTypes, writer: *std.Io.Writer) !void {
|
||||
const struct_info = @typeInfo(ExpectedTypes).@"struct";
|
||||
const num_real_fields = struct_info.fields.len - 1;
|
||||
const num_padding_bits = @bitSizeOf(ExpectedTypes) - num_real_fields;
|
||||
@ -441,7 +441,7 @@ pub const ErrorDetails = struct {
|
||||
} };
|
||||
}
|
||||
|
||||
pub fn render(self: ErrorDetails, writer: anytype, source: []const u8, strings: []const []const u8) !void {
|
||||
pub fn render(self: ErrorDetails, writer: *std.Io.Writer, source: []const u8, strings: []const []const u8) !void {
|
||||
switch (self.err) {
|
||||
.unfinished_string_literal => {
|
||||
return writer.print("unfinished string literal at '{f}', expected closing '\"'", .{self.fmtToken(source)});
|
||||
@ -987,12 +987,14 @@ pub fn renderErrorMessage(writer: *std.io.Writer, tty_config: std.io.tty.Config,
|
||||
if (corresponding_span != null and corresponding_file != null) {
|
||||
var worth_printing_lines: bool = true;
|
||||
var initial_lines_err: ?anyerror = null;
|
||||
var file_reader_buf: [max_source_line_bytes * 2]u8 = undefined;
|
||||
var corresponding_lines: ?CorrespondingLines = CorrespondingLines.init(
|
||||
cwd,
|
||||
err_details,
|
||||
source_line_for_display.line,
|
||||
corresponding_span.?,
|
||||
corresponding_file.?,
|
||||
&file_reader_buf,
|
||||
) catch |err| switch (err) {
|
||||
error.NotWorthPrintingLines => blk: {
|
||||
worth_printing_lines = false;
|
||||
@ -1078,10 +1080,17 @@ const CorrespondingLines = struct {
|
||||
at_eof: bool = false,
|
||||
span: SourceMappings.CorrespondingSpan,
|
||||
file: std.fs.File,
|
||||
buffered_reader: std.fs.File.Reader,
|
||||
file_reader: std.fs.File.Reader,
|
||||
code_page: SupportedCodePage,
|
||||
|
||||
pub fn init(cwd: std.fs.Dir, err_details: ErrorDetails, line_for_comparison: []const u8, corresponding_span: SourceMappings.CorrespondingSpan, corresponding_file: []const u8) !CorrespondingLines {
|
||||
pub fn init(
|
||||
cwd: std.fs.Dir,
|
||||
err_details: ErrorDetails,
|
||||
line_for_comparison: []const u8,
|
||||
corresponding_span: SourceMappings.CorrespondingSpan,
|
||||
corresponding_file: []const u8,
|
||||
file_reader_buf: []u8,
|
||||
) !CorrespondingLines {
|
||||
// We don't do line comparison for this error, so don't print the note if the line
|
||||
// number is different
|
||||
if (err_details.err == .string_literal_too_long and err_details.token.line_number != corresponding_span.start_line) {
|
||||
@ -1096,17 +1105,14 @@ const CorrespondingLines = struct {
|
||||
var corresponding_lines = CorrespondingLines{
|
||||
.span = corresponding_span,
|
||||
.file = try utils.openFileNotDir(cwd, corresponding_file, .{}),
|
||||
.buffered_reader = undefined,
|
||||
.code_page = err_details.code_page,
|
||||
.file_reader = undefined,
|
||||
};
|
||||
corresponding_lines.buffered_reader = corresponding_lines.file.reader(&.{});
|
||||
corresponding_lines.file_reader = corresponding_lines.file.reader(file_reader_buf);
|
||||
errdefer corresponding_lines.deinit();
|
||||
|
||||
var writer: std.Io.Writer = .fixed(&corresponding_lines.line_buf);
|
||||
|
||||
try corresponding_lines.writeLineFromStreamVerbatim(
|
||||
&writer,
|
||||
corresponding_lines.buffered_reader.interface.adaptToOldInterface(),
|
||||
&corresponding_lines.file_reader.interface,
|
||||
corresponding_span.start_line,
|
||||
);
|
||||
|
||||
@ -1144,11 +1150,8 @@ const CorrespondingLines = struct {
|
||||
self.line_len = 0;
|
||||
self.visual_line_len = 0;
|
||||
|
||||
var writer: std.Io.Writer = .fixed(&self.line_buf);
|
||||
|
||||
try self.writeLineFromStreamVerbatim(
|
||||
&writer,
|
||||
self.buffered_reader.interface.adaptToOldInterface(),
|
||||
&self.file_reader.interface,
|
||||
self.line_num,
|
||||
);
|
||||
|
||||
@ -1162,7 +1165,7 @@ const CorrespondingLines = struct {
|
||||
return visual_line;
|
||||
}
|
||||
|
||||
fn writeLineFromStreamVerbatim(self: *CorrespondingLines, writer: *std.Io.Writer, input: anytype, line_num: usize) !void {
|
||||
fn writeLineFromStreamVerbatim(self: *CorrespondingLines, input: *std.Io.Reader, line_num: usize) !void {
|
||||
while (try readByteOrEof(input)) |byte| {
|
||||
switch (byte) {
|
||||
'\n', '\r' => {
|
||||
@ -1182,13 +1185,9 @@ const CorrespondingLines = struct {
|
||||
}
|
||||
},
|
||||
else => {
|
||||
if (self.line_num == line_num) {
|
||||
if (writer.writeByte(byte)) {
|
||||
self.line_len += 1;
|
||||
} else |err| switch (err) {
|
||||
error.WriteFailed => {},
|
||||
else => |e| return e,
|
||||
}
|
||||
if (self.line_num == line_num and self.line_len < self.line_buf.len) {
|
||||
self.line_buf[self.line_len] = byte;
|
||||
self.line_len += 1;
|
||||
}
|
||||
},
|
||||
}
|
||||
@ -1199,8 +1198,8 @@ const CorrespondingLines = struct {
|
||||
self.line_num += 1;
|
||||
}
|
||||
|
||||
fn readByteOrEof(reader: anytype) !?u8 {
|
||||
return reader.readByte() catch |err| switch (err) {
|
||||
fn readByteOrEof(reader: *std.Io.Reader) !?u8 {
|
||||
return reader.takeByte() catch |err| switch (err) {
|
||||
error.EndOfStream => return null,
|
||||
else => |e| return e,
|
||||
};
|
||||
|
||||
@ -8,80 +8,66 @@ const std = @import("std");
|
||||
const builtin = @import("builtin");
|
||||
const native_endian = builtin.cpu.arch.endian();
|
||||
|
||||
pub const ReadError = std.mem.Allocator.Error || error{ InvalidHeader, InvalidImageType, ImpossibleDataSize, UnexpectedEOF, ReadError };
|
||||
pub const ReadError = std.mem.Allocator.Error || error{ InvalidHeader, InvalidImageType, ImpossibleDataSize, UnexpectedEOF, ReadFailed };
|
||||
|
||||
pub fn read(allocator: std.mem.Allocator, reader: anytype, max_size: u64) ReadError!IconDir {
|
||||
// Some Reader implementations have an empty ReadError error set which would
|
||||
// cause 'unreachable else' if we tried to use an else in the switch, so we
|
||||
// need to detect this case and not try to translate to ReadError
|
||||
const anyerror_reader_errorset = @TypeOf(reader).Error == anyerror;
|
||||
const empty_reader_errorset = @typeInfo(@TypeOf(reader).Error).error_set == null or @typeInfo(@TypeOf(reader).Error).error_set.?.len == 0;
|
||||
if (empty_reader_errorset and !anyerror_reader_errorset) {
|
||||
return readAnyError(allocator, reader, max_size) catch |err| switch (err) {
|
||||
error.EndOfStream => error.UnexpectedEOF,
|
||||
else => |e| return e,
|
||||
};
|
||||
} else {
|
||||
return readAnyError(allocator, reader, max_size) catch |err| switch (err) {
|
||||
error.OutOfMemory,
|
||||
error.InvalidHeader,
|
||||
error.InvalidImageType,
|
||||
error.ImpossibleDataSize,
|
||||
=> |e| return e,
|
||||
error.EndOfStream => error.UnexpectedEOF,
|
||||
// The remaining errors are dependent on the `reader`, so
|
||||
// we just translate them all to generic ReadError
|
||||
else => error.ReadError,
|
||||
};
|
||||
}
|
||||
pub fn read(allocator: std.mem.Allocator, reader: *std.Io.Reader, max_size: u64) ReadError!IconDir {
|
||||
return readInner(allocator, reader, max_size) catch |err| switch (err) {
|
||||
error.OutOfMemory,
|
||||
error.InvalidHeader,
|
||||
error.InvalidImageType,
|
||||
error.ImpossibleDataSize,
|
||||
error.ReadFailed,
|
||||
=> |e| return e,
|
||||
error.EndOfStream => error.UnexpectedEOF,
|
||||
};
|
||||
}
|
||||
|
||||
// TODO: This seems like a somewhat strange pattern, could be a better way
|
||||
// to do this. Maybe it makes more sense to handle the translation
|
||||
// at the call site instead of having a helper function here.
|
||||
pub fn readAnyError(allocator: std.mem.Allocator, reader: anytype, max_size: u64) !IconDir {
|
||||
const reserved = try reader.readInt(u16, .little);
|
||||
fn readInner(allocator: std.mem.Allocator, reader: *std.Io.Reader, max_size: u64) !IconDir {
|
||||
const reserved = try reader.takeInt(u16, .little);
|
||||
if (reserved != 0) {
|
||||
return error.InvalidHeader;
|
||||
}
|
||||
|
||||
const image_type = reader.readEnum(ImageType, .little) catch |err| switch (err) {
|
||||
error.InvalidValue => return error.InvalidImageType,
|
||||
const image_type = reader.takeEnum(ImageType, .little) catch |err| switch (err) {
|
||||
error.InvalidEnumTag => return error.InvalidImageType,
|
||||
else => |e| return e,
|
||||
};
|
||||
|
||||
const num_images = try reader.readInt(u16, .little);
|
||||
const num_images = try reader.takeInt(u16, .little);
|
||||
|
||||
// To avoid over-allocation in the case of a file that says it has way more
|
||||
// entries than it actually does, we use an ArrayList with a conservatively
|
||||
// limited initial capacity instead of allocating the entire slice at once.
|
||||
const initial_capacity = @min(num_images, 8);
|
||||
var entries = try std.array_list.Managed(Entry).initCapacity(allocator, initial_capacity);
|
||||
errdefer entries.deinit();
|
||||
var entries = try std.ArrayList(Entry).initCapacity(allocator, initial_capacity);
|
||||
errdefer entries.deinit(allocator);
|
||||
|
||||
var i: usize = 0;
|
||||
while (i < num_images) : (i += 1) {
|
||||
var entry: Entry = undefined;
|
||||
entry.width = try reader.readByte();
|
||||
entry.height = try reader.readByte();
|
||||
entry.num_colors = try reader.readByte();
|
||||
entry.reserved = try reader.readByte();
|
||||
entry.width = try reader.takeByte();
|
||||
entry.height = try reader.takeByte();
|
||||
entry.num_colors = try reader.takeByte();
|
||||
entry.reserved = try reader.takeByte();
|
||||
switch (image_type) {
|
||||
.icon => {
|
||||
entry.type_specific_data = .{ .icon = .{
|
||||
.color_planes = try reader.readInt(u16, .little),
|
||||
.bits_per_pixel = try reader.readInt(u16, .little),
|
||||
.color_planes = try reader.takeInt(u16, .little),
|
||||
.bits_per_pixel = try reader.takeInt(u16, .little),
|
||||
} };
|
||||
},
|
||||
.cursor => {
|
||||
entry.type_specific_data = .{ .cursor = .{
|
||||
.hotspot_x = try reader.readInt(u16, .little),
|
||||
.hotspot_y = try reader.readInt(u16, .little),
|
||||
.hotspot_x = try reader.takeInt(u16, .little),
|
||||
.hotspot_y = try reader.takeInt(u16, .little),
|
||||
} };
|
||||
},
|
||||
}
|
||||
entry.data_size_in_bytes = try reader.readInt(u32, .little);
|
||||
entry.data_offset_from_start_of_file = try reader.readInt(u32, .little);
|
||||
entry.data_size_in_bytes = try reader.takeInt(u32, .little);
|
||||
entry.data_offset_from_start_of_file = try reader.takeInt(u32, .little);
|
||||
// Validate that the offset/data size is feasible
|
||||
if (@as(u64, entry.data_offset_from_start_of_file) + entry.data_size_in_bytes > max_size) {
|
||||
return error.ImpossibleDataSize;
|
||||
@ -101,12 +87,12 @@ pub fn readAnyError(allocator: std.mem.Allocator, reader: anytype, max_size: u64
|
||||
if (entry.data_size_in_bytes < 16) {
|
||||
return error.ImpossibleDataSize;
|
||||
}
|
||||
try entries.append(entry);
|
||||
try entries.append(allocator, entry);
|
||||
}
|
||||
|
||||
return .{
|
||||
.image_type = image_type,
|
||||
.entries = try entries.toOwnedSlice(),
|
||||
.entries = try entries.toOwnedSlice(allocator),
|
||||
.allocator = allocator,
|
||||
};
|
||||
}
|
||||
@ -135,7 +121,7 @@ pub const IconDir = struct {
|
||||
return @intCast(IconDir.res_header_byte_len + self.entries.len * Entry.res_byte_len);
|
||||
}
|
||||
|
||||
pub fn writeResData(self: IconDir, writer: anytype, first_image_id: u16) !void {
|
||||
pub fn writeResData(self: IconDir, writer: *std.Io.Writer, first_image_id: u16) !void {
|
||||
try writer.writeInt(u16, 0, .little);
|
||||
try writer.writeInt(u16, @intFromEnum(self.image_type), .little);
|
||||
// We know that entries.len must fit into a u16
|
||||
@ -173,7 +159,7 @@ pub const Entry = struct {
|
||||
|
||||
pub const res_byte_len = 14;
|
||||
|
||||
pub fn writeResData(self: Entry, writer: anytype, id: u16) !void {
|
||||
pub fn writeResData(self: Entry, writer: *std.Io.Writer, id: u16) !void {
|
||||
switch (self.type_specific_data) {
|
||||
.icon => |icon_data| {
|
||||
try writer.writeInt(u8, @as(u8, @truncate(self.width)), .little);
|
||||
@ -198,8 +184,8 @@ pub const Entry = struct {
|
||||
|
||||
test "icon" {
|
||||
const data = "\x00\x00\x01\x00\x01\x00\x10\x10\x00\x00\x01\x00\x10\x00\x10\x00\x00\x00\x16\x00\x00\x00" ++ [_]u8{0} ** 16;
|
||||
var fbs = std.io.fixedBufferStream(data);
|
||||
const icon = try read(std.testing.allocator, fbs.reader(), data.len);
|
||||
var fbs: std.Io.Reader = .fixed(data);
|
||||
const icon = try read(std.testing.allocator, &fbs, data.len);
|
||||
defer icon.deinit();
|
||||
|
||||
try std.testing.expectEqual(ImageType.icon, icon.image_type);
|
||||
@ -211,26 +197,26 @@ test "icon too many images" {
|
||||
// it's not possible to hit EOF when looking for more RESDIR structures, since they are
|
||||
// themselves 16 bytes long, so we'll always hit ImpossibleDataSize instead.
|
||||
const data = "\x00\x00\x01\x00\x02\x00\x10\x10\x00\x00\x01\x00\x10\x00\x10\x00\x00\x00\x16\x00\x00\x00" ++ [_]u8{0} ** 16;
|
||||
var fbs = std.io.fixedBufferStream(data);
|
||||
try std.testing.expectError(error.ImpossibleDataSize, read(std.testing.allocator, fbs.reader(), data.len));
|
||||
var fbs: std.Io.Reader = .fixed(data);
|
||||
try std.testing.expectError(error.ImpossibleDataSize, read(std.testing.allocator, &fbs, data.len));
|
||||
}
|
||||
|
||||
test "icon data size past EOF" {
|
||||
const data = "\x00\x00\x01\x00\x01\x00\x10\x10\x00\x00\x01\x00\x10\x00\x10\x01\x00\x00\x16\x00\x00\x00" ++ [_]u8{0} ** 16;
|
||||
var fbs = std.io.fixedBufferStream(data);
|
||||
try std.testing.expectError(error.ImpossibleDataSize, read(std.testing.allocator, fbs.reader(), data.len));
|
||||
var fbs: std.Io.Reader = .fixed(data);
|
||||
try std.testing.expectError(error.ImpossibleDataSize, read(std.testing.allocator, &fbs, data.len));
|
||||
}
|
||||
|
||||
test "icon data offset past EOF" {
|
||||
const data = "\x00\x00\x01\x00\x01\x00\x10\x10\x00\x00\x01\x00\x10\x00\x10\x00\x00\x00\x17\x00\x00\x00" ++ [_]u8{0} ** 16;
|
||||
var fbs = std.io.fixedBufferStream(data);
|
||||
try std.testing.expectError(error.ImpossibleDataSize, read(std.testing.allocator, fbs.reader(), data.len));
|
||||
var fbs: std.Io.Reader = .fixed(data);
|
||||
try std.testing.expectError(error.ImpossibleDataSize, read(std.testing.allocator, &fbs, data.len));
|
||||
}
|
||||
|
||||
test "icon data size too small" {
|
||||
const data = "\x00\x00\x01\x00\x01\x00\x10\x10\x00\x00\x01\x00\x10\x00\x0F\x00\x00\x00\x16\x00\x00\x00";
|
||||
var fbs = std.io.fixedBufferStream(data);
|
||||
try std.testing.expectError(error.ImpossibleDataSize, read(std.testing.allocator, fbs.reader(), data.len));
|
||||
var fbs: std.Io.Reader = .fixed(data);
|
||||
try std.testing.expectError(error.ImpossibleDataSize, read(std.testing.allocator, &fbs, data.len));
|
||||
}
|
||||
|
||||
pub const ImageFormat = enum(u2) {
|
||||
|
||||
@ -119,6 +119,7 @@ test tagToId {
|
||||
}
|
||||
|
||||
test "exhaustive tagToId" {
|
||||
@setEvalBranchQuota(2000);
|
||||
inline for (@typeInfo(LanguageId).@"enum".fields) |field| {
|
||||
const id = tagToId(field.name) catch |err| {
|
||||
std.debug.print("tag: {s}\n", .{field.name});
|
||||
@ -131,8 +132,8 @@ test "exhaustive tagToId" {
|
||||
}
|
||||
var buf: [32]u8 = undefined;
|
||||
inline for (valid_alternate_sorts) |parsed_sort| {
|
||||
var fbs = std.io.fixedBufferStream(&buf);
|
||||
const writer = fbs.writer();
|
||||
var fbs: std.Io.Writer = .fixed(&buf);
|
||||
const writer = &fbs;
|
||||
writer.writeAll(parsed_sort.language_code) catch unreachable;
|
||||
writer.writeAll("-") catch unreachable;
|
||||
writer.writeAll(parsed_sort.country_code.?) catch unreachable;
|
||||
@ -146,12 +147,12 @@ test "exhaustive tagToId" {
|
||||
break :field name_buf;
|
||||
};
|
||||
const expected = @field(LanguageId, &expected_field_name);
|
||||
const id = tagToId(fbs.getWritten()) catch |err| {
|
||||
std.debug.print("tag: {s}\n", .{fbs.getWritten()});
|
||||
const id = tagToId(fbs.buffered()) catch |err| {
|
||||
std.debug.print("tag: {s}\n", .{fbs.buffered()});
|
||||
return err;
|
||||
};
|
||||
try std.testing.expectEqual(expected, id orelse {
|
||||
std.debug.print("tag: {s}, expected: {}, got null\n", .{ fbs.getWritten(), expected });
|
||||
std.debug.print("tag: {s}, expected: {}, got null\n", .{ fbs.buffered(), expected });
|
||||
return error.TestExpectedEqual;
|
||||
});
|
||||
}
|
||||
|
||||
@ -469,8 +469,8 @@ pub fn parseQuotedString(
|
||||
const T = if (literal_type == .ascii) u8 else u16;
|
||||
std.debug.assert(bytes.slice.len >= 2); // must at least have 2 double quote chars
|
||||
|
||||
var buf = try std.array_list.Managed(T).initCapacity(allocator, bytes.slice.len);
|
||||
errdefer buf.deinit();
|
||||
var buf = try std.ArrayList(T).initCapacity(allocator, bytes.slice.len);
|
||||
errdefer buf.deinit(allocator);
|
||||
|
||||
var iterative_parser = IterativeStringParser.init(bytes, options);
|
||||
|
||||
@ -480,13 +480,13 @@ pub fn parseQuotedString(
|
||||
.ascii => switch (options.output_code_page) {
|
||||
.windows1252 => {
|
||||
if (parsed.from_escaped_integer) {
|
||||
try buf.append(@truncate(c));
|
||||
try buf.append(allocator, @truncate(c));
|
||||
} else if (windows1252.bestFitFromCodepoint(c)) |best_fit| {
|
||||
try buf.append(best_fit);
|
||||
try buf.append(allocator, best_fit);
|
||||
} else if (c < 0x10000 or c == code_pages.Codepoint.invalid) {
|
||||
try buf.append('?');
|
||||
try buf.append(allocator, '?');
|
||||
} else {
|
||||
try buf.appendSlice("??");
|
||||
try buf.appendSlice(allocator, "??");
|
||||
}
|
||||
},
|
||||
.utf8 => {
|
||||
@ -500,35 +500,35 @@ pub fn parseQuotedString(
|
||||
}
|
||||
var utf8_buf: [4]u8 = undefined;
|
||||
const utf8_len = std.unicode.utf8Encode(codepoint_to_encode, &utf8_buf) catch unreachable;
|
||||
try buf.appendSlice(utf8_buf[0..utf8_len]);
|
||||
try buf.appendSlice(allocator, utf8_buf[0..utf8_len]);
|
||||
},
|
||||
},
|
||||
.wide => {
|
||||
// Parsing any string type as a wide string is handled separately, see parseQuotedStringAsWideString
|
||||
std.debug.assert(iterative_parser.declared_string_type == .wide);
|
||||
if (parsed.from_escaped_integer) {
|
||||
try buf.append(std.mem.nativeToLittle(u16, @truncate(c)));
|
||||
try buf.append(allocator, std.mem.nativeToLittle(u16, @truncate(c)));
|
||||
} else if (c == code_pages.Codepoint.invalid) {
|
||||
try buf.append(std.mem.nativeToLittle(u16, '<27>'));
|
||||
try buf.append(allocator, std.mem.nativeToLittle(u16, '<27>'));
|
||||
} else if (c < 0x10000) {
|
||||
const short: u16 = @intCast(c);
|
||||
try buf.append(std.mem.nativeToLittle(u16, short));
|
||||
try buf.append(allocator, std.mem.nativeToLittle(u16, short));
|
||||
} else {
|
||||
if (!parsed.escaped_surrogate_pair) {
|
||||
const high = @as(u16, @intCast((c - 0x10000) >> 10)) + 0xD800;
|
||||
try buf.append(std.mem.nativeToLittle(u16, high));
|
||||
try buf.append(allocator, std.mem.nativeToLittle(u16, high));
|
||||
}
|
||||
const low = @as(u16, @intCast(c & 0x3FF)) + 0xDC00;
|
||||
try buf.append(std.mem.nativeToLittle(u16, low));
|
||||
try buf.append(allocator, std.mem.nativeToLittle(u16, low));
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
if (literal_type == .wide) {
|
||||
return buf.toOwnedSliceSentinel(0);
|
||||
return buf.toOwnedSliceSentinel(allocator, 0);
|
||||
} else {
|
||||
return buf.toOwnedSlice();
|
||||
return buf.toOwnedSlice(allocator);
|
||||
}
|
||||
}
|
||||
|
||||
@ -564,8 +564,8 @@ pub fn parseQuotedStringAsWideString(allocator: std.mem.Allocator, bytes: Source
|
||||
// Note: We're only handling the case of parsing an ASCII string into a wide string from here on out.
|
||||
// TODO: The logic below is similar to that in AcceleratorKeyCodepointTranslator, might be worth merging the two
|
||||
|
||||
var buf = try std.array_list.Managed(u16).initCapacity(allocator, bytes.slice.len);
|
||||
errdefer buf.deinit();
|
||||
var buf = try std.ArrayList(u16).initCapacity(allocator, bytes.slice.len);
|
||||
errdefer buf.deinit(allocator);
|
||||
|
||||
var iterative_parser = IterativeStringParser.init(bytes, options);
|
||||
|
||||
@ -578,23 +578,23 @@ pub fn parseQuotedStringAsWideString(allocator: std.mem.Allocator, bytes: Source
|
||||
.windows1252 => windows1252.toCodepoint(byte_to_interpret),
|
||||
.utf8 => if (byte_to_interpret > 0x7F) '<27>' else byte_to_interpret,
|
||||
};
|
||||
try buf.append(std.mem.nativeToLittle(u16, code_unit_to_encode));
|
||||
try buf.append(allocator, std.mem.nativeToLittle(u16, code_unit_to_encode));
|
||||
} else if (c == code_pages.Codepoint.invalid) {
|
||||
try buf.append(std.mem.nativeToLittle(u16, '<27>'));
|
||||
try buf.append(allocator, std.mem.nativeToLittle(u16, '<27>'));
|
||||
} else if (c < 0x10000) {
|
||||
const short: u16 = @intCast(c);
|
||||
try buf.append(std.mem.nativeToLittle(u16, short));
|
||||
try buf.append(allocator, std.mem.nativeToLittle(u16, short));
|
||||
} else {
|
||||
if (!parsed.escaped_surrogate_pair) {
|
||||
const high = @as(u16, @intCast((c - 0x10000) >> 10)) + 0xD800;
|
||||
try buf.append(std.mem.nativeToLittle(u16, high));
|
||||
try buf.append(allocator, std.mem.nativeToLittle(u16, high));
|
||||
}
|
||||
const low = @as(u16, @intCast(c & 0x3FF)) + 0xDC00;
|
||||
try buf.append(std.mem.nativeToLittle(u16, low));
|
||||
try buf.append(allocator, std.mem.nativeToLittle(u16, low));
|
||||
}
|
||||
}
|
||||
|
||||
return buf.toOwnedSliceSentinel(0);
|
||||
return buf.toOwnedSliceSentinel(allocator, 0);
|
||||
}
|
||||
|
||||
test "parse quoted ascii string" {
|
||||
|
||||
@ -3,6 +3,7 @@ const builtin = @import("builtin");
|
||||
const removeComments = @import("comments.zig").removeComments;
|
||||
const parseAndRemoveLineCommands = @import("source_mapping.zig").parseAndRemoveLineCommands;
|
||||
const compile = @import("compile.zig").compile;
|
||||
const Dependencies = @import("compile.zig").Dependencies;
|
||||
const Diagnostics = @import("errors.zig").Diagnostics;
|
||||
const cli = @import("cli.zig");
|
||||
const preprocess = @import("preprocess.zig");
|
||||
@ -13,8 +14,6 @@ const hasDisjointCodePage = @import("disjoint_code_page.zig").hasDisjointCodePag
|
||||
const fmtResourceType = @import("res.zig").NameOrOrdinal.fmtResourceType;
|
||||
const aro = @import("aro");
|
||||
|
||||
var stdout_buffer: [1024]u8 = undefined;
|
||||
|
||||
pub fn main() !void {
|
||||
var gpa: std.heap.GeneralPurposeAllocator(.{}) = .init;
|
||||
defer std.debug.assert(gpa.deinit() == .ok);
|
||||
@ -43,11 +42,13 @@ pub fn main() !void {
|
||||
cli_args = args[3..];
|
||||
}
|
||||
|
||||
var stdout_buffer: [1024]u8 = undefined;
|
||||
var stdout_writer = std.fs.File.stdout().writer(&stdout_buffer);
|
||||
const stdout = &stdout_writer.interface;
|
||||
var error_handler: ErrorHandler = switch (zig_integration) {
|
||||
true => .{
|
||||
.server = .{
|
||||
.out = &stdout_writer.interface,
|
||||
.out = stdout,
|
||||
.in = undefined, // won't be receiving messages
|
||||
},
|
||||
},
|
||||
@ -83,8 +84,8 @@ pub fn main() !void {
|
||||
defer options.deinit();
|
||||
|
||||
if (options.print_help_and_exit) {
|
||||
try cli.writeUsage(&stdout_writer.interface, "zig rc");
|
||||
try stdout_writer.interface.flush();
|
||||
try cli.writeUsage(stdout, "zig rc");
|
||||
try stdout.flush();
|
||||
return;
|
||||
}
|
||||
|
||||
@ -92,19 +93,14 @@ pub fn main() !void {
|
||||
options.verbose = false;
|
||||
|
||||
if (options.verbose) {
|
||||
try options.dumpVerbose(&stdout_writer.interface);
|
||||
try stdout_writer.interface.writeByte('\n');
|
||||
try stdout_writer.interface.flush();
|
||||
try options.dumpVerbose(stdout);
|
||||
try stdout.writeByte('\n');
|
||||
try stdout.flush();
|
||||
}
|
||||
|
||||
var dependencies_list = std.array_list.Managed([]const u8).init(allocator);
|
||||
defer {
|
||||
for (dependencies_list.items) |item| {
|
||||
allocator.free(item);
|
||||
}
|
||||
dependencies_list.deinit();
|
||||
}
|
||||
const maybe_dependencies_list: ?*std.array_list.Managed([]const u8) = if (options.depfile_path != null) &dependencies_list else null;
|
||||
var dependencies = Dependencies.init(allocator);
|
||||
defer dependencies.deinit();
|
||||
const maybe_dependencies: ?*Dependencies = if (options.depfile_path != null) &dependencies else null;
|
||||
|
||||
var include_paths = LazyIncludePaths{
|
||||
.arena = arena,
|
||||
@ -127,27 +123,27 @@ pub fn main() !void {
|
||||
var comp = aro.Compilation.init(aro_arena, std.fs.cwd());
|
||||
defer comp.deinit();
|
||||
|
||||
var argv = std.array_list.Managed([]const u8).init(comp.gpa);
|
||||
defer argv.deinit();
|
||||
var argv: std.ArrayList([]const u8) = .empty;
|
||||
defer argv.deinit(aro_arena);
|
||||
|
||||
try argv.append("arocc"); // dummy command name
|
||||
try argv.append(aro_arena, "arocc"); // dummy command name
|
||||
const resolved_include_paths = try include_paths.get(&error_handler);
|
||||
try preprocess.appendAroArgs(aro_arena, &argv, options, resolved_include_paths);
|
||||
try argv.append(switch (options.input_source) {
|
||||
try argv.append(aro_arena, switch (options.input_source) {
|
||||
.stdio => "-",
|
||||
.filename => |filename| filename,
|
||||
});
|
||||
|
||||
if (options.verbose) {
|
||||
try stdout_writer.interface.writeAll("Preprocessor: arocc (built-in)\n");
|
||||
try stdout.writeAll("Preprocessor: arocc (built-in)\n");
|
||||
for (argv.items[0 .. argv.items.len - 1]) |arg| {
|
||||
try stdout_writer.interface.print("{s} ", .{arg});
|
||||
try stdout.print("{s} ", .{arg});
|
||||
}
|
||||
try stdout_writer.interface.print("{s}\n\n", .{argv.items[argv.items.len - 1]});
|
||||
try stdout_writer.interface.flush();
|
||||
try stdout.print("{s}\n\n", .{argv.items[argv.items.len - 1]});
|
||||
try stdout.flush();
|
||||
}
|
||||
|
||||
preprocess.preprocess(&comp, &preprocessed_buf.writer, argv.items, maybe_dependencies_list) catch |err| switch (err) {
|
||||
preprocess.preprocess(&comp, &preprocessed_buf.writer, argv.items, maybe_dependencies) catch |err| switch (err) {
|
||||
error.GeneratedSourceError => {
|
||||
try error_handler.emitAroDiagnostics(allocator, "failed during preprocessor setup (this is always a bug):", &comp);
|
||||
std.process.exit(1);
|
||||
@ -258,7 +254,7 @@ pub fn main() !void {
|
||||
.cwd = std.fs.cwd(),
|
||||
.diagnostics = &diagnostics,
|
||||
.source_mappings = &mapping_results.mappings,
|
||||
.dependencies_list = maybe_dependencies_list,
|
||||
.dependencies = maybe_dependencies,
|
||||
.ignore_include_env_var = options.ignore_include_env_var,
|
||||
.extra_include_paths = options.extra_include_paths.items,
|
||||
.system_include_paths = try include_paths.get(&error_handler),
|
||||
@ -305,7 +301,7 @@ pub fn main() !void {
|
||||
};
|
||||
|
||||
try write_stream.beginArray();
|
||||
for (dependencies_list.items) |dep_path| {
|
||||
for (dependencies.list.items) |dep_path| {
|
||||
try write_stream.write(dep_path);
|
||||
}
|
||||
try write_stream.endArray();
|
||||
|
||||
@ -82,8 +82,8 @@ pub const Parser = struct {
|
||||
}
|
||||
|
||||
fn parseRoot(self: *Self) Error!*Node {
|
||||
var statements = std.array_list.Managed(*Node).init(self.state.allocator);
|
||||
defer statements.deinit();
|
||||
var statements: std.ArrayList(*Node) = .empty;
|
||||
defer statements.deinit(self.state.allocator);
|
||||
|
||||
try self.parseStatements(&statements);
|
||||
try self.check(.eof);
|
||||
@ -95,7 +95,7 @@ pub const Parser = struct {
|
||||
return &node.base;
|
||||
}
|
||||
|
||||
fn parseStatements(self: *Self, statements: *std.array_list.Managed(*Node)) Error!void {
|
||||
fn parseStatements(self: *Self, statements: *std.ArrayList(*Node)) Error!void {
|
||||
while (true) {
|
||||
try self.nextToken(.whitespace_delimiter_only);
|
||||
if (self.state.token.id == .eof) break;
|
||||
@ -105,7 +105,7 @@ pub const Parser = struct {
|
||||
// (usually it will end up with bogus things like 'file
|
||||
// not found: {')
|
||||
const statement = try self.parseStatement();
|
||||
try statements.append(statement);
|
||||
try statements.append(self.state.allocator, statement);
|
||||
}
|
||||
}
|
||||
|
||||
@ -115,7 +115,7 @@ pub const Parser = struct {
|
||||
/// current token is unchanged.
|
||||
/// The returned slice is allocated by the parser's arena
|
||||
fn parseCommonResourceAttributes(self: *Self) ![]Token {
|
||||
var common_resource_attributes: std.ArrayListUnmanaged(Token) = .empty;
|
||||
var common_resource_attributes: std.ArrayList(Token) = .empty;
|
||||
while (true) {
|
||||
const maybe_common_resource_attribute = try self.lookaheadToken(.normal);
|
||||
if (maybe_common_resource_attribute.id == .literal and rc.CommonResourceAttributes.map.has(maybe_common_resource_attribute.slice(self.lexer.buffer))) {
|
||||
@ -135,7 +135,7 @@ pub const Parser = struct {
|
||||
/// current token is unchanged.
|
||||
/// The returned slice is allocated by the parser's arena
|
||||
fn parseOptionalStatements(self: *Self, resource: ResourceType) ![]*Node {
|
||||
var optional_statements: std.ArrayListUnmanaged(*Node) = .empty;
|
||||
var optional_statements: std.ArrayList(*Node) = .empty;
|
||||
|
||||
const num_statement_types = @typeInfo(rc.OptionalStatements).@"enum".fields.len;
|
||||
var statement_type_has_duplicates = [_]bool{false} ** num_statement_types;
|
||||
@ -355,8 +355,8 @@ pub const Parser = struct {
|
||||
const begin_token = self.state.token;
|
||||
try self.check(.begin);
|
||||
|
||||
var strings = std.array_list.Managed(*Node).init(self.state.allocator);
|
||||
defer strings.deinit();
|
||||
var strings: std.ArrayList(*Node) = .empty;
|
||||
defer strings.deinit(self.state.allocator);
|
||||
while (true) {
|
||||
const maybe_end_token = try self.lookaheadToken(.normal);
|
||||
switch (maybe_end_token.id) {
|
||||
@ -392,7 +392,7 @@ pub const Parser = struct {
|
||||
.maybe_comma = comma_token,
|
||||
.string = self.state.token,
|
||||
};
|
||||
try strings.append(&string_node.base);
|
||||
try strings.append(self.state.allocator, &string_node.base);
|
||||
}
|
||||
|
||||
if (strings.items.len == 0) {
|
||||
@ -501,7 +501,7 @@ pub const Parser = struct {
|
||||
const begin_token = self.state.token;
|
||||
try self.check(.begin);
|
||||
|
||||
var accelerators: std.ArrayListUnmanaged(*Node) = .empty;
|
||||
var accelerators: std.ArrayList(*Node) = .empty;
|
||||
|
||||
while (true) {
|
||||
const lookahead = try self.lookaheadToken(.normal);
|
||||
@ -519,7 +519,7 @@ pub const Parser = struct {
|
||||
|
||||
const idvalue = try self.parseExpression(.{ .allowed_types = .{ .number = true } });
|
||||
|
||||
var type_and_options: std.ArrayListUnmanaged(Token) = .empty;
|
||||
var type_and_options: std.ArrayList(Token) = .empty;
|
||||
while (true) {
|
||||
if (!(try self.parseOptionalToken(.comma))) break;
|
||||
|
||||
@ -584,7 +584,7 @@ pub const Parser = struct {
|
||||
const begin_token = self.state.token;
|
||||
try self.check(.begin);
|
||||
|
||||
var controls: std.ArrayListUnmanaged(*Node) = .empty;
|
||||
var controls: std.ArrayList(*Node) = .empty;
|
||||
defer controls.deinit(self.state.allocator);
|
||||
while (try self.parseControlStatement(resource)) |control_node| {
|
||||
// The number of controls must fit in a u16 in order for it to
|
||||
@ -643,7 +643,7 @@ pub const Parser = struct {
|
||||
const begin_token = self.state.token;
|
||||
try self.check(.begin);
|
||||
|
||||
var buttons: std.ArrayListUnmanaged(*Node) = .empty;
|
||||
var buttons: std.ArrayList(*Node) = .empty;
|
||||
defer buttons.deinit(self.state.allocator);
|
||||
while (try self.parseToolbarButtonStatement()) |button_node| {
|
||||
// The number of buttons must fit in a u16 in order for it to
|
||||
@ -701,7 +701,7 @@ pub const Parser = struct {
|
||||
const begin_token = self.state.token;
|
||||
try self.check(.begin);
|
||||
|
||||
var items: std.ArrayListUnmanaged(*Node) = .empty;
|
||||
var items: std.ArrayList(*Node) = .empty;
|
||||
defer items.deinit(self.state.allocator);
|
||||
while (try self.parseMenuItemStatement(resource, id_token, 1)) |item_node| {
|
||||
try items.append(self.state.allocator, item_node);
|
||||
@ -735,7 +735,7 @@ pub const Parser = struct {
|
||||
// common resource attributes must all be contiguous and come before optional-statements
|
||||
const common_resource_attributes = try self.parseCommonResourceAttributes();
|
||||
|
||||
var fixed_info: std.ArrayListUnmanaged(*Node) = .empty;
|
||||
var fixed_info: std.ArrayList(*Node) = .empty;
|
||||
while (try self.parseVersionStatement()) |version_statement| {
|
||||
try fixed_info.append(self.state.arena, version_statement);
|
||||
}
|
||||
@ -744,7 +744,7 @@ pub const Parser = struct {
|
||||
const begin_token = self.state.token;
|
||||
try self.check(.begin);
|
||||
|
||||
var block_statements: std.ArrayListUnmanaged(*Node) = .empty;
|
||||
var block_statements: std.ArrayList(*Node) = .empty;
|
||||
while (try self.parseVersionBlockOrValue(id_token, 1)) |block_node| {
|
||||
try block_statements.append(self.state.arena, block_node);
|
||||
}
|
||||
@ -852,8 +852,8 @@ pub const Parser = struct {
|
||||
/// Expects the current token to be a begin token.
|
||||
/// After return, the current token will be the end token.
|
||||
fn parseRawDataBlock(self: *Self) Error![]*Node {
|
||||
var raw_data = std.array_list.Managed(*Node).init(self.state.allocator);
|
||||
defer raw_data.deinit();
|
||||
var raw_data: std.ArrayList(*Node) = .empty;
|
||||
defer raw_data.deinit(self.state.allocator);
|
||||
while (true) {
|
||||
const maybe_end_token = try self.lookaheadToken(.normal);
|
||||
switch (maybe_end_token.id) {
|
||||
@ -888,7 +888,7 @@ pub const Parser = struct {
|
||||
else => {},
|
||||
}
|
||||
const expression = try self.parseExpression(.{ .allowed_types = .{ .number = true, .string = true } });
|
||||
try raw_data.append(expression);
|
||||
try raw_data.append(self.state.allocator, expression);
|
||||
|
||||
if (expression.isNumberExpression()) {
|
||||
const maybe_close_paren = try self.lookaheadToken(.normal);
|
||||
@ -1125,7 +1125,7 @@ pub const Parser = struct {
|
||||
|
||||
_ = try self.parseOptionalToken(.comma);
|
||||
|
||||
var options: std.ArrayListUnmanaged(Token) = .empty;
|
||||
var options: std.ArrayList(Token) = .empty;
|
||||
while (true) {
|
||||
const option_token = try self.lookaheadToken(.normal);
|
||||
if (!rc.MenuItem.Option.map.has(option_token.slice(self.lexer.buffer))) {
|
||||
@ -1160,7 +1160,7 @@ pub const Parser = struct {
|
||||
}
|
||||
try self.skipAnyCommas();
|
||||
|
||||
var options: std.ArrayListUnmanaged(Token) = .empty;
|
||||
var options: std.ArrayList(Token) = .empty;
|
||||
while (true) {
|
||||
const option_token = try self.lookaheadToken(.normal);
|
||||
if (!rc.MenuItem.Option.map.has(option_token.slice(self.lexer.buffer))) {
|
||||
@ -1175,7 +1175,7 @@ pub const Parser = struct {
|
||||
const begin_token = self.state.token;
|
||||
try self.check(.begin);
|
||||
|
||||
var items: std.ArrayListUnmanaged(*Node) = .empty;
|
||||
var items: std.ArrayList(*Node) = .empty;
|
||||
while (try self.parseMenuItemStatement(resource, top_level_menu_id_token, nesting_level + 1)) |item_node| {
|
||||
try items.append(self.state.arena, item_node);
|
||||
}
|
||||
@ -1245,7 +1245,7 @@ pub const Parser = struct {
|
||||
const begin_token = self.state.token;
|
||||
try self.check(.begin);
|
||||
|
||||
var items: std.ArrayListUnmanaged(*Node) = .empty;
|
||||
var items: std.ArrayList(*Node) = .empty;
|
||||
while (try self.parseMenuItemStatement(resource, top_level_menu_id_token, nesting_level + 1)) |item_node| {
|
||||
try items.append(self.state.arena, item_node);
|
||||
}
|
||||
@ -1322,7 +1322,7 @@ pub const Parser = struct {
|
||||
switch (statement_type) {
|
||||
.file_version, .product_version => {
|
||||
var parts_buffer: [4]*Node = undefined;
|
||||
var parts = std.ArrayListUnmanaged(*Node).initBuffer(&parts_buffer);
|
||||
var parts = std.ArrayList(*Node).initBuffer(&parts_buffer);
|
||||
|
||||
while (true) {
|
||||
const value = try self.parseExpression(.{ .allowed_types = .{ .number = true } });
|
||||
@ -1402,7 +1402,7 @@ pub const Parser = struct {
|
||||
const begin_token = self.state.token;
|
||||
try self.check(.begin);
|
||||
|
||||
var children: std.ArrayListUnmanaged(*Node) = .empty;
|
||||
var children: std.ArrayList(*Node) = .empty;
|
||||
while (try self.parseVersionBlockOrValue(top_level_version_id_token, nesting_level + 1)) |value_node| {
|
||||
try children.append(self.state.arena, value_node);
|
||||
}
|
||||
@ -1435,7 +1435,7 @@ pub const Parser = struct {
|
||||
}
|
||||
|
||||
fn parseBlockValuesList(self: *Self, had_comma_before_first_value: bool) Error![]*Node {
|
||||
var values: std.ArrayListUnmanaged(*Node) = .empty;
|
||||
var values: std.ArrayList(*Node) = .empty;
|
||||
var seen_number: bool = false;
|
||||
var first_string_value: ?*Node = null;
|
||||
while (true) {
|
||||
|
||||
@ -2,16 +2,17 @@ const std = @import("std");
|
||||
const builtin = @import("builtin");
|
||||
const Allocator = std.mem.Allocator;
|
||||
const cli = @import("cli.zig");
|
||||
const Dependencies = @import("compile.zig").Dependencies;
|
||||
const aro = @import("aro");
|
||||
|
||||
const PreprocessError = error{ ArgError, GeneratedSourceError, PreprocessError, StreamTooLong, OutOfMemory };
|
||||
|
||||
pub fn preprocess(
|
||||
comp: *aro.Compilation,
|
||||
writer: anytype,
|
||||
writer: *std.Io.Writer,
|
||||
/// Expects argv[0] to be the command name
|
||||
argv: []const []const u8,
|
||||
maybe_dependencies_list: ?*std.array_list.Managed([]const u8),
|
||||
maybe_dependencies: ?*Dependencies,
|
||||
) PreprocessError!void {
|
||||
try comp.addDefaultPragmaHandlers();
|
||||
|
||||
@ -66,13 +67,13 @@ pub fn preprocess(
|
||||
error.WriteFailed => return error.OutOfMemory,
|
||||
};
|
||||
|
||||
if (maybe_dependencies_list) |dependencies_list| {
|
||||
if (maybe_dependencies) |dependencies| {
|
||||
for (comp.sources.values()) |comp_source| {
|
||||
if (comp_source.id == builtin_macros.id or comp_source.id == user_macros.id) continue;
|
||||
if (comp_source.id == .unused or comp_source.id == .generated) continue;
|
||||
const duped_path = try dependencies_list.allocator.dupe(u8, comp_source.path);
|
||||
errdefer dependencies_list.allocator.free(duped_path);
|
||||
try dependencies_list.append(duped_path);
|
||||
const duped_path = try dependencies.allocator.dupe(u8, comp_source.path);
|
||||
errdefer dependencies.allocator.free(duped_path);
|
||||
try dependencies.list.append(dependencies.allocator, duped_path);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -92,8 +93,8 @@ fn hasAnyErrors(comp: *aro.Compilation) bool {
|
||||
|
||||
/// `arena` is used for temporary -D argument strings and the INCLUDE environment variable.
|
||||
/// The arena should be kept alive at least as long as `argv`.
|
||||
pub fn appendAroArgs(arena: Allocator, argv: *std.array_list.Managed([]const u8), options: cli.Options, system_include_paths: []const []const u8) !void {
|
||||
try argv.appendSlice(&.{
|
||||
pub fn appendAroArgs(arena: Allocator, argv: *std.ArrayList([]const u8), options: cli.Options, system_include_paths: []const []const u8) !void {
|
||||
try argv.appendSlice(arena, &.{
|
||||
"-E",
|
||||
"--comments",
|
||||
"-fuse-line-directives",
|
||||
@ -104,13 +105,13 @@ pub fn appendAroArgs(arena: Allocator, argv: *std.array_list.Managed([]const u8)
|
||||
"-D_WIN32", // undocumented, but defined by default
|
||||
});
|
||||
for (options.extra_include_paths.items) |extra_include_path| {
|
||||
try argv.append("-I");
|
||||
try argv.append(extra_include_path);
|
||||
try argv.append(arena, "-I");
|
||||
try argv.append(arena, extra_include_path);
|
||||
}
|
||||
|
||||
for (system_include_paths) |include_path| {
|
||||
try argv.append("-isystem");
|
||||
try argv.append(include_path);
|
||||
try argv.append(arena, "-isystem");
|
||||
try argv.append(arena, include_path);
|
||||
}
|
||||
|
||||
if (!options.ignore_include_env_var) {
|
||||
@ -124,8 +125,8 @@ pub fn appendAroArgs(arena: Allocator, argv: *std.array_list.Managed([]const u8)
|
||||
};
|
||||
var it = std.mem.tokenizeScalar(u8, INCLUDE, delimiter);
|
||||
while (it.next()) |include_path| {
|
||||
try argv.append("-isystem");
|
||||
try argv.append(include_path);
|
||||
try argv.append(arena, "-isystem");
|
||||
try argv.append(arena, include_path);
|
||||
}
|
||||
}
|
||||
|
||||
@ -133,13 +134,13 @@ pub fn appendAroArgs(arena: Allocator, argv: *std.array_list.Managed([]const u8)
|
||||
while (symbol_it.next()) |entry| {
|
||||
switch (entry.value_ptr.*) {
|
||||
.define => |value| {
|
||||
try argv.append("-D");
|
||||
try argv.append(arena, "-D");
|
||||
const define_arg = try std.fmt.allocPrint(arena, "{s}={s}", .{ entry.key_ptr.*, value });
|
||||
try argv.append(define_arg);
|
||||
try argv.append(arena, define_arg);
|
||||
},
|
||||
.undefine => {
|
||||
try argv.append("-U");
|
||||
try argv.append(entry.key_ptr.*);
|
||||
try argv.append(arena, "-U");
|
||||
try argv.append(arena, entry.key_ptr.*);
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@ -258,7 +258,7 @@ pub const NameOrOrdinal = union(enum) {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn write(self: NameOrOrdinal, writer: anytype) !void {
|
||||
pub fn write(self: NameOrOrdinal, writer: *std.Io.Writer) !void {
|
||||
switch (self) {
|
||||
.name => |name| {
|
||||
try writer.writeAll(std.mem.sliceAsBytes(name[0 .. name.len + 1]));
|
||||
@ -270,7 +270,7 @@ pub const NameOrOrdinal = union(enum) {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn writeEmpty(writer: anytype) !void {
|
||||
pub fn writeEmpty(writer: *std.Io.Writer) !void {
|
||||
try writer.writeInt(u16, 0, .little);
|
||||
}
|
||||
|
||||
@ -283,8 +283,8 @@ pub const NameOrOrdinal = union(enum) {
|
||||
|
||||
pub fn nameFromString(allocator: Allocator, bytes: SourceBytes) !NameOrOrdinal {
|
||||
// Names have a limit of 256 UTF-16 code units + null terminator
|
||||
var buf = try std.array_list.Managed(u16).initCapacity(allocator, @min(257, bytes.slice.len));
|
||||
errdefer buf.deinit();
|
||||
var buf = try std.ArrayList(u16).initCapacity(allocator, @min(257, bytes.slice.len));
|
||||
errdefer buf.deinit(allocator);
|
||||
|
||||
var i: usize = 0;
|
||||
while (bytes.code_page.codepointAt(i, bytes.slice)) |codepoint| : (i += codepoint.byte_len) {
|
||||
@ -292,27 +292,27 @@ pub const NameOrOrdinal = union(enum) {
|
||||
|
||||
const c = codepoint.value;
|
||||
if (c == Codepoint.invalid) {
|
||||
try buf.append(std.mem.nativeToLittle(u16, '<27>'));
|
||||
try buf.append(allocator, std.mem.nativeToLittle(u16, '<27>'));
|
||||
} else if (c < 0x7F) {
|
||||
// ASCII chars in names are always converted to uppercase
|
||||
try buf.append(std.mem.nativeToLittle(u16, std.ascii.toUpper(@intCast(c))));
|
||||
try buf.append(allocator, std.mem.nativeToLittle(u16, std.ascii.toUpper(@intCast(c))));
|
||||
} else if (c < 0x10000) {
|
||||
const short: u16 = @intCast(c);
|
||||
try buf.append(std.mem.nativeToLittle(u16, short));
|
||||
try buf.append(allocator, std.mem.nativeToLittle(u16, short));
|
||||
} else {
|
||||
const high = @as(u16, @intCast((c - 0x10000) >> 10)) + 0xD800;
|
||||
try buf.append(std.mem.nativeToLittle(u16, high));
|
||||
try buf.append(allocator, std.mem.nativeToLittle(u16, high));
|
||||
|
||||
// Note: This can cut-off in the middle of a UTF-16 surrogate pair,
|
||||
// i.e. it can make the string end with an unpaired high surrogate
|
||||
if (buf.items.len == 256) break;
|
||||
|
||||
const low = @as(u16, @intCast(c & 0x3FF)) + 0xDC00;
|
||||
try buf.append(std.mem.nativeToLittle(u16, low));
|
||||
try buf.append(allocator, std.mem.nativeToLittle(u16, low));
|
||||
}
|
||||
}
|
||||
|
||||
return NameOrOrdinal{ .name = try buf.toOwnedSliceSentinel(0) };
|
||||
return NameOrOrdinal{ .name = try buf.toOwnedSliceSentinel(allocator, 0) };
|
||||
}
|
||||
|
||||
/// Returns `null` if the bytes do not form a valid number.
|
||||
@ -1079,7 +1079,7 @@ pub const FixedFileInfo = struct {
|
||||
}
|
||||
};
|
||||
|
||||
pub fn write(self: FixedFileInfo, writer: anytype) !void {
|
||||
pub fn write(self: FixedFileInfo, writer: *std.Io.Writer) !void {
|
||||
try writer.writeInt(u32, signature, .little);
|
||||
try writer.writeInt(u32, version, .little);
|
||||
try writer.writeInt(u32, self.file_version.mostSignificantCombinedParts(), .little);
|
||||
|
||||
@ -10,7 +10,7 @@ pub const ParseLineCommandsResult = struct {
|
||||
|
||||
const CurrentMapping = struct {
|
||||
line_num: usize = 1,
|
||||
filename: std.ArrayListUnmanaged(u8) = .empty,
|
||||
filename: std.ArrayList(u8) = .empty,
|
||||
pending: bool = true,
|
||||
ignore_contents: bool = false,
|
||||
};
|
||||
@ -574,8 +574,8 @@ fn parseFilename(allocator: Allocator, str: []const u8) error{ OutOfMemory, Inva
|
||||
escape_u,
|
||||
};
|
||||
|
||||
var filename = try std.array_list.Managed(u8).initCapacity(allocator, str.len);
|
||||
errdefer filename.deinit();
|
||||
var filename = try std.ArrayList(u8).initCapacity(allocator, str.len);
|
||||
errdefer filename.deinit(allocator);
|
||||
var state: State = .string;
|
||||
var index: usize = 0;
|
||||
var escape_len: usize = undefined;
|
||||
@ -693,7 +693,7 @@ fn parseFilename(allocator: Allocator, str: []const u8) error{ OutOfMemory, Inva
|
||||
}
|
||||
}
|
||||
|
||||
return filename.toOwnedSlice();
|
||||
return filename.toOwnedSlice(allocator);
|
||||
}
|
||||
|
||||
fn testParseFilename(expected: []const u8, input: []const u8) !void {
|
||||
@ -927,7 +927,7 @@ test "SourceMappings collapse" {
|
||||
|
||||
/// Same thing as StringTable in Zig's src/Wasm.zig
|
||||
pub const StringTable = struct {
|
||||
data: std.ArrayListUnmanaged(u8) = .empty,
|
||||
data: std.ArrayList(u8) = .empty,
|
||||
map: std.HashMapUnmanaged(u32, void, std.hash_map.StringIndexContext, std.hash_map.default_max_load_percentage) = .empty,
|
||||
|
||||
pub fn deinit(self: *StringTable, allocator: Allocator) void {
|
||||
|
||||
@ -1,36 +1,5 @@
|
||||
const std = @import("std");
|
||||
|
||||
pub fn windows1252ToUtf8Stream(writer: anytype, reader: anytype) !usize {
|
||||
var bytes_written: usize = 0;
|
||||
var utf8_buf: [3]u8 = undefined;
|
||||
while (true) {
|
||||
const c = reader.readByte() catch |err| switch (err) {
|
||||
error.EndOfStream => return bytes_written,
|
||||
else => |e| return e,
|
||||
};
|
||||
const codepoint = toCodepoint(c);
|
||||
if (codepoint <= 0x7F) {
|
||||
try writer.writeByte(c);
|
||||
bytes_written += 1;
|
||||
} else {
|
||||
const utf8_len = std.unicode.utf8Encode(codepoint, &utf8_buf) catch unreachable;
|
||||
try writer.writeAll(utf8_buf[0..utf8_len]);
|
||||
bytes_written += utf8_len;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the number of code units written to the writer
|
||||
pub fn windows1252ToUtf16AllocZ(allocator: std.mem.Allocator, win1252_str: []const u8) ![:0]u16 {
|
||||
// Guaranteed to need exactly the same number of code units as Windows-1252 bytes
|
||||
var utf16_slice = try allocator.allocSentinel(u16, win1252_str.len, 0);
|
||||
errdefer allocator.free(utf16_slice);
|
||||
for (win1252_str, 0..) |c, i| {
|
||||
utf16_slice[i] = toCodepoint(c);
|
||||
}
|
||||
return utf16_slice;
|
||||
}
|
||||
|
||||
/// https://www.unicode.org/Public/MAPPINGS/VENDORS/MICSFT/WindowsBestFit/bestfit1252.txt
|
||||
pub fn toCodepoint(c: u8) u16 {
|
||||
return switch (c) {
|
||||
@ -572,17 +541,3 @@ pub fn bestFitFromCodepoint(codepoint: u21) ?u8 {
|
||||
else => null,
|
||||
};
|
||||
}
|
||||
|
||||
test "windows-1252 to utf8" {
|
||||
var buf = std.array_list.Managed(u8).init(std.testing.allocator);
|
||||
defer buf.deinit();
|
||||
|
||||
const input_windows1252 = "\x81pqrstuvwxyz{|}~\x80\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8e\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9e\x9f\xa1\xa2\xa3\xa4\xa5\xa6\xa7\xa8\xa9\xaa\xab\xac\xae\xaf\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7\xe8\xe9\xea\xeb\xec\xed\xee\xef\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff";
|
||||
const expected_utf8 = "\xc2\x81pqrstuvwxyz{|}~€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ¡¢£¤¥¦§¨©ª«¬®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖרÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ";
|
||||
|
||||
var fbs = std.io.fixedBufferStream(input_windows1252);
|
||||
const bytes_written = try windows1252ToUtf8Stream(buf.writer(), fbs.reader());
|
||||
|
||||
try std.testing.expectEqualStrings(expected_utf8, buf.items);
|
||||
try std.testing.expectEqual(expected_utf8.len, bytes_written);
|
||||
}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user