Merge pull request #5097 from Vexu/field

Disallow declarations between fields
This commit is contained in:
Andrew Kelley 2020-04-19 03:52:53 -04:00 committed by GitHub
commit 051620dcaf
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 153 additions and 40 deletions

View File

@ -77,7 +77,7 @@ test "encodesTo" {
testing.expectEqual(true, encodesTo("false", "false"));
// totally different
testing.expectEqual(false, encodesTo("false", "true"));
// differnt lengths
// different lengths
testing.expectEqual(false, encodesTo("false", "other"));
// with escape
testing.expectEqual(true, encodesTo("\\", "\\\\"));
@ -1771,22 +1771,20 @@ test "parse into struct with misc fields" {
static_array: [3]f64,
dynamic_array: []f64,
const Bar = struct {
complex: struct {
nested: []const u8,
};
complex: Bar,
},
const Baz = struct {
veryComplex: []struct {
foo: []const u8,
};
veryComplex: []Baz,
},
a_union: Union,
const Union = union(enum) {
x: u8,
float: f64,
string: []const u8,
};
a_union: Union,
};
const r = try parse(T, &TokenStream.init(
\\{
@ -2323,13 +2321,14 @@ pub const StringifyOptions = struct {
/// How many indentation levels deep are we?
indent_level: usize = 0,
pub const Indentation = union(enum) {
/// What character(s) should be used for indentation?
indent: union(enum) {
Space: u8,
Tab: void,
};
} = .{ .Space = 4 },
/// What character(s) should be used for indentation?
indent: Indentation = Indentation{ .Space = 4 },
/// After a colon, should whitespace be inserted?
separator: bool = true,
fn outputIndent(
whitespace: @This(),
@ -2350,17 +2349,17 @@ pub const StringifyOptions = struct {
n_chars *= whitespace.indent_level;
try out_stream.writeByteNTimes(char, n_chars);
}
/// After a colon, should whitespace be inserted?
separator: bool = true,
};
/// Controls the whitespace emitted
whitespace: ?Whitespace = null,
string: StringOptions = StringOptions{ .String = .{} },
/// Should []u8 be serialised as a string? or an array?
pub const StringOptions = union(enum) {
Array,
String: StringOutputOptions,
/// String output options
const StringOutputOptions = struct {
@ -2370,10 +2369,7 @@ pub const StringifyOptions = struct {
/// Should unicode characters be escaped in strings?
escape_unicode: bool = false,
};
String: StringOutputOptions,
};
string: StringOptions = StringOptions{ .String = .{} },
};
fn outputUnicodeEscape(

View File

@ -374,7 +374,7 @@ test "mem.zeroes" {
testing.expect(a.y == 10);
const ZigStruct = struct {
const IntegralTypes = struct {
integral_types: struct {
integer_0: i0,
integer_8: i8,
integer_16: i16,
@ -390,16 +390,13 @@ test "mem.zeroes" {
float_32: f32,
float_64: f64,
};
},
integral_types: IntegralTypes,
const Pointers = struct {
pointers: struct {
optional: ?*u8,
c_pointer: [*c]u8,
slice: []u8,
};
pointers: Pointers,
},
array: [2]u32,
optional_int: ?u8,

View File

@ -1226,17 +1226,11 @@ pub const io_cqring_offsets = extern struct {
};
pub const io_uring_sqe = extern struct {
opcode: IORING_OP,
flags: u8,
ioprio: u16,
fd: i32,
pub const union1 = extern union {
off: u64,
addr2: u64,
};
union1: union1,
addr: u64,
len: u32,
pub const union2 = extern union {
rw_flags: kernel_rwf,
fsync_flags: u32,
@ -1250,8 +1244,7 @@ pub const io_uring_sqe = extern struct {
statx_flags: u32,
fadvise_flags: u32,
};
union2: union2,
user_data: u64,
pub const union3 = extern union {
struct1: extern struct {
/// index into fixed buffers, if used
@ -1262,6 +1255,23 @@ pub const io_uring_sqe = extern struct {
},
__pad2: [3]u64,
};
opcode: IORING_OP,
flags: u8,
ioprio: u16,
fd: i32,
opcode: u8,
flags: u8,
ioprio: u16,
fd: i32,
union1: union1,
addr: u64,
len: u32,
union2: union2,
user_data: u64,
union3: union3,
};

View File

@ -384,8 +384,10 @@ pub const EKEYREVOKED = 128;
pub const EKEYREJECTED = 129;
// for robust mutexes
/// Owner died
pub const EOWNERDEAD = 130;
/// State not recoverable
pub const ENOTRECOVERABLE = 131;

View File

@ -122,6 +122,9 @@ pub const NLM_F_CAPPED = 0x100;
pub const NLM_F_ACK_TLVS = 0x200;
pub const NetlinkMessageType = extern enum(u16) {
/// < 0x10: reserved control messages
pub const MIN_TYPE = 0x10;
/// Nothing.
NOOP = 0x1,
@ -134,9 +137,6 @@ pub const NetlinkMessageType = extern enum(u16) {
/// Data lost
OVERRUN = 0x4,
/// < 0x10: reserved control messages
pub const MIN_TYPE = 0x10;
// rtlink types
RTM_NEWLINK = 16,

View File

@ -5,6 +5,8 @@ const gid_t = std.os.linux.gid_t;
const pid_t = std.os.linux.pid_t;
pub const SYS = extern enum(usize) {
pub const arch_specific_syscall = 244;
io_setup = 0,
io_destroy = 1,
io_submit = 2,
@ -249,7 +251,6 @@ pub const SYS = extern enum(usize) {
accept4 = 242,
recvmmsg = 243,
pub const arch_specific_syscall = 244;
riscv_flush_icache = arch_specific_syscall + 15,
wait4 = 260,

View File

@ -164,6 +164,7 @@ pub const Error = union(enum) {
ExpectedLoopExpr: ExpectedLoopExpr,
ExpectedDerefOrUnwrap: ExpectedDerefOrUnwrap,
ExpectedSuffixOp: ExpectedSuffixOp,
DeclBetweenFields: DeclBetweenFields,
pub fn render(self: *const Error, tokens: *Tree.TokenList, stream: var) !void {
switch (self.*) {
@ -211,6 +212,7 @@ pub const Error = union(enum) {
.ExpectedLoopExpr => |*x| return x.render(tokens, stream),
.ExpectedDerefOrUnwrap => |*x| return x.render(tokens, stream),
.ExpectedSuffixOp => |*x| return x.render(tokens, stream),
.DeclBetweenFields => |*x| return x.render(tokens, stream),
}
}
@ -260,6 +262,7 @@ pub const Error = union(enum) {
.ExpectedLoopExpr => |x| return x.token,
.ExpectedDerefOrUnwrap => |x| return x.token,
.ExpectedSuffixOp => |x| return x.token,
.DeclBetweenFields => |x| return x.token,
}
}
@ -304,6 +307,7 @@ pub const Error = union(enum) {
pub const ExtraConstQualifier = SimpleError("Extra const qualifier");
pub const ExtraVolatileQualifier = SimpleError("Extra volatile qualifier");
pub const ExtraAllowZeroQualifier = SimpleError("Extra allowzero qualifier");
pub const DeclBetweenFields = SimpleError("Declarations are not allowed between container fields");
pub const ExpectedCall = struct {
node: *Node,

View File

@ -88,6 +88,18 @@ fn parseRoot(arena: *Allocator, it: *TokenIterator, tree: *Tree) Error!*Node.Roo
fn parseContainerMembers(arena: *Allocator, it: *TokenIterator, tree: *Tree) !Node.Root.DeclList {
var list = Node.Root.DeclList.init(arena);
var field_state: union(enum) {
/// no fields have been seen
none,
/// currently parsing fields
seen,
/// saw fields and then a declaration after them.
/// payload is first token of previous declaration.
end: TokenIndex,
/// ther was a declaration between fields, don't report more errors
err,
} = .none;
while (true) {
if (try parseContainerDocComments(arena, it, tree)) |node| {
try list.push(node);
@ -97,12 +109,18 @@ fn parseContainerMembers(arena: *Allocator, it: *TokenIterator, tree: *Tree) !No
const doc_comments = try parseDocComment(arena, it, tree);
if (try parseTestDecl(arena, it, tree)) |node| {
if (field_state == .seen) {
field_state = .{ .end = node.firstToken() };
}
node.cast(Node.TestDecl).?.doc_comments = doc_comments;
try list.push(node);
continue;
}
if (try parseTopLevelComptime(arena, it, tree)) |node| {
if (field_state == .seen) {
field_state = .{ .end = node.firstToken() };
}
node.cast(Node.Comptime).?.doc_comments = doc_comments;
try list.push(node);
continue;
@ -111,6 +129,9 @@ fn parseContainerMembers(arena: *Allocator, it: *TokenIterator, tree: *Tree) !No
const visib_token = eatToken(it, .Keyword_pub);
if (try parseTopLevelDecl(arena, it, tree)) |node| {
if (field_state == .seen) {
field_state = .{ .end = visib_token orelse node.firstToken() };
}
switch (node.id) {
.FnProto => {
node.cast(Node.FnProto).?.doc_comments = doc_comments;
@ -146,6 +167,18 @@ fn parseContainerMembers(arena: *Allocator, it: *TokenIterator, tree: *Tree) !No
}
if (try parseContainerField(arena, it, tree)) |node| {
switch (field_state) {
.none => field_state = .seen,
.err, .seen => {},
.end => |tok| {
try tree.errors.push(.{
.DeclBetweenFields = .{ .token = tok },
});
// continue parsing, error will be reported later
field_state = .err;
},
}
const field = node.cast(Node.ContainerField).?;
field.doc_comments = doc_comments;
try list.push(node);

View File

@ -1,3 +1,18 @@
test "zig fmt: decl between fields" {
try testError(
\\const S = struct {
\\ const foo = 2;
\\ const bar = 2;
\\ const baz = 2;
\\ a: usize,
\\ const foo1 = 2;
\\ const bar1 = 2;
\\ const baz1 = 2;
\\ b: usize,
\\};
);
}
test "zig fmt: errdefer with payload" {
try testCanonical(
\\pub fn main() anyerror!void {
@ -2001,11 +2016,11 @@ test "zig fmt: struct declaration" {
\\ f1: u8,
\\ f3: u8,
\\
\\ f2: u8,
\\
\\ fn method(self: *Self) Self {
\\ return self.*;
\\ }
\\
\\ f2: u8,
\\};
\\
\\const Ps = packed struct {

View File

@ -526,6 +526,15 @@ static void ast_parse_container_doc_comments(ParseContext *pc, Buf *buf) {
}
}
enum ContainerFieldState {
// no fields have been seen
ContainerFieldStateNone,
// currently parsing fields
ContainerFieldStateSeen,
// saw fields and then a declaration after them
ContainerFieldStateEnd,
};
// ContainerMembers
// <- TestDecl ContainerMembers
// / TopLevelComptime ContainerMembers
@ -537,17 +546,29 @@ static AstNodeContainerDecl ast_parse_container_members(ParseContext *pc) {
AstNodeContainerDecl res = {};
Buf tld_doc_comment_buf = BUF_INIT;
buf_resize(&tld_doc_comment_buf, 0);
ContainerFieldState field_state = ContainerFieldStateNone;
Token *first_token = nullptr;
for (;;) {
ast_parse_container_doc_comments(pc, &tld_doc_comment_buf);
Token *peeked_token = peek_token(pc);
AstNode *test_decl = ast_parse_test_decl(pc);
if (test_decl != nullptr) {
if (field_state == ContainerFieldStateSeen) {
field_state = ContainerFieldStateEnd;
first_token = peeked_token;
}
res.decls.append(test_decl);
continue;
}
AstNode *top_level_comptime = ast_parse_top_level_comptime(pc);
if (top_level_comptime != nullptr) {
if (field_state == ContainerFieldStateSeen) {
field_state = ContainerFieldStateEnd;
first_token = peeked_token;
}
res.decls.append(top_level_comptime);
continue;
}
@ -555,11 +576,17 @@ static AstNodeContainerDecl ast_parse_container_members(ParseContext *pc) {
Buf doc_comment_buf = BUF_INIT;
ast_parse_doc_comments(pc, &doc_comment_buf);
peeked_token = peek_token(pc);
Token *visib_token = eat_token_if(pc, TokenIdKeywordPub);
VisibMod visib_mod = visib_token != nullptr ? VisibModPub : VisibModPrivate;
AstNode *top_level_decl = ast_parse_top_level_decl(pc, visib_mod, &doc_comment_buf);
if (top_level_decl != nullptr) {
if (field_state == ContainerFieldStateSeen) {
field_state = ContainerFieldStateEnd;
first_token = peeked_token;
}
res.decls.append(top_level_decl);
continue;
}
@ -572,6 +599,16 @@ static AstNodeContainerDecl ast_parse_container_members(ParseContext *pc) {
AstNode *container_field = ast_parse_container_field(pc);
if (container_field != nullptr) {
switch (field_state) {
case ContainerFieldStateNone:
field_state = ContainerFieldStateSeen;
break;
case ContainerFieldStateSeen:
break;
case ContainerFieldStateEnd:
ast_error(pc, first_token, "declarations are not allowed between container fields");
}
assert(container_field->type == NodeTypeStructField);
container_field->data.struct_field.doc_comments = doc_comment_buf;
container_field->data.struct_field.comptime_token = comptime_token;

View File

@ -2,6 +2,24 @@ const tests = @import("tests.zig");
const std = @import("std");
pub fn addCases(cases: *tests.CompileErrorContext) void {
cases.add("declaration between fields",
\\const S = struct {
\\ const foo = 2;
\\ const bar = 2;
\\ const baz = 2;
\\ a: usize,
\\ const foo1 = 2;
\\ const bar1 = 2;
\\ const baz1 = 2;
\\ b: usize,
\\};
\\comptime {
\\ _ = S;
\\}
, &[_][]const u8{
"tmp.zig:6:5: error: declarations are not allowed between container fields",
});
cases.add("non-extern function with var args",
\\fn foo(args: ...) void {}
\\export fn entry() void {