mirror of
https://github.com/ziglang/zig.git
synced 2026-02-12 20:37:54 +00:00
translate-c: use mangled name when macro translation fails
Closes #6009
This commit is contained in:
parent
cf5932b236
commit
2b28cebf64
@ -3089,7 +3089,7 @@ fn transUnaryExprOrTypeTraitExpr(
|
||||
.AlignOf => "@alignOf",
|
||||
.PreferredAlignOf,
|
||||
.VecStep,
|
||||
.OpenMPRequiredSimdAlign,
|
||||
.OpenMPRequiredSimdAlign,
|
||||
=> return revertAndWarn(
|
||||
rp,
|
||||
error.UnsupportedTranslation,
|
||||
@ -5220,26 +5220,32 @@ pub fn freeErrors(errors: []ClangErrMsg) void {
|
||||
ZigClangErrorMsg_delete(errors.ptr, errors.len);
|
||||
}
|
||||
|
||||
const CTokIterator = struct {
|
||||
const MacroCtx = struct {
|
||||
source: []const u8,
|
||||
list: []const CToken,
|
||||
i: usize = 0,
|
||||
loc: ZigClangSourceLocation,
|
||||
name: []const u8,
|
||||
|
||||
fn peek(self: *CTokIterator) ?CToken.Id {
|
||||
fn peek(self: *MacroCtx) ?CToken.Id {
|
||||
if (self.i >= self.list.len) return null;
|
||||
return self.list[self.i + 1].id;
|
||||
}
|
||||
|
||||
fn next(self: *CTokIterator) ?CToken.Id {
|
||||
fn next(self: *MacroCtx) ?CToken.Id {
|
||||
if (self.i >= self.list.len) return null;
|
||||
self.i += 1;
|
||||
return self.list[self.i].id;
|
||||
}
|
||||
|
||||
fn slice(self: *CTokIterator, index: usize) []const u8 {
|
||||
const tok = self.list[index];
|
||||
fn slice(self: *MacroCtx) []const u8 {
|
||||
const tok = self.list[self.i];
|
||||
return self.source[tok.start..tok.end];
|
||||
}
|
||||
|
||||
fn fail(self: *MacroCtx, c: *Context, comptime fmt: []const u8, args: anytype) !void {
|
||||
return failDecl(c, self.loc, self.name, fmt, args);
|
||||
}
|
||||
};
|
||||
|
||||
fn transPreprocessorEntities(c: *Context, unit: *ZigClangASTUnit) Error!void {
|
||||
@ -5286,18 +5292,21 @@ fn transPreprocessorEntities(c: *Context, unit: *ZigClangASTUnit) Error!void {
|
||||
try tok_list.append(tok);
|
||||
}
|
||||
|
||||
var tok_it = CTokIterator{
|
||||
var macro_ctx = MacroCtx{
|
||||
.source = slice,
|
||||
.list = tok_list.items,
|
||||
.name = mangled_name,
|
||||
.loc = begin_loc,
|
||||
};
|
||||
assert(mem.eql(u8, tok_it.slice(0), name));
|
||||
assert(mem.eql(u8, macro_ctx.slice(), name));
|
||||
|
||||
var macro_fn = false;
|
||||
switch (tok_it.peek().?) {
|
||||
switch (macro_ctx.peek().?) {
|
||||
.Identifier => {
|
||||
// if it equals itself, ignore. for example, from stdio.h:
|
||||
// #define stdin stdin
|
||||
if (mem.eql(u8, name, tok_it.slice(1))) {
|
||||
const tok = macro_ctx.list[1];
|
||||
if (mem.eql(u8, name, slice[tok.start..tok.end])) {
|
||||
continue;
|
||||
}
|
||||
},
|
||||
@ -5308,15 +5317,15 @@ fn transPreprocessorEntities(c: *Context, unit: *ZigClangASTUnit) Error!void {
|
||||
},
|
||||
.LParen => {
|
||||
// if the name is immediately followed by a '(' then it is a function
|
||||
macro_fn = tok_it.list[0].end == tok_it.list[1].start;
|
||||
macro_fn = macro_ctx.list[0].end == macro_ctx.list[1].start;
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
|
||||
(if (macro_fn)
|
||||
transMacroFnDefine(c, &tok_it, mangled_name, begin_loc)
|
||||
transMacroFnDefine(c, ¯o_ctx)
|
||||
else
|
||||
transMacroDefine(c, &tok_it, mangled_name, begin_loc)) catch |err| switch (err) {
|
||||
transMacroDefine(c, ¯o_ctx)) catch |err| switch (err) {
|
||||
error.ParseError => continue,
|
||||
error.OutOfMemory => |e| return e,
|
||||
};
|
||||
@ -5326,24 +5335,18 @@ fn transPreprocessorEntities(c: *Context, unit: *ZigClangASTUnit) Error!void {
|
||||
}
|
||||
}
|
||||
|
||||
fn transMacroDefine(c: *Context, it: *CTokIterator, name: []const u8, source_loc: ZigClangSourceLocation) ParseError!void {
|
||||
fn transMacroDefine(c: *Context, m: *MacroCtx) ParseError!void {
|
||||
const scope = &c.global_scope.base;
|
||||
|
||||
const visib_tok = try appendToken(c, .Keyword_pub, "pub");
|
||||
const mut_tok = try appendToken(c, .Keyword_const, "const");
|
||||
const name_tok = try appendIdentifier(c, name);
|
||||
const name_tok = try appendIdentifier(c, m.name);
|
||||
const eq_token = try appendToken(c, .Equal, "=");
|
||||
|
||||
const init_node = try parseCExpr(c, it, source_loc, scope);
|
||||
const last = it.next().?;
|
||||
const init_node = try parseCExpr(c, m, scope);
|
||||
const last = m.next().?;
|
||||
if (last != .Eof and last != .Nl)
|
||||
return failDecl(
|
||||
c,
|
||||
source_loc,
|
||||
name,
|
||||
"unable to translate C expr: unexpected token .{}",
|
||||
.{@tagName(last)},
|
||||
);
|
||||
return m.fail(c, "unable to translate C expr: unexpected token .{}", .{@tagName(last)});
|
||||
|
||||
const semicolon_token = try appendToken(c, .Semicolon, ";");
|
||||
const node = try ast.Node.VarDecl.create(c.arena, .{
|
||||
@ -5355,10 +5358,10 @@ fn transMacroDefine(c: *Context, it: *CTokIterator, name: []const u8, source_loc
|
||||
.eq_token = eq_token,
|
||||
.init_node = init_node,
|
||||
});
|
||||
_ = try c.global_scope.macro_table.put(name, &node.base);
|
||||
_ = try c.global_scope.macro_table.put(m.name, &node.base);
|
||||
}
|
||||
|
||||
fn transMacroFnDefine(c: *Context, it: *CTokIterator, name: []const u8, source_loc: ZigClangSourceLocation) ParseError!void {
|
||||
fn transMacroFnDefine(c: *Context, m: *MacroCtx) ParseError!void {
|
||||
var block_scope = try Scope.Block.init(c, &c.global_scope.base, null);
|
||||
defer block_scope.deinit();
|
||||
const scope = &block_scope.base;
|
||||
@ -5366,34 +5369,22 @@ fn transMacroFnDefine(c: *Context, it: *CTokIterator, name: []const u8, source_l
|
||||
const pub_tok = try appendToken(c, .Keyword_pub, "pub");
|
||||
const inline_tok = try appendToken(c, .Keyword_inline, "inline");
|
||||
const fn_tok = try appendToken(c, .Keyword_fn, "fn");
|
||||
const name_tok = try appendIdentifier(c, name);
|
||||
const name_tok = try appendIdentifier(c, m.name);
|
||||
_ = try appendToken(c, .LParen, "(");
|
||||
|
||||
if (it.next().? != .LParen) {
|
||||
return failDecl(
|
||||
c,
|
||||
source_loc,
|
||||
name,
|
||||
"unable to translate C expr: expected '('",
|
||||
.{},
|
||||
);
|
||||
if (m.next().? != .LParen) {
|
||||
return m.fail(c, "unable to translate C expr: expected '('", .{});
|
||||
}
|
||||
|
||||
var fn_params = std.ArrayList(ast.Node.FnProto.ParamDecl).init(c.gpa);
|
||||
defer fn_params.deinit();
|
||||
|
||||
while (true) {
|
||||
if (it.next().? != .Identifier) {
|
||||
return failDecl(
|
||||
c,
|
||||
source_loc,
|
||||
name,
|
||||
"unable to translate C expr: expected identifier",
|
||||
.{},
|
||||
);
|
||||
if (m.next().? != .Identifier) {
|
||||
return m.fail(c, "unable to translate C expr: expected identifier", .{});
|
||||
}
|
||||
|
||||
const mangled_name = try block_scope.makeMangledName(c, it.slice(it.i));
|
||||
const mangled_name = try block_scope.makeMangledName(c, m.slice());
|
||||
const param_name_tok = try appendIdentifier(c, mangled_name);
|
||||
_ = try appendToken(c, .Colon, ":");
|
||||
|
||||
@ -5411,20 +5402,14 @@ fn transMacroFnDefine(c: *Context, it: *CTokIterator, name: []const u8, source_l
|
||||
.param_type = .{ .any_type = &any_type.base },
|
||||
};
|
||||
|
||||
if (it.peek().? != .Comma)
|
||||
if (m.peek().? != .Comma)
|
||||
break;
|
||||
_ = it.next();
|
||||
_ = m.next();
|
||||
_ = try appendToken(c, .Comma, ",");
|
||||
}
|
||||
|
||||
if (it.next().? != .RParen) {
|
||||
return failDecl(
|
||||
c,
|
||||
source_loc,
|
||||
name,
|
||||
"unable to translate C expr: expected ')'",
|
||||
.{},
|
||||
);
|
||||
if (m.next().? != .RParen) {
|
||||
return m.fail(c, "unable to translate C expr: expected ')'", .{});
|
||||
}
|
||||
|
||||
_ = try appendToken(c, .RParen, ")");
|
||||
@ -5432,16 +5417,10 @@ fn transMacroFnDefine(c: *Context, it: *CTokIterator, name: []const u8, source_l
|
||||
const type_of = try c.createBuiltinCall("@TypeOf", 1);
|
||||
|
||||
const return_kw = try appendToken(c, .Keyword_return, "return");
|
||||
const expr = try parseCExpr(c, it, source_loc, scope);
|
||||
const last = it.next().?;
|
||||
const expr = try parseCExpr(c, m, scope);
|
||||
const last = m.next().?;
|
||||
if (last != .Eof and last != .Nl)
|
||||
return failDecl(
|
||||
c,
|
||||
source_loc,
|
||||
name,
|
||||
"unable to translate C expr: unexpected token .{}",
|
||||
.{@tagName(last)},
|
||||
);
|
||||
return m.fail(c, "unable to translate C expr: unexpected token .{}", .{@tagName(last)});
|
||||
_ = try appendToken(c, .Semicolon, ";");
|
||||
const type_of_arg = if (expr.tag != .Block) expr else blk: {
|
||||
const blk = @fieldParentPtr(ast.Node.Block, "base", expr);
|
||||
@ -5472,32 +5451,26 @@ fn transMacroFnDefine(c: *Context, it: *CTokIterator, name: []const u8, source_l
|
||||
});
|
||||
mem.copy(ast.Node.FnProto.ParamDecl, fn_proto.params(), fn_params.items);
|
||||
|
||||
_ = try c.global_scope.macro_table.put(name, &fn_proto.base);
|
||||
_ = try c.global_scope.macro_table.put(m.name, &fn_proto.base);
|
||||
}
|
||||
|
||||
const ParseError = Error || error{ParseError};
|
||||
|
||||
fn parseCExpr(c: *Context, it: *CTokIterator, source_loc: ZigClangSourceLocation, scope: *Scope) ParseError!*ast.Node {
|
||||
const node = try parseCPrefixOpExpr(c, it, source_loc, scope);
|
||||
switch (it.next().?) {
|
||||
fn parseCExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.Node {
|
||||
const node = try parseCPrefixOpExpr(c, m, scope);
|
||||
switch (m.next().?) {
|
||||
.QuestionMark => {
|
||||
// must come immediately after expr
|
||||
_ = try appendToken(c, .RParen, ")");
|
||||
const if_node = try transCreateNodeIf(c);
|
||||
if_node.condition = node;
|
||||
if_node.body = try parseCPrimaryExpr(c, it, source_loc, scope);
|
||||
if (it.next().? != .Colon) {
|
||||
try failDecl(
|
||||
c,
|
||||
source_loc,
|
||||
it.slice(0),
|
||||
"unable to translate C expr: expected ':'",
|
||||
.{},
|
||||
);
|
||||
if_node.body = try parseCPrimaryExpr(c, m, scope);
|
||||
if (m.next().? != .Colon) {
|
||||
try m.fail(c, "unable to translate C expr: expected ':'", .{});
|
||||
return error.ParseError;
|
||||
}
|
||||
if_node.@"else" = try transCreateNodeElse(c);
|
||||
if_node.@"else".?.body = try parseCPrimaryExpr(c, it, source_loc, scope);
|
||||
if_node.@"else".?.body = try parseCPrimaryExpr(c, m, scope);
|
||||
return &if_node.base;
|
||||
},
|
||||
.Comma => {
|
||||
@ -5520,10 +5493,10 @@ fn parseCExpr(c: *Context, it: *CTokIterator, source_loc: ZigClangSourceLocation
|
||||
};
|
||||
try block_scope.statements.append(&op_node.base);
|
||||
|
||||
last = try parseCPrefixOpExpr(c, it, source_loc, scope);
|
||||
last = try parseCPrefixOpExpr(c, m, scope);
|
||||
_ = try appendToken(c, .Semicolon, ";");
|
||||
if (it.next().? != .Comma) {
|
||||
it.i -= 1;
|
||||
if (m.next().? != .Comma) {
|
||||
m.i -= 1;
|
||||
break;
|
||||
}
|
||||
}
|
||||
@ -5534,16 +5507,16 @@ fn parseCExpr(c: *Context, it: *CTokIterator, source_loc: ZigClangSourceLocation
|
||||
return &block_node.base;
|
||||
},
|
||||
else => {
|
||||
it.i -= 1;
|
||||
m.i -= 1;
|
||||
return node;
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn parseCNumLit(c: *Context, it: *CTokIterator, source_loc: ZigClangSourceLocation) ParseError!*ast.Node {
|
||||
var lit_bytes = it.slice(it.i);
|
||||
fn parseCNumLit(c: *Context, m: *MacroCtx) ParseError!*ast.Node {
|
||||
var lit_bytes = m.slice();
|
||||
|
||||
switch (it.list[it.i].id) {
|
||||
switch (m.list[m.i].id) {
|
||||
.IntegerLiteral => |suffix| {
|
||||
if (lit_bytes.len > 2 and lit_bytes[0] == '0') {
|
||||
switch (lit_bytes[1]) {
|
||||
@ -5604,8 +5577,8 @@ fn parseCNumLit(c: *Context, it: *CTokIterator, source_loc: ZigClangSourceLocati
|
||||
}
|
||||
}
|
||||
|
||||
fn zigifyEscapeSequences(ctx: *Context, source_bytes: []const u8, name: []const u8, source_loc: ZigClangSourceLocation) ![]const u8 {
|
||||
var source = source_bytes;
|
||||
fn zigifyEscapeSequences(ctx: *Context, m: *MacroCtx) ![]const u8 {
|
||||
var source = m.slice();
|
||||
for (source) |c, i| {
|
||||
if (c == '\"' or c == '\'') {
|
||||
source = source[i..];
|
||||
@ -5677,11 +5650,11 @@ fn zigifyEscapeSequences(ctx: *Context, source_bytes: []const u8, name: []const
|
||||
bytes[i] = '?';
|
||||
},
|
||||
'u', 'U' => {
|
||||
try failDecl(ctx, source_loc, name, "macro tokenizing failed: TODO unicode escape sequences", .{});
|
||||
try m.fail(ctx, "macro tokenizing failed: TODO unicode escape sequences", .{});
|
||||
return error.ParseError;
|
||||
},
|
||||
else => {
|
||||
try failDecl(ctx, source_loc, name, "macro tokenizing failed: unknown escape sequence", .{});
|
||||
try m.fail(ctx, "macro tokenizing failed: unknown escape sequence", .{});
|
||||
return error.ParseError;
|
||||
},
|
||||
}
|
||||
@ -5700,21 +5673,21 @@ fn zigifyEscapeSequences(ctx: *Context, source_bytes: []const u8, name: []const
|
||||
switch (c) {
|
||||
'0'...'9' => {
|
||||
num = std.math.mul(u8, num, 16) catch {
|
||||
try failDecl(ctx, source_loc, name, "macro tokenizing failed: hex literal overflowed", .{});
|
||||
try m.fail(ctx, "macro tokenizing failed: hex literal overflowed", .{});
|
||||
return error.ParseError;
|
||||
};
|
||||
num += c - '0';
|
||||
},
|
||||
'a'...'f' => {
|
||||
num = std.math.mul(u8, num, 16) catch {
|
||||
try failDecl(ctx, source_loc, name, "macro tokenizing failed: hex literal overflowed", .{});
|
||||
try m.fail(ctx, "macro tokenizing failed: hex literal overflowed", .{});
|
||||
return error.ParseError;
|
||||
};
|
||||
num += c - 'a' + 10;
|
||||
},
|
||||
'A'...'F' => {
|
||||
num = std.math.mul(u8, num, 16) catch {
|
||||
try failDecl(ctx, source_loc, name, "macro tokenizing failed: hex literal overflowed", .{});
|
||||
try m.fail(ctx, "macro tokenizing failed: hex literal overflowed", .{});
|
||||
return error.ParseError;
|
||||
};
|
||||
num += c - 'A' + 10;
|
||||
@ -5741,7 +5714,7 @@ fn zigifyEscapeSequences(ctx: *Context, source_bytes: []const u8, name: []const
|
||||
if (accept_digit) {
|
||||
count += 1;
|
||||
num = std.math.mul(u8, num, 8) catch {
|
||||
try failDecl(ctx, source_loc, name, "macro tokenizing failed: octal literal overflowed", .{});
|
||||
try m.fail(ctx, "macro tokenizing failed: octal literal overflowed", .{});
|
||||
return error.ParseError;
|
||||
};
|
||||
num += c - '0';
|
||||
@ -5764,13 +5737,13 @@ fn zigifyEscapeSequences(ctx: *Context, source_bytes: []const u8, name: []const
|
||||
return bytes[0..i];
|
||||
}
|
||||
|
||||
fn parseCPrimaryExpr(c: *Context, it: *CTokIterator, source_loc: ZigClangSourceLocation, scope: *Scope) ParseError!*ast.Node {
|
||||
const tok = it.next().?;
|
||||
const slice = it.slice(it.i);
|
||||
fn parseCPrimaryExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.Node {
|
||||
const tok = m.next().?;
|
||||
const slice = m.slice();
|
||||
switch (tok) {
|
||||
.CharLiteral => {
|
||||
if (slice[0] != '\'' or slice[1] == '\\' or slice.len == 3) {
|
||||
const token = try appendToken(c, .CharLiteral, try zigifyEscapeSequences(c, slice, it.slice(0), source_loc));
|
||||
const token = try appendToken(c, .CharLiteral, try zigifyEscapeSequences(c, m));
|
||||
const node = try c.arena.create(ast.Node.OneToken);
|
||||
node.* = .{
|
||||
.base = .{ .tag = .CharLiteral },
|
||||
@ -5788,7 +5761,7 @@ fn parseCPrimaryExpr(c: *Context, it: *CTokIterator, source_loc: ZigClangSourceL
|
||||
}
|
||||
},
|
||||
.StringLiteral => {
|
||||
const token = try appendToken(c, .StringLiteral, try zigifyEscapeSequences(c, slice, it.slice(0), source_loc));
|
||||
const token = try appendToken(c, .StringLiteral, try zigifyEscapeSequences(c, m));
|
||||
const node = try c.arena.create(ast.Node.OneToken);
|
||||
node.* = .{
|
||||
.base = .{ .tag = .StringLiteral },
|
||||
@ -5797,7 +5770,7 @@ fn parseCPrimaryExpr(c: *Context, it: *CTokIterator, source_loc: ZigClangSourceL
|
||||
return &node.base;
|
||||
},
|
||||
.IntegerLiteral, .FloatLiteral => {
|
||||
return parseCNumLit(c, it, source_loc);
|
||||
return parseCNumLit(c, m);
|
||||
},
|
||||
// eventually this will be replaced by std.c.parse which will handle these correctly
|
||||
.Keyword_void => return transCreateNodeIdentifierUnchecked(c, "c_void"),
|
||||
@ -5808,61 +5781,55 @@ fn parseCPrimaryExpr(c: *Context, it: *CTokIterator, source_loc: ZigClangSourceL
|
||||
.Keyword_float => return transCreateNodeIdentifierUnchecked(c, "f32"),
|
||||
.Keyword_short => return transCreateNodeIdentifierUnchecked(c, "c_short"),
|
||||
.Keyword_char => return transCreateNodeIdentifierUnchecked(c, "u8"),
|
||||
.Keyword_unsigned => if (it.next()) |t| switch (t) {
|
||||
.Keyword_unsigned => if (m.next()) |t| switch (t) {
|
||||
.Keyword_char => return transCreateNodeIdentifierUnchecked(c, "u8"),
|
||||
.Keyword_short => return transCreateNodeIdentifierUnchecked(c, "c_ushort"),
|
||||
.Keyword_int => return transCreateNodeIdentifierUnchecked(c, "c_uint"),
|
||||
.Keyword_long => if (it.peek() != null and it.peek().? == .Keyword_long) {
|
||||
_ = it.next();
|
||||
.Keyword_long => if (m.peek() != null and m.peek().? == .Keyword_long) {
|
||||
_ = m.next();
|
||||
return transCreateNodeIdentifierUnchecked(c, "c_ulonglong");
|
||||
} else return transCreateNodeIdentifierUnchecked(c, "c_ulong"),
|
||||
else => {
|
||||
it.i -= 1;
|
||||
m.i -= 1;
|
||||
return transCreateNodeIdentifierUnchecked(c, "c_uint");
|
||||
},
|
||||
} else {
|
||||
return transCreateNodeIdentifierUnchecked(c, "c_uint");
|
||||
},
|
||||
.Keyword_signed => if (it.next()) |t| switch (t) {
|
||||
.Keyword_signed => if (m.next()) |t| switch (t) {
|
||||
.Keyword_char => return transCreateNodeIdentifierUnchecked(c, "i8"),
|
||||
.Keyword_short => return transCreateNodeIdentifierUnchecked(c, "c_short"),
|
||||
.Keyword_int => return transCreateNodeIdentifierUnchecked(c, "c_int"),
|
||||
.Keyword_long => if (it.peek() != null and it.peek().? == .Keyword_long) {
|
||||
_ = it.next();
|
||||
.Keyword_long => if (m.peek() != null and m.peek().? == .Keyword_long) {
|
||||
_ = m.next();
|
||||
return transCreateNodeIdentifierUnchecked(c, "c_longlong");
|
||||
} else return transCreateNodeIdentifierUnchecked(c, "c_long"),
|
||||
else => {
|
||||
it.i -= 1;
|
||||
m.i -= 1;
|
||||
return transCreateNodeIdentifierUnchecked(c, "c_int");
|
||||
},
|
||||
} else {
|
||||
return transCreateNodeIdentifierUnchecked(c, "c_int");
|
||||
},
|
||||
.Identifier => {
|
||||
const mangled_name = scope.getAlias(it.slice(it.i));
|
||||
const mangled_name = scope.getAlias(slice);
|
||||
return transCreateNodeIdentifier(c, mangled_name);
|
||||
},
|
||||
.LParen => {
|
||||
const inner_node = try parseCExpr(c, it, source_loc, scope);
|
||||
const inner_node = try parseCExpr(c, m, scope);
|
||||
|
||||
const next_id = it.next().?;
|
||||
const next_id = m.next().?;
|
||||
if (next_id != .RParen) {
|
||||
try failDecl(
|
||||
c,
|
||||
source_loc,
|
||||
it.slice(0),
|
||||
"unable to translate C expr: expected ')'' instead got: {}",
|
||||
.{@tagName(next_id)},
|
||||
);
|
||||
try m.fail(c, "unable to translate C expr: expected ')'' instead got: {}", .{@tagName(next_id)});
|
||||
return error.ParseError;
|
||||
}
|
||||
var saw_l_paren = false;
|
||||
var saw_integer_literal = false;
|
||||
switch (it.peek().?) {
|
||||
switch (m.peek().?) {
|
||||
// (type)(to_cast)
|
||||
.LParen => {
|
||||
saw_l_paren = true;
|
||||
_ = it.next();
|
||||
_ = m.next();
|
||||
},
|
||||
// (type)identifier
|
||||
.Identifier => {},
|
||||
@ -5876,16 +5843,10 @@ fn parseCPrimaryExpr(c: *Context, it: *CTokIterator, source_loc: ZigClangSourceL
|
||||
// hack to get zig fmt to render a comma in builtin calls
|
||||
_ = try appendToken(c, .Comma, ",");
|
||||
|
||||
const node_to_cast = try parseCExpr(c, it, source_loc, scope);
|
||||
const node_to_cast = try parseCExpr(c, m, scope);
|
||||
|
||||
if (saw_l_paren and it.next().? != .RParen) {
|
||||
try failDecl(
|
||||
c,
|
||||
source_loc,
|
||||
it.slice(0),
|
||||
"unable to translate C expr: expected ')''",
|
||||
.{},
|
||||
);
|
||||
if (saw_l_paren and m.next().? != .RParen) {
|
||||
try m.fail(c, "unable to translate C expr: expected ')''", .{});
|
||||
return error.ParseError;
|
||||
}
|
||||
|
||||
@ -5913,13 +5874,7 @@ fn parseCPrimaryExpr(c: *Context, it: *CTokIterator, source_loc: ZigClangSourceL
|
||||
return &group_node.base;
|
||||
},
|
||||
else => {
|
||||
try failDecl(
|
||||
c,
|
||||
source_loc,
|
||||
it.slice(0),
|
||||
"unable to translate C expr: unexpected token .{}",
|
||||
.{@tagName(tok)},
|
||||
);
|
||||
try m.fail(c, "unable to translate C expr: unexpected token .{}", .{@tagName(tok)});
|
||||
return error.ParseError;
|
||||
},
|
||||
}
|
||||
@ -6026,52 +5981,40 @@ fn macroIntToBool(c: *Context, node: *ast.Node) !*ast.Node {
|
||||
return &group_node.base;
|
||||
}
|
||||
|
||||
fn parseCSuffixOpExpr(c: *Context, it: *CTokIterator, source_loc: ZigClangSourceLocation, scope: *Scope) ParseError!*ast.Node {
|
||||
var node = try parseCPrimaryExpr(c, it, source_loc, scope);
|
||||
fn parseCSuffixOpExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.Node {
|
||||
var node = try parseCPrimaryExpr(c, m, scope);
|
||||
while (true) {
|
||||
var op_token: ast.TokenIndex = undefined;
|
||||
var op_id: ast.Node.Tag = undefined;
|
||||
var bool_op = false;
|
||||
switch (it.next().?) {
|
||||
switch (m.next().?) {
|
||||
.Period => {
|
||||
if (it.next().? != .Identifier) {
|
||||
try failDecl(
|
||||
c,
|
||||
source_loc,
|
||||
it.slice(0),
|
||||
"unable to translate C expr: expected identifier",
|
||||
.{},
|
||||
);
|
||||
if (m.next().? != .Identifier) {
|
||||
try m.fail(c, "unable to translate C expr: expected identifier", .{});
|
||||
return error.ParseError;
|
||||
}
|
||||
|
||||
node = try transCreateNodeFieldAccess(c, node, it.slice(it.i));
|
||||
node = try transCreateNodeFieldAccess(c, node, m.slice());
|
||||
continue;
|
||||
},
|
||||
.Arrow => {
|
||||
if (it.next().? != .Identifier) {
|
||||
try failDecl(
|
||||
c,
|
||||
source_loc,
|
||||
it.slice(0),
|
||||
"unable to translate C expr: expected identifier",
|
||||
.{},
|
||||
);
|
||||
if (m.next().? != .Identifier) {
|
||||
try m.fail(c, "unable to translate C expr: expected identifier", .{});
|
||||
return error.ParseError;
|
||||
}
|
||||
const deref = try transCreateNodePtrDeref(c, node);
|
||||
node = try transCreateNodeFieldAccess(c, deref, it.slice(it.i));
|
||||
node = try transCreateNodeFieldAccess(c, deref, m.slice());
|
||||
continue;
|
||||
},
|
||||
.Asterisk => {
|
||||
if (it.peek().? == .RParen) {
|
||||
if (m.peek().? == .RParen) {
|
||||
// type *)
|
||||
|
||||
// hack to get zig fmt to render a comma in builtin calls
|
||||
_ = try appendToken(c, .Comma, ",");
|
||||
|
||||
// last token of `node`
|
||||
const prev_id = it.list[it.i - 1].id;
|
||||
const prev_id = m.list[m.i - 1].id;
|
||||
|
||||
if (prev_id == .Keyword_void) {
|
||||
const ptr = try transCreateNodePtrType(c, false, false, .Asterisk);
|
||||
@ -6142,17 +6085,11 @@ fn parseCSuffixOpExpr(c: *Context, it: *CTokIterator, source_loc: ZigClangSource
|
||||
},
|
||||
.LBracket => {
|
||||
const arr_node = try transCreateNodeArrayAccess(c, node);
|
||||
arr_node.index_expr = try parseCPrefixOpExpr(c, it, source_loc, scope);
|
||||
arr_node.index_expr = try parseCPrefixOpExpr(c, m, scope);
|
||||
arr_node.rtoken = try appendToken(c, .RBracket, "]");
|
||||
node = &arr_node.base;
|
||||
if (it.next().? != .RBracket) {
|
||||
try failDecl(
|
||||
c,
|
||||
source_loc,
|
||||
it.slice(0),
|
||||
"unable to translate C expr: expected ']'",
|
||||
.{},
|
||||
);
|
||||
if (m.next().? != .RBracket) {
|
||||
try m.fail(c, "unable to translate C expr: expected ']'", .{});
|
||||
return error.ParseError;
|
||||
}
|
||||
continue;
|
||||
@ -6162,19 +6099,13 @@ fn parseCSuffixOpExpr(c: *Context, it: *CTokIterator, source_loc: ZigClangSource
|
||||
var call_params = std.ArrayList(*ast.Node).init(c.gpa);
|
||||
defer call_params.deinit();
|
||||
while (true) {
|
||||
const arg = try parseCPrefixOpExpr(c, it, source_loc, scope);
|
||||
const arg = try parseCPrefixOpExpr(c, m, scope);
|
||||
try call_params.append(arg);
|
||||
switch (it.next().?) {
|
||||
switch (m.next().?) {
|
||||
.Comma => _ = try appendToken(c, .Comma, ","),
|
||||
.RParen => break,
|
||||
else => {
|
||||
try failDecl(
|
||||
c,
|
||||
source_loc,
|
||||
it.slice(0),
|
||||
"unable to translate C expr: expected ',' or ')'",
|
||||
.{},
|
||||
);
|
||||
try m.fail(c, "unable to translate C expr: expected ',' or ')'", .{});
|
||||
return error.ParseError;
|
||||
},
|
||||
}
|
||||
@ -6201,19 +6132,13 @@ fn parseCSuffixOpExpr(c: *Context, it: *CTokIterator, source_loc: ZigClangSource
|
||||
defer init_vals.deinit();
|
||||
|
||||
while (true) {
|
||||
const val = try parseCPrefixOpExpr(c, it, source_loc, scope);
|
||||
const val = try parseCPrefixOpExpr(c, m, scope);
|
||||
try init_vals.append(val);
|
||||
switch (it.next().?) {
|
||||
switch (m.next().?) {
|
||||
.Comma => _ = try appendToken(c, .Comma, ","),
|
||||
.RBrace => break,
|
||||
else => {
|
||||
try failDecl(
|
||||
c,
|
||||
source_loc,
|
||||
it.slice(0),
|
||||
"unable to translate C expr: expected ',' or '}}'",
|
||||
.{},
|
||||
);
|
||||
try m.fail(c, "unable to translate C expr: expected ',' or '}}'", .{});
|
||||
return error.ParseError;
|
||||
},
|
||||
}
|
||||
@ -6262,22 +6187,22 @@ fn parseCSuffixOpExpr(c: *Context, it: *CTokIterator, source_loc: ZigClangSource
|
||||
op_id = .ArrayCat;
|
||||
op_token = try appendToken(c, .PlusPlus, "++");
|
||||
|
||||
it.i -= 1;
|
||||
m.i -= 1;
|
||||
},
|
||||
.Identifier => {
|
||||
op_id = .ArrayCat;
|
||||
op_token = try appendToken(c, .PlusPlus, "++");
|
||||
|
||||
it.i -= 1;
|
||||
m.i -= 1;
|
||||
},
|
||||
else => {
|
||||
it.i -= 1;
|
||||
m.i -= 1;
|
||||
return node;
|
||||
},
|
||||
}
|
||||
const cast_fn = if (bool_op) macroIntToBool else macroBoolToInt;
|
||||
const lhs_node = try cast_fn(c, node);
|
||||
const rhs_node = try parseCPrefixOpExpr(c, it, source_loc, scope);
|
||||
const rhs_node = try parseCPrefixOpExpr(c, m, scope);
|
||||
const op_node = try c.arena.create(ast.Node.SimpleInfixOp);
|
||||
op_node.* = .{
|
||||
.base = .{ .tag = op_id },
|
||||
@ -6289,36 +6214,36 @@ fn parseCSuffixOpExpr(c: *Context, it: *CTokIterator, source_loc: ZigClangSource
|
||||
}
|
||||
}
|
||||
|
||||
fn parseCPrefixOpExpr(c: *Context, it: *CTokIterator, source_loc: ZigClangSourceLocation, scope: *Scope) ParseError!*ast.Node {
|
||||
switch (it.next().?) {
|
||||
fn parseCPrefixOpExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.Node {
|
||||
switch (m.next().?) {
|
||||
.Bang => {
|
||||
const node = try transCreateNodeSimplePrefixOp(c, .BoolNot, .Bang, "!");
|
||||
node.rhs = try parseCPrefixOpExpr(c, it, source_loc, scope);
|
||||
node.rhs = try parseCPrefixOpExpr(c, m, scope);
|
||||
return &node.base;
|
||||
},
|
||||
.Minus => {
|
||||
const node = try transCreateNodeSimplePrefixOp(c, .Negation, .Minus, "-");
|
||||
node.rhs = try parseCPrefixOpExpr(c, it, source_loc, scope);
|
||||
node.rhs = try parseCPrefixOpExpr(c, m, scope);
|
||||
return &node.base;
|
||||
},
|
||||
.Plus => return try parseCPrefixOpExpr(c, it, source_loc, scope),
|
||||
.Plus => return try parseCPrefixOpExpr(c, m, scope),
|
||||
.Tilde => {
|
||||
const node = try transCreateNodeSimplePrefixOp(c, .BitNot, .Tilde, "~");
|
||||
node.rhs = try parseCPrefixOpExpr(c, it, source_loc, scope);
|
||||
node.rhs = try parseCPrefixOpExpr(c, m, scope);
|
||||
return &node.base;
|
||||
},
|
||||
.Asterisk => {
|
||||
const node = try parseCPrefixOpExpr(c, it, source_loc, scope);
|
||||
const node = try parseCPrefixOpExpr(c, m, scope);
|
||||
return try transCreateNodePtrDeref(c, node);
|
||||
},
|
||||
.Ampersand => {
|
||||
const node = try transCreateNodeSimplePrefixOp(c, .AddressOf, .Ampersand, "&");
|
||||
node.rhs = try parseCPrefixOpExpr(c, it, source_loc, scope);
|
||||
node.rhs = try parseCPrefixOpExpr(c, m, scope);
|
||||
return &node.base;
|
||||
},
|
||||
else => {
|
||||
it.i -= 1;
|
||||
return try parseCSuffixOpExpr(c, it, source_loc, scope);
|
||||
m.i -= 1;
|
||||
return try parseCSuffixOpExpr(c, m, scope);
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@ -1679,8 +1679,12 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
|
||||
|
||||
cases.add("shadowing primitive types",
|
||||
\\unsigned anyerror = 2;
|
||||
\\#define noreturn _Noreturn
|
||||
, &[_][]const u8{
|
||||
\\pub export var anyerror_1: c_uint = @bitCast(c_uint, @as(c_int, 2));
|
||||
,
|
||||
|
||||
\\pub const noreturn_2 = @compileError("unable to translate C expr: unexpected token .Keyword_noreturn");
|
||||
});
|
||||
|
||||
cases.add("floats",
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user