Merge remote-tracking branch 'origin/master' into llvm15

This commit is contained in:
Andrew Kelley 2022-09-11 20:26:53 -07:00
commit ab3ac291ac
51 changed files with 2523 additions and 1742 deletions

View File

@ -203,7 +203,7 @@ var zigAnalysis;
if (!("type" in resolvedExpr)) {
return null;
}
let type = zigAnalysis.types[resolvedExpr.type];
let type = getType(resolvedExpr.type);
outer: for (let i = 0; i < 10000; i += 1) {
switch (type.kind) {
@ -212,7 +212,7 @@ var zigAnalysis;
let child = type.child;
let resolvedChild = resolveValue(child);
if ("type" in resolvedChild) {
type = zigAnalysis.types[resolvedChild.type];
type = getType(resolvedChild.type);
continue;
} else {
return null;
@ -276,7 +276,7 @@ var zigAnalysis;
}
if ("declRef" in value.expr) {
value = zigAnalysis.decls[value.expr.declRef].value;
value = getDecl(value.expr.declRef).value;
continue;
}
@ -430,7 +430,7 @@ var zigAnalysis;
curNav.pkgObjs.push(pkg);
}
let currentType = zigAnalysis.types[pkg.main];
let currentType = getType(pkg.main);
curNav.declObjs = [currentType];
for (let i = 0; i < curNav.declNames.length; i += 1) {
let childDecl = findSubDecl(currentType, curNav.declNames[i]);
@ -440,7 +440,7 @@ var zigAnalysis;
let childDeclValue = resolveValue(childDecl.value).expr;
if ("type" in childDeclValue) {
const t = zigAnalysis.types[childDeclValue.type];
const t = getType(childDeclValue.type);
if (t.kind != typeKinds.Fn) {
childDecl = t;
}
@ -478,19 +478,21 @@ var zigAnalysis;
}
if (lastIsDecl && last.kind === "const") {
let typeObj = zigAnalysis.types[resolveValue(last.value).expr.type];
if (typeObj && typeObj.kind === typeKinds.Fn) {
return renderFn(last);
const value = resolveValue(last.value);
if ("type" in value.expr) {
let typeObj = getType(value.expr.type);
if (typeObj.kind === typeKinds.Fn) {
return renderFn(last);
}
}
return renderValue(last);
}
}
function renderDocTest(decl) {
if (!("decltest" in decl)) return;
const astNode = zigAnalysis.astNodes[decl.decltest];
if (!decl.decltest) return;
const astNode = getAstNode(decl.decltest);
domSectDocTests.classList.remove("hidden");
domDocTestsCode.innerHTML = astNode.code;
}
@ -498,7 +500,7 @@ var zigAnalysis;
function renderUnknownDecl(decl) {
domDeclNoRef.classList.remove("hidden");
let docs = zigAnalysis.astNodes[decl.src].docs;
let docs = getAstNode(decl.src).docs;
if (docs != null) {
domTldDocs.innerHTML = markdown(docs);
} else {
@ -509,18 +511,18 @@ var zigAnalysis;
}
function typeIsErrSet(typeIndex) {
let typeObj = zigAnalysis.types[typeIndex];
let typeObj = getType(typeIndex);
return typeObj.kind === typeKinds.ErrorSet;
}
function typeIsStructWithNoFields(typeIndex) {
let typeObj = zigAnalysis.types[typeIndex];
let typeObj = getType(typeIndex);
if (typeObj.kind !== typeKinds.Struct) return false;
return typeObj.fields.length == 0;
}
function typeIsGenericFn(typeIndex) {
let typeObj = zigAnalysis.types[typeIndex];
let typeObj = getType(typeIndex);
if (typeObj.kind !== typeKinds.Fn) {
return false;
}
@ -532,12 +534,12 @@ var zigAnalysis;
let last = fnDecl.value.expr.refPath.length - 1;
let lastExpr = fnDecl.value.expr.refPath[last];
console.assert("declRef" in lastExpr);
fnDecl = zigAnalysis.decls[lastExpr.declRef];
fnDecl = getDecl(lastExpr.declRef);
}
let value = resolveValue(fnDecl.value);
console.assert("type" in value.expr);
let typeObj = zigAnalysis.types[value.expr.type];
let typeObj = getType(value.expr.type);
domFnProtoCode.innerHTML = exprName(value.expr, {
wantHtml: true,
@ -546,7 +548,7 @@ var zigAnalysis;
});
let docsSource = null;
let srcNode = zigAnalysis.astNodes[fnDecl.src];
let srcNode = getAstNode(fnDecl.src);
if (srcNode.docs != null) {
docsSource = srcNode.docs;
}
@ -557,14 +559,14 @@ var zigAnalysis;
if ("type" in retExpr) {
let retIndex = retExpr.type;
let errSetTypeIndex = null;
let retType = zigAnalysis.types[retIndex];
let retType = getType(retIndex);
if (retType.kind === typeKinds.ErrorSet) {
errSetTypeIndex = retIndex;
} else if (retType.kind === typeKinds.ErrorUnion) {
errSetTypeIndex = retType.err.type;
}
if (errSetTypeIndex != null) {
let errSetType = zigAnalysis.types[errSetTypeIndex];
let errSetType = getType(errSetTypeIndex);
renderErrorSet(errSetType);
}
}
@ -578,7 +580,7 @@ var zigAnalysis;
let call = zigAnalysis.calls[resolvedGenericRet.expr.call];
let resolvedFunc = resolveValue({ expr: call.func });
if (!("type" in resolvedFunc.expr)) return;
let callee = zigAnalysis.types[resolvedFunc.expr.type];
let callee = getType(resolvedFunc.expr.type);
if (!callee.generic_ret) return;
resolvedGenericRet = resolveValue({ expr: callee.generic_ret });
}
@ -591,7 +593,7 @@ var zigAnalysis;
}
if (!("type" in resolvedGenericRet.expr)) return;
const genericType = zigAnalysis.types[resolvedGenericRet.expr.type];
const genericType = getType(resolvedGenericRet.expr.type);
if (isContainerType(genericType)) {
renderContainer(genericType);
}
@ -621,7 +623,7 @@ var zigAnalysis;
domFnNoExamples.classList.add("hidden");
}
let protoSrcNode = zigAnalysis.astNodes[protoSrcIndex];
let protoSrcNode = getAstNode(protoSrcIndex);
if (
docsSource == null &&
protoSrcNode != null &&
@ -639,13 +641,13 @@ var zigAnalysis;
function renderFnParamDocs(fnDecl, typeObj) {
let docCount = 0;
let fnNode = zigAnalysis.astNodes[fnDecl.src];
let fnNode = getAstNode(fnDecl.src);
let fields = fnNode.fields;
let isVarArgs = fnNode.varArgs;
for (let i = 0; i < fields.length; i += 1) {
let field = fields[i];
let fieldNode = zigAnalysis.astNodes[field];
let fieldNode = getAstNode(field);
if (fieldNode.docs != null) {
docCount += 1;
}
@ -659,7 +661,7 @@ var zigAnalysis;
for (let i = 0; i < fields.length; i += 1) {
let field = fields[i];
let fieldNode = zigAnalysis.astNodes[field];
let fieldNode = getAstNode(field);
let docs = fieldNode.docs;
if (fieldNode.docs == null) {
continue;
@ -967,17 +969,17 @@ var zigAnalysis;
}
case "switchOp": {
let condExpr = zigAnalysis.exprs[expr.switchOp.cond_index];
let ast = zigAnalysis.astNodes[expr.switchOp.ast];
let ast = getAstNode(expr.switchOp.src);
let file_name = expr.switchOp.file_name;
let outer_decl_index = expr.switchOp.outer_decl;
let outer_decl = zigAnalysis.types[outer_decl_index];
let outer_decl = getType(outer_decl_index);
let line = 0;
// console.log(expr.switchOp)
// console.log(outer_decl)
while (outer_decl_index !== 0 && outer_decl.line_number > 0) {
line += outer_decl.line_number;
outer_decl_index = outer_decl.outer_decl;
outer_decl = zigAnalysis.types[outer_decl_index];
outer_decl = getType(outer_decl_index);
// console.log(outer_decl)
}
line += ast.line + 1;
@ -1028,8 +1030,8 @@ var zigAnalysis;
case "fieldRef": {
const enumObj = exprName({ type: expr.fieldRef.type }, opts);
const field =
zigAnalysis.astNodes[enumObj.ast].fields[expr.fieldRef.index];
const name = zigAnalysis.astNodes[field].name;
getAstNode(enumObj.src).fields[expr.fieldRef.index];
const name = getAstNode(field).name;
return name;
}
case "enumToInt": {
@ -1452,13 +1454,13 @@ var zigAnalysis;
return print_lhs + " " + operator + " " + print_rhs;
}
case "errorSets": {
const errUnionObj = zigAnalysis.types[expr.errorSets];
const errUnionObj = getType(expr.errorSets);
let lhs = exprName(errUnionObj.lhs, opts);
let rhs = exprName(errUnionObj.rhs, opts);
return lhs + " || " + rhs;
}
case "errorUnion": {
const errUnionObj = zigAnalysis.types[expr.errorUnion];
const errUnionObj = getType(expr.errorUnion);
let lhs = exprName(errUnionObj.lhs, opts);
let rhs = exprName(errUnionObj.rhs, opts);
return lhs + "!" + rhs;
@ -1574,7 +1576,7 @@ var zigAnalysis;
return exprName(exprArg, opts);
}
case "declRef": {
return zigAnalysis.decls[expr.declRef].name;
return getDecl(expr.declRef).name;
}
case "refPath": {
return expr.refPath.map((x) => exprName(x, opts)).join(".");
@ -1611,7 +1613,7 @@ var zigAnalysis;
let name = "";
let typeObj = expr.type;
if (typeof typeObj === "number") typeObj = zigAnalysis.types[typeObj];
if (typeof typeObj === "number") typeObj = getType(typeObj);
switch (typeObj.kind) {
default:
throw "TODO";
@ -1865,7 +1867,7 @@ var zigAnalysis;
if (fnObj.params) {
let fields = null;
let isVarArgs = false;
let fnNode = zigAnalysis.astNodes[fnObj.src];
let fnNode = getAstNode(fnObj.src);
fields = fnNode.fields;
isVarArgs = fnNode.varArgs;
@ -1880,7 +1882,7 @@ var zigAnalysis;
let paramValue = resolveValue({ expr: value });
if (fields != null) {
let paramNode = zigAnalysis.astNodes[fields[i]];
let paramNode = getAstNode(fields[i]);
if (paramNode.varArgs) {
payloadHtml += "...";
@ -2046,7 +2048,7 @@ var zigAnalysis;
function shouldSkipParamName(typeRef, paramName) {
let resolvedTypeRef = resolveValue({ expr: typeRef });
if ("type" in resolvedTypeRef) {
let typeObj = zigAnalysis.types[resolvedTypeRef.type];
let typeObj = getType(resolvedTypeRef.type);
if (typeObj.kind === typeKinds.Pointer) {
let ptrObj = typeObj;
if (getPtrSize(ptrObj) === pointerSizeEnum.One) {
@ -2067,7 +2069,7 @@ var zigAnalysis;
if (
rootIsStd &&
typeObj ===
zigAnalysis.types[zigAnalysis.packages[zigAnalysis.rootPkg].main]
getType(zigAnalysis.packages[zigAnalysis.rootPkg].main)
) {
name = "std";
} else {
@ -2189,7 +2191,7 @@ var zigAnalysis;
if (resolvedValue.expr.fieldRef) {
const declRef = decl.value.expr.refPath[0].declRef;
const type = zigAnalysis.decls[declRef];
const type = getDecl(declRef);
domFnProtoCode.innerHTML =
'<span class="tok-kw">const</span> ' +
escapeHtml(decl.name) +
@ -2229,7 +2231,7 @@ var zigAnalysis;
";";
}
let docs = zigAnalysis.astNodes[decl.src].docs;
let docs = getAstNode(decl.src).docs;
if (docs != null) {
domTldDocs.innerHTML = markdown(docs);
domTldDocs.classList.remove("hidden");
@ -2246,7 +2248,7 @@ var zigAnalysis;
": " +
typeValueName(declTypeRef, true, true);
let docs = zigAnalysis.astNodes[decl.src].docs;
let docs = getAstNode(decl.src).docs;
if (docs != null) {
domTldDocs.innerHTML = markdown(docs);
domTldDocs.classList.remove("hidden");
@ -2266,7 +2268,7 @@ var zigAnalysis;
testsList
) {
for (let i = 0; i < decls.length; i += 1) {
let decl = zigAnalysis.decls[decls[i]];
let decl = getDecl(decls[i]);
let declValue = resolveValue(decl.value);
if (decl.isTest) {
@ -2282,7 +2284,7 @@ var zigAnalysis;
if (decl.kind === "const") {
if ("type" in declValue.expr) {
// We have the actual type expression at hand.
const typeExpr = zigAnalysis.types[declValue.expr.type];
const typeExpr = getType(declValue.expr.type);
if (typeExpr.kind == typeKinds.Fn) {
const funcRetExpr = resolveValue({
expr: typeExpr.ret,
@ -2310,7 +2312,7 @@ var zigAnalysis;
typesList.push(decl);
}
}
} else if ("typeRef" in declValue) {
} else if (declValue.typeRef) {
if ("type" in declValue.typeRef && declValue.typeRef == typeTypeId) {
// We don't know what the type expression is, but we know it's a type.
typesList.push(decl);
@ -2324,7 +2326,7 @@ var zigAnalysis;
}
}
function renderSourceFileLink(decl) {
let srcNode = zigAnalysis.astNodes[decl.src];
let srcNode = getAstNode(decl.src);
return "<a style=\"float: right;\" href=\"" +
sourceFileUrlTemplate.replace("{{file}}",
@ -2377,7 +2379,7 @@ var zigAnalysis;
testsList.sort(byNameProperty);
if (container.src != null) {
let docs = zigAnalysis.astNodes[container.src].docs;
let docs = getAstNode(container.src).docs;
if (docs != null) {
domTldDocs.innerHTML = markdown(docs);
domTldDocs.classList.remove("hidden");
@ -2457,7 +2459,7 @@ var zigAnalysis;
});
tdFnSrc.innerHTML = renderSourceFileLink(decl);
let docs = zigAnalysis.astNodes[decl.src].docs;
let docs = getAstNode(decl.src).docs;
if (docs != null) {
tdDesc.innerHTML = shortDescMarkdown(docs);
} else {
@ -2467,12 +2469,12 @@ var zigAnalysis;
domSectFns.classList.remove("hidden");
}
let containerNode = zigAnalysis.astNodes[container.src];
let containerNode = getAstNode(container.src);
if (containerNode.fields && containerNode.fields.length > 0) {
resizeDomList(domListFields, containerNode.fields.length, "<div></div>");
for (let i = 0; i < containerNode.fields.length; i += 1) {
let fieldNode = zigAnalysis.astNodes[containerNode.fields[i]];
let fieldNode = getAstNode(containerNode.fields[i]);
let divDom = domListFields.children[i];
let fieldName = fieldNode.name;
let docs = fieldNode.docs;
@ -2528,7 +2530,7 @@ var zigAnalysis;
tdType.innerHTML = typeValueName(typeOfDecl(decl), true, true);
let docs = zigAnalysis.astNodes[decl.src].docs;
let docs = getAstNode(decl.src).docs;
if (docs != null) {
tdDesc.innerHTML = shortDescMarkdown(docs);
} else {
@ -2561,7 +2563,7 @@ var zigAnalysis;
wantLink: true,
});
let docs = zigAnalysis.astNodes[decl.src].docs;
let docs = getAstNode(decl.src).docs;
if (docs != null) {
tdDesc.innerHTML = shortDescMarkdown(docs);
} else {
@ -2594,7 +2596,7 @@ var zigAnalysis;
wantLink: true,
});
let docs = zigAnalysis.astNodes[decl.src].docs;
let docs = getAstNode(decl.src).docs;
if (docs != null) {
tdDesc.innerHTML = shortDescMarkdown(docs);
} else {
@ -2668,7 +2670,7 @@ var zigAnalysis;
function findTypeTypeId() {
for (let i = 0; i < zigAnalysis.types.length; i += 1) {
if (zigAnalysis.types[i].kind == typeKinds.Type) {
if (getType(i).kind == typeKinds.Type) {
return i;
}
}
@ -2732,11 +2734,11 @@ var zigAnalysis;
if ("value" in parentType) {
const rv = resolveValue(parentType.value);
if ("type" in rv.expr) {
const t = zigAnalysis.types[rv.expr.type];
const t = getType(rv.expr.type);
if (t.kind == typeKinds.Fn && t.generic_ret != null) {
const rgr = resolveValue({ expr: t.generic_ret });
if ("type" in rgr.expr) {
parentType = zigAnalysis.types[rgr.expr.type];
parentType = getType(rgr.expr.type);
}
}
}
@ -2746,7 +2748,7 @@ var zigAnalysis;
if (!parentType.pubDecls) return null;
for (let i = 0; i < parentType.pubDecls.length; i += 1) {
let declIndex = parentType.pubDecls[i];
let childDecl = zigAnalysis.decls[declIndex];
let childDecl = getDecl(declIndex);
if (childDecl.name === childName) {
return childDecl;
}
@ -2754,7 +2756,7 @@ var zigAnalysis;
if (!parentType.privDecls) return null;
for (let i = 0; i < parentType.privDecls.length; i += 1) {
let declIndex = parentType.privDecls[i];
let childDecl = zigAnalysis.decls[declIndex];
let childDecl = getDecl(declIndex);
if (childDecl.name === childName) {
return childDecl;
}
@ -2805,7 +2807,7 @@ var zigAnalysis;
let stack = [
{
declNames: [],
type: zigAnalysis.types[pkg.main],
type: getType(pkg.main),
},
];
while (stack.length !== 0) {
@ -2819,7 +2821,7 @@ var zigAnalysis;
let mainDeclIndex = t.pubDecls[declI];
if (list[mainDeclIndex] != null) continue;
let decl = zigAnalysis.decls[mainDeclIndex];
let decl = getDecl(mainDeclIndex);
let declVal = resolveValue(decl.value);
let declNames = item.declNames.concat([decl.name]);
list[mainDeclIndex] = {
@ -2827,7 +2829,7 @@ var zigAnalysis;
declNames: declNames,
};
if ("type" in declVal.expr) {
let value = zigAnalysis.types[declVal.expr.type];
let value = getType(declVal.expr.type);
if (declCanRepresentTypeKind(value.kind)) {
canonTypeDecls[declVal.type] = mainDeclIndex;
}
@ -2843,7 +2845,7 @@ var zigAnalysis;
if (value.kind == typeKinds.Fn && value.generic_ret != null) {
let resolvedVal = resolveValue({ expr: value.generic_ret });
if ("type" in resolvedVal.expr) {
let generic_type = zigAnalysis.types[resolvedVal.expr.type];
let generic_type = getType(resolvedVal.expr.type);
if (isContainerType(generic_type)) {
stack.push({
declNames: declNames,
@ -3394,11 +3396,11 @@ var zigAnalysis;
let canonPath = getCanonDeclPath(declIndex);
if (canonPath == null) continue;
let decl = zigAnalysis.decls[declIndex];
let decl = getDecl(declIndex);
let lastPkgName = canonPath.pkgNames[canonPath.pkgNames.length - 1];
let fullPathSearchText =
lastPkgName + "." + canonPath.declNames.join(".");
let astNode = zigAnalysis.astNodes[decl.src];
let astNode = getAstNode(decl.src);
let fileAndDocs = ""; //zigAnalysis.files[astNode.file];
// TODO: understand what this piece of code is trying to achieve
// also right now `files` are expressed as a hashmap.
@ -3513,4 +3515,169 @@ var zigAnalysis;
function byNameProperty(a, b) {
return operatorCompare(a.name, b.name);
}
function getDecl(idx) {
const decl = zigAnalysis.decls[idx];
return {
name: decl[0],
kind: decl[1],
isTest: decl[2],
src: decl[3],
value: decl[4],
decltest: decl[5],
};
}
function getAstNode(idx) {
const ast = zigAnalysis.astNodes[idx];
return {
file: ast[0],
line: ast[1],
col: ast[2],
name: ast[3],
code: ast[4],
docs: ast[5],
fields: ast[6],
comptime: ast[7],
};
}
function getType(idx){
const ty = zigAnalysis.types[idx];
switch(ty[0]) {
default:
throw "unhandled type kind!";
case 0: // Unanalyzed
throw "unanalyzed type!";
case 1: // Type
case 2: // Void
case 3: // Bool
case 4: // NoReturn
case 5: // Int
case 6: // Float
return { kind: ty[0], name: ty[1]};
case 7: // Pointer
return {
kind: ty[0],
size: ty[1],
child: ty[2],
sentinel: ty[3],
align: ty[4],
address_space: ty[5],
bit_start: ty[6],
host_size: ty[7],
is_ref: ty[8],
is_allowzero: ty[9],
is_mutable: ty[10],
is_volatile: ty[11],
has_sentinel: ty[12],
has_align: ty[13],
has_addrspace: ty[14],
has_bit_range: ty[15],
};
case 8: // Array
return {
kind: ty[0],
len: ty[1],
child: ty[2],
sentinel: ty[3],
};
case 9: // Struct
return {
kind: ty[0],
name: ty[1],
src: ty[2],
privDecls: ty[3],
pubDecls: ty[4],
fields: ty[5],
line_number: ty[6],
outer_decl: ty[7],
};
case 10: // ComptimeExpr
case 11: // ComptimeFloat
case 12: // ComptimeInt
case 13: // Undefined
case 14: // Null
return { kind: ty[0], name: ty[1] };
case 15: // Optional
return {
kind: ty[0],
name: ty[1],
child: ty[2],
};
case 16: // ErrorUnion
return {
kind: ty[0],
lhs: ty[1],
rhs: ty[2],
};
case 17: // InferredErrorUnion
return {
kind: ty[0],
payload: ty[1],
};
case 18: // ErrorSet
return {
kind: ty[0],
name: ty[1],
fields: ty[2],
};
case 19: // Enum
return {
kind: ty[0],
name: ty[1],
src: ty[2],
privDecls: ty[3],
pubDecls: ty[4],
};
case 20: // Union
return {
kind: ty[0],
name: ty[1],
src: ty[2],
privDecls: ty[3],
pubDecls: ty[4],
fields: ty[5],
};
case 21: // Fn
return {
kind: ty[0],
name: ty[1],
src: ty[2],
ret: ty[3],
generic_ret: ty[4],
params: ty[5],
lib_name: ty[6],
is_var_args: ty[7],
is_inferred_error: ty[8],
has_lib_name: ty[9],
has_cc: ty[10],
cc: ty[11],
align: ty[12],
has_align: ty[13],
is_test: ty[14],
is_extern: ty[15],
};
case 22: // BoundFn
return { kind: ty[0], name: ty[1] };
case 23: // Opaque
return {
kind: ty[0],
name: ty[1],
src: ty[2],
privDecls: ty[3],
pubDecls: ty[4],
};
case 24: // Frame
case 25: // AnyFrame
case 26: // Vector
case 27: // EnumLiteral
return { kind: ty[0], name: ty[1] };
}
}
})();

View File

@ -508,7 +508,7 @@ pub const ChildProcess = struct {
// it, that's the error code returned by the child process.
_ = std.os.poll(&fd, 0) catch unreachable;
// According to eventfd(2) the descriptro is readable if the counter
// According to eventfd(2) the descriptor is readable if the counter
// has a value greater than 0
if ((fd[0].revents & std.os.POLL.IN) != 0) {
const err_int = try readIntFd(err_pipe[0]);

View File

@ -2,8 +2,6 @@ const std = @import("std.zig");
const debug = std.debug;
const assert = debug.assert;
const testing = std.testing;
const mem = std.mem;
const Allocator = mem.Allocator;
/// A singly-linked list is headed by a single forward pointer. The elements
/// are singly linked for minimum space and pointer manipulation overhead at

View File

@ -280,8 +280,8 @@ pub fn generateZirData(self: *Autodoc) !void {
try std.json.stringify(
data,
.{
.whitespace = .{ .indent = if (builtin.mode == .Debug) .{ .Space = 4 } else .None },
.emit_null_optional_fields = false,
.whitespace = .{ .indent = .None, .separator = false },
.emit_null_optional_fields = true,
},
out,
);
@ -404,6 +404,7 @@ const DocData = struct {
w: anytype,
) !void {
var jsw = std.json.writeStream(w, 15);
if (opts.whitespace) |ws| jsw.whitespace = ws;
try jsw.beginObject();
inline for (comptime std.meta.tags(std.meta.FieldEnum(DocData))) |f| {
const f_name = @tagName(f);
@ -449,6 +450,8 @@ const DocData = struct {
w: anytype,
) !void {
var jsw = std.json.writeStream(w, 15);
if (opts.whitespace) |ws| jsw.whitespace = ws;
try jsw.beginObject();
inline for (comptime std.meta.tags(std.meta.FieldEnum(DocPackage))) |f| {
const f_name = @tagName(f);
@ -474,6 +477,22 @@ const DocData = struct {
// The index in astNodes of the `test declname { }` node
decltest: ?usize = null,
_analyzed: bool, // omitted in json data
pub fn jsonStringify(
self: Decl,
opts: std.json.StringifyOptions,
w: anytype,
) !void {
var jsw = std.json.writeStream(w, 15);
if (opts.whitespace) |ws| jsw.whitespace = ws;
try jsw.beginArray();
inline for (comptime std.meta.fields(Decl)) |f| {
try jsw.arrayElem();
try std.json.stringify(@field(self, f.name), opts, w);
jsw.state_index -= 1;
}
try jsw.endArray();
}
};
const AstNode = struct {
@ -485,6 +504,22 @@ const DocData = struct {
docs: ?[]const u8 = null,
fields: ?[]usize = null, // index into astNodes
@"comptime": bool = false,
pub fn jsonStringify(
self: AstNode,
opts: std.json.StringifyOptions,
w: anytype,
) !void {
var jsw = std.json.writeStream(w, 15);
if (opts.whitespace) |ws| jsw.whitespace = ws;
try jsw.beginArray();
inline for (comptime std.meta.fields(AstNode)) |f| {
try jsw.arrayElem();
try std.json.stringify(@field(self, f.name), opts, w);
jsw.state_index -= 1;
}
try jsw.endArray();
}
};
const Type = union(enum) {
@ -525,7 +560,6 @@ const DocData = struct {
fields: ?[]Expr = null, // (use src->fields to find names)
line_number: usize,
outer_decl: usize,
ast: usize,
},
ComptimeExpr: struct { name: []const u8 },
ComptimeFloat: struct { name: []const u8 },
@ -548,7 +582,6 @@ const DocData = struct {
src: usize, // index into astNodes
privDecls: []usize = &.{}, // index into decls
pubDecls: []usize = &.{}, // index into decls
ast: usize,
// (use src->fields to find field names)
},
Union: struct {
@ -557,7 +590,6 @@ const DocData = struct {
privDecls: []usize = &.{}, // index into decls
pubDecls: []usize = &.{}, // index into decls
fields: []Expr = &.{}, // (use src->fields to find names)
ast: usize,
},
Fn: struct {
name: []const u8,
@ -582,7 +614,6 @@ const DocData = struct {
src: usize, // index into astNodes
privDecls: []usize = &.{}, // index into decls
pubDecls: []usize = &.{}, // index into decls
ast: usize,
},
Frame: struct { name: []const u8 },
AnyFrame: struct { name: []const u8 },
@ -601,14 +632,15 @@ const DocData = struct {
) !void {
const active_tag = std.meta.activeTag(self);
var jsw = std.json.writeStream(w, 15);
try jsw.beginObject();
try jsw.objectField("kind");
if (opts.whitespace) |ws| jsw.whitespace = ws;
try jsw.beginArray();
try jsw.arrayElem();
try jsw.emitNumber(@enumToInt(active_tag));
inline for (comptime std.meta.fields(Type)) |case| {
if (@field(Type, case.name) == active_tag) {
const current_value = @field(self, case.name);
inline for (comptime std.meta.fields(case.field_type)) |f| {
try jsw.objectField(f.name);
try jsw.arrayElem();
if (f.field_type == std.builtin.TypeInfo.Pointer.Size) {
try jsw.emitNumber(@enumToInt(@field(current_value, f.name)));
} else {
@ -618,7 +650,7 @@ const DocData = struct {
}
}
}
try jsw.endObject();
try jsw.endArray();
}
};
@ -686,7 +718,7 @@ const DocData = struct {
const SwitchOp = struct {
cond_index: usize,
file_name: []const u8,
ast: usize,
src: usize,
outer_decl: usize, // index in `types`
};
const BuiltinBin = struct {
@ -704,7 +736,15 @@ const DocData = struct {
end: ?usize = null,
sentinel: ?usize = null, // index in `exprs`
};
const Cmpxchg = struct { name: []const u8, type: usize, ptr: usize, expected_value: usize, new_value: usize, success_order: usize, failure_order: usize };
const Cmpxchg = struct {
name: []const u8,
type: usize,
ptr: usize,
expected_value: usize,
new_value: usize,
success_order: usize,
failure_order: usize,
};
const As = struct {
typeRefArg: ?usize, // index in `exprs`
exprArg: usize, // index in `exprs`
@ -721,11 +761,12 @@ const DocData = struct {
pub fn jsonStringify(
self: Expr,
opt: std.json.StringifyOptions,
opts: std.json.StringifyOptions,
w: anytype,
) !void {
const active_tag = std.meta.activeTag(self);
var jsw = std.json.writeStream(w, 15);
if (opts.whitespace) |ws| jsw.whitespace = ws;
try jsw.beginObject();
try jsw.objectField(@tagName(active_tag));
switch (self) {
@ -742,7 +783,7 @@ const DocData = struct {
if (comptime std.mem.eql(u8, case.name, "builtinField"))
continue;
if (@field(Expr, case.name) == active_tag) {
try std.json.stringify(@field(self, case.name), opt, w);
try std.json.stringify(@field(self, case.name), opts, w);
jsw.state_index -= 1;
// TODO: we should not reach into the state of the
// json writer, but alas, this is what's
@ -1874,7 +1915,12 @@ fn walkInstruction(
// log.debug("{s}", .{sep});
const switch_index = self.exprs.items.len;
try self.exprs.append(self.arena, .{ .switchOp = .{ .cond_index = cond_index, .file_name = file.sub_file_path, .ast = ast_index, .outer_decl = type_index } });
try self.exprs.append(self.arena, .{ .switchOp = .{
.cond_index = cond_index,
.file_name = file.sub_file_path,
.src = ast_index,
.outer_decl = type_index,
} });
return DocData.WalkResult{
.typeRef = .{ .type = @enumToInt(Ref.type_type) },
@ -2505,7 +2551,6 @@ fn walkInstruction(
.src = self_ast_node_index,
.privDecls = priv_decl_indexes.items,
.pubDecls = decl_indexes.items,
.ast = self_ast_node_index,
},
};
if (self.ref_paths_pending_on_types.get(type_slot_index)) |paths| {
@ -2644,7 +2689,13 @@ fn walkInstruction(
self.ast_nodes.items[self_ast_node_index].fields = field_name_indexes.items;
self.types.items[type_slot_index] = .{
.Union = .{ .name = "todo_name", .src = self_ast_node_index, .privDecls = priv_decl_indexes.items, .pubDecls = decl_indexes.items, .fields = field_type_refs.items, .ast = self_ast_node_index },
.Union = .{
.name = "todo_name",
.src = self_ast_node_index,
.privDecls = priv_decl_indexes.items,
.pubDecls = decl_indexes.items,
.fields = field_type_refs.items,
},
};
if (self.ref_paths_pending_on_types.get(type_slot_index)) |paths| {
@ -2796,7 +2847,12 @@ fn walkInstruction(
self.ast_nodes.items[self_ast_node_index].fields = field_name_indexes.items;
self.types.items[type_slot_index] = .{
.Enum = .{ .name = "todo_name", .src = self_ast_node_index, .privDecls = priv_decl_indexes.items, .pubDecls = decl_indexes.items, .ast = self_ast_node_index },
.Enum = .{
.name = "todo_name",
.src = self_ast_node_index,
.privDecls = priv_decl_indexes.items,
.pubDecls = decl_indexes.items,
},
};
if (self.ref_paths_pending_on_types.get(type_slot_index)) |paths| {
for (paths.items) |resume_info| {
@ -2910,7 +2966,15 @@ fn walkInstruction(
self.ast_nodes.items[self_ast_node_index].fields = field_name_indexes.items;
self.types.items[type_slot_index] = .{
.Struct = .{ .name = "todo_name", .src = self_ast_node_index, .privDecls = priv_decl_indexes.items, .pubDecls = decl_indexes.items, .fields = field_type_refs.items, .line_number = self.ast_nodes.items[self_ast_node_index].line, .outer_decl = type_slot_index - 1, .ast = self_ast_node_index },
.Struct = .{
.name = "todo_name",
.src = self_ast_node_index,
.privDecls = priv_decl_indexes.items,
.pubDecls = decl_indexes.items,
.fields = field_type_refs.items,
.line_number = self.ast_nodes.items[self_ast_node_index].line,
.outer_decl = type_slot_index - 1,
},
};
if (self.ref_paths_pending_on_types.get(type_slot_index)) |paths| {
for (paths.items) |resume_info| {

View File

@ -1238,7 +1238,6 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
options.target,
options.is_native_abi,
link_libc,
options.system_lib_names.len != 0 or options.frameworks.count() != 0,
options.libc_installation,
options.native_darwin_sdk != null,
);
@ -4522,7 +4521,6 @@ fn detectLibCIncludeDirs(
target: Target,
is_native_abi: bool,
link_libc: bool,
link_system_libs: bool,
libc_installation: ?*const LibCInstallation,
has_macos_sdk: bool,
) !LibCDirs {
@ -4539,7 +4537,7 @@ fn detectLibCIncludeDirs(
// If linking system libraries and targeting the native abi, default to
// using the system libc installation.
if (link_system_libs and is_native_abi and !target.isMinGW()) {
if (is_native_abi and !target.isMinGW()) {
if (target.isDarwin()) {
return if (has_macos_sdk)
// For Darwin/macOS, we are all set with getDarwinSDK found earlier.
@ -4551,74 +4549,29 @@ fn detectLibCIncludeDirs(
getZigShippedLibCIncludeDirsDarwin(arena, zig_lib_dir, target);
}
const libc = try arena.create(LibCInstallation);
libc.* = try LibCInstallation.findNative(.{ .allocator = arena, .verbose = true });
libc.* = LibCInstallation.findNative(.{ .allocator = arena }) catch |err| switch (err) {
error.CCompilerExitCode,
error.CCompilerCrashed,
error.CCompilerCannotFindHeaders,
error.UnableToSpawnCCompiler,
=> |e| {
// We tried to integrate with the native system C compiler,
// however, it is not installed. So we must rely on our bundled
// libc files.
if (target_util.canBuildLibC(target)) {
return detectLibCFromBuilding(arena, zig_lib_dir, target, has_macos_sdk);
}
return e;
},
else => |e| return e,
};
return detectLibCFromLibCInstallation(arena, target, libc);
}
// If not linking system libraries, build and provide our own libc by
// default if possible.
if (target_util.canBuildLibC(target)) {
switch (target.os.tag) {
.macos => return if (has_macos_sdk)
// For Darwin/macOS, we are all set with getDarwinSDK found earlier.
LibCDirs{
.libc_include_dir_list = &[0][]u8{},
.libc_installation = null,
}
else
getZigShippedLibCIncludeDirsDarwin(arena, zig_lib_dir, target),
else => {
const generic_name = target_util.libCGenericName(target);
// Some architectures are handled by the same set of headers.
const arch_name = if (target.abi.isMusl())
musl.archName(target.cpu.arch)
else if (target.cpu.arch.isThumb())
// ARM headers are valid for Thumb too.
switch (target.cpu.arch) {
.thumb => "arm",
.thumbeb => "armeb",
else => unreachable,
}
else
@tagName(target.cpu.arch);
const os_name = @tagName(target.os.tag);
// Musl's headers are ABI-agnostic and so they all have the "musl" ABI name.
const abi_name = if (target.abi.isMusl()) "musl" else @tagName(target.abi);
const s = std.fs.path.sep_str;
const arch_include_dir = try std.fmt.allocPrint(
arena,
"{s}" ++ s ++ "libc" ++ s ++ "include" ++ s ++ "{s}-{s}-{s}",
.{ zig_lib_dir, arch_name, os_name, abi_name },
);
const generic_include_dir = try std.fmt.allocPrint(
arena,
"{s}" ++ s ++ "libc" ++ s ++ "include" ++ s ++ "generic-{s}",
.{ zig_lib_dir, generic_name },
);
const generic_arch_name = target_util.osArchName(target);
const arch_os_include_dir = try std.fmt.allocPrint(
arena,
"{s}" ++ s ++ "libc" ++ s ++ "include" ++ s ++ "{s}-{s}-any",
.{ zig_lib_dir, generic_arch_name, os_name },
);
const generic_os_include_dir = try std.fmt.allocPrint(
arena,
"{s}" ++ s ++ "libc" ++ s ++ "include" ++ s ++ "any-{s}-any",
.{ zig_lib_dir, os_name },
);
const list = try arena.alloc([]const u8, 4);
list[0] = arch_include_dir;
list[1] = generic_include_dir;
list[2] = arch_os_include_dir;
list[3] = generic_os_include_dir;
return LibCDirs{
.libc_include_dir_list = list,
.libc_installation = null,
};
},
}
return detectLibCFromBuilding(arena, zig_lib_dir, target, has_macos_sdk);
}
// If zig can't build the libc for the target and we are targeting the
@ -4677,6 +4630,75 @@ fn detectLibCFromLibCInstallation(arena: Allocator, target: Target, lci: *const
};
}
fn detectLibCFromBuilding(
arena: Allocator,
zig_lib_dir: []const u8,
target: std.Target,
has_macos_sdk: bool,
) !LibCDirs {
switch (target.os.tag) {
.macos => return if (has_macos_sdk)
// For Darwin/macOS, we are all set with getDarwinSDK found earlier.
LibCDirs{
.libc_include_dir_list = &[0][]u8{},
.libc_installation = null,
}
else
getZigShippedLibCIncludeDirsDarwin(arena, zig_lib_dir, target),
else => {
const generic_name = target_util.libCGenericName(target);
// Some architectures are handled by the same set of headers.
const arch_name = if (target.abi.isMusl())
musl.archName(target.cpu.arch)
else if (target.cpu.arch.isThumb())
// ARM headers are valid for Thumb too.
switch (target.cpu.arch) {
.thumb => "arm",
.thumbeb => "armeb",
else => unreachable,
}
else
@tagName(target.cpu.arch);
const os_name = @tagName(target.os.tag);
// Musl's headers are ABI-agnostic and so they all have the "musl" ABI name.
const abi_name = if (target.abi.isMusl()) "musl" else @tagName(target.abi);
const s = std.fs.path.sep_str;
const arch_include_dir = try std.fmt.allocPrint(
arena,
"{s}" ++ s ++ "libc" ++ s ++ "include" ++ s ++ "{s}-{s}-{s}",
.{ zig_lib_dir, arch_name, os_name, abi_name },
);
const generic_include_dir = try std.fmt.allocPrint(
arena,
"{s}" ++ s ++ "libc" ++ s ++ "include" ++ s ++ "generic-{s}",
.{ zig_lib_dir, generic_name },
);
const generic_arch_name = target_util.osArchName(target);
const arch_os_include_dir = try std.fmt.allocPrint(
arena,
"{s}" ++ s ++ "libc" ++ s ++ "include" ++ s ++ "{s}-{s}-any",
.{ zig_lib_dir, generic_arch_name, os_name },
);
const generic_os_include_dir = try std.fmt.allocPrint(
arena,
"{s}" ++ s ++ "libc" ++ s ++ "include" ++ s ++ "any-{s}-any",
.{ zig_lib_dir, os_name },
);
const list = try arena.alloc([]const u8, 4);
list[0] = arch_include_dir;
list[1] = generic_include_dir;
list[2] = arch_os_include_dir;
list[3] = generic_os_include_dir;
return LibCDirs{
.libc_include_dir_list = list,
.libc_installation = null,
};
},
}
}
pub fn get_libc_crt_file(comp: *Compilation, arena: Allocator, basename: []const u8) ![]const u8 {
if (comp.wantBuildGLibCFromSource() or
comp.wantBuildMuslFromSource() or

View File

@ -139,21 +139,10 @@ const MCValue = union(enum) {
/// If the type is a pointer, it means the pointer address is at
/// this memory location.
memory: u64,
/// The value is in memory referenced indirectly via a GOT entry
/// index.
///
/// If the type is a pointer, it means the pointer is referenced
/// indirectly via GOT. When lowered, linker will emit
/// relocations of type ARM64_RELOC_GOT_LOAD_PAGE21 and
/// ARM64_RELOC_GOT_LOAD_PAGEOFF12.
got_load: u32,
/// The value is in memory referenced directly via symbol index.
///
/// If the type is a pointer, it means the pointer is referenced
/// directly via symbol index. When lowered, linker will emit a
/// relocation of type ARM64_RELOC_PAGE21 and
/// ARM64_RELOC_PAGEOFF12.
direct_load: u32,
/// The value is in memory but requires a linker relocation fixup:
/// * got - the value is referenced indirectly via GOT entry index (the linker emits a got-type reloc)
/// * direct - the value is referenced directly via symbol index index (the linker emits a displacement reloc)
linker_load: struct { @"type": enum { got, direct }, sym_index: u32 },
/// The value is one of the stack variables.
///
/// If the type is a pointer, it means the pointer address is in
@ -2959,8 +2948,7 @@ fn load(self: *Self, dst_mcv: MCValue, ptr: MCValue, ptr_ty: Type) InnerError!vo
.memory,
.stack_offset,
.stack_argument_offset,
.got_load,
.direct_load,
.linker_load,
=> {
const addr_reg = try self.copyToTmpRegister(ptr_ty, ptr);
try self.load(dst_mcv, .{ .register = addr_reg }, ptr_ty);
@ -3197,8 +3185,7 @@ fn store(self: *Self, ptr: MCValue, value: MCValue, ptr_ty: Type, value_ty: Type
.memory,
.stack_offset,
.stack_argument_offset,
.got_load,
.direct_load,
.linker_load,
=> {
const addr_reg = try self.copyToTmpRegister(ptr_ty, ptr);
try self.store(.{ .register = addr_reg }, value, ptr_ty, value_ty);
@ -3493,7 +3480,10 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallOptions.
const func = func_payload.data;
const fn_owner_decl = mod.declPtr(func.owner_decl);
try self.genSetReg(Type.initTag(.u64), .x30, .{
.got_load = fn_owner_decl.link.macho.sym_index,
.linker_load = .{
.@"type" = .got,
.sym_index = fn_owner_decl.link.macho.sym_index,
},
});
// blr x30
_ = try self.addInst(.{
@ -4427,8 +4417,7 @@ fn genSetStack(self: *Self, ty: Type, stack_offset: u32, mcv: MCValue) InnerErro
.register = cond_reg,
});
},
.got_load,
.direct_load,
.linker_load,
.memory,
.stack_argument_offset,
.stack_offset,
@ -4479,13 +4468,10 @@ fn genSetStack(self: *Self, ty: Type, stack_offset: u32, mcv: MCValue) InnerErro
});
},
.memory => |addr| try self.genSetReg(Type.usize, src_reg, .{ .immediate = addr }),
.got_load,
.direct_load,
=> |sym_index| {
const tag: Mir.Inst.Tag = switch (mcv) {
.got_load => .load_memory_ptr_got,
.direct_load => .load_memory_ptr_direct,
else => unreachable,
.linker_load => |load_struct| {
const tag: Mir.Inst.Tag = switch (load_struct.@"type") {
.got => .load_memory_ptr_got,
.direct => .load_memory_ptr_direct,
};
const mod = self.bin_file.options.module.?;
_ = try self.addInst(.{
@ -4494,7 +4480,7 @@ fn genSetStack(self: *Self, ty: Type, stack_offset: u32, mcv: MCValue) InnerErro
.payload = try self.addExtra(Mir.LoadMemoryPie{
.register = @enumToInt(src_reg),
.atom_index = mod.declPtr(self.mod_fn.owner_decl).link.macho.sym_index,
.sym_index = sym_index,
.sym_index = load_struct.sym_index,
}),
},
});
@ -4594,13 +4580,10 @@ fn genSetReg(self: *Self, ty: Type, reg: Register, mcv: MCValue) InnerError!void
});
},
.register_with_overflow => unreachable, // doesn't fit into a register
.got_load,
.direct_load,
=> |sym_index| {
const tag: Mir.Inst.Tag = switch (mcv) {
.got_load => .load_memory_got,
.direct_load => .load_memory_direct,
else => unreachable,
.linker_load => |load_struct| {
const tag: Mir.Inst.Tag = switch (load_struct.@"type") {
.got => .load_memory_got,
.direct => .load_memory_direct,
};
const mod = self.bin_file.options.module.?;
_ = try self.addInst(.{
@ -4609,7 +4592,7 @@ fn genSetReg(self: *Self, ty: Type, reg: Register, mcv: MCValue) InnerError!void
.payload = try self.addExtra(Mir.LoadMemoryPie{
.register = @enumToInt(reg),
.atom_index = mod.declPtr(self.mod_fn.owner_decl).link.macho.sym_index,
.sym_index = sym_index,
.sym_index = load_struct.sym_index,
}),
},
});
@ -4741,8 +4724,7 @@ fn genSetStackArgument(self: *Self, ty: Type, stack_offset: u32, mcv: MCValue) I
.register_with_overflow => {
return self.fail("TODO implement genSetStackArgument {}", .{mcv});
},
.got_load,
.direct_load,
.linker_load,
.memory,
.stack_argument_offset,
.stack_offset,
@ -4785,13 +4767,10 @@ fn genSetStackArgument(self: *Self, ty: Type, stack_offset: u32, mcv: MCValue) I
});
},
.memory => |addr| try self.genSetReg(ptr_ty, src_reg, .{ .immediate = @intCast(u32, addr) }),
.got_load,
.direct_load,
=> |sym_index| {
const tag: Mir.Inst.Tag = switch (mcv) {
.got_load => .load_memory_ptr_got,
.direct_load => .load_memory_ptr_direct,
else => unreachable,
.linker_load => |load_struct| {
const tag: Mir.Inst.Tag = switch (load_struct.@"type") {
.got => .load_memory_ptr_got,
.direct => .load_memory_ptr_direct,
};
const mod = self.bin_file.options.module.?;
_ = try self.addInst(.{
@ -4800,7 +4779,7 @@ fn genSetStackArgument(self: *Self, ty: Type, stack_offset: u32, mcv: MCValue) I
.payload = try self.addExtra(Mir.LoadMemoryPie{
.register = @enumToInt(src_reg),
.atom_index = mod.declPtr(self.mod_fn.owner_decl).link.macho.sym_index,
.sym_index = sym_index,
.sym_index = load_struct.sym_index,
}),
},
});
@ -5107,7 +5086,10 @@ fn lowerDeclRef(self: *Self, tv: TypedValue, decl_index: Module.Decl.Index) Inne
// Because MachO is PIE-always-on, we defer memory address resolution until
// the linker has enough info to perform relocations.
assert(decl.link.macho.sym_index != 0);
return MCValue{ .got_load = decl.link.macho.sym_index };
return MCValue{ .linker_load = .{
.@"type" = .got,
.sym_index = decl.link.macho.sym_index,
} };
} else if (self.bin_file.cast(link.File.Coff)) |_| {
return self.fail("TODO codegen COFF const Decl pointer", .{});
} else if (self.bin_file.cast(link.File.Plan9)) |p9| {
@ -5129,7 +5111,10 @@ fn lowerUnnamedConst(self: *Self, tv: TypedValue) InnerError!MCValue {
const vaddr = elf_file.local_symbols.items[local_sym_index].st_value;
return MCValue{ .memory = vaddr };
} else if (self.bin_file.cast(link.File.MachO)) |_| {
return MCValue{ .direct_load = local_sym_index };
return MCValue{ .linker_load = .{
.@"type" = .direct,
.sym_index = local_sym_index,
} };
} else if (self.bin_file.cast(link.File.Coff)) |_| {
return self.fail("TODO lower unnamed const in COFF", .{});
} else if (self.bin_file.cast(link.File.Plan9)) |_| {

View File

@ -681,12 +681,10 @@ fn mirCallExtern(emit: *Emit, inst: Mir.Inst.Index) !void {
};
// Add relocation to the decl.
const atom = macho_file.atom_by_index_table.get(relocation.atom_index).?;
const target = macho_file.getGlobalByIndex(relocation.sym_index);
try atom.relocs.append(emit.bin_file.allocator, .{
.offset = offset,
.target = .{
.sym_index = relocation.sym_index,
.file = null,
},
.target = target,
.addend = 0,
.subtractor = null,
.pcrel = true,

File diff suppressed because it is too large Load Diff

View File

@ -11,6 +11,7 @@ const link = @import("../../link.zig");
const Module = @import("../../Module.zig");
const Type = @import("../../type.zig").Type;
const ErrorMsg = Module.ErrorMsg;
const Target = std.Target;
const assert = std.debug.assert;
const DW = std.dwarf;
const leb128 = std.leb;
@ -93,6 +94,8 @@ pub fn emitMir(
.sub => try emit.mirDataProcessing(inst),
.subs => try emit.mirDataProcessing(inst),
.sub_sp_scratch_r0 => try emit.mirSubStackPointer(inst),
.asr => try emit.mirShift(inst),
.lsl => try emit.mirShift(inst),
.lsr => try emit.mirShift(inst),
@ -190,6 +193,24 @@ fn instructionSize(emit: *Emit, inst: Mir.Inst.Index) usize {
.dbg_epilogue_begin,
.dbg_prologue_end,
=> return 0,
.sub_sp_scratch_r0 => {
const imm32 = emit.mir.instructions.items(.data)[inst].imm32;
if (imm32 == 0) {
return 0 * 4;
} else if (Instruction.Operand.fromU32(imm32) != null) {
// sub
return 1 * 4;
} else if (Target.arm.featureSetHas(emit.target.cpu.features, .has_v7)) {
// movw; movt; sub
return 3 * 4;
} else {
// mov; orr; orr; orr; sub
return 5 * 4;
}
},
else => return 4,
}
}
@ -385,20 +406,75 @@ fn dbgAdvancePCAndLine(self: *Emit, line: u32, column: u32) !void {
fn mirDataProcessing(emit: *Emit, inst: Mir.Inst.Index) !void {
const tag = emit.mir.instructions.items(.tag)[inst];
const cond = emit.mir.instructions.items(.cond)[inst];
const rr_op = emit.mir.instructions.items(.data)[inst].rr_op;
switch (tag) {
.add => try emit.writeInstruction(Instruction.add(cond, rr_op.rd, rr_op.rn, rr_op.op)),
.adds => try emit.writeInstruction(Instruction.adds(cond, rr_op.rd, rr_op.rn, rr_op.op)),
.@"and" => try emit.writeInstruction(Instruction.@"and"(cond, rr_op.rd, rr_op.rn, rr_op.op)),
.cmp => try emit.writeInstruction(Instruction.cmp(cond, rr_op.rn, rr_op.op)),
.eor => try emit.writeInstruction(Instruction.eor(cond, rr_op.rd, rr_op.rn, rr_op.op)),
.mov => try emit.writeInstruction(Instruction.mov(cond, rr_op.rd, rr_op.op)),
.mvn => try emit.writeInstruction(Instruction.mvn(cond, rr_op.rd, rr_op.op)),
.orr => try emit.writeInstruction(Instruction.orr(cond, rr_op.rd, rr_op.rn, rr_op.op)),
.rsb => try emit.writeInstruction(Instruction.rsb(cond, rr_op.rd, rr_op.rn, rr_op.op)),
.sub => try emit.writeInstruction(Instruction.sub(cond, rr_op.rd, rr_op.rn, rr_op.op)),
.subs => try emit.writeInstruction(Instruction.subs(cond, rr_op.rd, rr_op.rn, rr_op.op)),
.add,
.adds,
.@"and",
.eor,
.orr,
.rsb,
.sub,
.subs,
=> {
const rr_op = emit.mir.instructions.items(.data)[inst].rr_op;
switch (tag) {
.add => try emit.writeInstruction(Instruction.add(cond, rr_op.rd, rr_op.rn, rr_op.op)),
.adds => try emit.writeInstruction(Instruction.adds(cond, rr_op.rd, rr_op.rn, rr_op.op)),
.@"and" => try emit.writeInstruction(Instruction.@"and"(cond, rr_op.rd, rr_op.rn, rr_op.op)),
.eor => try emit.writeInstruction(Instruction.eor(cond, rr_op.rd, rr_op.rn, rr_op.op)),
.orr => try emit.writeInstruction(Instruction.orr(cond, rr_op.rd, rr_op.rn, rr_op.op)),
.rsb => try emit.writeInstruction(Instruction.rsb(cond, rr_op.rd, rr_op.rn, rr_op.op)),
.sub => try emit.writeInstruction(Instruction.sub(cond, rr_op.rd, rr_op.rn, rr_op.op)),
.subs => try emit.writeInstruction(Instruction.subs(cond, rr_op.rd, rr_op.rn, rr_op.op)),
else => unreachable,
}
},
.cmp => {
const r_op_cmp = emit.mir.instructions.items(.data)[inst].r_op_cmp;
try emit.writeInstruction(Instruction.cmp(cond, r_op_cmp.rn, r_op_cmp.op));
},
.mov,
.mvn,
=> {
const r_op_mov = emit.mir.instructions.items(.data)[inst].r_op_mov;
switch (tag) {
.mov => try emit.writeInstruction(Instruction.mov(cond, r_op_mov.rd, r_op_mov.op)),
.mvn => try emit.writeInstruction(Instruction.mvn(cond, r_op_mov.rd, r_op_mov.op)),
else => unreachable,
}
},
else => unreachable,
}
}
fn mirSubStackPointer(emit: *Emit, inst: Mir.Inst.Index) !void {
const tag = emit.mir.instructions.items(.tag)[inst];
const cond = emit.mir.instructions.items(.cond)[inst];
const imm32 = emit.mir.instructions.items(.data)[inst].imm32;
switch (tag) {
.sub_sp_scratch_r0 => {
if (imm32 == 0) return;
const operand = Instruction.Operand.fromU32(imm32) orelse blk: {
const scratch: Register = .r0;
if (Target.arm.featureSetHas(emit.target.cpu.features, .has_v7)) {
try emit.writeInstruction(Instruction.movw(cond, scratch, @truncate(u16, imm32)));
try emit.writeInstruction(Instruction.movt(cond, scratch, @truncate(u16, imm32 >> 16)));
} else {
try emit.writeInstruction(Instruction.mov(cond, scratch, Instruction.Operand.imm(@truncate(u8, imm32), 0)));
try emit.writeInstruction(Instruction.orr(cond, scratch, scratch, Instruction.Operand.imm(@truncate(u8, imm32 >> 8), 12)));
try emit.writeInstruction(Instruction.orr(cond, scratch, scratch, Instruction.Operand.imm(@truncate(u8, imm32 >> 16), 8)));
try emit.writeInstruction(Instruction.orr(cond, scratch, scratch, Instruction.Operand.imm(@truncate(u8, imm32 >> 24), 4)));
}
break :blk Instruction.Operand.reg(scratch, Instruction.Operand.Shift.none);
};
try emit.writeInstruction(Instruction.sub(cond, .sp, .sp, operand));
},
else => unreachable,
}
}

View File

@ -111,6 +111,11 @@ pub const Inst = struct {
strh,
/// Subtract
sub,
/// Pseudo-instruction: Subtract 32-bit immediate from stack
///
/// r0 can be used by Emit as a scratch register for loading
/// the immediate
sub_sp_scratch_r0,
/// Subtract, update condition flags
subs,
/// Supervisor Call
@ -144,6 +149,10 @@ pub const Inst = struct {
///
/// Used by e.g. svc
imm24: u24,
/// A 32-bit immediate value.
///
/// Used by e.g. sub_sp_scratch_r0
imm32: u32,
/// Index into `extra`. Meaning of what can be found there is context-dependent.
///
/// Used by e.g. load_memory
@ -166,6 +175,20 @@ pub const Inst = struct {
rd: Register,
imm16: u16,
},
/// A register and an operand
///
/// Used by mov and mvn
r_op_mov: struct {
rd: Register,
op: bits.Instruction.Operand,
},
/// A register and an operand
///
/// Used by cmp
r_op_cmp: struct {
rn: Register,
op: bits.Instruction.Operand,
},
/// Two registers and a shift amount
///
/// Used by e.g. lsl

View File

@ -128,15 +128,11 @@ pub const MCValue = union(enum) {
/// The value is in memory at a hard-coded address.
/// If the type is a pointer, it means the pointer address is at this memory location.
memory: u64,
/// The value is in memory referenced indirectly via a GOT entry index.
/// If the type is a pointer, it means the pointer is referenced indirectly via GOT.
/// When lowered, linker will emit a relocation of type X86_64_RELOC_GOT.
got_load: u32,
imports_load: u32,
/// The value is in memory referenced directly via symbol index.
/// If the type is a pointer, it means the pointer is referenced directly via symbol index.
/// When lowered, linker will emit a relocation of type X86_64_RELOC_SIGNED.
direct_load: u32,
/// The value is in memory but requires a linker relocation fixup:
/// * got - the value is referenced indirectly via GOT entry index (the linker emits a got-type reloc)
/// * direct - the value is referenced directly via symbol index index (the linker emits a displacement reloc)
/// * import - the value is referenced indirectly via import entry index (the linker emits an import-type reloc)
linker_load: struct { @"type": enum { got, direct, import }, sym_index: u32 },
/// The value is one of the stack variables.
/// If the type is a pointer, it means the pointer address is in the stack at this offset.
stack_offset: i32,
@ -150,9 +146,7 @@ pub const MCValue = union(enum) {
.memory,
.stack_offset,
.ptr_stack_offset,
.direct_load,
.got_load,
.imports_load,
.linker_load,
=> true,
else => false,
};
@ -165,26 +159,6 @@ pub const MCValue = union(enum) {
};
}
fn isMutable(mcv: MCValue) bool {
return switch (mcv) {
.none => unreachable,
.unreach => unreachable,
.dead => unreachable,
.immediate,
.memory,
.eflags,
.ptr_stack_offset,
.undef,
.register_overflow,
=> false,
.register,
.stack_offset,
=> true,
};
}
fn isRegister(mcv: MCValue) bool {
return switch (mcv) {
.register => true,
@ -2307,11 +2281,7 @@ fn airArrayElemVal(self: *Self, inst: Air.Inst.Index) !void {
.data = .{ .imm = @bitCast(u32, -off) },
});
},
.memory,
.got_load,
.direct_load,
.imports_load,
=> {
.memory, .linker_load => {
try self.loadMemPtrIntoRegister(addr_reg, Type.usize, array);
},
else => return self.fail("TODO implement array_elem_val when array is {}", .{array}),
@ -2652,11 +2622,7 @@ fn load(self: *Self, dst_mcv: MCValue, ptr: MCValue, ptr_ty: Type) InnerError!vo
else => return self.fail("TODO implement loading from register into {}", .{dst_mcv}),
}
},
.memory,
.got_load,
.direct_load,
.imports_load,
=> {
.memory, .linker_load => {
const reg = try self.copyToTmpRegister(ptr_ty, ptr);
try self.load(dst_mcv, .{ .register = reg }, ptr_ty);
},
@ -2691,10 +2657,7 @@ fn airLoad(self: *Self, inst: Air.Inst.Index) !void {
fn loadMemPtrIntoRegister(self: *Self, reg: Register, ptr_ty: Type, ptr: MCValue) InnerError!void {
switch (ptr) {
.got_load,
.direct_load,
.imports_load,
=> |sym_index| {
.linker_load => |load_struct| {
const abi_size = @intCast(u32, ptr_ty.abiSize(self.target.*));
const mod = self.bin_file.options.module.?;
const fn_owner_decl = mod.declPtr(self.mod_fn.owner_decl);
@ -2702,11 +2665,10 @@ fn loadMemPtrIntoRegister(self: *Self, reg: Register, ptr_ty: Type, ptr: MCValue
fn_owner_decl.link.macho.sym_index
else
fn_owner_decl.link.coff.sym_index;
const flags: u2 = switch (ptr) {
.got_load => 0b00,
.direct_load => 0b01,
.imports_load => 0b10,
else => unreachable,
const flags: u2 = switch (load_struct.@"type") {
.got => 0b00,
.direct => 0b01,
.import => 0b10,
};
_ = try self.addInst(.{
.tag = .lea_pic,
@ -2717,7 +2679,7 @@ fn loadMemPtrIntoRegister(self: *Self, reg: Register, ptr_ty: Type, ptr: MCValue
.data = .{
.relocation = .{
.atom_index = atom_index,
.sym_index = sym_index,
.sym_index = load_struct.sym_index,
},
},
});
@ -2801,9 +2763,7 @@ fn store(self: *Self, ptr: MCValue, value: MCValue, ptr_ty: Type, value_ty: Type
.register => |src_reg| {
try self.genInlineMemcpyRegisterRegister(value_ty, reg, src_reg, 0);
},
.got_load,
.direct_load,
.imports_load,
.linker_load,
.memory,
.stack_offset,
=> {
@ -2822,11 +2782,7 @@ fn store(self: *Self, ptr: MCValue, value: MCValue, ptr_ty: Type, value_ty: Type
},
}
},
.got_load,
.direct_load,
.imports_load,
.memory,
=> {
.linker_load, .memory => {
const value_lock: ?RegisterLock = switch (value) {
.register => |reg| self.register_manager.lockReg(reg),
else => null,
@ -2894,11 +2850,7 @@ fn store(self: *Self, ptr: MCValue, value: MCValue, ptr_ty: Type, value_ty: Type
.register => {
return self.store(new_ptr, value, ptr_ty, value_ty);
},
.got_load,
.direct_load,
.imports_load,
.memory,
=> {
.linker_load, .memory => {
if (abi_size <= 8) {
const tmp_reg = try self.register_manager.allocReg(null, gp);
const tmp_reg_lock = self.register_manager.lockRegAssumeUnused(tmp_reg);
@ -3606,9 +3558,7 @@ fn genBinOpMir(self: *Self, mir_tag: Mir.Inst.Tag, dst_ty: Type, dst_mcv: MCValu
});
},
.memory,
.got_load,
.direct_load,
.imports_load,
.linker_load,
.eflags,
=> {
assert(abi_size <= 8);
@ -3694,10 +3644,7 @@ fn genBinOpMir(self: *Self, mir_tag: Mir.Inst.Tag, dst_ty: Type, dst_mcv: MCValu
=> {
return self.fail("TODO implement x86 ADD/SUB/CMP source memory", .{});
},
.got_load,
.direct_load,
.imports_load,
=> {
.linker_load => {
return self.fail("TODO implement x86 ADD/SUB/CMP source symbol at index in linker", .{});
},
.eflags => {
@ -3708,10 +3655,7 @@ fn genBinOpMir(self: *Self, mir_tag: Mir.Inst.Tag, dst_ty: Type, dst_mcv: MCValu
.memory => {
return self.fail("TODO implement x86 ADD/SUB/CMP destination memory", .{});
},
.got_load,
.direct_load,
.imports_load,
=> {
.linker_load => {
return self.fail("TODO implement x86 ADD/SUB/CMP destination symbol at index", .{});
},
}
@ -3779,10 +3723,7 @@ fn genIntMulComplexOpMir(self: *Self, dst_ty: Type, dst_mcv: MCValue, src_mcv: M
.memory => {
return self.fail("TODO implement x86 multiply source memory", .{});
},
.got_load,
.direct_load,
.imports_load,
=> {
.linker_load => {
return self.fail("TODO implement x86 multiply source symbol at index in linker", .{});
},
.eflags => {
@ -3826,10 +3767,7 @@ fn genIntMulComplexOpMir(self: *Self, dst_ty: Type, dst_mcv: MCValue, src_mcv: M
.memory, .stack_offset => {
return self.fail("TODO implement x86 multiply source memory", .{});
},
.got_load,
.direct_load,
.imports_load,
=> {
.linker_load => {
return self.fail("TODO implement x86 multiply source symbol at index in linker", .{});
},
.eflags => {
@ -3840,10 +3778,7 @@ fn genIntMulComplexOpMir(self: *Self, dst_ty: Type, dst_mcv: MCValue, src_mcv: M
.memory => {
return self.fail("TODO implement x86 multiply destination memory", .{});
},
.got_load,
.direct_load,
.imports_load,
=> {
.linker_load => {
return self.fail("TODO implement x86 multiply destination symbol at index in linker", .{});
},
}
@ -4006,9 +3941,7 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallOptions.
.unreach => unreachable,
.dead => unreachable,
.memory => unreachable,
.got_load => unreachable,
.direct_load => unreachable,
.imports_load => unreachable,
.linker_load => unreachable,
.eflags => unreachable,
.register_overflow => unreachable,
}
@ -4066,7 +3999,10 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallOptions.
const func = func_payload.data;
const fn_owner_decl = mod.declPtr(func.owner_decl);
try self.genSetReg(Type.initTag(.usize), .rax, .{
.got_load = fn_owner_decl.link.coff.sym_index,
.linker_load = .{
.@"type" = .got,
.sym_index = fn_owner_decl.link.coff.sym_index,
},
});
_ = try self.addInst(.{
.tag = .call,
@ -4087,7 +4023,10 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallOptions.
}
const sym_index = try coff_file.getGlobalSymbol(mem.sliceTo(decl_name, 0));
try self.genSetReg(Type.initTag(.usize), .rax, .{
.imports_load = sym_index,
.linker_load = .{
.@"type" = .import,
.sym_index = sym_index,
},
});
_ = try self.addInst(.{
.tag = .call,
@ -4119,7 +4058,12 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallOptions.
const func = func_payload.data;
const fn_owner_decl = mod.declPtr(func.owner_decl);
const sym_index = fn_owner_decl.link.macho.sym_index;
try self.genSetReg(Type.initTag(.usize), .rax, .{ .got_load = sym_index });
try self.genSetReg(Type.initTag(.usize), .rax, .{
.linker_load = .{
.@"type" = .got,
.sym_index = sym_index,
},
});
// callq *%rax
_ = try self.addInst(.{
.tag = .call,
@ -4505,11 +4449,7 @@ fn genVarDbgInfo(
leb128.writeILEB128(dbg_info.writer(), -off) catch unreachable;
dbg_info.items[fixup] += @intCast(u8, dbg_info.items.len - fixup - 2);
},
.memory,
.got_load,
.direct_load,
.imports_load,
=> {
.memory, .linker_load => {
const ptr_width = @intCast(u8, @divExact(self.target.cpu.arch.ptrBitWidth(), 8));
const is_ptr = switch (tag) {
.dbg_var_ptr => true,
@ -4540,10 +4480,11 @@ fn genVarDbgInfo(
try dbg_info.append(DW.OP.deref);
}
switch (mcv) {
.got_load,
.direct_load,
.imports_load,
=> |index| try dw.addExprlocReloc(index, offset, is_ptr),
.linker_load => |load_struct| try dw.addExprlocReloc(
load_struct.sym_index,
offset,
is_ptr,
),
else => {},
}
},
@ -5587,11 +5528,7 @@ fn genSetStackArg(self: *Self, ty: Type, stack_offset: i32, mcv: MCValue) InnerE
else => return self.fail("TODO implement inputs on stack for {} with abi size > 8", .{mcv}),
}
},
.memory,
.direct_load,
.got_load,
.imports_load,
=> {
.memory, .linker_load => {
if (abi_size <= 8) {
const reg = try self.copyToTmpRegister(ty, mcv);
return self.genSetStackArg(ty, stack_offset, MCValue{ .register = reg });
@ -5835,11 +5772,7 @@ fn genSetStack(self: *Self, ty: Type, stack_offset: i32, mcv: MCValue, opts: Inl
},
}
},
.memory,
.got_load,
.direct_load,
.imports_load,
=> {
.memory, .linker_load => {
if (abi_size <= 8) {
const reg = try self.copyToTmpRegister(ty, mcv);
return self.genSetStack(ty, stack_offset, MCValue{ .register = reg }, opts);
@ -5959,11 +5892,7 @@ fn genInlineMemcpy(
const tmp_reg = regs[4].to8();
switch (dst_ptr) {
.memory,
.got_load,
.direct_load,
.imports_load,
=> {
.memory, .linker_load => {
try self.loadMemPtrIntoRegister(dst_addr_reg, Type.usize, dst_ptr);
},
.ptr_stack_offset, .stack_offset => |off| {
@ -5992,11 +5921,7 @@ fn genInlineMemcpy(
}
switch (src_ptr) {
.memory,
.got_load,
.direct_load,
.imports_load,
=> {
.memory, .linker_load => {
try self.loadMemPtrIntoRegister(src_addr_reg, Type.usize, src_ptr);
},
.ptr_stack_offset, .stack_offset => |off| {
@ -6120,11 +6045,7 @@ fn genInlineMemset(
const index_reg = regs[1].to64();
switch (dst_ptr) {
.memory,
.got_load,
.direct_load,
.imports_load,
=> {
.memory, .linker_load => {
try self.loadMemPtrIntoRegister(addr_reg, Type.usize, dst_ptr);
},
.ptr_stack_offset, .stack_offset => |off| {
@ -6356,10 +6277,7 @@ fn genSetReg(self: *Self, ty: Type, reg: Register, mcv: MCValue) InnerError!void
.data = undefined,
});
},
.direct_load,
.got_load,
.imports_load,
=> {
.linker_load => {
switch (ty.zigTypeTag()) {
.Float => {
const base_reg = try self.register_manager.allocReg(null, gp);
@ -6753,11 +6671,7 @@ fn airMemcpy(self: *Self, inst: Air.Inst.Index) !void {
// TODO Is this the only condition for pointer dereference for memcpy?
const src: MCValue = blk: {
switch (src_ptr) {
.got_load,
.direct_load,
.imports_load,
.memory,
=> {
.linker_load, .memory => {
const reg = try self.register_manager.allocReg(null, gp);
try self.loadMemPtrIntoRegister(reg, src_ty, src_ptr);
_ = try self.addInst(.{
@ -6997,10 +6911,16 @@ fn lowerDeclRef(self: *Self, tv: TypedValue, decl_index: Module.Decl.Index) Inne
return MCValue{ .memory = got_addr };
} else if (self.bin_file.cast(link.File.MachO)) |_| {
assert(decl.link.macho.sym_index != 0);
return MCValue{ .got_load = decl.link.macho.sym_index };
return MCValue{ .linker_load = .{
.@"type" = .got,
.sym_index = decl.link.macho.sym_index,
} };
} else if (self.bin_file.cast(link.File.Coff)) |_| {
assert(decl.link.coff.sym_index != 0);
return MCValue{ .got_load = decl.link.coff.sym_index };
return MCValue{ .linker_load = .{
.@"type" = .got,
.sym_index = decl.link.coff.sym_index,
} };
} else if (self.bin_file.cast(link.File.Plan9)) |p9| {
try p9.seeDecl(decl_index);
const got_addr = p9.bases.data + decl.link.plan9.got_index.? * ptr_bytes;
@ -7019,9 +6939,15 @@ fn lowerUnnamedConst(self: *Self, tv: TypedValue) InnerError!MCValue {
const vaddr = elf_file.local_symbols.items[local_sym_index].st_value;
return MCValue{ .memory = vaddr };
} else if (self.bin_file.cast(link.File.MachO)) |_| {
return MCValue{ .direct_load = local_sym_index };
return MCValue{ .linker_load = .{
.@"type" = .direct,
.sym_index = local_sym_index,
} };
} else if (self.bin_file.cast(link.File.Coff)) |_| {
return MCValue{ .direct_load = local_sym_index };
return MCValue{ .linker_load = .{
.@"type" = .direct,
.sym_index = local_sym_index,
} };
} else if (self.bin_file.cast(link.File.Plan9)) |_| {
return self.fail("TODO lower unnamed const in Plan9", .{});
} else {

View File

@ -1021,10 +1021,14 @@ fn mirLeaPic(emit: *Emit, inst: Mir.Inst.Index) InnerError!void {
.@"type" = switch (ops.flags) {
0b00 => .got,
0b01 => .direct,
0b10 => .imports,
0b10 => .import,
else => unreachable,
},
.target = switch (ops.flags) {
0b00, 0b01 => .{ .sym_index = relocation.sym_index, .file = null },
0b10 => coff_file.getGlobalByIndex(relocation.sym_index),
else => unreachable,
},
.target = .{ .sym_index = relocation.sym_index, .file = null },
.offset = @intCast(u32, end_offset - 4),
.addend = 0,
.pcrel = true,
@ -1142,12 +1146,10 @@ fn mirCallExtern(emit: *Emit, inst: Mir.Inst.Index) InnerError!void {
if (emit.bin_file.cast(link.File.MachO)) |macho_file| {
// Add relocation to the decl.
const atom = macho_file.atom_by_index_table.get(relocation.atom_index).?;
const target = macho_file.getGlobalByIndex(relocation.sym_index);
try atom.relocs.append(emit.bin_file.allocator, .{
.offset = offset,
.target = .{
.sym_index = relocation.sym_index,
.file = null,
},
.target = target,
.addend = 0,
.subtractor = null,
.pcrel = true,
@ -1157,16 +1159,17 @@ fn mirCallExtern(emit: *Emit, inst: Mir.Inst.Index) InnerError!void {
} else if (emit.bin_file.cast(link.File.Coff)) |coff_file| {
// Add relocation to the decl.
const atom = coff_file.atom_by_index_table.get(relocation.atom_index).?;
const target = coff_file.getGlobalByIndex(relocation.sym_index);
try atom.addRelocation(coff_file, .{
.@"type" = .direct,
.target = .{ .sym_index = relocation.sym_index, .file = null },
.target = target,
.offset = offset,
.addend = 0,
.pcrel = true,
.length = 2,
});
} else {
return emit.fail("TODO implement call_extern for linking backends different than MachO", .{});
return emit.fail("TODO implement call_extern for linking backends different than MachO and COFF", .{});
}
}

View File

@ -137,7 +137,7 @@ pub fn genHtml(
);
const source = try src.getSource(allocator);
try tokenizeAndPrintRaw(allocator, out, source.bytes);
try tokenizeAndPrintRaw(out, source.bytes);
try out.writeAll(
\\</body>
\\</html>
@ -150,13 +150,9 @@ const end_line = "</span>\n";
var line_counter: usize = 1;
pub fn tokenizeAndPrintRaw(
allocator: Allocator,
out: anytype,
raw_src: [:0]const u8,
src: [:0]const u8,
) !void {
const src = try allocator.dupeZ(u8, raw_src);
defer allocator.free(src);
line_counter = 1;
try out.print("<pre><code>" ++ start_line, .{line_counter});

View File

@ -719,17 +719,16 @@ pub fn buildSharedObjects(comp: *Compilation) !void {
.lt => continue,
.gt => {
// TODO Expose via compile error mechanism instead of log.
log.err("invalid target glibc version: {}", .{target_version});
log.warn("invalid target glibc version: {}", .{target_version});
return error.InvalidTargetGLibCVersion;
},
}
} else {
} else blk: {
const latest_index = metadata.all_versions.len - 1;
// TODO Expose via compile error mechanism instead of log.
log.err("zig does not yet provide glibc version {}, the max provided version is {}", .{
log.warn("zig cannot build new glibc version {}; providing instead {}", .{
target_version, metadata.all_versions[latest_index],
});
return error.InvalidTargetGLibCVersion;
break :blk latest_index;
};
{

View File

@ -127,7 +127,7 @@ pub const Reloc = struct {
@"type": enum {
got,
direct,
imports,
import,
},
target: SymbolWithLoc,
offset: u32,
@ -141,7 +141,7 @@ pub const Reloc = struct {
switch (self.@"type") {
.got => return coff_file.getGotAtomForSymbol(self.target),
.direct => return coff_file.getAtomForSymbol(self.target),
.imports => return coff_file.getImportAtomForSymbol(self.target),
.import => return coff_file.getImportAtomForSymbol(self.target),
}
}
};
@ -1423,23 +1423,22 @@ fn resolveGlobalSymbol(self: *Coff, current: SymbolWithLoc) !void {
const sym = self.getSymbol(current);
const sym_name = self.getSymbolName(current);
const global_index = self.resolver.get(sym_name) orelse {
const name = try gpa.dupe(u8, sym_name);
const global_index = try self.allocateGlobal();
self.globals.items[global_index] = current;
try self.resolver.putNoClobber(gpa, name, global_index);
const gop = try self.getOrPutGlobalPtr(sym_name);
if (!gop.found_existing) {
gop.value_ptr.* = current;
if (sym.section_number == .UNDEFINED) {
try self.unresolved.putNoClobber(gpa, global_index, false);
try self.unresolved.putNoClobber(gpa, self.getGlobalIndex(sym_name).?, false);
}
return;
};
}
log.debug("TODO finish resolveGlobalSymbols implementation", .{});
if (sym.section_number == .UNDEFINED) return;
_ = self.unresolved.swapRemove(global_index);
self.globals.items[global_index] = current;
_ = self.unresolved.swapRemove(self.getGlobalIndex(sym_name).?);
gop.value_ptr.* = current;
}
pub fn flush(self: *Coff, comp: *Compilation, prog_node: *std.Progress.Node) !void {
@ -1544,25 +1543,26 @@ pub fn getDeclVAddr(
}
pub fn getGlobalSymbol(self: *Coff, name: []const u8) !u32 {
if (self.resolver.get(name)) |global_index| {
return self.globals.items[global_index].sym_index;
const gop = try self.getOrPutGlobalPtr(name);
const global_index = self.getGlobalIndex(name).?;
if (gop.found_existing) {
return global_index;
}
const gpa = self.base.allocator;
const sym_index = try self.allocateSymbol();
const global_index = try self.allocateGlobal();
const sym_loc = SymbolWithLoc{ .sym_index = sym_index, .file = null };
self.globals.items[global_index] = sym_loc;
gop.value_ptr.* = sym_loc;
const gpa = self.base.allocator;
const sym_name = try gpa.dupe(u8, name);
const sym = self.getSymbolPtr(sym_loc);
try self.setSymbolName(sym, sym_name);
sym.storage_class = .EXTERNAL;
try self.resolver.putNoClobber(gpa, sym_name, global_index);
try self.unresolved.putNoClobber(gpa, global_index, true);
return sym_index;
return global_index;
}
pub fn updateDeclLineNumber(self: *Coff, module: *Module, decl: *Module.Decl) !void {
@ -2061,6 +2061,49 @@ pub fn getSymbolName(self: *const Coff, sym_loc: SymbolWithLoc) []const u8 {
return self.strtab.get(offset).?;
}
/// Returns pointer to the global entry for `name` if one exists.
pub fn getGlobalPtr(self: *Coff, name: []const u8) ?*SymbolWithLoc {
const global_index = self.resolver.get(name) orelse return null;
return &self.globals.items[global_index];
}
/// Returns the global entry for `name` if one exists.
pub fn getGlobal(self: *const Coff, name: []const u8) ?SymbolWithLoc {
const global_index = self.resolver.get(name) orelse return null;
return self.globals.items[global_index];
}
/// Returns the index of the global entry for `name` if one exists.
pub fn getGlobalIndex(self: *const Coff, name: []const u8) ?u32 {
return self.resolver.get(name);
}
/// Returns global entry at `index`.
pub fn getGlobalByIndex(self: *const Coff, index: u32) SymbolWithLoc {
assert(index < self.globals.items.len);
return self.globals.items[index];
}
const GetOrPutGlobalPtrResult = struct {
found_existing: bool,
value_ptr: *SymbolWithLoc,
};
/// Return pointer to the global entry for `name` if one exists.
/// Puts a new global entry for `name` if one doesn't exist, and
/// returns a pointer to it.
pub fn getOrPutGlobalPtr(self: *Coff, name: []const u8) !GetOrPutGlobalPtrResult {
if (self.getGlobalPtr(name)) |ptr| {
return GetOrPutGlobalPtrResult{ .found_existing = true, .value_ptr = ptr };
}
const gpa = self.base.allocator;
const global_index = try self.allocateGlobal();
const global_name = try gpa.dupe(u8, name);
_ = try self.resolver.put(gpa, global_name, global_index);
const ptr = &self.globals.items[global_index];
return GetOrPutGlobalPtrResult{ .found_existing = false, .value_ptr = ptr };
}
/// Returns atom if there is an atom referenced by the symbol described by `sym_loc` descriptor.
/// Returns null on failure.
pub fn getAtomForSymbol(self: *Coff, sym_loc: SymbolWithLoc) ?*Atom {

View File

@ -111,13 +111,3 @@ pub fn addBaseRelocation(self: *Atom, coff_file: *Coff, offset: u32) !void {
}
try gop.value_ptr.append(gpa, offset);
}
pub fn addBinding(self: *Atom, coff_file: *Coff, target: SymbolWithLoc) !void {
const gpa = coff_file.base.allocator;
log.debug(" (adding binding to target %{d} in %{d})", .{ target.sym_index, self.sym_index });
const gop = try coff_file.bindings.getOrPut(gpa, self);
if (!gop.found_existing) {
gop.value_ptr.* = .{};
}
try gop.value_ptr.append(gpa, target);
}

View File

@ -131,17 +131,12 @@ la_symbol_ptr_section_index: ?u8 = null,
data_section_index: ?u8 = null,
locals: std.ArrayListUnmanaged(macho.nlist_64) = .{},
globals: std.StringArrayHashMapUnmanaged(SymbolWithLoc) = .{},
// FIXME Jakub
// TODO storing index into globals might be dangerous if we delete a global
// while not having everything resolved. Actually, perhaps `unresolved`
// should not be stored at the global scope? Is this possible?
// Otherwise, audit if this can be a problem.
// An alternative, which I still need to investigate for perf reasons is to
// store all global names in an adapted with context strtab.
globals: std.ArrayListUnmanaged(SymbolWithLoc) = .{},
resolver: std.StringHashMapUnmanaged(u32) = .{},
unresolved: std.AutoArrayHashMapUnmanaged(u32, bool) = .{},
locals_free_list: std.ArrayListUnmanaged(u32) = .{},
globals_free_list: std.ArrayListUnmanaged(u32) = .{},
dyld_stub_binder_index: ?u32 = null,
dyld_private_atom: ?*Atom = null,
@ -1917,7 +1912,7 @@ fn allocateSpecialSymbols(self: *MachO) !void {
"___dso_handle",
"__mh_execute_header",
}) |name| {
const global = self.globals.get(name) orelse continue;
const global = self.getGlobal(name) orelse continue;
if (global.file != null) continue;
const sym = self.getSymbolPtr(global);
const seg = self.segments.items[self.text_segment_cmd_index.?];
@ -2048,16 +2043,11 @@ fn writeAtomsIncremental(self: *MachO) !void {
pub fn createGotAtom(self: *MachO, target: SymbolWithLoc) !*Atom {
const gpa = self.base.allocator;
const sym_index = @intCast(u32, self.locals.items.len);
try self.locals.append(gpa, .{
.n_strx = 0,
.n_type = macho.N_SECT,
.n_sect = 0,
.n_desc = 0,
.n_value = 0,
});
const sym_index = try self.allocateSymbol();
const atom = try MachO.createEmptyAtom(gpa, sym_index, @sizeOf(u64), 3);
const sym = atom.getSymbolPtr(self);
sym.n_type = macho.N_SECT;
try atom.relocs.append(gpa, .{
.offset = 0,
.target = target,
@ -2074,7 +2064,7 @@ pub fn createGotAtom(self: *MachO, target: SymbolWithLoc) !*Atom {
const target_sym = self.getSymbol(target);
if (target_sym.undf()) {
const global = self.globals.get(self.getSymbolName(target)).?;
const global = self.getGlobal(self.getSymbolName(target)).?;
try atom.bindings.append(gpa, .{
.target = global,
.offset = 0,
@ -2093,20 +2083,15 @@ pub fn createGotAtom(self: *MachO, target: SymbolWithLoc) !*Atom {
pub fn createTlvPtrAtom(self: *MachO, target: SymbolWithLoc) !*Atom {
const gpa = self.base.allocator;
const sym_index = @intCast(u32, self.locals.items.len);
try self.locals.append(gpa, .{
.n_strx = 0,
.n_type = macho.N_SECT,
.n_sect = 0,
.n_desc = 0,
.n_value = 0,
});
const sym_index = try self.allocateSymbol();
const atom = try MachO.createEmptyAtom(gpa, sym_index, @sizeOf(u64), 3);
const sym = atom.getSymbolPtr(self);
sym.n_type = macho.N_SECT;
const target_sym = self.getSymbol(target);
assert(target_sym.undf());
const global = self.globals.get(self.getSymbolName(target)).?;
const global = self.getGlobal(self.getSymbolName(target)).?;
try atom.bindings.append(gpa, .{
.target = global,
.offset = 0,
@ -2130,15 +2115,10 @@ fn createDyldPrivateAtom(self: *MachO) !void {
if (self.dyld_private_atom != null) return;
const gpa = self.base.allocator;
const sym_index = @intCast(u32, self.locals.items.len);
try self.locals.append(gpa, .{
.n_strx = 0,
.n_type = macho.N_SECT,
.n_sect = 0,
.n_desc = 0,
.n_value = 0,
});
const sym_index = try self.allocateSymbol();
const atom = try MachO.createEmptyAtom(gpa, sym_index, @sizeOf(u64), 3);
const sym = atom.getSymbolPtr(self);
sym.n_type = macho.N_SECT;
self.dyld_private_atom = atom;
try self.allocateAtomCommon(atom, self.data_section_index.?);
@ -2163,15 +2143,11 @@ fn createStubHelperPreambleAtom(self: *MachO) !void {
.aarch64 => 2,
else => unreachable,
};
const sym_index = @intCast(u32, self.locals.items.len);
try self.locals.append(gpa, .{
.n_strx = 0,
.n_type = macho.N_SECT,
.n_sect = 0,
.n_desc = 0,
.n_value = 0,
});
const sym_index = try self.allocateSymbol();
const atom = try MachO.createEmptyAtom(gpa, sym_index, size, alignment);
const sym = atom.getSymbolPtr(self);
sym.n_type = macho.N_SECT;
const dyld_private_sym_index = self.dyld_private_atom.?.sym_index;
switch (arch) {
.x86_64 => {
@ -2288,15 +2264,11 @@ pub fn createStubHelperAtom(self: *MachO) !*Atom {
.aarch64 => 2,
else => unreachable,
};
const sym_index = @intCast(u32, self.locals.items.len);
try self.locals.append(gpa, .{
.n_strx = 0,
.n_type = macho.N_SECT,
.n_sect = 0,
.n_desc = 0,
.n_value = 0,
});
const sym_index = try self.allocateSymbol();
const atom = try MachO.createEmptyAtom(gpa, sym_index, stub_size, alignment);
const sym = atom.getSymbolPtr(self);
sym.n_type = macho.N_SECT;
try atom.relocs.ensureTotalCapacity(gpa, 1);
switch (arch) {
@ -2352,15 +2324,11 @@ pub fn createStubHelperAtom(self: *MachO) !*Atom {
pub fn createLazyPointerAtom(self: *MachO, stub_sym_index: u32, target: SymbolWithLoc) !*Atom {
const gpa = self.base.allocator;
const sym_index = @intCast(u32, self.locals.items.len);
try self.locals.append(gpa, .{
.n_strx = 0,
.n_type = macho.N_SECT,
.n_sect = 0,
.n_desc = 0,
.n_value = 0,
});
const sym_index = try self.allocateSymbol();
const atom = try MachO.createEmptyAtom(gpa, sym_index, @sizeOf(u64), 3);
const sym = atom.getSymbolPtr(self);
sym.n_type = macho.N_SECT;
try atom.relocs.append(gpa, .{
.offset = 0,
.target = .{ .sym_index = stub_sym_index, .file = null },
@ -2376,7 +2344,7 @@ pub fn createLazyPointerAtom(self: *MachO, stub_sym_index: u32, target: SymbolWi
});
try atom.rebases.append(gpa, 0);
const global = self.globals.get(self.getSymbolName(target)).?;
const global = self.getGlobal(self.getSymbolName(target)).?;
try atom.lazy_bindings.append(gpa, .{
.target = global,
.offset = 0,
@ -2403,15 +2371,11 @@ pub fn createStubAtom(self: *MachO, laptr_sym_index: u32) !*Atom {
.aarch64 => 3 * @sizeOf(u32),
else => unreachable, // unhandled architecture type
};
const sym_index = @intCast(u32, self.locals.items.len);
try self.locals.append(gpa, .{
.n_strx = 0,
.n_type = macho.N_SECT,
.n_sect = 0,
.n_desc = 0,
.n_value = 0,
});
const sym_index = try self.allocateSymbol();
const atom = try MachO.createEmptyAtom(gpa, sym_index, stub_size, alignment);
const sym = atom.getSymbolPtr(self);
sym.n_type = macho.N_SECT;
switch (arch) {
.x86_64 => {
// jmp
@ -2472,7 +2436,7 @@ pub fn createStubAtom(self: *MachO, laptr_sym_index: u32) !*Atom {
fn createTentativeDefAtoms(self: *MachO) !void {
const gpa = self.base.allocator;
for (self.globals.values()) |global| {
for (self.globals.items) |global| {
const sym = self.getSymbolPtr(global);
if (!sym.tentative()) continue;
@ -2516,51 +2480,44 @@ fn createTentativeDefAtoms(self: *MachO) !void {
fn createMhExecuteHeaderSymbol(self: *MachO) !void {
if (self.base.options.output_mode != .Exe) return;
if (self.globals.get("__mh_execute_header")) |global| {
if (self.getGlobal("__mh_execute_header")) |global| {
const sym = self.getSymbol(global);
if (!sym.undf() and !(sym.pext() or sym.weakDef())) return;
}
const gpa = self.base.allocator;
const n_strx = try self.strtab.insert(gpa, "__mh_execute_header");
const sym_index = @intCast(u32, self.locals.items.len);
try self.locals.append(gpa, .{
.n_strx = n_strx,
const sym_index = try self.allocateSymbol();
const sym_loc = SymbolWithLoc{ .sym_index = sym_index, .file = null };
const sym = self.getSymbolPtr(sym_loc);
sym.* = .{
.n_strx = try self.strtab.insert(gpa, "__mh_execute_header"),
.n_type = macho.N_SECT | macho.N_EXT,
.n_sect = 0,
.n_desc = macho.REFERENCED_DYNAMICALLY,
.n_value = 0,
});
const name = try gpa.dupe(u8, "__mh_execute_header");
const gop = try self.globals.getOrPut(gpa, name);
defer if (gop.found_existing) gpa.free(name);
gop.value_ptr.* = .{
.sym_index = sym_index,
.file = null,
};
const gop = try self.getOrPutGlobalPtr("__mh_execute_header");
gop.value_ptr.* = sym_loc;
}
fn createDsoHandleSymbol(self: *MachO) !void {
const global = self.globals.getPtr("___dso_handle") orelse return;
const sym = self.getSymbolPtr(global.*);
if (!sym.undf()) return;
const global = self.getGlobalPtr("___dso_handle") orelse return;
if (!self.getSymbol(global.*).undf()) return;
const gpa = self.base.allocator;
const n_strx = try self.strtab.insert(gpa, "___dso_handle");
const sym_index = @intCast(u32, self.locals.items.len);
try self.locals.append(gpa, .{
.n_strx = n_strx,
const sym_index = try self.allocateSymbol();
const sym_loc = SymbolWithLoc{ .sym_index = sym_index, .file = null };
const sym = self.getSymbolPtr(sym_loc);
sym.* = .{
.n_strx = try self.strtab.insert(gpa, "___dso_handle"),
.n_type = macho.N_SECT | macho.N_EXT,
.n_sect = 0,
.n_desc = macho.N_WEAK_DEF,
.n_value = 0,
});
global.* = .{
.sym_index = sym_index,
.file = null,
};
_ = self.unresolved.swapRemove(@intCast(u32, self.globals.getIndex("___dso_handle").?));
global.* = sym_loc;
_ = self.unresolved.swapRemove(self.getGlobalIndex("___dso_handle").?);
}
fn resolveGlobalSymbol(self: *MachO, current: SymbolWithLoc) !void {
@ -2568,19 +2525,14 @@ fn resolveGlobalSymbol(self: *MachO, current: SymbolWithLoc) !void {
const sym = self.getSymbol(current);
const sym_name = self.getSymbolName(current);
const name = try gpa.dupe(u8, sym_name);
const global_index = @intCast(u32, self.globals.values().len);
const gop = try self.globals.getOrPut(gpa, name);
defer if (gop.found_existing) gpa.free(name);
const gop = try self.getOrPutGlobalPtr(sym_name);
if (!gop.found_existing) {
gop.value_ptr.* = current;
if (sym.undf() and !sym.tentative()) {
try self.unresolved.putNoClobber(gpa, global_index, false);
try self.unresolved.putNoClobber(gpa, self.getGlobalIndex(sym_name).?, false);
}
return;
}
const global = gop.value_ptr.*;
const global_sym = self.getSymbol(global);
@ -2619,7 +2571,7 @@ fn resolveGlobalSymbol(self: *MachO, current: SymbolWithLoc) !void {
}
if (sym.undf() and !sym.tentative()) return;
_ = self.unresolved.swapRemove(@intCast(u32, self.globals.getIndex(name).?));
_ = self.unresolved.swapRemove(self.getGlobalIndex(sym_name).?);
gop.value_ptr.* = current;
}
@ -2664,7 +2616,7 @@ fn resolveSymbolsInObject(self: *MachO, object_id: u16) !void {
const sym_loc = SymbolWithLoc{ .sym_index = sym_index, .file = object_id };
self.resolveGlobalSymbol(sym_loc) catch |err| switch (err) {
error.MultipleSymbolDefinitions => {
const global = self.globals.get(sym_name).?;
const global = self.getGlobal(sym_name).?;
log.err("symbol '{s}' defined multiple times", .{sym_name});
if (global.file) |file| {
log.err(" first definition in '{s}'", .{self.objects.items[file].name});
@ -2684,7 +2636,8 @@ fn resolveSymbolsInArchives(self: *MachO) !void {
const cpu_arch = self.base.options.target.cpu.arch;
var next_sym: usize = 0;
loop: while (next_sym < self.unresolved.count()) {
const global = self.globals.values()[self.unresolved.keys()[next_sym]];
const global_index = self.unresolved.keys()[next_sym];
const global = self.globals.items[global_index];
const sym_name = self.getSymbolName(global);
for (self.archives.items) |archive| {
@ -2710,10 +2663,11 @@ fn resolveSymbolsInArchives(self: *MachO) !void {
fn resolveSymbolsInDylibs(self: *MachO) !void {
if (self.dylibs.items.len == 0) return;
const gpa = self.base.allocator;
var next_sym: usize = 0;
loop: while (next_sym < self.unresolved.count()) {
const global_index = self.unresolved.keys()[next_sym];
const global = self.globals.values()[global_index];
const global = self.globals.items[global_index];
const sym = self.getSymbolPtr(global);
const sym_name = self.getSymbolName(global);
@ -2722,7 +2676,7 @@ fn resolveSymbolsInDylibs(self: *MachO) !void {
const dylib_id = @intCast(u16, id);
if (!self.referenced_dylibs.contains(dylib_id)) {
try self.referenced_dylibs.putNoClobber(self.base.allocator, dylib_id, {});
try self.referenced_dylibs.putNoClobber(gpa, dylib_id, {});
}
const ordinal = self.referenced_dylibs.getIndex(dylib_id) orelse unreachable;
@ -2760,7 +2714,7 @@ fn resolveSymbolsAtLoading(self: *MachO) !void {
var next_sym: usize = 0;
while (next_sym < self.unresolved.count()) {
const global_index = self.unresolved.keys()[next_sym];
const global = self.globals.values()[global_index];
const global = self.globals.items[global_index];
const sym = self.getSymbolPtr(global);
const sym_name = self.getSymbolName(global);
@ -2800,26 +2754,27 @@ fn resolveDyldStubBinder(self: *MachO) !void {
if (self.unresolved.count() == 0) return; // no need for a stub binder if we don't have any imports
const gpa = self.base.allocator;
const n_strx = try self.strtab.insert(gpa, "dyld_stub_binder");
const sym_index = @intCast(u32, self.locals.items.len);
try self.locals.append(gpa, .{
.n_strx = n_strx,
const sym_index = try self.allocateSymbol();
const sym_loc = SymbolWithLoc{ .sym_index = sym_index, .file = null };
const sym = self.getSymbolPtr(sym_loc);
const sym_name = "dyld_stub_binder";
sym.* = .{
.n_strx = try self.strtab.insert(gpa, sym_name),
.n_type = macho.N_UNDF,
.n_sect = 0,
.n_desc = 0,
.n_value = 0,
});
const sym_name = try gpa.dupe(u8, "dyld_stub_binder");
const global = SymbolWithLoc{ .sym_index = sym_index, .file = null };
try self.globals.putNoClobber(gpa, sym_name, global);
const sym = &self.locals.items[sym_index];
};
const gop = try self.getOrPutGlobalPtr(sym_name);
gop.value_ptr.* = sym_loc;
const global = gop.value_ptr.*;
for (self.dylibs.items) |dylib, id| {
if (!dylib.symbols.contains(sym_name)) continue;
const dylib_id = @intCast(u16, id);
if (!self.referenced_dylibs.contains(dylib_id)) {
try self.referenced_dylibs.putNoClobber(self.base.allocator, dylib_id, {});
try self.referenced_dylibs.putNoClobber(gpa, dylib_id, {});
}
const ordinal = self.referenced_dylibs.getIndex(dylib_id) orelse unreachable;
@ -3050,14 +3005,20 @@ pub fn deinit(self: *MachO) void {
self.stubs_free_list.deinit(gpa);
self.stubs_table.deinit(gpa);
self.strtab.deinit(gpa);
self.locals.deinit(gpa);
self.globals.deinit(gpa);
self.locals_free_list.deinit(gpa);
self.globals_free_list.deinit(gpa);
self.unresolved.deinit(gpa);
for (self.globals.keys()) |key| {
gpa.free(key);
{
var it = self.resolver.keyIterator();
while (it.next()) |key_ptr| {
gpa.free(key_ptr.*);
}
self.resolver.deinit(gpa);
}
self.globals.deinit(gpa);
for (self.objects.items) |*object| {
object.deinit(gpa);
@ -3211,6 +3172,29 @@ fn allocateSymbol(self: *MachO) !u32 {
return index;
}
fn allocateGlobal(self: *MachO) !u32 {
try self.globals.ensureUnusedCapacity(self.base.allocator, 1);
const index = blk: {
if (self.globals_free_list.popOrNull()) |index| {
log.debug(" (reusing global index {d})", .{index});
break :blk index;
} else {
log.debug(" (allocating symbol index {d})", .{self.globals.items.len});
const index = @intCast(u32, self.globals.items.len);
_ = self.globals.addOneAssumeCapacity();
break :blk index;
}
};
self.globals.items[index] = .{
.sym_index = 0,
.file = null,
};
return index;
}
pub fn allocateGotEntry(self: *MachO, target: SymbolWithLoc) !u32 {
const gpa = self.base.allocator;
try self.got_entries.ensureUnusedCapacity(gpa, 1);
@ -3832,7 +3816,7 @@ pub fn updateDeclExports(
self.resolveGlobalSymbol(sym_loc) catch |err| switch (err) {
error.MultipleSymbolDefinitions => {
const global = self.globals.get(exp_name).?;
const global = self.getGlobal(exp_name).?;
if (sym_loc.sym_index != global.sym_index and global.file != null) {
_ = try module.failed_exports.put(module.gpa, exp, try Module.ErrorMsg.create(
gpa,
@ -3869,11 +3853,13 @@ pub fn deleteExport(self: *MachO, exp: Export) void {
};
self.locals_free_list.append(gpa, sym_index) catch {};
if (self.globals.get(sym_name)) |global| blk: {
if (global.sym_index != sym_index) break :blk;
if (global.file != null) break :blk;
const kv = self.globals.fetchSwapRemove(sym_name);
gpa.free(kv.?.key);
if (self.resolver.fetchRemove(sym_name)) |entry| {
defer gpa.free(entry.key);
self.globals_free_list.append(gpa, entry.value) catch {};
self.globals.items[entry.value] = .{
.sym_index = 0,
.file = null,
};
}
}
@ -4864,32 +4850,26 @@ pub fn addAtomToSection(self: *MachO, atom: *Atom, sect_id: u8) !void {
pub fn getGlobalSymbol(self: *MachO, name: []const u8) !u32 {
const gpa = self.base.allocator;
const sym_name = try std.fmt.allocPrint(gpa, "_{s}", .{name});
const global_index = @intCast(u32, self.globals.values().len);
const gop = try self.globals.getOrPut(gpa, sym_name);
defer if (gop.found_existing) gpa.free(sym_name);
defer gpa.free(sym_name);
const gop = try self.getOrPutGlobalPtr(sym_name);
const global_index = self.getGlobalIndex(sym_name).?;
if (gop.found_existing) {
// TODO audit this: can we ever reference anything from outside the Zig module?
assert(gop.value_ptr.file == null);
return gop.value_ptr.sym_index;
return global_index;
}
const sym_index = @intCast(u32, self.locals.items.len);
try self.locals.append(gpa, .{
.n_strx = try self.strtab.insert(gpa, sym_name),
.n_type = macho.N_UNDF,
.n_sect = 0,
.n_desc = 0,
.n_value = 0,
});
gop.value_ptr.* = .{
.sym_index = sym_index,
.file = null,
};
const sym_index = try self.allocateSymbol();
const sym_loc = SymbolWithLoc{ .sym_index = sym_index, .file = null };
gop.value_ptr.* = sym_loc;
const sym = self.getSymbolPtr(sym_loc);
sym.n_strx = try self.strtab.insert(gpa, sym_name);
try self.unresolved.putNoClobber(gpa, global_index, true);
return sym_index;
return global_index;
}
fn getSegmentAllocBase(self: MachO, indices: []const ?u8) struct { vmaddr: u64, fileoff: u64 } {
@ -5055,7 +5035,7 @@ fn writeDyldInfoData(self: *MachO, ncmds: *u32, lc_writer: anytype) !void {
if (self.base.options.output_mode == .Exe) {
for (&[_]SymbolWithLoc{
try self.getEntryPoint(),
self.globals.get("__mh_execute_header").?,
self.getGlobal("__mh_execute_header").?,
}) |global| {
const sym = self.getSymbol(global);
const sym_name = self.getSymbolName(global);
@ -5068,7 +5048,7 @@ fn writeDyldInfoData(self: *MachO, ncmds: *u32, lc_writer: anytype) !void {
}
} else {
assert(self.base.options.output_mode == .Lib);
for (self.globals.values()) |global| {
for (self.globals.items) |global| {
const sym = self.getSymbol(global);
if (sym.undf()) continue;
@ -5271,9 +5251,9 @@ fn writeFunctionStarts(self: *MachO, ncmds: *u32, lc_writer: anytype) !void {
// We need to sort by address first
var addresses = std.ArrayList(u64).init(gpa);
defer addresses.deinit();
try addresses.ensureTotalCapacityPrecise(self.globals.count());
try addresses.ensureTotalCapacityPrecise(self.globals.items.len);
for (self.globals.values()) |global| {
for (self.globals.items) |global| {
const sym = self.getSymbol(global);
if (sym.undf()) continue;
if (sym.n_desc == N_DESC_GCED) continue;
@ -5453,7 +5433,7 @@ fn writeSymtab(self: *MachO, lc: *macho.symtab_command) !SymtabCtx {
if (sym.n_desc == N_DESC_GCED) continue; // GCed, skip
const sym_loc = SymbolWithLoc{ .sym_index = @intCast(u32, sym_id), .file = null };
if (self.symbolIsTemp(sym_loc)) continue; // local temp symbol, skip
if (self.globals.contains(self.getSymbolName(sym_loc))) continue; // global symbol is either an export or import, skip
if (self.getGlobal(self.getSymbolName(sym_loc)) != null) continue; // global symbol is either an export or import, skip
try locals.append(sym);
}
@ -5463,7 +5443,7 @@ fn writeSymtab(self: *MachO, lc: *macho.symtab_command) !SymtabCtx {
if (sym.n_desc == N_DESC_GCED) continue; // GCed, skip
const sym_loc = SymbolWithLoc{ .sym_index = @intCast(u32, sym_id), .file = @intCast(u32, object_id) };
if (self.symbolIsTemp(sym_loc)) continue; // local temp symbol, skip
if (self.globals.contains(self.getSymbolName(sym_loc))) continue; // global symbol is either an export or import, skip
if (self.getGlobal(self.getSymbolName(sym_loc)) != null) continue; // global symbol is either an export or import, skip
var out_sym = sym;
out_sym.n_strx = try self.strtab.insert(gpa, self.getSymbolName(sym_loc));
try locals.append(out_sym);
@ -5477,7 +5457,7 @@ fn writeSymtab(self: *MachO, lc: *macho.symtab_command) !SymtabCtx {
var exports = std.ArrayList(macho.nlist_64).init(gpa);
defer exports.deinit();
for (self.globals.values()) |global| {
for (self.globals.items) |global| {
const sym = self.getSymbol(global);
if (sym.undf()) continue; // import, skip
if (sym.n_desc == N_DESC_GCED) continue; // GCed, skip
@ -5491,7 +5471,7 @@ fn writeSymtab(self: *MachO, lc: *macho.symtab_command) !SymtabCtx {
var imports_table = std.AutoHashMap(SymbolWithLoc, u32).init(gpa);
for (self.globals.values()) |global| {
for (self.globals.items) |global| {
const sym = self.getSymbol(global);
if (sym.n_strx == 0) continue; // no name, skip
if (!sym.undf()) continue; // not an import, skip
@ -5798,6 +5778,49 @@ pub fn getSymbolName(self: *MachO, sym_with_loc: SymbolWithLoc) []const u8 {
}
}
/// Returns pointer to the global entry for `name` if one exists.
pub fn getGlobalPtr(self: *MachO, name: []const u8) ?*SymbolWithLoc {
const global_index = self.resolver.get(name) orelse return null;
return &self.globals.items[global_index];
}
/// Returns the global entry for `name` if one exists.
pub fn getGlobal(self: *const MachO, name: []const u8) ?SymbolWithLoc {
const global_index = self.resolver.get(name) orelse return null;
return self.globals.items[global_index];
}
/// Returns the index of the global entry for `name` if one exists.
pub fn getGlobalIndex(self: *const MachO, name: []const u8) ?u32 {
return self.resolver.get(name);
}
/// Returns global entry at `index`.
pub fn getGlobalByIndex(self: *const MachO, index: u32) SymbolWithLoc {
assert(index < self.globals.items.len);
return self.globals.items[index];
}
const GetOrPutGlobalPtrResult = struct {
found_existing: bool,
value_ptr: *SymbolWithLoc,
};
/// Return pointer to the global entry for `name` if one exists.
/// Puts a new global entry for `name` if one doesn't exist, and
/// returns a pointer to it.
pub fn getOrPutGlobalPtr(self: *MachO, name: []const u8) !GetOrPutGlobalPtrResult {
if (self.getGlobalPtr(name)) |ptr| {
return GetOrPutGlobalPtrResult{ .found_existing = true, .value_ptr = ptr };
}
const gpa = self.base.allocator;
const global_index = try self.allocateGlobal();
const global_name = try gpa.dupe(u8, name);
_ = try self.resolver.put(gpa, global_name, global_index);
const ptr = &self.globals.items[global_index];
return GetOrPutGlobalPtrResult{ .found_existing = false, .value_ptr = ptr };
}
/// Returns atom if there is an atom referenced by the symbol described by `sym_with_loc` descriptor.
/// Returns null on failure.
pub fn getAtomForSymbol(self: *MachO, sym_with_loc: SymbolWithLoc) ?*Atom {
@ -5834,7 +5857,7 @@ pub fn getTlvPtrAtomForSymbol(self: *MachO, sym_with_loc: SymbolWithLoc) ?*Atom
/// Asserts output mode is executable.
pub fn getEntryPoint(self: MachO) error{MissingMainEntrypoint}!SymbolWithLoc {
const entry_name = self.base.options.entry orelse "_main";
const global = self.globals.get(entry_name) orelse {
const global = self.getGlobal(entry_name) orelse {
log.err("entrypoint '{s}' not found", .{entry_name});
return error.MissingMainEntrypoint;
};
@ -6342,9 +6365,9 @@ fn logSymtab(self: *MachO) void {
}
log.debug("globals table:", .{});
for (self.globals.keys()) |name, id| {
const value = self.globals.values()[id];
log.debug(" {s} => %{d} in object({?d})", .{ name, value.sym_index, value.file });
for (self.globals.items) |global| {
const name = self.getSymbolName(global);
log.debug(" {s} => %{d} in object({?d})", .{ name, global.sym_index, global.file });
}
log.debug("GOT entries:", .{});

View File

@ -272,7 +272,7 @@ pub fn parseRelocs(self: *Atom, relocs: []align(1) const macho.relocation_info,
subtractor = sym_loc;
} else {
const sym_name = context.macho_file.getSymbolName(sym_loc);
subtractor = context.macho_file.globals.get(sym_name).?;
subtractor = context.macho_file.getGlobal(sym_name).?;
}
// Verify that *_SUBTRACTOR is followed by *_UNSIGNED.
if (relocs.len <= i + 1) {
@ -339,7 +339,7 @@ pub fn parseRelocs(self: *Atom, relocs: []align(1) const macho.relocation_info,
break :target sym_loc;
} else {
const sym_name = context.macho_file.getSymbolName(sym_loc);
break :target context.macho_file.globals.get(sym_name).?;
break :target context.macho_file.getGlobal(sym_name).?;
}
};
const offset = @intCast(u32, rel.r_address - context.base_offset);
@ -579,7 +579,7 @@ pub fn resolveRelocs(self: *Atom, macho_file: *MachO) !void {
// If there is no atom for target, we still need to check for special, atom-less
// symbols such as `___dso_handle`.
const target_name = macho_file.getSymbolName(rel.target);
assert(macho_file.globals.contains(target_name));
assert(macho_file.getGlobal(target_name) != null);
const atomless_sym = macho_file.getSymbol(rel.target);
log.debug(" | atomless target '{s}'", .{target_name});
break :blk atomless_sym.n_value;

View File

@ -480,7 +480,7 @@ fn writeSymtab(self: *DebugSymbols, lc: *macho.symtab_command) !void {
if (sym.n_desc == MachO.N_DESC_GCED) continue; // GCed, skip
const sym_loc = MachO.SymbolWithLoc{ .sym_index = @intCast(u32, sym_id), .file = null };
if (self.base.symbolIsTemp(sym_loc)) continue; // local temp symbol, skip
if (self.base.globals.contains(self.base.getSymbolName(sym_loc))) continue; // global symbol is either an export or import, skip
if (self.base.getGlobal(self.base.getSymbolName(sym_loc)) != null) continue; // global symbol is either an export or import, skip
var out_sym = sym;
out_sym.n_strx = try self.strtab.insert(gpa, self.base.getSymbolName(sym_loc));
try locals.append(out_sym);
@ -489,7 +489,7 @@ fn writeSymtab(self: *DebugSymbols, lc: *macho.symtab_command) !void {
var exports = std.ArrayList(macho.nlist_64).init(gpa);
defer exports.deinit();
for (self.base.globals.values()) |global| {
for (self.base.globals.items) |global| {
const sym = self.base.getSymbol(global);
if (sym.undf()) continue; // import, skip
if (sym.n_desc == MachO.N_DESC_GCED) continue; // GCed, skip

View File

@ -62,7 +62,7 @@ fn collectRoots(roots: *std.AutoHashMap(*Atom, void), macho_file: *MachO) !void
else => |other| {
assert(other == .Lib);
// Add exports as GC roots
for (macho_file.globals.values()) |global| {
for (macho_file.globals.items) |global| {
const sym = macho_file.getSymbol(global);
if (!sym.sect()) continue;
const atom = macho_file.getAtomForSymbol(global) orelse {
@ -77,7 +77,7 @@ fn collectRoots(roots: *std.AutoHashMap(*Atom, void), macho_file: *MachO) !void
}
// TODO just a temp until we learn how to parse unwind records
if (macho_file.globals.get("___gxx_personality_v0")) |global| {
if (macho_file.getGlobal("___gxx_personality_v0")) |global| {
if (macho_file.getAtomForSymbol(global)) |atom| {
_ = try roots.getOrPut(atom);
log.debug("adding root", .{});

View File

@ -5957,20 +5957,36 @@ fn zigifyEscapeSequences(ctx: *Context, m: *MacroCtx) ![]const u8 {
return bytes[0..i];
}
/// non-ASCII characters (c > 127) are also treated as non-printable by fmtSliceEscapeLower.
/// If a C string literal or char literal in a macro is not valid UTF-8, we need to escape
/// non-ASCII characters so that the Zig source we output will itself be UTF-8.
fn escapeUnprintables(ctx: *Context, m: *MacroCtx) ![]const u8 {
const zigified = try zigifyEscapeSequences(ctx, m);
if (std.unicode.utf8ValidateSlice(zigified)) return zigified;
const formatter = std.fmt.fmtSliceEscapeLower(zigified);
const encoded_size = @intCast(usize, std.fmt.count("{s}", .{formatter}));
var output = try ctx.arena.alloc(u8, encoded_size);
return std.fmt.bufPrint(output, "{s}", .{formatter}) catch |err| switch (err) {
error.NoSpaceLeft => unreachable,
else => |e| return e,
};
}
fn parseCPrimaryExprInner(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node {
const tok = m.next().?;
const slice = m.slice();
switch (tok) {
.CharLiteral => {
if (slice[0] != '\'' or slice[1] == '\\' or slice.len == 3) {
return Tag.char_literal.create(c.arena, try zigifyEscapeSequences(c, m));
return Tag.char_literal.create(c.arena, try escapeUnprintables(c, m));
} else {
const str = try std.fmt.allocPrint(c.arena, "0x{s}", .{std.fmt.fmtSliceHexLower(slice[1 .. slice.len - 1])});
return Tag.integer_literal.create(c.arena, str);
}
},
.StringLiteral => {
return Tag.string_literal.create(c.arena, try zigifyEscapeSequences(c, m));
return Tag.string_literal.create(c.arena, try escapeUnprintables(c, m));
},
.IntegerLiteral, .FloatLiteral => {
return parseCNumLit(c, m);

View File

@ -2042,6 +2042,9 @@ pub const Type = extern union {
try writer.writeAll("fn(");
for (fn_info.param_types) |param_ty, i| {
if (i != 0) try writer.writeAll(", ");
if (fn_info.paramIsComptime(i)) {
try writer.writeAll("comptime ");
}
if (std.math.cast(u5, i)) |index| if (@truncate(u1, fn_info.noalias_bits >> index) != 0) {
try writer.writeAll("noalias ");
};

View File

@ -13,7 +13,6 @@ const Foo = struct {
test "@alignOf(T) before referencing T" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_c) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
comptime try expect(@alignOf(Foo) != maxInt(usize));
if (native_arch == .x86_64) {
comptime try expect(@alignOf(Foo) == 4);

View File

@ -175,7 +175,6 @@ test "nested arrays of integers" {
test "implicit comptime in array type size" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
var arr: [plusOne(10)]bool = undefined;
try expect(arr.len == 11);
@ -245,7 +244,6 @@ const Sub = struct { b: u8 };
const Str = struct { a: []Sub };
test "set global var array via slice embedded in struct" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
var s = Str{ .a = s_array[0..] };
@ -298,7 +296,6 @@ fn testArrayByValAtComptime(b: [2]u8) u8 {
test "comptime evaluating function that takes array by value" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
const arr = [_]u8{ 1, 2 };
const x = comptime testArrayByValAtComptime(arr);
@ -427,7 +424,6 @@ test "anonymous literal in array" {
test "access the null element of a null terminated array" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
const S = struct {
fn doTheTest() !void {
@ -484,7 +480,6 @@ test "sentinel element count towards the ABI size calculation" {
test "zero-sized array with recursive type definition" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_c) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
const U = struct {
fn foo(comptime T: type, comptime n: usize) type {

View File

@ -465,7 +465,6 @@ fn nine() u8 {
test "struct inside function" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
try testStructInFn();
comptime try testStructInFn();
@ -514,7 +513,6 @@ var global_foo: *i32 = undefined;
test "peer result location with typed parent, runtime condition, comptime prongs" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
const S = struct {
fn doTheTest(arg: i32) i32 {
@ -643,7 +641,6 @@ test "global constant is loaded with a runtime-known index" {
test "multiline string literal is null terminated" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
const s1 =
\\one
@ -1060,7 +1057,6 @@ comptime {
test "switch inside @as gets correct type" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
var a: u32 = 0;
var b: [2]u32 = undefined;

View File

@ -138,7 +138,6 @@ test "@bitCast extern structs at runtime and comptime" {
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_c) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
const Full = extern struct {
number: u16,

View File

@ -523,7 +523,6 @@ fn testCastConstArrayRefToConstSlice() !void {
test "peer type resolution: error and [N]T" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
try expect(mem.eql(u8, try testPeerErrorAndArray(0), "OK"));
comptime try expect(mem.eql(u8, try testPeerErrorAndArray(0), "OK"));
@ -548,7 +547,6 @@ fn testPeerErrorAndArray2(x: u8) anyerror![]const u8 {
test "single-item pointer of array to slice to unknown length pointer" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_c) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
try testCastPtrOfArrayToSliceAndPtr();
comptime try testCastPtrOfArrayToSliceAndPtr();
@ -578,7 +576,6 @@ fn testCastPtrOfArrayToSliceAndPtr() !void {
test "cast *[1][*]const u8 to [*]const ?[*]const u8" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_c) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
const window_name = [1][*]const u8{"window name"};
@ -649,7 +646,6 @@ test "@floatCast cast down" {
test "peer type resolution: unreachable, error set, unreachable" {
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
const Error = error{
FileDescriptorAlreadyPresentInSet,
@ -922,7 +918,6 @@ test "peer cast *[N:x]T to *[N]T" {
test "peer cast [*:x]T to [*]T" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
const S = struct {
@ -964,7 +959,6 @@ test "peer cast [:x]T to [*:x]T" {
test "peer type resolution implicit cast to return type" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
const S = struct {
fn doTheTest() !void {
@ -984,7 +978,6 @@ test "peer type resolution implicit cast to return type" {
test "peer type resolution implicit cast to variable type" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
const S = struct {
@ -1009,7 +1002,6 @@ test "variable initialization uses result locations properly with regards to the
test "cast between C pointer with different but compatible types" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
const S = struct {
fn foo(arg: [*]c_ushort) u16 {
@ -1026,7 +1018,6 @@ test "cast between C pointer with different but compatible types" {
test "peer type resolve string lit with sentinel-terminated mutable slice" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_c) return error.SkipZigTest; // TODO
var array: [4:0]u8 = undefined;
@ -1079,7 +1070,6 @@ test "comptime float casts" {
test "pointer reinterpret const float to int" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_c) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
// The hex representation is 0x3fe3333333333303.
const float: f64 = 5.99999999999994648725e-01;

View File

@ -87,7 +87,6 @@ fn bigToNativeEndian(comptime T: type, v: T) T {
test "type pun endianness" {
if (builtin.zig_backend == .stage1) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
comptime {
const StructOfBytes = extern struct { x: [4]u8 };
@ -398,7 +397,6 @@ test "offset field ptr by enclosing array element size" {
test "accessing reinterpreted memory of parent object" {
if (builtin.zig_backend == .stage1) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
const S = extern struct {
a: f32,
b: [4]u8,

View File

@ -9,7 +9,6 @@ var argv: [*]const [*]const u8 = undefined;
test "const slice child" {
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
const strs = [_][*]const u8{ "one", "two", "three" };
argv = &strs;

View File

@ -606,7 +606,6 @@ fn testEnumWithSpecifiedTagValues(x: MultipleChoice) !void {
}
test "enum with specified tag values" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
try testEnumWithSpecifiedTagValues(MultipleChoice.C);
@ -614,7 +613,6 @@ test "enum with specified tag values" {
}
test "non-exhaustive enum" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
const S = struct {
@ -677,7 +675,6 @@ test "empty non-exhaustive enum" {
}
test "single field non-exhaustive enum" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
const S = struct {
@ -741,7 +738,6 @@ test "cast integer literal to enum" {
}
test "enum with specified and unspecified tag values" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
try testEnumWithSpecifiedAndUnspecifiedTagValues(MultipleChoice2.D);
@ -925,7 +921,6 @@ test "enum literal casting to tagged union" {
const Bar = enum { A, B, C, D };
test "enum literal casting to error union with payload enum" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
var bar: error{B}!Bar = undefined;
@ -1132,7 +1127,6 @@ test "tag name functions are unique" {
test "size of enum with only one tag which has explicit integer tag type" {
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
const E = enum(u8) { nope = 10 };

View File

@ -222,7 +222,6 @@ fn testErrorSetType() !void {
test "explicit error set cast" {
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
try testExplicitErrorSetCast(Set1.A);
@ -282,7 +281,6 @@ test "inferred empty error set comptime catch" {
}
test "error union peer type resolution" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
try testErrorUnionPeerTypeResolution(1);
@ -327,7 +325,6 @@ fn foo3(b: usize) Error!usize {
test "error: Infer error set from literals" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
_ = nullLiteral("n") catch |err| handleErrors(err);
_ = floatLiteral("n") catch |err| handleErrors(err);
@ -700,7 +697,6 @@ test "ret_ptr doesn't cause own inferred error set to be resolved" {
test "simple else prong allowed even when all errors handled" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO
const S = struct {

View File

@ -69,7 +69,6 @@ fn constExprEvalOnSingleExprBlocksFn(x: i32, b: bool) i32 {
}
test "constant expressions" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
var array: [array_size]u8 = undefined;
@ -138,7 +137,6 @@ test "pointer to type" {
test "a type constructed in a global expression" {
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
var l: List = undefined;
l.array[0] = 10;
@ -338,7 +336,6 @@ fn doesAlotT(comptime T: type, value: usize) T {
}
test "@setEvalBranchQuota at same scope as generic function call" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
try expect(doesAlotT(u32, 2) == 2);
@ -565,7 +562,6 @@ test "inlined loop has array literal with elided runtime scope on first iteratio
}
test "ptr to local array argument at comptime" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
comptime {
@ -806,7 +802,6 @@ test "array concatenation sets the sentinel - value" {
test "array concatenation sets the sentinel - pointer" {
if (builtin.zig_backend == .stage1) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_c) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
var a = [2]u3{ 1, 7 };
@ -956,7 +951,6 @@ test "const local with comptime init through array init" {
test "closure capture type of runtime-known parameter" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
const S = struct {
fn b(c: anytype) !void {
@ -1074,7 +1068,6 @@ test "comptime break operand passing through runtime switch converted to runtime
test "no dependency loop for alignment of self struct" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
const S = struct {
fn doTheTest() !void {
@ -1111,7 +1104,6 @@ test "no dependency loop for alignment of self struct" {
test "no dependency loop for alignment of self bare union" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
const S = struct {
fn doTheTest() !void {
@ -1148,7 +1140,6 @@ test "no dependency loop for alignment of self bare union" {
test "no dependency loop for alignment of self tagged union" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
const S = struct {
fn doTheTest() !void {
@ -1336,7 +1327,6 @@ test "lazy sizeof is resolved in division" {
}
test "lazy value is resolved as slice operand" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
const A = struct { a: u32 };

View File

@ -2,7 +2,6 @@ const expect = @import("std").testing.expect;
const builtin = @import("builtin");
test "@fieldParentPtr non-first field" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_c) return error.SkipZigTest;
@ -11,7 +10,6 @@ test "@fieldParentPtr non-first field" {
}
test "@fieldParentPtr first field" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_c) return error.SkipZigTest;

View File

@ -5,7 +5,6 @@ const expectEqual = std.testing.expectEqual;
const mem = std.mem;
test "continue in for loop" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
const array = [_]i32{ 1, 2, 3, 4, 5 };
@ -130,7 +129,6 @@ test "for with null and T peer types and inferred result location type" {
}
test "2 break statements and an else" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
const S = struct {
@ -177,7 +175,6 @@ fn mangleString(s: []u8) void {
}
test "for copies its payload" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
const S = struct {
@ -213,7 +210,6 @@ test "for on slice with allowzero ptr" {
test "else continue outer for" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
var i: usize = 6;
var buf: [5]u8 = undefined;

View File

@ -91,7 +91,6 @@ fn max_f64(a: f64, b: f64) f64 {
test "type constructed by comptime function call" {
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
var l: SimpleList(10) = undefined;

View File

@ -12,7 +12,6 @@ fn foo() C!void {
}
test "merge error sets" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
if (foo()) {

View File

@ -17,8 +17,6 @@ fn testDerefPtr() !void {
}
test "pointer arithmetic" {
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
var ptr: [*]const u8 = "abcd";
@ -65,8 +63,6 @@ test "initialize const optional C pointer to null" {
}
test "assigning integer to C pointer" {
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
var x: i32 = 0;
@ -83,8 +79,6 @@ test "assigning integer to C pointer" {
}
test "C pointer comparison and arithmetic" {
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
const S = struct {
fn doTheTest() !void {
var ptr1: [*c]u32 = 0;
@ -150,7 +144,6 @@ test "peer type resolution with C pointer and const pointer" {
}
test "implicit casting between C pointer and optional non-C pointer" {
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
@ -281,8 +274,6 @@ test "array initialization types" {
test "null terminated pointer" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
const S = struct {
fn doTheTest() !void {
@ -299,8 +290,6 @@ test "null terminated pointer" {
test "allow any sentinel" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
const S = struct {
fn doTheTest() !void {
@ -315,8 +304,6 @@ test "allow any sentinel" {
test "pointer sentinel with enums" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
const S = struct {
const Number = enum {
@ -337,7 +324,6 @@ test "pointer sentinel with enums" {
test "pointer sentinel with optional element" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
const S = struct {
fn doTheTest() !void {

View File

@ -4,7 +4,6 @@ const expect = std.testing.expect;
const native_endian = builtin.target.cpu.arch.endian();
test "reinterpret bytes as integer with nonzero offset" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
try testReinterpretBytesAsInteger();
@ -39,7 +38,6 @@ fn testReinterpretWithOffsetAndNoWellDefinedLayout() !void {
}
test "reinterpret bytes inside auto-layout struct as integer with nonzero offset" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
try testReinterpretStructWrappedBytesAsInteger();
@ -179,7 +177,6 @@ test "lower reinterpreted comptime field ptr" {
}
test "reinterpret struct field at comptime" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
const numNative = comptime Bytes.init(0x12345678);

View File

@ -18,7 +18,6 @@ test "@sizeOf on compile-time types" {
}
test "@TypeOf() with multiple arguments" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_c) return error.SkipZigTest;
{
@ -77,7 +76,6 @@ const P = packed struct {
};
test "@offsetOf" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
// Packed structs have fixed memory layout

View File

@ -28,7 +28,6 @@ comptime {
test "slicing" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
var array: [20]i32 = undefined;
@ -269,7 +268,6 @@ fn sliceSum(comptime q: []const u8) i32 {
test "slice type with custom alignment" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
const LazilyResolvedType = struct {
anything: i32,
@ -283,7 +281,6 @@ test "slice type with custom alignment" {
test "obtaining a null terminated slice" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
// here we have a normal array
var buf: [50]u8 = undefined;

View File

@ -10,7 +10,6 @@ top_level_field: i32,
test "top level fields" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
var instance = @This(){
.top_level_field = 1234,
@ -104,7 +103,6 @@ fn testMutation(foo: *StructFoo) void {
test "struct byval assign" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
var foo1: StructFoo = undefined;
var foo2: StructFoo = undefined;
@ -240,7 +238,6 @@ test "usingnamespace within struct scope" {
test "struct field init with catch" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
const S = struct {
fn doTheTest() !void {
@ -281,7 +278,6 @@ const Val = struct {
test "struct point to self" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_c) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
var root: Node = undefined;
root.val.x = 1;
@ -297,7 +293,6 @@ test "struct point to self" {
test "void struct fields" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
const foo = VoidStructFieldsFoo{
.a = void{},
@ -761,7 +756,6 @@ test "packed struct with u0 field access" {
}
test "access to global struct fields" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_c) return error.SkipZigTest; // TODO
@ -1260,7 +1254,6 @@ test "typed init through error unions and optionals" {
test "initialize struct with empty literal" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
const S = struct { x: i32 = 1234 };
var s: S = .{};
@ -1362,7 +1355,6 @@ test "store to comptime field" {
test "struct field init value is size of the struct" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
const namespace = struct {
const S = extern struct {

View File

@ -348,7 +348,6 @@ test "switch on const enum with var" {
}
test "anon enum literal used in switch on union enum" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
const Foo = union(enum) {
@ -490,7 +489,6 @@ test "switch prongs with error set cases make a new error set type for capture v
}
test "return result loc and then switch with range implicit casted to error union" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
const S = struct {

View File

@ -25,7 +25,6 @@ test "this refer to module call private fn" {
}
test "this refer to container" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
var pt: Point(i32) = undefined;

View File

@ -5,6 +5,7 @@ const expectEqual = std.testing.expectEqual;
const expectEqualStrings = std.testing.expectEqualStrings;
const h = @cImport(@cInclude("behavior/translate_c_macros.h"));
const latin1 = @cImport(@cInclude("behavior/translate_c_macros_not_utf8.h"));
test "casting to void with a macro" {
h.IGNORE_ME_1(42);
@ -134,3 +135,14 @@ test "string literal macro with embedded tab character" {
try expectEqualStrings("hello\t", h.EMBEDDED_TAB);
}
test "string and char literals that are not UTF-8 encoded. Issue #12784" {
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_c) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
try expectEqual(@as(u8, '\xA9'), latin1.UNPRINTABLE_CHAR);
try expectEqualStrings("\xA9\xA9\xA9", latin1.UNPRINTABLE_STRING);
}

View File

@ -0,0 +1,5 @@
// Note: This file is encoded with ISO/IEC 8859-1 (latin1), not UTF-8.
// Do not change the encoding
#define UNPRINTABLE_STRING "©©©"
#define UNPRINTABLE_CHAR '©'

View File

@ -3,7 +3,6 @@ const builtin = @import("builtin");
const expect = std.testing.expect;
test "try on error union" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
try tryOnErrorUnionImpl();

View File

@ -122,7 +122,7 @@ test "top level decl" {
);
// generic fn
try expectEqualStrings(
"fn(type) type",
"fn(comptime type) type",
@typeName(@TypeOf(TypeFromFn)),
);
}
@ -244,5 +244,5 @@ test "comptime parameters not converted to anytype in function type" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
const T = fn (fn (type) void, void) void;
try expectEqualStrings("fn(fn(type) void, void) void", @typeName(T));
try expectEqualStrings("fn(comptime fn(comptime type) void, void) void", @typeName(T));
}

View File

@ -92,7 +92,6 @@ const FooExtern = extern union {
};
test "basic extern unions" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
var foo = FooExtern{ .int = 1 };

View File

@ -58,7 +58,6 @@ test "two files usingnamespace import each other" {
}
test {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
const AA = struct {

View File

@ -175,7 +175,6 @@ test "while with optional as condition with else" {
test "while with error union condition" {
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
numbers_left = 10;

View File

@ -0,0 +1,20 @@
pub export fn entry() void {
comptime var x: fn (comptime i32, comptime i32) void = undefined;
x = bar;
}
pub export fn entry1() void {
comptime var x: fn (i32, i32) void = undefined;
x = foo;
}
fn foo(comptime _: i32, comptime _: i32) void {}
fn bar(comptime _: i32, _: i32) void {}
// error
// backend=stage2
// target=native
//
// :3:9: error: expected type 'fn(comptime i32, comptime i32) void', found 'fn(comptime i32, i32) void'
// :3:9: note: non-comptime parameter 1 cannot cast into a comptime parameter
// :7:9: error: expected type 'fn(i32, i32) void', found 'fn(comptime i32, comptime i32) void'
// :7:9: note: generic function cannot cast into a non-generic function