Merge remote-tracking branch 'origin/master' into llvm7

This commit is contained in:
Andrew Kelley 2018-05-10 11:37:25 -04:00
commit 284ab109c4
30 changed files with 6232 additions and 5271 deletions

View File

@ -416,8 +416,8 @@ set(ZIG_CPP_SOURCES
set(ZIG_STD_FILES
"array_list.zig"
"atomic/index.zig"
"atomic/stack.zig"
"atomic/queue.zig"
"atomic/stack.zig"
"base64.zig"
"buf_map.zig"
"buf_set.zig"
@ -427,13 +427,13 @@ set(ZIG_STD_FILES
"c/index.zig"
"c/linux.zig"
"c/windows.zig"
"crypto/blake2.zig"
"crypto/hmac.zig"
"crypto/index.zig"
"crypto/md5.zig"
"crypto/sha1.zig"
"crypto/sha2.zig"
"crypto/sha3.zig"
"crypto/blake2.zig"
"crypto/hmac.zig"
"cstr.zig"
"debug/failing_allocator.zig"
"debug/index.zig"
@ -445,12 +445,12 @@ set(ZIG_STD_FILES
"fmt/errol/index.zig"
"fmt/errol/lookup.zig"
"fmt/index.zig"
"hash_map.zig"
"hash/index.zig"
"hash/adler.zig"
"hash/crc.zig"
"hash/fnv.zig"
"hash/index.zig"
"hash/siphash.zig"
"hash_map.zig"
"heap.zig"
"index.zig"
"io.zig"
@ -466,6 +466,28 @@ set(ZIG_STD_FILES
"math/atanh.zig"
"math/cbrt.zig"
"math/ceil.zig"
"math/complex/abs.zig"
"math/complex/acos.zig"
"math/complex/acosh.zig"
"math/complex/arg.zig"
"math/complex/asin.zig"
"math/complex/asinh.zig"
"math/complex/atan.zig"
"math/complex/atanh.zig"
"math/complex/conj.zig"
"math/complex/cos.zig"
"math/complex/cosh.zig"
"math/complex/exp.zig"
"math/complex/index.zig"
"math/complex/ldexp.zig"
"math/complex/log.zig"
"math/complex/pow.zig"
"math/complex/proj.zig"
"math/complex/sin.zig"
"math/complex/sinh.zig"
"math/complex/sqrt.zig"
"math/complex/tan.zig"
"math/complex/tanh.zig"
"math/copysign.zig"
"math/cos.zig"
"math/cosh.zig"
@ -502,33 +524,12 @@ set(ZIG_STD_FILES
"math/tan.zig"
"math/tanh.zig"
"math/trunc.zig"
"math/complex/abs.zig"
"math/complex/acosh.zig"
"math/complex/acos.zig"
"math/complex/arg.zig"
"math/complex/asinh.zig"
"math/complex/asin.zig"
"math/complex/atanh.zig"
"math/complex/atan.zig"
"math/complex/conj.zig"
"math/complex/cosh.zig"
"math/complex/cos.zig"
"math/complex/exp.zig"
"math/complex/index.zig"
"math/complex/ldexp.zig"
"math/complex/log.zig"
"math/complex/pow.zig"
"math/complex/proj.zig"
"math/complex/sinh.zig"
"math/complex/sin.zig"
"math/complex/sqrt.zig"
"math/complex/tanh.zig"
"math/complex/tan.zig"
"mem.zig"
"net.zig"
"os/child_process.zig"
"os/darwin.zig"
"os/darwin_errno.zig"
"os/epoch.zig"
"os/file.zig"
"os/get_user_id.zig"
"os/index.zig"
@ -538,13 +539,13 @@ set(ZIG_STD_FILES
"os/linux/x86_64.zig"
"os/path.zig"
"os/time.zig"
"os/epoch.zig"
"os/windows/error.zig"
"os/windows/index.zig"
"os/windows/util.zig"
"os/zen.zig"
"rand/index.zig"
"rand/ziggurat.zig"
"segmented_list.zig"
"sort.zig"
"special/bootstrap.zig"
"special/bootstrap_lib.zig"
@ -575,7 +576,8 @@ set(ZIG_STD_FILES
"unicode.zig"
"zig/ast.zig"
"zig/index.zig"
"zig/parser.zig"
"zig/parse.zig"
"zig/render.zig"
"zig/tokenizer.zig"
)

View File

@ -671,34 +671,46 @@ fn cmdFmt(allocator: &Allocator, args: []const []const u8) !void {
};
defer allocator.free(source_code);
var tokenizer = std.zig.Tokenizer.init(source_code);
var parser = std.zig.Parser.init(&tokenizer, allocator, file_path);
defer parser.deinit();
var tree = parser.parse() catch |err| {
var tree = std.zig.parse(allocator, source_code) catch |err| {
try stderr.print("error parsing file '{}': {}\n", file_path, err);
continue;
};
defer tree.deinit();
var original_file_backup = try Buffer.init(allocator, file_path);
defer original_file_backup.deinit();
try original_file_backup.append(".backup");
try os.rename(allocator, file_path, original_file_backup.toSliceConst());
var error_it = tree.errors.iterator(0);
while (error_it.next()) |parse_error| {
const token = tree.tokens.at(parse_error.loc());
const loc = tree.tokenLocation(0, parse_error.loc());
try stderr.print("{}:{}:{}: error: ", file_path, loc.line + 1, loc.column + 1);
try tree.renderError(parse_error, stderr);
try stderr.print("\n{}\n", source_code[loc.line_start..loc.line_end]);
{
var i: usize = 0;
while (i < loc.column) : (i += 1) {
try stderr.write(" ");
}
}
{
const caret_count = token.end - token.start;
var i: usize = 0;
while (i < caret_count) : (i += 1) {
try stderr.write("~");
}
}
try stderr.write("\n");
}
if (tree.errors.len != 0) {
continue;
}
try stderr.print("{}\n", file_path);
// TODO: BufferedAtomicFile has some access problems.
var out_file = try os.File.openWrite(allocator, file_path);
defer out_file.close();
const baf = try io.BufferedAtomicFile.create(allocator, file_path);
defer baf.destroy();
var out_file_stream = io.FileOutStream.init(&out_file);
try parser.renderSource(out_file_stream.stream, tree.root_node);
if (!flags.present("keep-backups")) {
try os.deleteFile(allocator, original_file_backup.toSliceConst());
}
try std.zig.render(allocator, baf.stream(), &tree);
try baf.finish();
}
}

View File

@ -8,9 +8,7 @@ const c = @import("c.zig");
const builtin = @import("builtin");
const Target = @import("target.zig").Target;
const warn = std.debug.warn;
const Tokenizer = std.zig.Tokenizer;
const Token = std.zig.Token;
const Parser = std.zig.Parser;
const ArrayList = std.ArrayList;
pub const Module = struct {
@ -246,34 +244,17 @@ pub const Module = struct {
warn("{}", source_code);
warn("====tokenization:====\n");
{
var tokenizer = Tokenizer.init(source_code);
while (true) {
const token = tokenizer.next();
tokenizer.dump(token);
if (token.id == Token.Id.Eof) {
break;
}
}
}
warn("====parse:====\n");
var tokenizer = Tokenizer.init(source_code);
var parser = Parser.init(&tokenizer, self.allocator, root_src_real_path);
defer parser.deinit();
var tree = try parser.parse();
var tree = try std.zig.parse(self.allocator, source_code);
defer tree.deinit();
var stderr_file = try std.io.getStdErr();
var stderr_file_out_stream = std.io.FileOutStream.init(&stderr_file);
const out_stream = &stderr_file_out_stream.stream;
try parser.renderAst(out_stream, tree.root_node);
warn("====fmt:====\n");
try parser.renderSource(out_stream, tree.root_node);
try std.zig.render(self.allocator, out_stream, &tree);
warn("====ir:====\n");
warn("TODO\n\n");

View File

@ -5931,8 +5931,8 @@ size_t type_id_len() {
return array_length(all_type_ids);
}
size_t type_id_index(TypeTableEntryId id) {
switch (id) {
size_t type_id_index(TypeTableEntry *entry) {
switch (entry->id) {
case TypeTableEntryIdInvalid:
zig_unreachable();
case TypeTableEntryIdMetaType:
@ -5952,6 +5952,8 @@ size_t type_id_index(TypeTableEntryId id) {
case TypeTableEntryIdArray:
return 7;
case TypeTableEntryIdStruct:
if (entry->data.structure.is_slice)
return 25;
return 8;
case TypeTableEntryIdNumLitFloat:
return 9;

View File

@ -174,7 +174,7 @@ void update_compile_var(CodeGen *g, Buf *name, ConstExprValue *value);
const char *type_id_name(TypeTableEntryId id);
TypeTableEntryId type_id_at_index(size_t index);
size_t type_id_len();
size_t type_id_index(TypeTableEntryId id);
size_t type_id_index(TypeTableEntry *entry);
TypeTableEntry *get_generic_fn_type(CodeGen *g, FnTypeId *fn_type_id);
bool type_is_copyable(CodeGen *g, TypeTableEntry *type_entry);
LinkLib *create_link_lib(Buf *name);

View File

@ -1259,12 +1259,11 @@ void bigint_and(BigInt *dest, const BigInt *op1, const BigInt *op2) {
bigint_normalize(dest);
return;
}
// TODO this code path is untested
uint64_t first_digit = dest->data.digit;
dest->digit_count = max(op1->digit_count, op2->digit_count);
dest->data.digits = allocate_nonzero<uint64_t>(dest->digit_count);
dest->data.digits[0] = first_digit;
size_t i = 1;
size_t i = 0;
for (; i < op1->digit_count && i < op2->digit_count; i += 1) {
dest->data.digits[i] = op1_digits[i] & op2_digits[i];
}
@ -1412,7 +1411,6 @@ void bigint_shr(BigInt *dest, const BigInt *op1, const BigInt *op2) {
return;
}
// TODO this code path is untested
size_t digit_shift_count = shift_amt / 64;
size_t leftover_shift_count = shift_amt % 64;
@ -1427,7 +1425,7 @@ void bigint_shr(BigInt *dest, const BigInt *op1, const BigInt *op2) {
uint64_t digit = op1_digits[op_digit_index];
size_t dest_digit_index = op_digit_index - digit_shift_count;
dest->data.digits[dest_digit_index] = carry | (digit >> leftover_shift_count);
carry = (0xffffffffffffffffULL << leftover_shift_count) & digit;
carry = digit << (64 - leftover_shift_count);
if (dest_digit_index == 0) { break; }
op_digit_index -= 1;

View File

@ -6260,6 +6260,7 @@ static void define_builtin_compile_vars(CodeGen *g) {
const TypeTableEntryId id = type_id_at_index(i);
buf_appendf(contents, " %s,\n", type_id_name(id));
}
buf_appendf(contents, " Slice,\n");
buf_appendf(contents, "};\n\n");
}
{
@ -6272,6 +6273,7 @@ static void define_builtin_compile_vars(CodeGen *g) {
" Int: Int,\n"
" Float: Float,\n"
" Pointer: Pointer,\n"
" Slice: Slice,\n"
" Array: Array,\n"
" Struct: Struct,\n"
" FloatLiteral: void,\n"
@ -6307,6 +6309,8 @@ static void define_builtin_compile_vars(CodeGen *g) {
" child: type,\n"
" };\n"
"\n"
" pub const Slice = Pointer;\n"
"\n"
" pub const Array = struct {\n"
" len: usize,\n"
" child: type,\n"

View File

@ -13736,7 +13736,16 @@ static TypeTableEntry *ir_analyze_instruction_field_ptr(IrAnalyze *ira, IrInstru
create_const_enum(child_type, &field->value), child_type,
ConstPtrMutComptimeConst, ptr_is_const, ptr_is_volatile);
}
} else if (child_type->id == TypeTableEntryIdUnion &&
}
ScopeDecls *container_scope = get_container_scope(child_type);
if (container_scope != nullptr) {
auto entry = container_scope->decl_table.maybe_get(field_name);
Tld *tld = entry ? entry->value : nullptr;
if (tld) {
return ir_analyze_decl_ref(ira, &field_ptr_instruction->base, tld);
}
}
if (child_type->id == TypeTableEntryIdUnion &&
(child_type->data.unionation.decl_node->data.container_decl.init_arg_expr != nullptr ||
child_type->data.unionation.decl_node->data.container_decl.auto_enum))
{
@ -13753,14 +13762,6 @@ static TypeTableEntry *ir_analyze_instruction_field_ptr(IrAnalyze *ira, IrInstru
ConstPtrMutComptimeConst, ptr_is_const, ptr_is_volatile);
}
}
ScopeDecls *container_scope = get_container_scope(child_type);
if (container_scope != nullptr) {
auto entry = container_scope->decl_table.maybe_get(field_name);
Tld *tld = entry ? entry->value : nullptr;
if (tld) {
return ir_analyze_decl_ref(ira, &field_ptr_instruction->base, tld);
}
}
ir_add_error(ira, &field_ptr_instruction->base,
buf_sprintf("container '%s' has no member called '%s'",
buf_ptr(&child_type->name), buf_ptr(field_name)));
@ -14708,7 +14709,7 @@ static IrInstruction *ir_analyze_union_tag(IrAnalyze *ira, IrInstruction *source
}
if (value->value.type->id != TypeTableEntryIdUnion) {
ir_add_error(ira, source_instr,
ir_add_error(ira, value,
buf_sprintf("expected enum or union type, found '%s'", buf_ptr(&value->value.type->name)));
return ira->codegen->invalid_instruction;
}
@ -15784,11 +15785,10 @@ static TypeTableEntry *ir_type_info_get_type(IrAnalyze *ira, const char *type_na
Buf field_name = BUF_INIT;
buf_init_from_str(&field_name, type_name);
auto entry = type_info_scope->decl_table.maybe_get(&field_name);
auto entry = type_info_scope->decl_table.get(&field_name);
buf_deinit(&field_name);
assert(entry != nullptr);
TldVar *tld = (TldVar *)entry->value;
TldVar *tld = (TldVar *)entry;
assert(tld->base.id == TldIdVar);
VariableTableEntry *var = tld->var;
@ -16070,6 +16070,38 @@ static ConstExprValue *ir_make_type_info_value(IrAnalyze *ira, TypeTableEntry *t
enum_field_val->data.x_struct.fields = inner_fields;
};
const auto create_ptr_like_type_info = [ira](const char *name, TypeTableEntry *ptr_type_entry) {
ConstExprValue *result = create_const_vals(1);
result->special = ConstValSpecialStatic;
result->type = ir_type_info_get_type(ira, name);
ConstExprValue *fields = create_const_vals(4);
result->data.x_struct.fields = fields;
// is_const: bool
ensure_field_index(result->type, "is_const", 0);
fields[0].special = ConstValSpecialStatic;
fields[0].type = ira->codegen->builtin_types.entry_bool;
fields[0].data.x_bool = ptr_type_entry->data.pointer.is_const;
// is_volatile: bool
ensure_field_index(result->type, "is_volatile", 1);
fields[1].special = ConstValSpecialStatic;
fields[1].type = ira->codegen->builtin_types.entry_bool;
fields[1].data.x_bool = ptr_type_entry->data.pointer.is_volatile;
// alignment: u32
ensure_field_index(result->type, "alignment", 2);
fields[2].special = ConstValSpecialStatic;
fields[2].type = ira->codegen->builtin_types.entry_u32;
bigint_init_unsigned(&fields[2].data.x_bigint, ptr_type_entry->data.pointer.alignment);
// child: type
ensure_field_index(result->type, "child", 3);
fields[3].special = ConstValSpecialStatic;
fields[3].type = ira->codegen->builtin_types.entry_type;
fields[3].data.x_type = ptr_type_entry->data.pointer.child_type;
return result;
};
ConstExprValue *result = nullptr;
switch (type_entry->id)
{
@ -16138,34 +16170,7 @@ static ConstExprValue *ir_make_type_info_value(IrAnalyze *ira, TypeTableEntry *t
}
case TypeTableEntryIdPointer:
{
result = create_const_vals(1);
result->special = ConstValSpecialStatic;
result->type = ir_type_info_get_type(ira, "Pointer");
ConstExprValue *fields = create_const_vals(4);
result->data.x_struct.fields = fields;
// is_const: bool
ensure_field_index(result->type, "is_const", 0);
fields[0].special = ConstValSpecialStatic;
fields[0].type = ira->codegen->builtin_types.entry_bool;
fields[0].data.x_bool = type_entry->data.pointer.is_const;
// is_volatile: bool
ensure_field_index(result->type, "is_volatile", 1);
fields[1].special = ConstValSpecialStatic;
fields[1].type = ira->codegen->builtin_types.entry_bool;
fields[1].data.x_bool = type_entry->data.pointer.is_volatile;
// alignment: u32
ensure_field_index(result->type, "alignment", 2);
fields[2].special = ConstValSpecialStatic;
fields[2].type = ira->codegen->builtin_types.entry_u32;
bigint_init_unsigned(&fields[2].data.x_bigint, type_entry->data.pointer.alignment);
// child: type
ensure_field_index(result->type, "child", 3);
fields[3].special = ConstValSpecialStatic;
fields[3].type = ira->codegen->builtin_types.entry_type;
fields[3].data.x_type = type_entry->data.pointer.child_type;
result = create_ptr_like_type_info("Pointer", type_entry);
break;
}
case TypeTableEntryIdArray:
@ -16435,6 +16440,17 @@ static ConstExprValue *ir_make_type_info_value(IrAnalyze *ira, TypeTableEntry *t
}
case TypeTableEntryIdStruct:
{
if (type_entry->data.structure.is_slice) {
Buf ptr_field_name = BUF_INIT;
buf_init_from_str(&ptr_field_name, "ptr");
TypeTableEntry *ptr_type = type_entry->data.structure.fields_by_name.get(&ptr_field_name)->type_entry;
ensure_complete_type(ira->codegen, ptr_type);
buf_deinit(&ptr_field_name);
result = create_ptr_like_type_info("Slice", ptr_type);
break;
}
result = create_const_vals(1);
result->special = ConstValSpecialStatic;
result->type = ir_type_info_get_type(ira, "Struct");
@ -16621,7 +16637,7 @@ static TypeTableEntry *ir_analyze_instruction_type_info(IrAnalyze *ira,
ConstExprValue *out_val = ir_build_const_from(ira, &instruction->base);
out_val->type = result_type;
bigint_init_unsigned(&out_val->data.x_union.tag, type_id_index(type_entry->id));
bigint_init_unsigned(&out_val->data.x_union.tag, type_id_index(type_entry));
ConstExprValue *payload = ir_make_type_info_value(ira, type_entry);
out_val->data.x_union.payload = payload;
@ -16649,7 +16665,7 @@ static TypeTableEntry *ir_analyze_instruction_type_id(IrAnalyze *ira,
TypeTableEntry *result_type = var_value->data.x_type;
ConstExprValue *out_val = ir_build_const_from(ira, &instruction->base);
bigint_init_unsigned(&out_val->data.x_enum_tag, type_id_index(type_entry->id));
bigint_init_unsigned(&out_val->data.x_enum_tag, type_id_index(type_entry));
return result_type;
}

View File

@ -3669,6 +3669,7 @@ static AstNode *resolve_typedef_decl(Context *c, const TypedefNameDecl *typedef_
if (existing_entry) {
return existing_entry->value;
}
QualType child_qt = typedef_decl->getUnderlyingType();
Buf *type_name = buf_create_from_str(decl_name(typedef_decl));
@ -3702,16 +3703,19 @@ static AstNode *resolve_typedef_decl(Context *c, const TypedefNameDecl *typedef_
// use the name of this typedef
// TODO
// trans_qual_type here might cause us to look at this typedef again so we put the item in the map first
AstNode *symbol_node = trans_create_node_symbol(c, type_name);
c->decl_table.put(typedef_decl->getCanonicalDecl(), symbol_node);
AstNode *type_node = trans_qual_type(c, child_qt, typedef_decl->getLocation());
if (type_node == nullptr) {
emit_warning(c, typedef_decl->getLocation(), "typedef %s - unresolved child type", buf_ptr(type_name));
c->decl_table.put(typedef_decl, nullptr);
// TODO add global var with type_name equal to @compileError("unable to resolve C type")
return nullptr;
}
add_global_var(c, type_name, type_node);
AstNode *symbol_node = trans_create_node_symbol(c, type_name);
c->decl_table.put(typedef_decl->getCanonicalDecl(), symbol_node);
return symbol_node;
}

View File

@ -28,11 +28,11 @@ pub fn AlignedArrayList(comptime T: type, comptime A: u29) type{
};
}
pub fn deinit(l: &Self) void {
pub fn deinit(l: &const Self) void {
l.allocator.free(l.items);
}
pub fn toSlice(l: &Self) []align(A) T {
pub fn toSlice(l: &const Self) []align(A) T {
return l.items[0..l.len];
}
@ -150,7 +150,7 @@ pub fn AlignedArrayList(comptime T: type, comptime A: u29) type{
}
};
pub fn iterator(self: &Self) Iterator {
pub fn iterator(self: &const Self) Iterator {
return Iterator { .list = self, .count = 0 };
}
};
@ -168,6 +168,14 @@ test "basic ArrayList test" {
assert(list.items[i] == i32(i + 1));
}}
for (list.toSlice()) |v, i| {
assert(v == i32(i + 1));
}
for (list.toSliceConst()) |v, i| {
assert(v == i32(i + 1));
}
assert(list.pop() == 10);
assert(list.len == 9);
@ -228,4 +236,4 @@ test "insert ArrayList test" {
const items = []const i32 { 1 };
try list.insertSlice(0, items[0..0]);
assert(list.items[0] == 5);
}
}

View File

@ -18,10 +18,10 @@ pub const BufMap = struct {
return self;
}
pub fn deinit(self: &BufMap) void {
pub fn deinit(self: &const BufMap) void {
var it = self.hash_map.iterator();
while (true) {
const entry = it.next() ?? break;
const entry = it.next() ?? break;
self.free(entry.key);
self.free(entry.value);
}
@ -38,7 +38,7 @@ pub const BufMap = struct {
_ = try self.hash_map.put(key_copy, value_copy);
}
pub fn get(self: &BufMap, key: []const u8) ?[]const u8 {
pub fn get(self: &const BufMap, key: []const u8) ?[]const u8 {
const entry = self.hash_map.get(key) ?? return null;
return entry.value;
}
@ -57,11 +57,11 @@ pub const BufMap = struct {
return self.hash_map.iterator();
}
fn free(self: &BufMap, value: []const u8) void {
fn free(self: &const BufMap, value: []const u8) void {
self.hash_map.allocator.free(value);
}
fn copy(self: &BufMap, value: []const u8) ![]const u8 {
fn copy(self: &const BufMap, value: []const u8) ![]const u8 {
return mem.dupe(self.hash_map.allocator, u8, value);
}
};
@ -87,4 +87,4 @@ test "BufMap" {
bufmap.delete("x");
assert(0 == bufmap.count());
}
}

View File

@ -1,6 +1,8 @@
const std = @import("index.zig");
const HashMap = @import("hash_map.zig").HashMap;
const mem = @import("mem.zig");
const Allocator = mem.Allocator;
const assert = std.debug.assert;
pub const BufSet = struct {
hash_map: BufSetHashMap,
@ -14,10 +16,10 @@ pub const BufSet = struct {
return self;
}
pub fn deinit(self: &BufSet) void {
pub fn deinit(self: &const BufSet) void {
var it = self.hash_map.iterator();
while (true) {
const entry = it.next() ?? break;
const entry = it.next() ?? break;
self.free(entry.key);
}
@ -49,13 +51,30 @@ pub const BufSet = struct {
return self.hash_map.allocator;
}
fn free(self: &BufSet, value: []const u8) void {
fn free(self: &const BufSet, value: []const u8) void {
self.hash_map.allocator.free(value);
}
fn copy(self: &BufSet, value: []const u8) ![]const u8 {
fn copy(self: &const BufSet, value: []const u8) ![]const u8 {
const result = try self.hash_map.allocator.alloc(u8, value.len);
mem.copy(u8, result, value);
return result;
}
};
test "BufSet" {
var direct_allocator = std.heap.DirectAllocator.init();
defer direct_allocator.deinit();
var bufset = BufSet.init(&direct_allocator.allocator);
defer bufset.deinit();
try bufset.put("x");
assert(bufset.count() == 1);
bufset.delete("x");
assert(bufset.count() == 0);
try bufset.put("x");
try bufset.put("y");
try bufset.put("z");
}

View File

@ -66,7 +66,7 @@ pub const Buffer = struct {
self.list.deinit();
}
pub fn toSlice(self: &Buffer) []u8 {
pub fn toSlice(self: &const Buffer) []u8 {
return self.list.toSlice()[0..self.len()];
}
@ -166,5 +166,5 @@ test "simple Buffer" {
assert(buf.endsWith("orld"));
try buf2.resize(4);
assert(buf.startsWith(buf2.toSliceConst()));
assert(buf.startsWith(buf2.toSlice()));
}

View File

@ -74,7 +74,7 @@ pub fn HashMap(comptime K: type, comptime V: type,
};
}
pub fn deinit(hm: &Self) void {
pub fn deinit(hm: &const Self) void {
hm.allocator.free(hm.entries);
}
@ -114,14 +114,14 @@ pub fn HashMap(comptime K: type, comptime V: type,
return hm.internalPut(key, value);
}
pub fn get(hm: &Self, key: K) ?&Entry {
pub fn get(hm: &const Self, key: K) ?&Entry {
if (hm.entries.len == 0) {
return null;
}
return hm.internalGet(key);
}
pub fn contains(hm: &Self, key: K) bool {
pub fn contains(hm: &const Self, key: K) bool {
return hm.get(key) != null;
}
@ -230,7 +230,7 @@ pub fn HashMap(comptime K: type, comptime V: type,
unreachable; // put into a full map
}
fn internalGet(hm: &Self, key: K) ?&Entry {
fn internalGet(hm: &const Self, key: K) ?&Entry {
const start_index = hm.keyToIndex(key);
{var roll_over: usize = 0; while (roll_over <= hm.max_distance_from_start_index) : (roll_over += 1) {
const index = (start_index + roll_over) % hm.entries.len;
@ -242,7 +242,7 @@ pub fn HashMap(comptime K: type, comptime V: type,
return null;
}
fn keyToIndex(hm: &Self, key: K) usize {
fn keyToIndex(hm: &const Self, key: K) usize {
return usize(hash(key)) % hm.entries.len;
}
};
@ -264,6 +264,7 @@ test "basic hash map usage" {
assert(??(map.put(5, 66) catch unreachable) == 55);
assert(??(map.put(5, 55) catch unreachable) == 66);
assert(map.contains(2));
assert((??map.get(2)).value == 22);
_ = map.remove(2);
assert(map.remove(2) == null);
@ -273,7 +274,7 @@ test "basic hash map usage" {
test "iterator hash map" {
var direct_allocator = std.heap.DirectAllocator.init();
defer direct_allocator.deinit();
var reset_map = HashMap(i32, i32, hash_i32, eql_i32).init(&direct_allocator.allocator);
defer reset_map.deinit();
@ -315,4 +316,4 @@ fn hash_i32(x: i32) u32 {
fn eql_i32(a: i32, b: i32) bool {
return a == b;
}
}

View File

@ -7,6 +7,7 @@ pub const BufferOutStream = @import("buffer.zig").BufferOutStream;
pub const HashMap = @import("hash_map.zig").HashMap;
pub const LinkedList = @import("linked_list.zig").LinkedList;
pub const IntrusiveLinkedList = @import("linked_list.zig").IntrusiveLinkedList;
pub const SegmentedList = @import("segmented_list.zig").SegmentedList;
pub const atomic = @import("atomic/index.zig");
pub const base64 = @import("base64.zig");
@ -43,6 +44,7 @@ test "std" {
_ = @import("buffer.zig");
_ = @import("hash_map.zig");
_ = @import("linked_list.zig");
_ = @import("segmented_list.zig");
_ = @import("base64.zig");
_ = @import("build.zig");

View File

@ -558,6 +558,32 @@ test "math.floorPowerOfTwo" {
comptime testFloorPowerOfTwo();
}
pub fn log2_int(comptime T: type, x: T) Log2Int(T) {
assert(x != 0);
return Log2Int(T)(T.bit_count - 1 - @clz(x));
}
pub fn log2_int_ceil(comptime T: type, x: T) Log2Int(T) {
assert(x != 0);
const log2_val = log2_int(T, x);
if (T(1) << log2_val == x)
return log2_val;
return log2_val + 1;
}
test "std.math.log2_int_ceil" {
assert(log2_int_ceil(u32, 1) == 0);
assert(log2_int_ceil(u32, 2) == 1);
assert(log2_int_ceil(u32, 3) == 2);
assert(log2_int_ceil(u32, 4) == 2);
assert(log2_int_ceil(u32, 5) == 3);
assert(log2_int_ceil(u32, 6) == 3);
assert(log2_int_ceil(u32, 7) == 3);
assert(log2_int_ceil(u32, 8) == 3);
assert(log2_int_ceil(u32, 9) == 4);
assert(log2_int_ceil(u32, 10) == 4);
}
fn testFloorPowerOfTwo() void {
assert(floorPowerOfTwo(u32, 63) == 32);
assert(floorPowerOfTwo(u32, 64) == 64);

View File

@ -31,17 +31,12 @@ pub fn log2(x: var) @typeOf(x) {
return result;
},
TypeId.Int => {
return log2_int(T, x);
return math.log2_int(T, x);
},
else => @compileError("log2 not implemented for " ++ @typeName(T)),
}
}
pub fn log2_int(comptime T: type, x: T) T {
assert(x != 0);
return T.bit_count - 1 - T(@clz(x));
}
pub fn log2_32(x_: f32) f32 {
const ivln2hi: f32 = 1.4428710938e+00;
const ivln2lo: f32 = -1.7605285393e-04;

379
std/segmented_list.zig Normal file
View File

@ -0,0 +1,379 @@
const std = @import("index.zig");
const assert = std.debug.assert;
const Allocator = std.mem.Allocator;
// Imagine that `fn at(self: &Self, index: usize) &T` is a customer asking for a box
// from a warehouse, based on a flat array, boxes ordered from 0 to N - 1.
// But the warehouse actually stores boxes in shelves of increasing powers of 2 sizes.
// So when the customer requests a box index, we have to translate it to shelf index
// and box index within that shelf. Illustration:
//
// customer indexes:
// shelf 0: 0
// shelf 1: 1 2
// shelf 2: 3 4 5 6
// shelf 3: 7 8 9 10 11 12 13 14
// shelf 4: 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30
// shelf 5: 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62
// ...
//
// warehouse indexes:
// shelf 0: 0
// shelf 1: 0 1
// shelf 2: 0 1 2 3
// shelf 3: 0 1 2 3 4 5 6 7
// shelf 4: 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
// shelf 5: 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31
// ...
//
// With this arrangement, here are the equations to get the shelf index and
// box index based on customer box index:
//
// shelf_index = floor(log2(customer_index + 1))
// shelf_count = ceil(log2(box_count + 1))
// box_index = customer_index + 1 - 2 ** shelf
// shelf_size = 2 ** shelf_index
//
// Now we complicate it a little bit further by adding a preallocated shelf, which must be
// a power of 2:
// prealloc=4
//
// customer indexes:
// prealloc: 0 1 2 3
// shelf 0: 4 5 6 7 8 9 10 11
// shelf 1: 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27
// shelf 2: 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59
// ...
//
// warehouse indexes:
// prealloc: 0 1 2 3
// shelf 0: 0 1 2 3 4 5 6 7
// shelf 1: 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
// shelf 2: 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31
// ...
//
// Now the equations are:
//
// shelf_index = floor(log2(customer_index + prealloc)) - log2(prealloc) - 1
// shelf_count = ceil(log2(box_count + prealloc)) - log2(prealloc) - 1
// box_index = customer_index + prealloc - 2 ** (log2(prealloc) + 1 + shelf)
// shelf_size = prealloc * 2 ** (shelf_index + 1)
/// This is a stack data structure where pointers to indexes have the same lifetime as the data structure
/// itself, unlike ArrayList where push() invalidates all existing element pointers.
/// The tradeoff is that elements are not guaranteed to be contiguous. For that, use ArrayList.
/// Note however that most elements are contiguous, making this data structure cache-friendly.
///
/// Because it never has to copy elements from an old location to a new location, it does not require
/// its elements to be copyable, and it avoids wasting memory when backed by an ArenaAllocator.
/// Note that the push() and pop() convenience methods perform a copy, but you can instead use
/// addOne(), at(), setCapacity(), and shrinkCapacity() to avoid copying items.
///
/// This data structure has O(1) push and O(1) pop.
///
/// It supports preallocated elements, making it especially well suited when the expected maximum
/// size is small. `prealloc_item_count` must be 0, or a power of 2.
pub fn SegmentedList(comptime T: type, comptime prealloc_item_count: usize) type {
return struct {
const Self = this;
const prealloc_exp = blk: {
// we don't use the prealloc_exp constant when prealloc_item_count is 0.
assert(prealloc_item_count != 0);
const value = std.math.log2_int(usize, prealloc_item_count);
assert((1 << value) == prealloc_item_count); // prealloc_item_count must be a power of 2
break :blk @typeOf(1)(value);
};
const ShelfIndex = std.math.Log2Int(usize);
prealloc_segment: [prealloc_item_count]T,
dynamic_segments: []&T,
allocator: &Allocator,
len: usize,
pub const prealloc_count = prealloc_item_count;
/// Deinitialize with `deinit`
pub fn init(allocator: &Allocator) Self {
return Self {
.allocator = allocator,
.len = 0,
.prealloc_segment = undefined,
.dynamic_segments = []&T{},
};
}
pub fn deinit(self: &Self) void {
self.freeShelves(ShelfIndex(self.dynamic_segments.len), 0);
self.allocator.free(self.dynamic_segments);
*self = undefined;
}
pub fn at(self: &Self, i: usize) &T {
assert(i < self.len);
return self.uncheckedAt(i);
}
pub fn count(self: &const Self) usize {
return self.len;
}
pub fn push(self: &Self, item: &const T) !void {
const new_item_ptr = try self.addOne();
*new_item_ptr = *item;
}
pub fn pushMany(self: &Self, items: []const T) !void {
for (items) |item| {
try self.push(item);
}
}
pub fn pop(self: &Self) ?T {
if (self.len == 0)
return null;
const index = self.len - 1;
const result = *self.uncheckedAt(index);
self.len = index;
return result;
}
pub fn addOne(self: &Self) !&T {
const new_length = self.len + 1;
try self.growCapacity(new_length);
const result = self.uncheckedAt(self.len);
self.len = new_length;
return result;
}
/// Grows or shrinks capacity to match usage.
pub fn setCapacity(self: &Self, new_capacity: usize) !void {
if (new_capacity <= usize(1) << (prealloc_exp + self.dynamic_segments.len)) {
return self.shrinkCapacity(new_capacity);
} else {
return self.growCapacity(new_capacity);
}
}
/// Only grows capacity, or retains current capacity
pub fn growCapacity(self: &Self, new_capacity: usize) !void {
const new_cap_shelf_count = shelfCount(new_capacity);
const old_shelf_count = ShelfIndex(self.dynamic_segments.len);
if (new_cap_shelf_count > old_shelf_count) {
self.dynamic_segments = try self.allocator.realloc(&T, self.dynamic_segments, new_cap_shelf_count);
var i = old_shelf_count;
errdefer {
self.freeShelves(i, old_shelf_count);
self.dynamic_segments = self.allocator.shrink(&T, self.dynamic_segments, old_shelf_count);
}
while (i < new_cap_shelf_count) : (i += 1) {
self.dynamic_segments[i] = (try self.allocator.alloc(T, shelfSize(i))).ptr;
}
}
}
/// Only shrinks capacity or retains current capacity
pub fn shrinkCapacity(self: &Self, new_capacity: usize) void {
if (new_capacity <= prealloc_item_count) {
const len = ShelfIndex(self.dynamic_segments.len);
self.freeShelves(len, 0);
self.allocator.free(self.dynamic_segments);
self.dynamic_segments = []&T{};
return;
}
const new_cap_shelf_count = shelfCount(new_capacity);
const old_shelf_count = ShelfIndex(self.dynamic_segments.len);
assert(new_cap_shelf_count <= old_shelf_count);
if (new_cap_shelf_count == old_shelf_count) {
return;
}
self.freeShelves(old_shelf_count, new_cap_shelf_count);
self.dynamic_segments = self.allocator.shrink(&T, self.dynamic_segments, new_cap_shelf_count);
}
pub fn uncheckedAt(self: &Self, index: usize) &T {
if (index < prealloc_item_count) {
return &self.prealloc_segment[index];
}
const shelf_index = shelfIndex(index);
const box_index = boxIndex(index, shelf_index);
return &self.dynamic_segments[shelf_index][box_index];
}
fn shelfCount(box_count: usize) ShelfIndex {
if (prealloc_item_count == 0) {
return std.math.log2_int_ceil(usize, box_count + 1);
}
return std.math.log2_int_ceil(usize, box_count + prealloc_item_count) - prealloc_exp - 1;
}
fn shelfSize(shelf_index: ShelfIndex) usize {
if (prealloc_item_count == 0) {
return usize(1) << shelf_index;
}
return usize(1) << (shelf_index + (prealloc_exp + 1));
}
fn shelfIndex(list_index: usize) ShelfIndex {
if (prealloc_item_count == 0) {
return std.math.log2_int(usize, list_index + 1);
}
return std.math.log2_int(usize, list_index + prealloc_item_count) - prealloc_exp - 1;
}
fn boxIndex(list_index: usize, shelf_index: ShelfIndex) usize {
if (prealloc_item_count == 0) {
return (list_index + 1) - (usize(1) << shelf_index);
}
return list_index + prealloc_item_count - (usize(1) << ((prealloc_exp + 1) + shelf_index));
}
fn freeShelves(self: &Self, from_count: ShelfIndex, to_count: ShelfIndex) void {
var i = from_count;
while (i != to_count) {
i -= 1;
self.allocator.free(self.dynamic_segments[i][0..shelfSize(i)]);
}
}
pub const Iterator = struct {
list: &Self,
index: usize,
box_index: usize,
shelf_index: ShelfIndex,
shelf_size: usize,
pub fn next(it: &Iterator) ?&T {
if (it.index >= it.list.len)
return null;
if (it.index < prealloc_item_count) {
const ptr = &it.list.prealloc_segment[it.index];
it.index += 1;
if (it.index == prealloc_item_count) {
it.box_index = 0;
it.shelf_index = 0;
it.shelf_size = prealloc_item_count * 2;
}
return ptr;
}
const ptr = &it.list.dynamic_segments[it.shelf_index][it.box_index];
it.index += 1;
it.box_index += 1;
if (it.box_index == it.shelf_size) {
it.shelf_index += 1;
it.box_index = 0;
it.shelf_size *= 2;
}
return ptr;
}
pub fn prev(it: &Iterator) ?&T {
if (it.index == 0)
return null;
it.index -= 1;
if (it.index < prealloc_item_count)
return &it.list.prealloc_segment[it.index];
if (it.box_index == 0) {
it.shelf_index -= 1;
it.shelf_size /= 2;
it.box_index = it.shelf_size - 1;
} else {
it.box_index -= 1;
}
return &it.list.dynamic_segments[it.shelf_index][it.box_index];
}
pub fn peek(it: &Iterator) ?&T {
if (it.index >= it.list.len)
return null;
if (it.index < prealloc_item_count)
return &it.list.prealloc_segment[it.index];
return &it.list.dynamic_segments[it.shelf_index][it.box_index];
}
};
pub fn iterator(self: &Self, start_index: usize) Iterator {
var it = Iterator {
.list = self,
.index = start_index,
.shelf_index = undefined,
.box_index = undefined,
.shelf_size = undefined,
};
if (start_index >= prealloc_item_count) {
it.shelf_index = shelfIndex(start_index);
it.box_index = boxIndex(start_index, it.shelf_index);
it.shelf_size = shelfSize(it.shelf_index);
}
return it;
}
};
}
test "std.SegmentedList" {
var da = std.heap.DirectAllocator.init();
defer da.deinit();
var a = &da.allocator;
try testSegmentedList(0, a);
try testSegmentedList(1, a);
try testSegmentedList(2, a);
try testSegmentedList(4, a);
try testSegmentedList(8, a);
try testSegmentedList(16, a);
}
fn testSegmentedList(comptime prealloc: usize, allocator: &Allocator) !void {
var list = SegmentedList(i32, prealloc).init(allocator);
defer list.deinit();
{var i: usize = 0; while (i < 100) : (i += 1) {
try list.push(i32(i + 1));
assert(list.len == i + 1);
}}
{var i: usize = 0; while (i < 100) : (i += 1) {
assert(*list.at(i) == i32(i + 1));
}}
{
var it = list.iterator(0);
var x: i32 = 0;
while (it.next()) |item| {
x += 1;
assert(*item == x);
}
assert(x == 100);
while (it.prev()) |item| : (x -= 1) {
assert(*item == x);
}
assert(x == 0);
}
assert(??list.pop() == 100);
assert(list.len == 99);
try list.pushMany([]i32 { 1, 2, 3 });
assert(list.len == 102);
assert(??list.pop() == 3);
assert(??list.pop() == 2);
assert(??list.pop() == 1);
assert(list.len == 99);
try list.pushMany([]const i32 {});
assert(list.len == 99);
var i: i32 = 99;
while (list.pop()) |item| : (i -= 1) {
assert(item == i);
list.shrinkCapacity(list.len);
}
}

File diff suppressed because it is too large Load Diff

38
std/zig/bench.zig Normal file
View File

@ -0,0 +1,38 @@
const std = @import("std");
const mem = std.mem;
const warn = std.debug.warn;
const Tokenizer = std.zig.Tokenizer;
const Parser = std.zig.Parser;
const io = std.io;
const source = @embedFile("../os/index.zig");
var fixed_buffer_mem: [10 * 1024 * 1024]u8 = undefined;
pub fn main() !void {
var i: usize = 0;
var timer = try std.os.time.Timer.start();
const start = timer.lap();
const iterations = 100;
var memory_used: usize = 0;
while (i < iterations) : (i += 1) {
memory_used += testOnce();
}
const end = timer.read();
memory_used /= iterations;
const elapsed_s = f64(end - start) / std.os.time.ns_per_s;
const bytes_per_sec = f64(source.len * iterations) / elapsed_s;
const mb_per_sec = bytes_per_sec / (1024 * 1024);
var stdout_file = try std.io.getStdOut();
const stdout = &std.io.FileOutStream.init(&stdout_file).stream;
try stdout.print("{.3} MB/s, {} KB used \n", mb_per_sec, memory_used / 1024);
}
fn testOnce() usize {
var fixed_buf_alloc = std.heap.FixedBufferAllocator.init(fixed_buffer_mem[0..]);
var allocator = &fixed_buf_alloc.allocator;
var tokenizer = Tokenizer.init(source);
var parser = Parser.init(&tokenizer, allocator, "(memory buffer)");
_ = parser.parse() catch @panic("parse failure");
return fixed_buf_alloc.end_index;
}

View File

@ -1,11 +1,13 @@
const tokenizer = @import("tokenizer.zig");
pub const Token = tokenizer.Token;
pub const Tokenizer = tokenizer.Tokenizer;
pub const Parser = @import("parser.zig").Parser;
pub const parse = @import("parse.zig").parse;
pub const render = @import("render.zig").render;
pub const ast = @import("ast.zig");
test "std.zig tests" {
_ = @import("tokenizer.zig");
_ = @import("parser.zig");
_ = @import("ast.zig");
_ = @import("parse.zig");
_ = @import("render.zig");
_ = @import("tokenizer.zig");
}

3503
std/zig/parse.zig Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,29 +1,82 @@
// TODO
//if (sr > n_uword_bits - 1) // d > r
// return 0;
test "zig fmt: same-line comment after a statement" {
try testCanonical(
\\test "" {
\\ a = b;
\\ debug.assert(H.digest_size <= H.block_size); // HMAC makes this assumption
\\ a = b;
\\}
\\
);
}
// TODO switch with no body
// format(&size, error{}, countSize, fmt, args) catch |err| switch (err) {};
test "zig fmt: same-line comment after var decl in struct" {
try testCanonical(
\\pub const vfs_cap_data = extern struct {
\\ const Data = struct {}; // when on disk.
\\};
\\
);
}
test "zig fmt: same-line comment after field decl" {
try testCanonical(
\\pub const dirent = extern struct {
\\ d_name: u8,
\\ d_name: u8, // comment 1
\\ d_name: u8,
\\ d_name: u8, // comment 2
\\ d_name: u8,
\\};
\\
);
}
//TODO
//test "zig fmt: same-line comptime" {
// try testCanonical(
// \\test "" {
// \\ comptime assert(@typeId(T) == builtin.TypeId.Int); // must pass an integer to absInt
// \\}
// \\
// );
//}
test "zig fmt: same-line comment after switch prong" {
try testCanonical(
\\test "" {
\\ switch (err) {
\\ error.PathAlreadyExists => {}, // comment 2
\\ else => return err, // comment 1
\\ }
\\}
\\
);
}
test "zig fmt: same-line comment after non-block if expression" {
try testCanonical(
\\comptime {
\\ if (sr > n_uword_bits - 1) // d > r
\\ return 0;
\\}
\\
);
}
//TODO
//test "zig fmt: number literals" {
// try testCanonical(
// \\pub const f64_true_min = 4.94065645841246544177e-324;
// \\
// );
//}
test "zig fmt: same-line comment on comptime expression" {
try testCanonical(
\\test "" {
\\ comptime assert(@typeId(T) == builtin.TypeId.Int); // must pass an integer to absInt
\\}
\\
);
}
test "zig fmt: switch with empty body" {
try testCanonical(
\\test "" {
\\ foo() catch |err| switch (err) {};
\\}
\\
);
}
test "zig fmt: float literal with exponent" {
try testCanonical(
\\pub const f64_true_min = 4.94065645841246544177e-324;
\\
);
}
test "zig fmt: line comments in struct initializer" {
try testCanonical(
@ -144,18 +197,6 @@ test "zig fmt: comments before switch prong" {
);
}
test "zig fmt: same-line comment after switch prong" {
try testCanonical(
\\test "" {
\\ switch (err) {
\\ error.PathAlreadyExists => {}, // comment 2
\\ else => return err, // comment 1
\\ }
\\}
\\
);
}
test "zig fmt: comments before var decl in struct" {
try testCanonical(
\\pub const vfs_cap_data = extern struct {
@ -181,28 +222,6 @@ test "zig fmt: comments before var decl in struct" {
);
}
test "zig fmt: same-line comment after var decl in struct" {
try testCanonical(
\\pub const vfs_cap_data = extern struct {
\\ const Data = struct {}; // when on disk.
\\};
\\
);
}
test "zig fmt: same-line comment after field decl" {
try testCanonical(
\\pub const dirent = extern struct {
\\ d_name: u8,
\\ d_name: u8, // comment 1
\\ d_name: u8,
\\ d_name: u8, // comment 2
\\ d_name: u8,
\\};
\\
);
}
test "zig fmt: array literal with 1 item on 1 line" {
try testCanonical(
\\var s = []const u64{0} ** 25;
@ -210,17 +229,6 @@ test "zig fmt: array literal with 1 item on 1 line" {
);
}
test "zig fmt: same-line comment after a statement" {
try testCanonical(
\\test "" {
\\ a = b;
\\ debug.assert(H.digest_size <= H.block_size); // HMAC makes this assumption
\\ a = b;
\\}
\\
);
}
test "zig fmt: comments before global variables" {
try testCanonical(
\\/// Foo copies keys and values before they go into the map, and
@ -1084,37 +1092,60 @@ test "zig fmt: error return" {
const std = @import("std");
const mem = std.mem;
const warn = std.debug.warn;
const Tokenizer = std.zig.Tokenizer;
const Parser = std.zig.Parser;
const io = std.io;
var fixed_buffer_mem: [100 * 1024]u8 = undefined;
fn testParse(source: []const u8, allocator: &mem.Allocator) ![]u8 {
var tokenizer = Tokenizer.init(source);
var parser = Parser.init(&tokenizer, allocator, "(memory buffer)");
defer parser.deinit();
var stderr_file = try io.getStdErr();
var stderr = &io.FileOutStream.init(&stderr_file).stream;
var tree = try parser.parse();
var tree = try std.zig.parse(allocator, source);
defer tree.deinit();
var error_it = tree.errors.iterator(0);
while (error_it.next()) |parse_error| {
const token = tree.tokens.at(parse_error.loc());
const loc = tree.tokenLocation(0, parse_error.loc());
try stderr.print("(memory buffer):{}:{}: error: ", loc.line + 1, loc.column + 1);
try tree.renderError(parse_error, stderr);
try stderr.print("\n{}\n", source[loc.line_start..loc.line_end]);
{
var i: usize = 0;
while (i < loc.column) : (i += 1) {
try stderr.write(" ");
}
}
{
const caret_count = token.end - token.start;
var i: usize = 0;
while (i < caret_count) : (i += 1) {
try stderr.write("~");
}
}
try stderr.write("\n");
}
if (tree.errors.len != 0) {
return error.ParseError;
}
var buffer = try std.Buffer.initSize(allocator, 0);
errdefer buffer.deinit();
var buffer_out_stream = io.BufferOutStream.init(&buffer);
try parser.renderSource(&buffer_out_stream.stream, tree.root_node);
try std.zig.render(allocator, &buffer_out_stream.stream, &tree);
return buffer.toOwnedSlice();
}
fn testCanonical(source: []const u8) !void {
fn testTransform(source: []const u8, expected_source: []const u8) !void {
const needed_alloc_count = x: {
// Try it once with unlimited memory, make sure it works
var fixed_allocator = std.heap.FixedBufferAllocator.init(fixed_buffer_mem[0..]);
var failing_allocator = std.debug.FailingAllocator.init(&fixed_allocator.allocator, @maxValue(usize));
const result_source = try testParse(source, &failing_allocator.allocator);
if (!mem.eql(u8, result_source, source)) {
if (!mem.eql(u8, result_source, expected_source)) {
warn("\n====== expected this output: =========\n");
warn("{}", source);
warn("{}", expected_source);
warn("\n======== instead found this: =========\n");
warn("{}", result_source);
warn("\n======================================\n");
@ -1141,7 +1172,12 @@ fn testCanonical(source: []const u8) !void {
}
},
error.ParseError => @panic("test failed"),
else => @panic("test failed"),
}
}
}
fn testCanonical(source: []const u8) !void {
return testTransform(source, source);
}

1270
std/zig/render.zig Normal file

File diff suppressed because it is too large Load Diff

View File

@ -6,60 +6,60 @@ pub const Token = struct {
start: usize,
end: usize,
const KeywordId = struct {
const Keyword = struct {
bytes: []const u8,
id: Id,
};
const keywords = []KeywordId {
KeywordId{.bytes="align", .id = Id.Keyword_align},
KeywordId{.bytes="and", .id = Id.Keyword_and},
KeywordId{.bytes="asm", .id = Id.Keyword_asm},
KeywordId{.bytes="async", .id = Id.Keyword_async},
KeywordId{.bytes="await", .id = Id.Keyword_await},
KeywordId{.bytes="break", .id = Id.Keyword_break},
KeywordId{.bytes="catch", .id = Id.Keyword_catch},
KeywordId{.bytes="cancel", .id = Id.Keyword_cancel},
KeywordId{.bytes="comptime", .id = Id.Keyword_comptime},
KeywordId{.bytes="const", .id = Id.Keyword_const},
KeywordId{.bytes="continue", .id = Id.Keyword_continue},
KeywordId{.bytes="defer", .id = Id.Keyword_defer},
KeywordId{.bytes="else", .id = Id.Keyword_else},
KeywordId{.bytes="enum", .id = Id.Keyword_enum},
KeywordId{.bytes="errdefer", .id = Id.Keyword_errdefer},
KeywordId{.bytes="error", .id = Id.Keyword_error},
KeywordId{.bytes="export", .id = Id.Keyword_export},
KeywordId{.bytes="extern", .id = Id.Keyword_extern},
KeywordId{.bytes="false", .id = Id.Keyword_false},
KeywordId{.bytes="fn", .id = Id.Keyword_fn},
KeywordId{.bytes="for", .id = Id.Keyword_for},
KeywordId{.bytes="if", .id = Id.Keyword_if},
KeywordId{.bytes="inline", .id = Id.Keyword_inline},
KeywordId{.bytes="nakedcc", .id = Id.Keyword_nakedcc},
KeywordId{.bytes="noalias", .id = Id.Keyword_noalias},
KeywordId{.bytes="null", .id = Id.Keyword_null},
KeywordId{.bytes="or", .id = Id.Keyword_or},
KeywordId{.bytes="packed", .id = Id.Keyword_packed},
KeywordId{.bytes="promise", .id = Id.Keyword_promise},
KeywordId{.bytes="pub", .id = Id.Keyword_pub},
KeywordId{.bytes="resume", .id = Id.Keyword_resume},
KeywordId{.bytes="return", .id = Id.Keyword_return},
KeywordId{.bytes="section", .id = Id.Keyword_section},
KeywordId{.bytes="stdcallcc", .id = Id.Keyword_stdcallcc},
KeywordId{.bytes="struct", .id = Id.Keyword_struct},
KeywordId{.bytes="suspend", .id = Id.Keyword_suspend},
KeywordId{.bytes="switch", .id = Id.Keyword_switch},
KeywordId{.bytes="test", .id = Id.Keyword_test},
KeywordId{.bytes="this", .id = Id.Keyword_this},
KeywordId{.bytes="true", .id = Id.Keyword_true},
KeywordId{.bytes="try", .id = Id.Keyword_try},
KeywordId{.bytes="undefined", .id = Id.Keyword_undefined},
KeywordId{.bytes="union", .id = Id.Keyword_union},
KeywordId{.bytes="unreachable", .id = Id.Keyword_unreachable},
KeywordId{.bytes="use", .id = Id.Keyword_use},
KeywordId{.bytes="var", .id = Id.Keyword_var},
KeywordId{.bytes="volatile", .id = Id.Keyword_volatile},
KeywordId{.bytes="while", .id = Id.Keyword_while},
const keywords = []Keyword {
Keyword{.bytes="align", .id = Id.Keyword_align},
Keyword{.bytes="and", .id = Id.Keyword_and},
Keyword{.bytes="asm", .id = Id.Keyword_asm},
Keyword{.bytes="async", .id = Id.Keyword_async},
Keyword{.bytes="await", .id = Id.Keyword_await},
Keyword{.bytes="break", .id = Id.Keyword_break},
Keyword{.bytes="catch", .id = Id.Keyword_catch},
Keyword{.bytes="cancel", .id = Id.Keyword_cancel},
Keyword{.bytes="comptime", .id = Id.Keyword_comptime},
Keyword{.bytes="const", .id = Id.Keyword_const},
Keyword{.bytes="continue", .id = Id.Keyword_continue},
Keyword{.bytes="defer", .id = Id.Keyword_defer},
Keyword{.bytes="else", .id = Id.Keyword_else},
Keyword{.bytes="enum", .id = Id.Keyword_enum},
Keyword{.bytes="errdefer", .id = Id.Keyword_errdefer},
Keyword{.bytes="error", .id = Id.Keyword_error},
Keyword{.bytes="export", .id = Id.Keyword_export},
Keyword{.bytes="extern", .id = Id.Keyword_extern},
Keyword{.bytes="false", .id = Id.Keyword_false},
Keyword{.bytes="fn", .id = Id.Keyword_fn},
Keyword{.bytes="for", .id = Id.Keyword_for},
Keyword{.bytes="if", .id = Id.Keyword_if},
Keyword{.bytes="inline", .id = Id.Keyword_inline},
Keyword{.bytes="nakedcc", .id = Id.Keyword_nakedcc},
Keyword{.bytes="noalias", .id = Id.Keyword_noalias},
Keyword{.bytes="null", .id = Id.Keyword_null},
Keyword{.bytes="or", .id = Id.Keyword_or},
Keyword{.bytes="packed", .id = Id.Keyword_packed},
Keyword{.bytes="promise", .id = Id.Keyword_promise},
Keyword{.bytes="pub", .id = Id.Keyword_pub},
Keyword{.bytes="resume", .id = Id.Keyword_resume},
Keyword{.bytes="return", .id = Id.Keyword_return},
Keyword{.bytes="section", .id = Id.Keyword_section},
Keyword{.bytes="stdcallcc", .id = Id.Keyword_stdcallcc},
Keyword{.bytes="struct", .id = Id.Keyword_struct},
Keyword{.bytes="suspend", .id = Id.Keyword_suspend},
Keyword{.bytes="switch", .id = Id.Keyword_switch},
Keyword{.bytes="test", .id = Id.Keyword_test},
Keyword{.bytes="this", .id = Id.Keyword_this},
Keyword{.bytes="true", .id = Id.Keyword_true},
Keyword{.bytes="try", .id = Id.Keyword_try},
Keyword{.bytes="undefined", .id = Id.Keyword_undefined},
Keyword{.bytes="union", .id = Id.Keyword_union},
Keyword{.bytes="unreachable", .id = Id.Keyword_unreachable},
Keyword{.bytes="use", .id = Id.Keyword_use},
Keyword{.bytes="var", .id = Id.Keyword_var},
Keyword{.bytes="volatile", .id = Id.Keyword_volatile},
Keyword{.bytes="while", .id = Id.Keyword_while},
};
fn getKeyword(bytes: []const u8) ?Id {
@ -195,37 +195,6 @@ pub const Tokenizer = struct {
index: usize,
pending_invalid_token: ?Token,
pub const Location = struct {
line: usize,
column: usize,
line_start: usize,
line_end: usize,
};
pub fn getTokenLocation(self: &Tokenizer, start_index: usize, token: &const Token) Location {
var loc = Location {
.line = 0,
.column = 0,
.line_start = start_index,
.line_end = self.buffer.len,
};
for (self.buffer[start_index..]) |c, i| {
if (i + start_index == token.start) {
loc.line_end = i + start_index;
while (loc.line_end < self.buffer.len and self.buffer[loc.line_end] != '\n') : (loc.line_end += 1) {}
return loc;
}
if (c == '\n') {
loc.line += 1;
loc.column = 0;
loc.line_start = i + 1;
} else {
loc.column += 1;
}
}
return loc;
}
/// For debugging purposes
pub fn dump(self: &Tokenizer, token: &const Token) void {
std.debug.warn("{} \"{}\"\n", @tagName(token.id), self.buffer[token.start..token.end]);
@ -912,10 +881,10 @@ pub const Tokenizer = struct {
},
},
State.FloatFraction => switch (c) {
'p', 'P' => {
'p', 'P', 'e', 'E' => {
state = State.FloatExponentUnsigned;
},
'0'...'9', 'a'...'f', 'A'...'F' => {},
'0'...'9' => {},
else => break,
},
State.FloatExponentUnsigned => switch (c) {
@ -1047,10 +1016,6 @@ pub const Tokenizer = struct {
return result;
}
pub fn getTokenSlice(self: &const Tokenizer, token: &const Token) []const u8 {
return self.buffer[token.start..token.end];
}
fn checkLiteralCharacter(self: &Tokenizer) void {
if (self.pending_invalid_token != null) return;
const invalid_length = self.getInvalidCharacterLength();
@ -1108,6 +1073,15 @@ test "tokenizer" {
});
}
test "tokenizer - float literal" {
testTokenize("a = 4.94065645841246544177e-324;\n", []Token.Id {
Token.Id.Identifier,
Token.Id.Equal,
Token.Id.FloatLiteral,
Token.Id.Semicolon,
});
}
test "tokenizer - chars" {
testTokenize("'c'", []Token.Id {Token.Id.CharLiteral});
}

View File

@ -349,6 +349,31 @@ test "big number shifting" {
}
}
test "big number multi-limb shift and mask" {
comptime {
var a = 0xefffffffa0000001eeeeeeefaaaaaaab;
assert(u32(a & 0xffffffff) == 0xaaaaaaab);
a >>= 32;
assert(u32(a & 0xffffffff) == 0xeeeeeeef);
a >>= 32;
assert(u32(a & 0xffffffff) == 0xa0000001);
a >>= 32;
assert(u32(a & 0xffffffff) == 0xefffffff);
a >>= 32;
assert(a == 0);
}
}
test "big number multi-limb partial shift right" {
comptime {
var a = 0x1ffffffffeeeeeeee;
a >>= 16;
assert(a == 0x1ffffffffeeee);
}
}
test "xor" {
test_xor();
comptime test_xor();

View File

@ -25,7 +25,7 @@ test "type info: integer, floating point type info" {
}
}
test "type info: pointer, array and nullable type info" {
test "type info: pointer type info" {
comptime {
const u32_ptr_info = @typeInfo(&u32);
assert(TypeId(u32_ptr_info) == TypeId.Pointer);
@ -33,12 +33,31 @@ test "type info: pointer, array and nullable type info" {
assert(u32_ptr_info.Pointer.is_volatile == false);
assert(u32_ptr_info.Pointer.alignment == 4);
assert(u32_ptr_info.Pointer.child == u32);
}
}
test "type info: slice type info" {
comptime {
const u32_slice_info = @typeInfo([]u32);
assert(TypeId(u32_slice_info) == TypeId.Slice);
assert(u32_slice_info.Slice.is_const == false);
assert(u32_slice_info.Slice.is_volatile == false);
assert(u32_slice_info.Slice.alignment == 4);
assert(u32_slice_info.Slice.child == u32);
}
}
test "type info: array type info" {
comptime {
const arr_info = @typeInfo([42]bool);
assert(TypeId(arr_info) == TypeId.Array);
assert(arr_info.Array.len == 42);
assert(arr_info.Array.child == bool);
}
}
test "type info: nullable type info" {
comptime {
const null_info = @typeInfo(?void);
assert(TypeId(null_info) == TypeId.Nullable);
assert(null_info.Nullable.child == void);
@ -100,11 +119,11 @@ test "type info: union info" {
assert(TypeId(typeinfo_info) == TypeId.Union);
assert(typeinfo_info.Union.layout == TypeInfo.ContainerLayout.Auto);
assert(typeinfo_info.Union.tag_type == TypeId);
assert(typeinfo_info.Union.fields.len == 25);
assert(typeinfo_info.Union.fields.len == 26);
assert(typeinfo_info.Union.fields[4].enum_field != null);
assert((??typeinfo_info.Union.fields[4].enum_field).value == 4);
assert(typeinfo_info.Union.fields[4].field_type == @typeOf(@typeInfo(u8).Int));
assert(typeinfo_info.Union.defs.len == 20);
assert(typeinfo_info.Union.defs.len == 21);
const TestNoTagUnion = union {
Foo: void,

View File

@ -272,3 +272,15 @@ const PartialInstWithPayload = union(enum) {
Compiled: i32,
};
test "access a member of tagged union with conflicting enum tag name" {
const Bar = union(enum) {
A: A,
B: B,
const A = u8;
const B = void;
};
comptime assert(Bar.A == u8);
}

View File

@ -1,6 +1,27 @@
const tests = @import("tests.zig");
pub fn addCases(cases: &tests.TranslateCContext) void {
cases.add("double define struct",
\\typedef struct Bar Bar;
\\typedef struct Foo Foo;
\\
\\struct Foo {
\\ Foo *a;
\\};
\\
\\struct Bar {
\\ Foo *a;
\\};
,
\\pub const struct_Foo = extern struct {
\\ a: ?&Foo,
\\};
\\pub const Foo = struct_Foo;
\\pub const struct_Bar = extern struct {
\\ a: ?&Foo,
\\};
);
cases.addAllowWarnings("simple data types",
\\#include <stdint.h>
\\int foo(char a, unsigned char b, signed char c);