add implicit cast from [0]T to %[]T

closes #347

also add std.os.path.relative
This commit is contained in:
Andrew Kelley 2017-04-29 19:23:33 -04:00
parent d04d3ec775
commit cbfe4b4bae
4 changed files with 159 additions and 6 deletions

View File

@ -5963,7 +5963,7 @@ static ImplicitCastMatchResult ir_types_match_with_implicit_cast(IrAnalyze *ira,
return ImplicitCastMatchResultYes;
}
// implicit conversion from error child type to error type
// implicit T to %T
if (expected_type->id == TypeTableEntryIdErrorUnion &&
ir_types_match_with_implicit_cast(ira, expected_type->data.error.child_type, actual_type, value))
{
@ -6012,7 +6012,7 @@ static ImplicitCastMatchResult ir_types_match_with_implicit_cast(IrAnalyze *ira,
return ImplicitCastMatchResultYes;
}
// implicit array to slice conversion
// implicit [N]T to []const T
if (expected_type->id == TypeTableEntryIdStruct &&
expected_type->data.structure.is_slice &&
actual_type->id == TypeTableEntryIdArray)
@ -6027,6 +6027,21 @@ static ImplicitCastMatchResult ir_types_match_with_implicit_cast(IrAnalyze *ira,
}
}
//// implicit [N]T to %[]const T
//if (expected_type->id == TypeTableEntryIdErrorUnion &&
// is_slice(expected_type->data.error.child_type) &&
// actual_type->id == TypeTableEntryIdArray)
//{
// TypeTableEntry *ptr_type =
// expected_type->data.error.child_type->data.structure.fields[slice_ptr_index].type_entry;
// assert(ptr_type->id == TypeTableEntryIdPointer);
// if ((ptr_type->data.pointer.is_const || actual_type->data.array.len == 0) &&
// types_match_const_cast_only(ptr_type->data.pointer.child_type, actual_type->data.array.child_type))
// {
// return ImplicitCastMatchResultYes;
// }
//}
// implicit [N]T to &const []const N
if (expected_type->id == TypeTableEntryIdPointer &&
expected_type->data.pointer.is_const &&
@ -6799,6 +6814,8 @@ static IrInstruction *ir_analyze_array_to_slice(IrAnalyze *ira, IrInstruction *s
IrInstruction *array, TypeTableEntry *wanted_type)
{
assert(is_slice(wanted_type));
// In this function we honor the const-ness of wanted_type, because
// we may be casting [0]T to []const T which is perfectly valid.
TypeTableEntry *array_type = array->value.type;
assert(array_type->id == TypeTableEntryIdArray);
@ -6807,6 +6824,7 @@ static IrInstruction *ir_analyze_array_to_slice(IrAnalyze *ira, IrInstruction *s
IrInstruction *result = ir_create_const(&ira->new_irb, source_instr->scope,
source_instr->source_node, wanted_type);
init_const_slice(ira->codegen, &result->value, &array->value, 0, array_type->data.array.len, true);
result->value.type = wanted_type;
return result;
}
@ -6822,8 +6840,7 @@ static IrInstruction *ir_analyze_array_to_slice(IrAnalyze *ira, IrInstruction *s
IrInstruction *result = ir_build_slice(&ira->new_irb, source_instr->scope,
source_instr->source_node, array_ptr, start, end, false);
TypeTableEntry *child_type = array_type->data.array.child_type;
result->value.type = get_slice_type(ira->codegen, child_type, true);
result->value.type = wanted_type;
ir_add_alloca(ira, result, result->value.type);
return result;
@ -7200,6 +7217,29 @@ static IrInstruction *ir_analyze_cast(IrAnalyze *ira, IrInstruction *source_inst
}
}
// explicit cast from [N]T to %[]const T
if (wanted_type->id == TypeTableEntryIdErrorUnion &&
is_slice(wanted_type->data.error.child_type) &&
actual_type->id == TypeTableEntryIdArray)
{
TypeTableEntry *ptr_type =
wanted_type->data.error.child_type->data.structure.fields[slice_ptr_index].type_entry;
assert(ptr_type->id == TypeTableEntryIdPointer);
if ((ptr_type->data.pointer.is_const || actual_type->data.array.len == 0) &&
types_match_const_cast_only(ptr_type->data.pointer.child_type, actual_type->data.array.child_type))
{
IrInstruction *cast1 = ir_analyze_cast(ira, source_instr, wanted_type->data.error.child_type, value);
if (type_is_invalid(cast1->value.type))
return ira->codegen->invalid_instruction;
IrInstruction *cast2 = ir_analyze_cast(ira, source_instr, wanted_type, cast1);
if (type_is_invalid(cast2->value.type))
return ira->codegen->invalid_instruction;
return cast2;
}
}
// explicit cast from pure error to error union type
if (wanted_type->id == TypeTableEntryIdErrorUnion &&
actual_type->id == TypeTableEntryIdPureError)

View File

@ -10,8 +10,10 @@ error NoMem;
pub const Allocator = struct {
allocFn: fn (self: &Allocator, n: usize) -> %[]u8,
/// Note that old_mem may be a slice of length 0, in which case reallocFn
/// should simply call allocFn
/// should simply call allocFn.
reallocFn: fn (self: &Allocator, old_mem: []u8, new_size: usize) -> %[]u8,
/// Note that mem may be a slice of length 0, in which case freeFn
/// should do nothing.
freeFn: fn (self: &Allocator, mem: []u8),
/// Aborts the program if an allocation fails.
@ -228,6 +230,10 @@ pub fn eql_slice_u8(a: []const u8, b: []const u8) -> bool {
return eql(u8, a, b);
}
/// Returns an iterator that iterates over the slices of ::s that are not
/// the byte ::c.
/// split(" abc def ghi ")
/// Will return slices for "abc", "def", "ghi", null, in that order.
pub fn split(s: []const u8, c: u8) -> SplitIterator {
SplitIterator {
.index = 0,
@ -236,6 +242,14 @@ pub fn split(s: []const u8, c: u8) -> SplitIterator {
}
}
test "mem.split" {
var it = split(" abc def ghi ", ' ');
assert(eql(u8, ??it.next(), "abc"));
assert(eql(u8, ??it.next(), "def"));
assert(eql(u8, ??it.next(), "ghi"));
assert(it.next() == null);
}
pub fn startsWith(comptime T: type, haystack: []const T, needle: []const T) -> bool {
return if (needle.len > haystack.len) false else eql(T, haystack[0...needle.len], needle);
}
@ -259,6 +273,14 @@ const SplitIterator = struct {
return self.s[start...end];
}
/// Returns a slice of the remaining bytes. Does not affect iterator state.
pub fn rest(self: &const SplitIterator) -> []const u8 {
// move to beginning of token
var index: usize = self.index;
while (index < self.s.len and self.s[index] == self.c; index += 1) {}
return self.s[index...];
}
};
test "testStringEquality" {

View File

@ -3,8 +3,16 @@ const assert = debug.assert;
const mem = @import("../mem.zig");
const Allocator = mem.Allocator;
const os = @import("index.zig");
const math = @import("../math.zig");
pub const sep = '/';
pub const sep = switch (@compileVar("os")) {
Os.windows => '\\',
else => '/',
};
pub const delimiter = switch (@compileVar("os")) {
Os.windows => ';',
else => ':',
};
/// Naively combines a series of paths with the native path seperator.
/// Allocates memory for the result, which must be freed by the caller.
@ -134,6 +142,7 @@ test "os.path.resolve" {
assert(mem.eql(u8, testResolve("/a/b", "c", "//d", "e///"), "/d/e"));
assert(mem.eql(u8, testResolve("/a/b/c", "..", "../"), "/a"));
assert(mem.eql(u8, testResolve("/", "..", ".."), "/"));
assert(mem.eql(u8, testResolve("/a/b/c/"), "/a/b/c"));
}
fn testResolve(args: ...) -> []u8 {
return %%resolve(&debug.global_allocator, args);
@ -175,3 +184,71 @@ test "os.path.dirname" {
fn testDirname(input: []const u8, expected_output: []const u8) {
assert(mem.eql(u8, dirname(input), expected_output));
}
/// Returns the relative path from ::from to ::to. If ::from and ::to each
/// resolve to the same path (after calling ::resolve on each), a zero-length
/// string is returned.
pub fn relative(allocator: &Allocator, from: []const u8, to: []const u8) -> %[]u8 {
const resolved_from = %return resolve(allocator, from);
defer allocator.free(resolved_from);
const resolved_to = %return resolve(allocator, to);
defer allocator.free(resolved_to);
var from_it = mem.split(resolved_from, '/');
var to_it = mem.split(resolved_to, '/');
while (true) {
const from_component = from_it.next() ?? return mem.dupe(allocator, u8, to_it.rest());
const to_rest = to_it.rest();
test(to_it.next()) |to_component| {
if (mem.eql(u8, from_component, to_component))
continue;
}
var up_count: usize = 1;
while (true) {
_ = from_it.next() ?? break;
up_count += 1;
}
const up_index_end = up_count * "../".len;
const result = %return allocator.alloc(u8, up_index_end + to_rest.len);
%defer allocator.free(result);
var result_index: usize = 0;
while (result_index < up_index_end) {
result[result_index] = '.';
result_index += 1;
result[result_index] = '.';
result_index += 1;
result[result_index] = '/';
result_index += 1;
}
if (to_rest.len == 0) {
// shave off the trailing slash
return result[0...result_index - 1];
}
mem.copy(u8, result[result_index...], to_rest);
return result;
}
return []u8{};
}
test "os.path.relative" {
testRelative("/var/lib", "/var", "..");
testRelative("/var/lib", "/bin", "../../bin");
testRelative("/var/lib", "/var/lib", "");
testRelative("/var/lib", "/var/apache", "../apache");
testRelative("/var/", "/var/lib", "lib");
testRelative("/", "/var/lib", "var/lib");
testRelative("/foo/test", "/foo/test/bar/package.json", "bar/package.json");
testRelative("/Users/a/web/b/test/mails", "/Users/a/web/b", "../..");
testRelative("/foo/bar/baz-quux", "/foo/bar/baz", "../baz");
testRelative("/foo/bar/baz", "/foo/bar/baz-quux", "../baz-quux");
testRelative("/baz-quux", "/baz", "../baz");
testRelative("/baz", "/baz-quux", "../baz-quux");
}
fn testRelative(from: []const u8, to: []const u8, expected_output: []const u8) {
const result = %%relative(&debug.global_allocator, from, to);
assert(mem.eql(u8, result, expected_output));
}

View File

@ -159,3 +159,17 @@ test "implicitly cast from [N]T to ?[]const T" {
fn castToMaybeSlice() -> ?[]const u8 {
return "hi";
}
test "implicitly cast from [0]T to %[]T" {
testCastZeroArrayToErrSliceMut();
comptime testCastZeroArrayToErrSliceMut();
}
fn testCastZeroArrayToErrSliceMut() {
assert((%%gimmeErrOrSlice()).len == 0);
}
fn gimmeErrOrSlice() -> %[]u8 {
return []u8{};
}