mirror of
https://github.com/ziglang/zig.git
synced 2026-02-20 00:08:56 +00:00
Merge branch 'master' into llvm7
This commit is contained in:
commit
b48948d6e8
11
.gitignore
vendored
11
.gitignore
vendored
@ -1,3 +1,14 @@
|
||||
# This file is for zig-specific build artifacts.
|
||||
# If you have OS-specific or editor-specific files to ignore,
|
||||
# such as *.swp or .DS_Store, put those in your global
|
||||
# ~/.gitignore and put this in your ~/.gitconfig:
|
||||
#
|
||||
# [core]
|
||||
# excludesfile = ~/.gitignore
|
||||
#
|
||||
# Cheers!
|
||||
# -andrewrk
|
||||
|
||||
zig-cache/
|
||||
build/
|
||||
build-*/
|
||||
|
||||
@ -4690,9 +4690,9 @@ test "coroutine suspend with block" {
|
||||
var a_promise: promise = undefined;
|
||||
var result = false;
|
||||
async fn testSuspendBlock() void {
|
||||
suspend |p| {
|
||||
comptime assert(@typeOf(p) == promise->void);
|
||||
a_promise = p;
|
||||
suspend {
|
||||
comptime assert(@typeOf(@handle()) == promise->void);
|
||||
a_promise = @handle();
|
||||
}
|
||||
result = true;
|
||||
}
|
||||
@ -4733,8 +4733,8 @@ test "resume from suspend" {
|
||||
std.debug.assert(my_result == 2);
|
||||
}
|
||||
async fn testResumeFromSuspend(my_result: *i32) void {
|
||||
suspend |p| {
|
||||
resume p;
|
||||
suspend {
|
||||
resume @handle();
|
||||
}
|
||||
my_result.* += 1;
|
||||
suspend;
|
||||
@ -4791,9 +4791,9 @@ async fn amain() void {
|
||||
}
|
||||
async fn another() i32 {
|
||||
seq('c');
|
||||
suspend |p| {
|
||||
suspend {
|
||||
seq('d');
|
||||
a_promise = p;
|
||||
a_promise = @handle();
|
||||
}
|
||||
seq('g');
|
||||
return 1234;
|
||||
@ -5383,6 +5383,16 @@ test "main" {
|
||||
This function is only valid within function scope.
|
||||
</p>
|
||||
{#header_close#}
|
||||
{#header_open|@handle#}
|
||||
<pre><code class="zig">@handle()</code></pre>
|
||||
<p>
|
||||
This function returns a <code>promise->T</code> type, where <code>T</code>
|
||||
is the return type of the async function in scope.
|
||||
</p>
|
||||
<p>
|
||||
This function is only valid within an async function scope.
|
||||
</p>
|
||||
{#header_close#}
|
||||
{#header_open|@import#}
|
||||
<pre><code class="zig">@import(comptime path: []u8) (namespace)</code></pre>
|
||||
<p>
|
||||
@ -7388,7 +7398,7 @@ Defer(body) = ("defer" | "deferror") body
|
||||
|
||||
IfExpression(body) = "if" "(" Expression ")" body option("else" BlockExpression(body))
|
||||
|
||||
SuspendExpression(body) = "suspend" option(("|" Symbol "|" body))
|
||||
SuspendExpression(body) = "suspend" option( body )
|
||||
|
||||
IfErrorExpression(body) = "if" "(" Expression ")" option("|" option("*") Symbol "|") body "else" "|" Symbol "|" BlockExpression(body)
|
||||
|
||||
|
||||
@ -899,7 +899,6 @@ struct AstNodeAwaitExpr {
|
||||
|
||||
struct AstNodeSuspend {
|
||||
AstNode *block;
|
||||
AstNode *promise_symbol;
|
||||
};
|
||||
|
||||
struct AstNodePromiseType {
|
||||
@ -1358,6 +1357,7 @@ enum BuiltinFnId {
|
||||
BuiltinFnIdBreakpoint,
|
||||
BuiltinFnIdReturnAddress,
|
||||
BuiltinFnIdFrameAddress,
|
||||
BuiltinFnIdHandle,
|
||||
BuiltinFnIdEmbedFile,
|
||||
BuiltinFnIdCmpxchgWeak,
|
||||
BuiltinFnIdCmpxchgStrong,
|
||||
@ -1714,6 +1714,7 @@ struct CodeGen {
|
||||
LLVMValueRef coro_save_fn_val;
|
||||
LLVMValueRef coro_promise_fn_val;
|
||||
LLVMValueRef coro_alloc_helper_fn_val;
|
||||
LLVMValueRef coro_frame_fn_val;
|
||||
LLVMValueRef merge_err_ret_traces_fn_val;
|
||||
LLVMValueRef add_error_return_trace_addr_fn_val;
|
||||
LLVMValueRef stacksave_fn_val;
|
||||
@ -2074,6 +2075,7 @@ enum IrInstructionId {
|
||||
IrInstructionIdBreakpoint,
|
||||
IrInstructionIdReturnAddress,
|
||||
IrInstructionIdFrameAddress,
|
||||
IrInstructionIdHandle,
|
||||
IrInstructionIdAlignOf,
|
||||
IrInstructionIdOverflowOp,
|
||||
IrInstructionIdTestErr,
|
||||
@ -2791,6 +2793,10 @@ struct IrInstructionFrameAddress {
|
||||
IrInstruction base;
|
||||
};
|
||||
|
||||
struct IrInstructionHandle {
|
||||
IrInstruction base;
|
||||
};
|
||||
|
||||
enum IrOverflowOp {
|
||||
IrOverflowOpAdd,
|
||||
IrOverflowOpSub,
|
||||
|
||||
@ -1112,9 +1112,6 @@ static void render_node_extra(AstRender *ar, AstNode *node, bool grouped) {
|
||||
{
|
||||
fprintf(ar->f, "suspend");
|
||||
if (node->data.suspend.block != nullptr) {
|
||||
fprintf(ar->f, " |");
|
||||
render_node_grouped(ar, node->data.suspend.promise_symbol);
|
||||
fprintf(ar->f, "| ");
|
||||
render_node_grouped(ar, node->data.suspend.block);
|
||||
}
|
||||
break;
|
||||
|
||||
@ -4057,6 +4057,26 @@ static LLVMValueRef ir_render_frame_address(CodeGen *g, IrExecutable *executable
|
||||
return LLVMBuildCall(g->builder, get_frame_address_fn_val(g), &zero, 1, "");
|
||||
}
|
||||
|
||||
static LLVMValueRef get_handle_fn_val(CodeGen *g) {
|
||||
if (g->coro_frame_fn_val)
|
||||
return g->coro_frame_fn_val;
|
||||
|
||||
LLVMTypeRef fn_type = LLVMFunctionType( LLVMPointerType(LLVMInt8Type(), 0)
|
||||
, nullptr, 0, false);
|
||||
Buf *name = buf_sprintf("llvm.coro.frame");
|
||||
g->coro_frame_fn_val = LLVMAddFunction(g->module, buf_ptr(name), fn_type);
|
||||
assert(LLVMGetIntrinsicID(g->coro_frame_fn_val));
|
||||
|
||||
return g->coro_frame_fn_val;
|
||||
}
|
||||
|
||||
static LLVMValueRef ir_render_handle(CodeGen *g, IrExecutable *executable,
|
||||
IrInstructionHandle *instruction)
|
||||
{
|
||||
LLVMValueRef zero = LLVMConstNull(g->builtin_types.entry_promise->type_ref);
|
||||
return LLVMBuildCall(g->builder, get_handle_fn_val(g), &zero, 0, "");
|
||||
}
|
||||
|
||||
static LLVMValueRef render_shl_with_overflow(CodeGen *g, IrInstructionOverflowOp *instruction) {
|
||||
TypeTableEntry *int_type = instruction->result_ptr_type;
|
||||
assert(int_type->id == TypeTableEntryIdInt);
|
||||
@ -4821,6 +4841,8 @@ static LLVMValueRef ir_render_instruction(CodeGen *g, IrExecutable *executable,
|
||||
return ir_render_return_address(g, executable, (IrInstructionReturnAddress *)instruction);
|
||||
case IrInstructionIdFrameAddress:
|
||||
return ir_render_frame_address(g, executable, (IrInstructionFrameAddress *)instruction);
|
||||
case IrInstructionIdHandle:
|
||||
return ir_render_handle(g, executable, (IrInstructionHandle *)instruction);
|
||||
case IrInstructionIdOverflowOp:
|
||||
return ir_render_overflow_op(g, executable, (IrInstructionOverflowOp *)instruction);
|
||||
case IrInstructionIdTestErr:
|
||||
@ -5916,6 +5938,7 @@ static void do_code_gen(CodeGen *g) {
|
||||
ir_render(g, fn_table_entry);
|
||||
|
||||
}
|
||||
|
||||
assert(!g->errors.length);
|
||||
|
||||
if (buf_len(&g->global_asm) != 0) {
|
||||
@ -6255,6 +6278,7 @@ static void define_builtin_fns(CodeGen *g) {
|
||||
create_builtin_fn(g, BuiltinFnIdBreakpoint, "breakpoint", 0);
|
||||
create_builtin_fn(g, BuiltinFnIdReturnAddress, "returnAddress", 0);
|
||||
create_builtin_fn(g, BuiltinFnIdFrameAddress, "frameAddress", 0);
|
||||
create_builtin_fn(g, BuiltinFnIdHandle, "handle", 0);
|
||||
create_builtin_fn(g, BuiltinFnIdMemcpy, "memcpy", 3);
|
||||
create_builtin_fn(g, BuiltinFnIdMemset, "memset", 3);
|
||||
create_builtin_fn(g, BuiltinFnIdSizeof, "sizeOf", 1);
|
||||
|
||||
78
src/ir.cpp
78
src/ir.cpp
@ -580,6 +580,10 @@ static constexpr IrInstructionId ir_instruction_id(IrInstructionFrameAddress *)
|
||||
return IrInstructionIdFrameAddress;
|
||||
}
|
||||
|
||||
static constexpr IrInstructionId ir_instruction_id(IrInstructionHandle *) {
|
||||
return IrInstructionIdHandle;
|
||||
}
|
||||
|
||||
static constexpr IrInstructionId ir_instruction_id(IrInstructionAlignOf *) {
|
||||
return IrInstructionIdAlignOf;
|
||||
}
|
||||
@ -2240,6 +2244,17 @@ static IrInstruction *ir_build_frame_address_from(IrBuilder *irb, IrInstruction
|
||||
return new_instruction;
|
||||
}
|
||||
|
||||
static IrInstruction *ir_build_handle(IrBuilder *irb, Scope *scope, AstNode *source_node) {
|
||||
IrInstructionHandle *instruction = ir_build_instruction<IrInstructionHandle>(irb, scope, source_node);
|
||||
return &instruction->base;
|
||||
}
|
||||
|
||||
static IrInstruction *ir_build_handle_from(IrBuilder *irb, IrInstruction *old_instruction) {
|
||||
IrInstruction *new_instruction = ir_build_handle(irb, old_instruction->scope, old_instruction->source_node);
|
||||
ir_link_new_instruction(new_instruction, old_instruction);
|
||||
return new_instruction;
|
||||
}
|
||||
|
||||
static IrInstruction *ir_build_overflow_op(IrBuilder *irb, Scope *scope, AstNode *source_node,
|
||||
IrOverflowOp op, IrInstruction *type_value, IrInstruction *op1, IrInstruction *op2,
|
||||
IrInstruction *result_ptr, TypeTableEntry *result_ptr_type)
|
||||
@ -3317,7 +3332,15 @@ static VariableTableEntry *create_local_var(CodeGen *codegen, AstNode *node, Sco
|
||||
static VariableTableEntry *ir_create_var(IrBuilder *irb, AstNode *node, Scope *scope, Buf *name,
|
||||
bool src_is_const, bool gen_is_const, bool is_shadowable, IrInstruction *is_comptime)
|
||||
{
|
||||
VariableTableEntry *var = create_local_var(irb->codegen, node, scope, name, src_is_const, gen_is_const, is_shadowable, is_comptime);
|
||||
bool is_underscored = name ? buf_eql_str(name, "_") : false;
|
||||
VariableTableEntry *var = create_local_var( irb->codegen
|
||||
, node
|
||||
, scope
|
||||
, (is_underscored ? nullptr : name)
|
||||
, src_is_const
|
||||
, gen_is_const
|
||||
, (is_underscored ? true : is_shadowable)
|
||||
, is_comptime );
|
||||
if (is_comptime != nullptr || gen_is_const) {
|
||||
var->mem_slot_index = exec_next_mem_slot(irb->exec);
|
||||
var->owner_exec = irb->exec;
|
||||
@ -3843,6 +3866,8 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
|
||||
return irb->codegen->invalid_instruction;
|
||||
}
|
||||
|
||||
bool is_async = exec_is_async(irb->exec);
|
||||
|
||||
switch (builtin_fn->id) {
|
||||
case BuiltinFnIdInvalid:
|
||||
zig_unreachable();
|
||||
@ -4475,6 +4500,16 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
|
||||
return ir_lval_wrap(irb, scope, ir_build_return_address(irb, scope, node), lval);
|
||||
case BuiltinFnIdFrameAddress:
|
||||
return ir_lval_wrap(irb, scope, ir_build_frame_address(irb, scope, node), lval);
|
||||
case BuiltinFnIdHandle:
|
||||
if (!irb->exec->fn_entry) {
|
||||
add_node_error(irb->codegen, node, buf_sprintf("@handle() called outside of function definition"));
|
||||
return irb->codegen->invalid_instruction;
|
||||
}
|
||||
if (!is_async) {
|
||||
add_node_error(irb->codegen, node, buf_sprintf("@handle() in non-async function"));
|
||||
return irb->codegen->invalid_instruction;
|
||||
}
|
||||
return ir_lval_wrap(irb, scope, ir_build_handle(irb, scope, node), lval);
|
||||
case BuiltinFnIdAlignOf:
|
||||
{
|
||||
AstNode *arg0_node = node->data.fn_call_expr.params.at(0);
|
||||
@ -5159,6 +5194,11 @@ static IrInstruction *ir_gen_var_decl(IrBuilder *irb, Scope *scope, AstNode *nod
|
||||
|
||||
AstNodeVariableDeclaration *variable_declaration = &node->data.variable_declaration;
|
||||
|
||||
if (buf_eql_str(variable_declaration->symbol, "_")) {
|
||||
add_node_error(irb->codegen, node, buf_sprintf("`_` is not a declarable symbol"));
|
||||
return irb->codegen->invalid_instruction;
|
||||
}
|
||||
|
||||
IrInstruction *type_instruction;
|
||||
if (variable_declaration->type != nullptr) {
|
||||
type_instruction = ir_gen_node(irb, variable_declaration->type, scope);
|
||||
@ -5171,6 +5211,7 @@ static IrInstruction *ir_gen_var_decl(IrBuilder *irb, Scope *scope, AstNode *nod
|
||||
bool is_shadowable = false;
|
||||
bool is_const = variable_declaration->is_const;
|
||||
bool is_extern = variable_declaration->is_extern;
|
||||
|
||||
IrInstruction *is_comptime = ir_build_const_bool(irb, scope, node,
|
||||
ir_should_inline(irb->exec, scope) || variable_declaration->is_comptime);
|
||||
VariableTableEntry *var = ir_create_var(irb, node, scope, variable_declaration->symbol,
|
||||
@ -7069,19 +7110,8 @@ static IrInstruction *ir_gen_suspend(IrBuilder *irb, Scope *parent_scope, AstNod
|
||||
if (node->data.suspend.block == nullptr) {
|
||||
suspend_code = ir_build_coro_suspend(irb, parent_scope, node, nullptr, const_bool_false);
|
||||
} else {
|
||||
assert(node->data.suspend.promise_symbol != nullptr);
|
||||
assert(node->data.suspend.promise_symbol->type == NodeTypeSymbol);
|
||||
Buf *promise_symbol_name = node->data.suspend.promise_symbol->data.symbol_expr.symbol;
|
||||
Scope *child_scope;
|
||||
if (!buf_eql_str(promise_symbol_name, "_")) {
|
||||
VariableTableEntry *promise_var = ir_create_var(irb, node, parent_scope, promise_symbol_name,
|
||||
true, true, false, const_bool_false);
|
||||
ir_build_var_decl(irb, parent_scope, node, promise_var, nullptr, nullptr, irb->exec->coro_handle);
|
||||
child_scope = promise_var->child_scope;
|
||||
} else {
|
||||
child_scope = parent_scope;
|
||||
}
|
||||
ScopeSuspend *suspend_scope = create_suspend_scope(node, child_scope);
|
||||
ScopeSuspend *suspend_scope = create_suspend_scope(node, parent_scope);
|
||||
suspend_scope->resume_block = resume_block;
|
||||
child_scope = &suspend_scope->base;
|
||||
IrInstruction *save_token = ir_build_coro_save(irb, child_scope, node, irb->exec->coro_handle);
|
||||
@ -9598,6 +9628,9 @@ static ConstExprValue *ir_resolve_const(IrAnalyze *ira, IrInstruction *value, Un
|
||||
case ConstValSpecialStatic:
|
||||
return &value->value;
|
||||
case ConstValSpecialRuntime:
|
||||
if (!type_has_bits(value->value.type)) {
|
||||
return &value->value;
|
||||
}
|
||||
ir_add_error(ira, value, buf_sprintf("unable to evaluate constant expression"));
|
||||
return nullptr;
|
||||
case ConstValSpecialUndef:
|
||||
@ -16099,8 +16132,14 @@ static TypeTableEntry *ir_analyze_container_init_fields_union(IrAnalyze *ira, Ir
|
||||
if (casted_field_value == ira->codegen->invalid_instruction)
|
||||
return ira->codegen->builtin_types.entry_invalid;
|
||||
|
||||
type_ensure_zero_bits_known(ira->codegen, casted_field_value->value.type);
|
||||
if (type_is_invalid(casted_field_value->value.type))
|
||||
return ira->codegen->builtin_types.entry_invalid;
|
||||
|
||||
bool is_comptime = ir_should_inline(ira->new_irb.exec, instruction->scope);
|
||||
if (is_comptime || casted_field_value->value.special != ConstValSpecialRuntime) {
|
||||
if (is_comptime || casted_field_value->value.special != ConstValSpecialRuntime ||
|
||||
!type_has_bits(casted_field_value->value.type))
|
||||
{
|
||||
ConstExprValue *field_val = ir_resolve_const(ira, casted_field_value, UndefOk);
|
||||
if (!field_val)
|
||||
return ira->codegen->builtin_types.entry_invalid;
|
||||
@ -19007,6 +19046,14 @@ static TypeTableEntry *ir_analyze_instruction_frame_address(IrAnalyze *ira, IrIn
|
||||
return u8_ptr_const;
|
||||
}
|
||||
|
||||
static TypeTableEntry *ir_analyze_instruction_handle(IrAnalyze *ira, IrInstructionHandle *instruction) {
|
||||
ir_build_handle_from(&ira->new_irb, &instruction->base);
|
||||
|
||||
FnTableEntry *fn_entry = exec_fn_entry(ira->new_irb.exec);
|
||||
assert(fn_entry != nullptr);
|
||||
return get_promise_type(ira->codegen, fn_entry->type_entry->data.fn.fn_type_id.return_type);
|
||||
}
|
||||
|
||||
static TypeTableEntry *ir_analyze_instruction_align_of(IrAnalyze *ira, IrInstructionAlignOf *instruction) {
|
||||
IrInstruction *type_value = instruction->type_value->other;
|
||||
if (type_is_invalid(type_value->value.type))
|
||||
@ -20982,6 +21029,8 @@ static TypeTableEntry *ir_analyze_instruction_nocast(IrAnalyze *ira, IrInstructi
|
||||
return ir_analyze_instruction_return_address(ira, (IrInstructionReturnAddress *)instruction);
|
||||
case IrInstructionIdFrameAddress:
|
||||
return ir_analyze_instruction_frame_address(ira, (IrInstructionFrameAddress *)instruction);
|
||||
case IrInstructionIdHandle:
|
||||
return ir_analyze_instruction_handle(ira, (IrInstructionHandle *)instruction);
|
||||
case IrInstructionIdAlignOf:
|
||||
return ir_analyze_instruction_align_of(ira, (IrInstructionAlignOf *)instruction);
|
||||
case IrInstructionIdOverflowOp:
|
||||
@ -21274,6 +21323,7 @@ bool ir_has_side_effects(IrInstruction *instruction) {
|
||||
case IrInstructionIdAlignOf:
|
||||
case IrInstructionIdReturnAddress:
|
||||
case IrInstructionIdFrameAddress:
|
||||
case IrInstructionIdHandle:
|
||||
case IrInstructionIdTestErr:
|
||||
case IrInstructionIdUnwrapErrCode:
|
||||
case IrInstructionIdOptionalWrap:
|
||||
|
||||
@ -791,6 +791,10 @@ static void ir_print_frame_address(IrPrint *irp, IrInstructionFrameAddress *inst
|
||||
fprintf(irp->f, "@frameAddress()");
|
||||
}
|
||||
|
||||
static void ir_print_handle(IrPrint *irp, IrInstructionHandle *instruction) {
|
||||
fprintf(irp->f, "@handle()");
|
||||
}
|
||||
|
||||
static void ir_print_return_address(IrPrint *irp, IrInstructionReturnAddress *instruction) {
|
||||
fprintf(irp->f, "@returnAddress()");
|
||||
}
|
||||
@ -1556,6 +1560,9 @@ static void ir_print_instruction(IrPrint *irp, IrInstruction *instruction) {
|
||||
case IrInstructionIdFrameAddress:
|
||||
ir_print_frame_address(irp, (IrInstructionFrameAddress *)instruction);
|
||||
break;
|
||||
case IrInstructionIdHandle:
|
||||
ir_print_handle(irp, (IrInstructionHandle *)instruction);
|
||||
break;
|
||||
case IrInstructionIdAlignOf:
|
||||
ir_print_align_of(irp, (IrInstructionAlignOf *)instruction);
|
||||
break;
|
||||
|
||||
@ -648,12 +648,11 @@ static AstNode *ast_parse_asm_expr(ParseContext *pc, size_t *token_index, bool m
|
||||
}
|
||||
|
||||
/*
|
||||
SuspendExpression(body) = "suspend" option(("|" Symbol "|" body))
|
||||
SuspendExpression(body) = "suspend" option( body )
|
||||
*/
|
||||
static AstNode *ast_parse_suspend_block(ParseContext *pc, size_t *token_index, bool mandatory) {
|
||||
size_t orig_token_index = *token_index;
|
||||
|
||||
Token *suspend_token = &pc->tokens->at(*token_index);
|
||||
|
||||
if (suspend_token->id == TokenIdKeywordSuspend) {
|
||||
*token_index += 1;
|
||||
} else if (mandatory) {
|
||||
@ -663,23 +662,18 @@ static AstNode *ast_parse_suspend_block(ParseContext *pc, size_t *token_index, b
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
Token *bar_token = &pc->tokens->at(*token_index);
|
||||
if (bar_token->id == TokenIdBinOr) {
|
||||
*token_index += 1;
|
||||
Token *lbrace = &pc->tokens->at(*token_index);
|
||||
if (lbrace->id == TokenIdLBrace) {
|
||||
AstNode *node = ast_create_node(pc, NodeTypeSuspend, suspend_token);
|
||||
node->data.suspend.block = ast_parse_block(pc, token_index, true);
|
||||
return node;
|
||||
} else if (mandatory) {
|
||||
ast_expect_token(pc, suspend_token, TokenIdBinOr);
|
||||
ast_expect_token(pc, lbrace, TokenIdLBrace);
|
||||
zig_unreachable();
|
||||
} else {
|
||||
*token_index = orig_token_index;
|
||||
*token_index -= 1;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
AstNode *node = ast_create_node(pc, NodeTypeSuspend, suspend_token);
|
||||
node->data.suspend.promise_symbol = ast_parse_symbol(pc, token_index);
|
||||
ast_eat_token(pc, token_index, TokenIdBinOr);
|
||||
node->data.suspend.block = ast_parse_block(pc, token_index, true);
|
||||
|
||||
return node;
|
||||
}
|
||||
|
||||
/*
|
||||
@ -3134,7 +3128,6 @@ void ast_visit_node_children(AstNode *node, void (*visit)(AstNode **, void *cont
|
||||
visit_field(&node->data.await_expr.expr, visit, context);
|
||||
break;
|
||||
case NodeTypeSuspend:
|
||||
visit_field(&node->data.suspend.promise_symbol, visit, context);
|
||||
visit_field(&node->data.suspend.block, visit, context);
|
||||
break;
|
||||
}
|
||||
|
||||
@ -71,10 +71,10 @@ pub fn Channel(comptime T: type) type {
|
||||
/// puts a data item in the channel. The promise completes when the value has been added to the
|
||||
/// buffer, or in the case of a zero size buffer, when the item has been retrieved by a getter.
|
||||
pub async fn put(self: *SelfChannel, data: T) void {
|
||||
suspend |handle| {
|
||||
suspend {
|
||||
var my_tick_node = Loop.NextTickNode{
|
||||
.next = undefined,
|
||||
.data = handle,
|
||||
.data = @handle(),
|
||||
};
|
||||
var queue_node = std.atomic.Queue(PutNode).Node{
|
||||
.data = PutNode{
|
||||
@ -96,10 +96,10 @@ pub fn Channel(comptime T: type) type {
|
||||
// TODO integrate this function with named return values
|
||||
// so we can get rid of this extra result copy
|
||||
var result: T = undefined;
|
||||
suspend |handle| {
|
||||
suspend {
|
||||
var my_tick_node = Loop.NextTickNode{
|
||||
.next = undefined,
|
||||
.data = handle,
|
||||
.data = @handle(),
|
||||
};
|
||||
var queue_node = std.atomic.Queue(GetNode).Node{
|
||||
.data = GetNode{
|
||||
|
||||
@ -100,8 +100,8 @@ test "std.event.Future" {
|
||||
}
|
||||
|
||||
async fn testFuture(loop: *Loop) void {
|
||||
suspend |p| {
|
||||
resume p;
|
||||
suspend {
|
||||
resume @handle();
|
||||
}
|
||||
var future = Future(i32).init(loop);
|
||||
|
||||
@ -115,15 +115,15 @@ async fn testFuture(loop: *Loop) void {
|
||||
}
|
||||
|
||||
async fn waitOnFuture(future: *Future(i32)) i32 {
|
||||
suspend |p| {
|
||||
resume p;
|
||||
suspend {
|
||||
resume @handle();
|
||||
}
|
||||
return (await (async future.get() catch @panic("memory"))).*;
|
||||
}
|
||||
|
||||
async fn resolveFuture(future: *Future(i32)) void {
|
||||
suspend |p| {
|
||||
resume p;
|
||||
suspend {
|
||||
resume @handle();
|
||||
}
|
||||
future.data = 6;
|
||||
future.resolve();
|
||||
|
||||
@ -54,10 +54,10 @@ pub fn Group(comptime ReturnType: type) type {
|
||||
const S = struct {
|
||||
async fn asyncFunc(node: **Stack.Node, args2: ...) ReturnType {
|
||||
// TODO this is a hack to make the memory following be inside the coro frame
|
||||
suspend |p| {
|
||||
suspend {
|
||||
var my_node: Stack.Node = undefined;
|
||||
node.* = &my_node;
|
||||
resume p;
|
||||
resume @handle();
|
||||
}
|
||||
|
||||
// TODO this allocation elision should be guaranteed because we await it in
|
||||
|
||||
@ -90,10 +90,10 @@ pub const Lock = struct {
|
||||
}
|
||||
|
||||
pub async fn acquire(self: *Lock) Held {
|
||||
suspend |handle| {
|
||||
suspend {
|
||||
// TODO explicitly put this memory in the coroutine frame #1194
|
||||
var my_tick_node = Loop.NextTickNode{
|
||||
.data = handle,
|
||||
.data = @handle(),
|
||||
.next = undefined,
|
||||
};
|
||||
|
||||
@ -106,35 +106,12 @@ pub const Lock = struct {
|
||||
// will attempt to grab the lock.
|
||||
_ = @atomicRmw(u8, &self.queue_empty_bit, AtomicRmwOp.Xchg, 0, AtomicOrder.SeqCst);
|
||||
|
||||
while (true) {
|
||||
const old_bit = @atomicRmw(u8, &self.shared_bit, AtomicRmwOp.Xchg, 1, AtomicOrder.SeqCst);
|
||||
if (old_bit != 0) {
|
||||
// We did not obtain the lock. Trust that our queue entry will resume us, and allow
|
||||
// suspend to complete.
|
||||
break;
|
||||
}
|
||||
// We got the lock. However we might have already been resumed from the queue.
|
||||
const old_bit = @atomicRmw(u8, &self.shared_bit, AtomicRmwOp.Xchg, 1, AtomicOrder.SeqCst);
|
||||
if (old_bit == 0) {
|
||||
if (self.queue.get()) |node| {
|
||||
// Whether this node is us or someone else, we tail resume it.
|
||||
resume node.data;
|
||||
break;
|
||||
} else {
|
||||
// We already got resumed, and there are none left in the queue, which means that
|
||||
// we aren't even supposed to hold the lock right now.
|
||||
_ = @atomicRmw(u8, &self.queue_empty_bit, AtomicRmwOp.Xchg, 1, AtomicOrder.SeqCst);
|
||||
_ = @atomicRmw(u8, &self.shared_bit, AtomicRmwOp.Xchg, 0, AtomicOrder.SeqCst);
|
||||
|
||||
// There might be a queue item. If we know the queue is empty, we can be done,
|
||||
// because the other actor will try to obtain the lock.
|
||||
// But if there's a queue item, we are the actor which must loop and attempt
|
||||
// to grab the lock again.
|
||||
if (@atomicLoad(u8, &self.queue_empty_bit, AtomicOrder.SeqCst) == 1) {
|
||||
break;
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
unreachable;
|
||||
}
|
||||
}
|
||||
|
||||
@ -164,8 +141,8 @@ test "std.event.Lock" {
|
||||
|
||||
async fn testLock(loop: *Loop, lock: *Lock) void {
|
||||
// TODO explicitly put next tick node memory in the coroutine frame #1194
|
||||
suspend |p| {
|
||||
resume p;
|
||||
suspend {
|
||||
resume @handle();
|
||||
}
|
||||
const handle1 = async lockRunner(lock) catch @panic("out of memory");
|
||||
var tick_node1 = Loop.NextTickNode{
|
||||
|
||||
@ -331,11 +331,11 @@ pub const Loop = struct {
|
||||
|
||||
pub async fn waitFd(self: *Loop, fd: i32) !void {
|
||||
defer self.removeFd(fd);
|
||||
suspend |p| {
|
||||
suspend {
|
||||
// TODO explicitly put this memory in the coroutine frame #1194
|
||||
var resume_node = ResumeNode{
|
||||
.id = ResumeNode.Id.Basic,
|
||||
.handle = p,
|
||||
.handle = @handle(),
|
||||
};
|
||||
try self.addFd(fd, &resume_node);
|
||||
}
|
||||
@ -417,11 +417,11 @@ pub const Loop = struct {
|
||||
pub fn call(self: *Loop, comptime func: var, args: ...) !(promise->@typeOf(func).ReturnType) {
|
||||
const S = struct {
|
||||
async fn asyncFunc(loop: *Loop, handle: *promise->@typeOf(func).ReturnType, args2: ...) @typeOf(func).ReturnType {
|
||||
suspend |p| {
|
||||
handle.* = p;
|
||||
suspend {
|
||||
handle.* = @handle();
|
||||
var my_tick_node = Loop.NextTickNode{
|
||||
.next = undefined,
|
||||
.data = p,
|
||||
.data = @handle(),
|
||||
};
|
||||
loop.onNextTick(&my_tick_node);
|
||||
}
|
||||
@ -439,10 +439,10 @@ pub const Loop = struct {
|
||||
/// CPU bound tasks would be waiting in the event loop but never get started because no async I/O
|
||||
/// is performed.
|
||||
pub async fn yield(self: *Loop) void {
|
||||
suspend |p| {
|
||||
suspend {
|
||||
var my_tick_node = Loop.NextTickNode{
|
||||
.next = undefined,
|
||||
.data = p,
|
||||
.data = @handle(),
|
||||
};
|
||||
self.onNextTick(&my_tick_node);
|
||||
}
|
||||
|
||||
@ -88,8 +88,8 @@ pub const Server = struct {
|
||||
},
|
||||
error.ProcessFdQuotaExceeded => {
|
||||
errdefer std.os.emfile_promise_queue.remove(&self.waiting_for_emfile_node);
|
||||
suspend |p| {
|
||||
self.waiting_for_emfile_node = PromiseNode.init(p);
|
||||
suspend {
|
||||
self.waiting_for_emfile_node = PromiseNode.init( @handle() );
|
||||
std.os.emfile_promise_queue.append(&self.waiting_for_emfile_node);
|
||||
}
|
||||
continue;
|
||||
@ -141,8 +141,8 @@ test "listen on a port, send bytes, receive bytes" {
|
||||
(await next_handler) catch |err| {
|
||||
std.debug.panic("unable to handle connection: {}\n", err);
|
||||
};
|
||||
suspend |p| {
|
||||
cancel p;
|
||||
suspend {
|
||||
cancel @handle();
|
||||
}
|
||||
}
|
||||
async fn errorableHandler(self: *Self, _addr: *const std.net.Address, _socket: *const std.os.File) !void {
|
||||
|
||||
@ -248,6 +248,11 @@ pub fn formatIntValue(
|
||||
return formatAsciiChar(value, context, Errors, output);
|
||||
}
|
||||
},
|
||||
'b' => {
|
||||
radix = 2;
|
||||
uppercase = false;
|
||||
width = 0;
|
||||
},
|
||||
'd' => {
|
||||
radix = 10;
|
||||
uppercase = false;
|
||||
@ -874,6 +879,10 @@ test "fmt.format" {
|
||||
const value: u8 = 'a';
|
||||
try testFmt("u8: a\n", "u8: {c}\n", value);
|
||||
}
|
||||
{
|
||||
const value: u8 = 0b1100;
|
||||
try testFmt("u8: 0b1100\n", "u8: 0b{b}\n", value);
|
||||
}
|
||||
{
|
||||
const value: [3]u8 = "abc";
|
||||
try testFmt("array: abc\n", "array: {}\n", value);
|
||||
|
||||
@ -130,16 +130,10 @@ pub fn getRandomBytes(buf: []u8) !void {
|
||||
try posixRead(fd, buf);
|
||||
},
|
||||
Os.windows => {
|
||||
var hCryptProv: windows.HCRYPTPROV = undefined;
|
||||
if (windows.CryptAcquireContextA(&hCryptProv, null, null, windows.PROV_RSA_FULL, 0) == 0) {
|
||||
const err = windows.GetLastError();
|
||||
return switch (err) {
|
||||
else => unexpectedErrorWindows(err),
|
||||
};
|
||||
}
|
||||
defer _ = windows.CryptReleaseContext(hCryptProv, 0);
|
||||
|
||||
if (windows.CryptGenRandom(hCryptProv, @intCast(windows.DWORD, buf.len), buf.ptr) == 0) {
|
||||
// Call RtlGenRandom() instead of CryptGetRandom() on Windows
|
||||
// https://github.com/rust-lang-nursery/rand/issues/111
|
||||
// https://bugzilla.mozilla.org/show_bug.cgi?id=504270
|
||||
if (windows.RtlGenRandom(buf.ptr, buf.len) == 0) {
|
||||
const err = windows.GetLastError();
|
||||
return switch (err) {
|
||||
else => unexpectedErrorWindows(err),
|
||||
@ -159,8 +153,14 @@ pub fn getRandomBytes(buf: []u8) !void {
|
||||
}
|
||||
|
||||
test "os.getRandomBytes" {
|
||||
var buf: [50]u8 = undefined;
|
||||
try getRandomBytes(buf[0..]);
|
||||
var buf_a: [50]u8 = undefined;
|
||||
var buf_b: [50]u8 = undefined;
|
||||
// Call Twice
|
||||
try getRandomBytes(buf_a[0..]);
|
||||
try getRandomBytes(buf_b[0..]);
|
||||
|
||||
// Check if random (not 100% conclusive)
|
||||
assert( !mem.eql(u8, buf_a, buf_b) );
|
||||
}
|
||||
|
||||
/// Raises a signal in the current kernel thread, ending its execution.
|
||||
@ -2790,7 +2790,7 @@ pub fn cpuCount(fallback_allocator: *mem.Allocator) CpuCountError!usize {
|
||||
builtin.Os.macosx => {
|
||||
var count: c_int = undefined;
|
||||
var count_len: usize = @sizeOf(c_int);
|
||||
const rc = posix.sysctlbyname(c"hw.ncpu", @ptrCast(*c_void, &count), &count_len, null, 0);
|
||||
const rc = posix.sysctlbyname(c"hw.logicalcpu", @ptrCast(*c_void, &count), &count_len, null, 0);
|
||||
const err = posix.getErrno(rc);
|
||||
switch (err) {
|
||||
0 => return @intCast(usize, count),
|
||||
|
||||
@ -944,7 +944,7 @@ pub fn setgroups(size: usize, list: *const u32) usize {
|
||||
}
|
||||
|
||||
pub fn getpid() i32 {
|
||||
return @bitCast(i32, u32(syscall0(SYS_getpid)));
|
||||
return @bitCast(i32, @truncate(u32, syscall0(SYS_getpid)));
|
||||
}
|
||||
|
||||
pub fn sigprocmask(flags: u32, noalias set: *const sigset_t, noalias oldset: ?*sigset_t) usize {
|
||||
|
||||
@ -3,6 +3,10 @@ const builtin = @import("builtin");
|
||||
const linux = std.os.linux;
|
||||
const assert = std.debug.assert;
|
||||
|
||||
test "getpid" {
|
||||
assert(linux.getpid() != 0);
|
||||
}
|
||||
|
||||
test "timer" {
|
||||
const epoll_fd = linux.epoll_create();
|
||||
var err = linux.getErrno(epoll_fd);
|
||||
|
||||
@ -28,3 +28,8 @@ pub extern "advapi32" stdcallcc fn RegOpenKeyExW(hKey: HKEY, lpSubKey: LPCWSTR,
|
||||
|
||||
pub extern "advapi32" stdcallcc fn RegQueryValueExW(hKey: HKEY, lpValueName: LPCWSTR, lpReserved: LPDWORD,
|
||||
lpType: LPDWORD, lpData: LPBYTE, lpcbData: LPDWORD,) LSTATUS;
|
||||
|
||||
// RtlGenRandom is known as SystemFunction036 under advapi32
|
||||
// http://msdn.microsoft.com/en-us/library/windows/desktop/aa387694.aspx */
|
||||
pub extern "advapi32" stdcallcc fn SystemFunction036(output: [*]u8, length: usize) BOOL;
|
||||
pub const RtlGenRandom = SystemFunction036;
|
||||
|
||||
@ -166,7 +166,7 @@ pub fn windowsUnloadDll(hModule: windows.HMODULE) void {
|
||||
}
|
||||
|
||||
test "InvalidDll" {
|
||||
if (builtin.os != builtin.Os.windows) return;
|
||||
if (builtin.os != builtin.Os.windows) return error.SkipZigTest;
|
||||
|
||||
const DllName = "asdf.dll";
|
||||
const allocator = std.debug.global_allocator;
|
||||
|
||||
@ -30,7 +30,7 @@ pub const DefaultCsprng = Isaac64;
|
||||
pub const Random = struct {
|
||||
fillFn: fn (r: *Random, buf: []u8) void,
|
||||
|
||||
/// Read random bytes into the specified buffer until fill.
|
||||
/// Read random bytes into the specified buffer until full.
|
||||
pub fn bytes(r: *Random, buf: []u8) void {
|
||||
r.fillFn(r, buf);
|
||||
}
|
||||
@ -48,10 +48,10 @@ pub const Random = struct {
|
||||
}
|
||||
}
|
||||
|
||||
/// Get a random unsigned integer with even distribution between `start`
|
||||
/// inclusive and `end` exclusive.
|
||||
/// Return a random integer with even distribution between `start`
|
||||
/// inclusive and `end` exclusive. `start` must be less than `end`.
|
||||
pub fn range(r: *Random, comptime T: type, start: T, end: T) T {
|
||||
assert(start <= end);
|
||||
assert(start < end);
|
||||
if (T.is_signed) {
|
||||
const uint = @IntType(false, T.bit_count);
|
||||
if (start >= 0 and end >= 0) {
|
||||
@ -664,6 +664,7 @@ test "Random range" {
|
||||
testRange(&prng.random, -4, 3);
|
||||
testRange(&prng.random, -4, -1);
|
||||
testRange(&prng.random, 10, 14);
|
||||
// TODO: test that prng.random.range(1, 1) causes an assertion error
|
||||
}
|
||||
|
||||
fn testRange(r: *Random, start: i32, end: i32) void {
|
||||
|
||||
@ -1778,19 +1778,12 @@ pub const Node = struct {
|
||||
|
||||
pub const Suspend = struct {
|
||||
base: Node,
|
||||
label: ?TokenIndex,
|
||||
suspend_token: TokenIndex,
|
||||
payload: ?*Node,
|
||||
body: ?*Node,
|
||||
|
||||
pub fn iterate(self: *Suspend, index: usize) ?*Node {
|
||||
var i = index;
|
||||
|
||||
if (self.payload) |payload| {
|
||||
if (i < 1) return payload;
|
||||
i -= 1;
|
||||
}
|
||||
|
||||
if (self.body) |body| {
|
||||
if (i < 1) return body;
|
||||
i -= 1;
|
||||
@ -1800,7 +1793,6 @@ pub const Node = struct {
|
||||
}
|
||||
|
||||
pub fn firstToken(self: *Suspend) TokenIndex {
|
||||
if (self.label) |label| return label;
|
||||
return self.suspend_token;
|
||||
}
|
||||
|
||||
@ -1809,10 +1801,6 @@ pub const Node = struct {
|
||||
return body.lastToken();
|
||||
}
|
||||
|
||||
if (self.payload) |payload| {
|
||||
return payload.lastToken();
|
||||
}
|
||||
|
||||
return self.suspend_token;
|
||||
}
|
||||
};
|
||||
|
||||
@ -852,19 +852,6 @@ pub fn parse(allocator: *mem.Allocator, source: []const u8) !ast.Tree {
|
||||
}) catch unreachable;
|
||||
continue;
|
||||
},
|
||||
Token.Id.Keyword_suspend => {
|
||||
const node = try arena.create(ast.Node.Suspend{
|
||||
.base = ast.Node{ .id = ast.Node.Id.Suspend },
|
||||
.label = ctx.label,
|
||||
.suspend_token = token_index,
|
||||
.payload = null,
|
||||
.body = null,
|
||||
});
|
||||
ctx.opt_ctx.store(&node.base);
|
||||
stack.append(State{ .SuspendBody = node }) catch unreachable;
|
||||
try stack.append(State{ .Payload = OptionalCtx{ .Optional = &node.payload } });
|
||||
continue;
|
||||
},
|
||||
Token.Id.Keyword_inline => {
|
||||
stack.append(State{
|
||||
.Inline = InlineCtx{
|
||||
@ -1415,10 +1402,21 @@ pub fn parse(allocator: *mem.Allocator, source: []const u8) !ast.Tree {
|
||||
},
|
||||
|
||||
State.SuspendBody => |suspend_node| {
|
||||
if (suspend_node.payload != null) {
|
||||
try stack.append(State{ .AssignmentExpressionBegin = OptionalCtx{ .RequiredNull = &suspend_node.body } });
|
||||
const token = nextToken(&tok_it, &tree);
|
||||
switch (token.ptr.id) {
|
||||
Token.Id.Semicolon => {
|
||||
prevToken(&tok_it, &tree);
|
||||
continue;
|
||||
},
|
||||
Token.Id.LBrace => {
|
||||
prevToken(&tok_it, &tree);
|
||||
try stack.append(State{ .AssignmentExpressionBegin = OptionalCtx{ .RequiredNull = &suspend_node.body } });
|
||||
continue;
|
||||
},
|
||||
else => {
|
||||
((try tree.errors.addOne())).* = Error{ .InvalidToken = Error.InvalidToken{ .token = token.index } };
|
||||
},
|
||||
}
|
||||
continue;
|
||||
},
|
||||
State.AsyncAllocator => |async_node| {
|
||||
if (eatToken(&tok_it, &tree, Token.Id.AngleBracketLeft) == null) {
|
||||
@ -3086,15 +3084,12 @@ fn parseBlockExpr(stack: *std.ArrayList(State), arena: *mem.Allocator, ctx: *con
|
||||
Token.Id.Keyword_suspend => {
|
||||
const node = try arena.create(ast.Node.Suspend{
|
||||
.base = ast.Node{ .id = ast.Node.Id.Suspend },
|
||||
.label = null,
|
||||
.suspend_token = token_index,
|
||||
.payload = null,
|
||||
.body = null,
|
||||
});
|
||||
ctx.store(&node.base);
|
||||
|
||||
stack.append(State{ .SuspendBody = node }) catch unreachable;
|
||||
try stack.append(State{ .Payload = OptionalCtx{ .Optional = &node.payload } });
|
||||
return true;
|
||||
},
|
||||
Token.Id.Keyword_if => {
|
||||
|
||||
@ -898,11 +898,11 @@ test "zig fmt: union(enum(u32)) with assigned enum values" {
|
||||
);
|
||||
}
|
||||
|
||||
test "zig fmt: labeled suspend" {
|
||||
test "zig fmt: resume from suspend block" {
|
||||
try testCanonical(
|
||||
\\fn foo() void {
|
||||
\\ s: suspend |p| {
|
||||
\\ break :s;
|
||||
\\ suspend {
|
||||
\\ resume @handle();
|
||||
\\ }
|
||||
\\}
|
||||
\\
|
||||
@ -1784,7 +1784,7 @@ test "zig fmt: coroutines" {
|
||||
\\ x += 1;
|
||||
\\ suspend;
|
||||
\\ x += 1;
|
||||
\\ suspend |p| {}
|
||||
\\ suspend;
|
||||
\\ const p: promise->void = async simpleAsyncFn() catch unreachable;
|
||||
\\ await p;
|
||||
\\}
|
||||
|
||||
@ -323,21 +323,7 @@ fn renderExpression(
|
||||
ast.Node.Id.Suspend => {
|
||||
const suspend_node = @fieldParentPtr(ast.Node.Suspend, "base", base);
|
||||
|
||||
if (suspend_node.label) |label| {
|
||||
try renderToken(tree, stream, label, indent, start_col, Space.None);
|
||||
try renderToken(tree, stream, tree.nextToken(label), indent, start_col, Space.Space);
|
||||
}
|
||||
|
||||
if (suspend_node.payload) |payload| {
|
||||
if (suspend_node.body) |body| {
|
||||
try renderToken(tree, stream, suspend_node.suspend_token, indent, start_col, Space.Space);
|
||||
try renderExpression(allocator, stream, tree, indent, start_col, payload, Space.Space);
|
||||
return renderExpression(allocator, stream, tree, indent, start_col, body, space);
|
||||
} else {
|
||||
try renderToken(tree, stream, suspend_node.suspend_token, indent, start_col, Space.Space);
|
||||
return renderExpression(allocator, stream, tree, indent, start_col, payload, space);
|
||||
}
|
||||
} else if (suspend_node.body) |body| {
|
||||
if (suspend_node.body) |body| {
|
||||
try renderToken(tree, stream, suspend_node.suspend_token, indent, start_col, Space.Space);
|
||||
return renderExpression(allocator, stream, tree, indent, start_col, body, space);
|
||||
} else {
|
||||
|
||||
@ -60,6 +60,7 @@ comptime {
|
||||
_ = @import("cases/try.zig");
|
||||
_ = @import("cases/type_info.zig");
|
||||
_ = @import("cases/undefined.zig");
|
||||
_ = @import("cases/underscore.zig");
|
||||
_ = @import("cases/union.zig");
|
||||
_ = @import("cases/var_args.zig");
|
||||
_ = @import("cases/void.zig");
|
||||
|
||||
@ -85,8 +85,8 @@ async fn b4() void {
|
||||
defer {
|
||||
defer_b4 = true;
|
||||
}
|
||||
suspend |p| {
|
||||
b4_handle = p;
|
||||
suspend {
|
||||
b4_handle = @handle();
|
||||
}
|
||||
suspend;
|
||||
}
|
||||
|
||||
@ -30,9 +30,9 @@ async fn await_amain() void {
|
||||
}
|
||||
async fn await_another() Foo {
|
||||
await_seq('c');
|
||||
suspend |p| {
|
||||
suspend {
|
||||
await_seq('d');
|
||||
await_a_promise = p;
|
||||
await_a_promise = @handle();
|
||||
}
|
||||
await_seq('g');
|
||||
return Foo{ .x = 1234 };
|
||||
|
||||
@ -62,10 +62,15 @@ test "coroutine suspend with block" {
|
||||
var a_promise: promise = undefined;
|
||||
var result = false;
|
||||
async fn testSuspendBlock() void {
|
||||
suspend |p| {
|
||||
comptime assert(@typeOf(p) == promise->void);
|
||||
a_promise = p;
|
||||
suspend {
|
||||
comptime assert(@typeOf(@handle()) == promise->void);
|
||||
a_promise = @handle();
|
||||
}
|
||||
|
||||
//Test to make sure that @handle() works as advertised (issue #1296)
|
||||
//var our_handle: promise = @handle();
|
||||
assert( a_promise == @handle() );
|
||||
|
||||
result = true;
|
||||
}
|
||||
|
||||
@ -93,9 +98,9 @@ async fn await_amain() void {
|
||||
}
|
||||
async fn await_another() i32 {
|
||||
await_seq('c');
|
||||
suspend |p| {
|
||||
suspend {
|
||||
await_seq('d');
|
||||
await_a_promise = p;
|
||||
await_a_promise = @handle();
|
||||
}
|
||||
await_seq('g');
|
||||
return 1234;
|
||||
@ -244,8 +249,8 @@ test "break from suspend" {
|
||||
std.debug.assert(my_result == 2);
|
||||
}
|
||||
async fn testBreakFromSuspend(my_result: *i32) void {
|
||||
suspend |p| {
|
||||
resume p;
|
||||
suspend {
|
||||
resume @handle();
|
||||
}
|
||||
my_result.* += 1;
|
||||
suspend;
|
||||
|
||||
28
test/cases/underscore.zig
Normal file
28
test/cases/underscore.zig
Normal file
@ -0,0 +1,28 @@
|
||||
const std = @import("std");
|
||||
const assert = std.debug.assert;
|
||||
|
||||
test "ignore lval with underscore" {
|
||||
_ = false;
|
||||
}
|
||||
|
||||
test "ignore lval with underscore (for loop)" {
|
||||
for ([]void{}) |_, i| {
|
||||
for ([]void{}) |_, j| {
|
||||
break;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
test "ignore lval with underscore (while loop)" {
|
||||
while (optionalReturnError()) |_| {
|
||||
while (optionalReturnError()) |_| {
|
||||
break;
|
||||
} else |_| { }
|
||||
break;
|
||||
} else |_| { }
|
||||
}
|
||||
|
||||
fn optionalReturnError() !?u32 {
|
||||
return error.optionalReturnError;
|
||||
}
|
||||
@ -297,3 +297,17 @@ test "access a member of tagged union with conflicting enum tag name" {
|
||||
|
||||
comptime assert(Bar.A == u8);
|
||||
}
|
||||
|
||||
test "tagged union initialization with runtime void" {
|
||||
assert(testTaggedUnionInit({}));
|
||||
}
|
||||
|
||||
const TaggedUnionWithAVoid = union(enum) {
|
||||
A,
|
||||
B: i32,
|
||||
};
|
||||
|
||||
fn testTaggedUnionInit(x: var) bool {
|
||||
const y = TaggedUnionWithAVoid{ .A = x };
|
||||
return @TagType(TaggedUnionWithAVoid)(y) == TaggedUnionWithAVoid.A;
|
||||
}
|
||||
|
||||
@ -1,6 +1,85 @@
|
||||
const tests = @import("tests.zig");
|
||||
|
||||
pub fn addCases(cases: *tests.CompileErrorContext) void {
|
||||
cases.add(
|
||||
"@handle() called outside of function definition",
|
||||
\\var handle_undef: promise = undefined;
|
||||
\\var handle_dummy: promise = @handle();
|
||||
\\export fn entry() bool {
|
||||
\\ return handle_undef == handle_dummy;
|
||||
\\}
|
||||
,
|
||||
".tmp_source.zig:2:29: error: @handle() called outside of function definition",
|
||||
);
|
||||
|
||||
cases.add(
|
||||
"@handle() in non-async function",
|
||||
\\export fn entry() bool {
|
||||
\\ var handle_undef: promise = undefined;
|
||||
\\ return handle_undef == @handle();
|
||||
\\}
|
||||
,
|
||||
".tmp_source.zig:3:28: error: @handle() in non-async function",
|
||||
);
|
||||
|
||||
cases.add(
|
||||
"`_` is not a declarable symbol",
|
||||
\\export fn f1() usize {
|
||||
\\ var _: usize = 2;
|
||||
\\ return _;
|
||||
\\}
|
||||
,
|
||||
".tmp_source.zig:2:5: error: `_` is not a declarable symbol",
|
||||
".tmp_source.zig:3:12: error: use of undeclared identifier '_'",
|
||||
);
|
||||
|
||||
cases.add(
|
||||
"`_` should not be usable inside for",
|
||||
\\export fn returns() void {
|
||||
\\ for ([]void{}) |_, i| {
|
||||
\\ for ([]void{}) |_, j| {
|
||||
\\ return _;
|
||||
\\ }
|
||||
\\ }
|
||||
\\}
|
||||
,
|
||||
".tmp_source.zig:4:20: error: use of undeclared identifier '_'",
|
||||
);
|
||||
|
||||
cases.add(
|
||||
"`_` should not be usable inside while",
|
||||
\\export fn returns() void {
|
||||
\\ while (optionalReturn()) |_| {
|
||||
\\ while (optionalReturn()) |_| {
|
||||
\\ return _;
|
||||
\\ }
|
||||
\\ }
|
||||
\\}
|
||||
\\fn optionalReturn() ?u32 {
|
||||
\\ return 1;
|
||||
\\}
|
||||
,
|
||||
".tmp_source.zig:4:20: error: use of undeclared identifier '_'",
|
||||
);
|
||||
|
||||
cases.add(
|
||||
"`_` should not be usable inside while else",
|
||||
\\export fn returns() void {
|
||||
\\ while (optionalReturnError()) |_| {
|
||||
\\ while (optionalReturnError()) |_| {
|
||||
\\ return;
|
||||
\\ } else |_| {
|
||||
\\ if (_ == error.optionalReturnError) return;
|
||||
\\ }
|
||||
\\ }
|
||||
\\}
|
||||
\\fn optionalReturnError() !?u32 {
|
||||
\\ return error.optionalReturnError;
|
||||
\\}
|
||||
,
|
||||
".tmp_source.zig:6:17: error: use of undeclared identifier '_'",
|
||||
);
|
||||
|
||||
cases.add(
|
||||
"while loop body expression ignored",
|
||||
\\fn returns() usize {
|
||||
@ -367,8 +446,8 @@ pub fn addCases(cases: *tests.CompileErrorContext) void {
|
||||
\\}
|
||||
\\
|
||||
\\async fn foo() void {
|
||||
\\ suspend |p| {
|
||||
\\ suspend |p1| {
|
||||
\\ suspend {
|
||||
\\ suspend {
|
||||
\\ }
|
||||
\\ }
|
||||
\\}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user