@@ -1102,7 +1102,8 @@ static IrInstruction *ir_build_union_field_ptr_from(IrBuilder *irb, IrInstructio
1102
1102
1103
1103
static IrInstruction *ir_build_call(IrBuilder *irb, Scope *scope, AstNode *source_node,
1104
1104
FnTableEntry *fn_entry, IrInstruction *fn_ref, size_t arg_count, IrInstruction **args,
1105
- bool is_comptime, FnInline fn_inline, bool is_async, IrInstruction *async_allocator)
1105
+ bool is_comptime, FnInline fn_inline, bool is_async, IrInstruction *async_allocator,
1106
+ IrInstruction *new_stack)
1106
1107
{
1107
1108
IrInstructionCall *call_instruction = ir_build_instruction<IrInstructionCall>(irb, scope, source_node);
1108
1109
call_instruction->fn_entry = fn_entry;
@@ -1113,23 +1114,27 @@ static IrInstruction *ir_build_call(IrBuilder *irb, Scope *scope, AstNode *sourc
1113
1114
call_instruction->arg_count = arg_count;
1114
1115
call_instruction->is_async = is_async;
1115
1116
call_instruction->async_allocator = async_allocator;
1117
+ call_instruction->new_stack = new_stack;
1116
1118
1117
1119
if (fn_ref)
1118
1120
ir_ref_instruction(fn_ref, irb->current_basic_block);
1119
1121
for (size_t i = 0; i < arg_count; i += 1)
1120
1122
ir_ref_instruction(args[i], irb->current_basic_block);
1121
1123
if (async_allocator)
1122
1124
ir_ref_instruction(async_allocator, irb->current_basic_block);
1125
+ if (new_stack != nullptr)
1126
+ ir_ref_instruction(new_stack, irb->current_basic_block);
1123
1127
1124
1128
return &call_instruction->base;
1125
1129
}
1126
1130
1127
1131
static IrInstruction *ir_build_call_from(IrBuilder *irb, IrInstruction *old_instruction,
1128
1132
FnTableEntry *fn_entry, IrInstruction *fn_ref, size_t arg_count, IrInstruction **args,
1129
- bool is_comptime, FnInline fn_inline, bool is_async, IrInstruction *async_allocator)
1133
+ bool is_comptime, FnInline fn_inline, bool is_async, IrInstruction *async_allocator,
1134
+ IrInstruction *new_stack)
1130
1135
{
1131
1136
IrInstruction *new_instruction = ir_build_call(irb, old_instruction->scope,
1132
- old_instruction->source_node, fn_entry, fn_ref, arg_count, args, is_comptime, fn_inline, is_async, async_allocator);
1137
+ old_instruction->source_node, fn_entry, fn_ref, arg_count, args, is_comptime, fn_inline, is_async, async_allocator, new_stack );
1133
1138
ir_link_new_instruction(new_instruction, old_instruction);
1134
1139
return new_instruction;
1135
1140
}
@@ -4303,7 +4308,37 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
4303
4308
}
4304
4309
FnInline fn_inline = (builtin_fn->id == BuiltinFnIdInlineCall) ? FnInlineAlways : FnInlineNever;
4305
4310
4306
- IrInstruction *call = ir_build_call(irb, scope, node, nullptr, fn_ref, arg_count, args, false, fn_inline, false, nullptr);
4311
+ IrInstruction *call = ir_build_call(irb, scope, node, nullptr, fn_ref, arg_count, args, false, fn_inline, false, nullptr, nullptr);
4312
+ return ir_lval_wrap(irb, scope, call, lval);
4313
+ }
4314
+ case BuiltinFnIdNewStackCall:
4315
+ {
4316
+ if (node->data.fn_call_expr.params.length == 0) {
4317
+ add_node_error(irb->codegen, node, buf_sprintf("expected at least 1 argument, found 0"));
4318
+ return irb->codegen->invalid_instruction;
4319
+ }
4320
+
4321
+ AstNode *new_stack_node = node->data.fn_call_expr.params.at(0);
4322
+ IrInstruction *new_stack = ir_gen_node(irb, new_stack_node, scope);
4323
+ if (new_stack == irb->codegen->invalid_instruction)
4324
+ return new_stack;
4325
+
4326
+ AstNode *fn_ref_node = node->data.fn_call_expr.params.at(1);
4327
+ IrInstruction *fn_ref = ir_gen_node(irb, fn_ref_node, scope);
4328
+ if (fn_ref == irb->codegen->invalid_instruction)
4329
+ return fn_ref;
4330
+
4331
+ size_t arg_count = node->data.fn_call_expr.params.length - 2;
4332
+
4333
+ IrInstruction **args = allocate<IrInstruction*>(arg_count);
4334
+ for (size_t i = 0; i < arg_count; i += 1) {
4335
+ AstNode *arg_node = node->data.fn_call_expr.params.at(i + 2);
4336
+ args[i] = ir_gen_node(irb, arg_node, scope);
4337
+ if (args[i] == irb->codegen->invalid_instruction)
4338
+ return args[i];
4339
+ }
4340
+
4341
+ IrInstruction *call = ir_build_call(irb, scope, node, nullptr, fn_ref, arg_count, args, false, FnInlineAuto, false, nullptr, new_stack);
4307
4342
return ir_lval_wrap(irb, scope, call, lval);
4308
4343
}
4309
4344
case BuiltinFnIdTypeId:
@@ -4513,7 +4548,7 @@ static IrInstruction *ir_gen_fn_call(IrBuilder *irb, Scope *scope, AstNode *node
4513
4548
}
4514
4549
}
4515
4550
4516
- IrInstruction *fn_call = ir_build_call(irb, scope, node, nullptr, fn_ref, arg_count, args, false, FnInlineAuto, is_async, async_allocator);
4551
+ IrInstruction *fn_call = ir_build_call(irb, scope, node, nullptr, fn_ref, arg_count, args, false, FnInlineAuto, is_async, async_allocator, nullptr );
4517
4552
return ir_lval_wrap(irb, scope, fn_call, lval);
4518
4553
}
4519
4554
@@ -6825,7 +6860,7 @@ bool ir_gen(CodeGen *codegen, AstNode *node, Scope *scope, IrExecutable *ir_exec
6825
6860
IrInstruction **args = allocate<IrInstruction *>(arg_count);
6826
6861
args[0] = implicit_allocator_ptr; // self
6827
6862
args[1] = mem_slice; // old_mem
6828
- ir_build_call(irb, scope, node, nullptr, free_fn, arg_count, args, false, FnInlineAuto, false, nullptr);
6863
+ ir_build_call(irb, scope, node, nullptr, free_fn, arg_count, args, false, FnInlineAuto, false, nullptr, nullptr );
6829
6864
6830
6865
IrBasicBlock *resume_block = ir_create_basic_block(irb, scope, "Resume");
6831
6866
ir_build_cond_br(irb, scope, node, resume_awaiter, resume_block, irb->exec->coro_suspend_block, const_bool_false);
@@ -11992,7 +12027,7 @@ static IrInstruction *ir_analyze_async_call(IrAnalyze *ira, IrInstructionCall *c
11992
12027
TypeTableEntry *async_return_type = get_error_union_type(ira->codegen, alloc_fn_error_set_type, promise_type);
11993
12028
11994
12029
IrInstruction *result = ir_build_call(&ira->new_irb, call_instruction->base.scope, call_instruction->base.source_node,
11995
- fn_entry, fn_ref, arg_count, casted_args, false, FnInlineAuto, true, async_allocator_inst);
12030
+ fn_entry, fn_ref, arg_count, casted_args, false, FnInlineAuto, true, async_allocator_inst, nullptr );
11996
12031
result->value.type = async_return_type;
11997
12032
return result;
11998
12033
}
@@ -12362,6 +12397,19 @@ static TypeTableEntry *ir_analyze_fn_call(IrAnalyze *ira, IrInstructionCall *cal
12362
12397
return ir_finish_anal(ira, return_type);
12363
12398
}
12364
12399
12400
+ IrInstruction *casted_new_stack = nullptr;
12401
+ if (call_instruction->new_stack != nullptr) {
12402
+ TypeTableEntry *u8_ptr = get_pointer_to_type(ira->codegen, ira->codegen->builtin_types.entry_u8, false);
12403
+ TypeTableEntry *u8_slice = get_slice_type(ira->codegen, u8_ptr);
12404
+ IrInstruction *new_stack = call_instruction->new_stack->other;
12405
+ if (type_is_invalid(new_stack->value.type))
12406
+ return ira->codegen->builtin_types.entry_invalid;
12407
+
12408
+ casted_new_stack = ir_implicit_cast(ira, new_stack, u8_slice);
12409
+ if (type_is_invalid(casted_new_stack->value.type))
12410
+ return ira->codegen->builtin_types.entry_invalid;
12411
+ }
12412
+
12365
12413
if (fn_type->data.fn.is_generic) {
12366
12414
if (!fn_entry) {
12367
12415
ir_add_error(ira, call_instruction->fn_ref,
@@ -12588,7 +12636,7 @@ static TypeTableEntry *ir_analyze_fn_call(IrAnalyze *ira, IrInstructionCall *cal
12588
12636
assert(async_allocator_inst == nullptr);
12589
12637
IrInstruction *new_call_instruction = ir_build_call_from(&ira->new_irb, &call_instruction->base,
12590
12638
impl_fn, nullptr, impl_param_count, casted_args, false, fn_inline,
12591
- call_instruction->is_async, nullptr);
12639
+ call_instruction->is_async, nullptr, casted_new_stack );
12592
12640
12593
12641
ir_add_alloca(ira, new_call_instruction, return_type);
12594
12642
@@ -12679,7 +12727,7 @@ static TypeTableEntry *ir_analyze_fn_call(IrAnalyze *ira, IrInstructionCall *cal
12679
12727
12680
12728
12681
12729
IrInstruction *new_call_instruction = ir_build_call_from(&ira->new_irb, &call_instruction->base,
12682
- fn_entry, fn_ref, call_param_count, casted_args, false, fn_inline, false, nullptr);
12730
+ fn_entry, fn_ref, call_param_count, casted_args, false, fn_inline, false, nullptr, casted_new_stack );
12683
12731
12684
12732
ir_add_alloca(ira, new_call_instruction, return_type);
12685
12733
return ir_finish_anal(ira, return_type);
0 commit comments