1use super::*;
2
3pub(crate) fn lower_call_family_instr(
4 instr: &ruka_mir::MirInstr,
5 body: &mut walrus::InstrSeqBuilder,
6 ctx: &LowerCtx<'_>,
7) -> Result<bool, LowerError> {
8 match instr {
9 ruka_mir::MirInstr::Call { callee, args, dst } => {
10 let callee_u32 = callee.as_u32();
11 let callee_index = *ctx
12 .func_id_by_mir
13 .get(&callee_u32)
14 .ok_or(LowerError::UnknownCallee)?;
15 let param_mask = ctx
16 .callee_param_runtime_mask
17 .get(&callee_u32)
18 .ok_or(LowerError::UnknownCallee)?;
19 let param_types = ctx
20 .callee_param_runtime_types
21 .get(&callee_u32)
22 .ok_or(LowerError::UnknownCallee)?;
23 let param_inout_mask = ctx
24 .callee_param_inout_mask
25 .get(&callee_u32)
26 .ok_or(LowerError::UnknownCallee)?;
27 if param_mask.len() != args.len() {
28 return Err(LowerError::UnsupportedInstruction(
29 "callee arg arity mismatch",
30 ));
31 }
32 if param_types.len() != args.len() {
33 return Err(LowerError::UnsupportedInstruction(
34 "callee arg type arity mismatch",
35 ));
36 }
37 if param_inout_mask.len() != args.len() {
38 return Err(LowerError::UnsupportedInstruction(
39 "callee arg inout arity mismatch",
40 ));
41 }
42 let returns_via_out_slot = *ctx
43 .callee_returns_via_out_slot
44 .get(&callee_u32)
45 .ok_or(LowerError::UnknownCallee)?;
46 let dst_runtime = runtime_local(ctx.local_indices, dst.as_u32())?;
47 if returns_via_out_slot {
48 let dst_local = dst_runtime.ok_or(LowerError::UnsupportedInstruction(
49 "out-slot return requires destination local",
50 ))?;
51 if !is_shadow_stack_local(ctx, *dst) {
52 return Err(LowerError::UnsupportedInstruction(
53 "out-slot return destination must be shadow-stack local",
54 ));
55 }
56 body.local_get(dst_local);
57 }
58 let mut inout_writebacks = Vec::<(ruka_mir::MirLocalId, ValType)>::new();
59 for (((arg, needs_runtime), expected_ty), inout_param) in args
60 .iter()
61 .zip(param_mask.iter())
62 .zip(param_types.iter())
63 .zip(param_inout_mask.iter())
64 {
65 if !*needs_runtime {
66 continue;
67 }
68 emit_call_arg(body, ctx, arg, "call arg", Some(*expected_ty), *inout_param)?;
69 if *inout_param {
70 inout_writebacks.push((arg.local, *expected_ty));
71 }
72 }
73 body.call(callee_index);
74
75 let returns_runtime = *ctx
76 .callee_returns_runtime
77 .get(&callee_u32)
78 .ok_or(LowerError::UnknownCallee)?;
79 match (returns_runtime, dst_runtime) {
80 (true, Some(dst_local)) => {
81 body.local_set(dst_local);
82 }
83 (false, None) => {
84 if returns_via_out_slot {
85 return Err(LowerError::UnsupportedInstruction(
86 "out-slot return missing destination local",
87 ));
88 }
89 }
90 (true, None) => {
91 body.drop();
92 }
93 (false, Some(_)) if !returns_via_out_slot => {
94 return Err(LowerError::UnsupportedInstruction(
95 "call unit/value mismatch",
96 ));
97 }
98 (false, Some(_)) => {}
99 }
100 for (local, local_ty) in inout_writebacks.into_iter().rev() {
101 let local_runtime =
102 runtime_local_index(ctx.local_indices, local.as_u32(), "inout dst")?;
103 let local_repr = local_repr(ctx.local_reprs, local.as_u32(), "inout dst repr")?;
104 if local_repr.is_place() && !is_passthrough_place_local(ctx, local) {
105 match local_ty {
106 ValType::I64 => {
107 body.local_set(ctx.scratch_i64_local)
108 .local_get(local_runtime)
109 .local_get(ctx.scratch_i64_local)
110 .instr(Store {
111 memory: ctx.memory_id,
112 kind: StoreKind::I64 { atomic: false },
113 arg: MemArg {
114 align: 8,
115 offset: 0,
116 },
117 });
118 }
119 ValType::I32 => {
120 body.local_set(ctx.scratch_i32_local)
121 .local_get(local_runtime)
122 .local_get(ctx.scratch_i32_local)
123 .unop(UnaryOp::I64ExtendUI32)
124 .instr(Store {
125 memory: ctx.memory_id,
126 kind: StoreKind::I64 { atomic: false },
127 arg: MemArg {
128 align: 8,
129 offset: 0,
130 },
131 });
132 }
133 _ => {
134 return Err(LowerError::UnsupportedInstruction(
135 "unsupported inout writeback type",
136 ));
137 }
138 }
139 } else {
140 let dst_ty = runtime_local_valtype(
141 ctx.local_runtime_types,
142 local.as_u32(),
143 "inout dst ty",
144 )?;
145 match (local_ty, dst_ty) {
146 (ValType::I64, ValType::I64)
147 | (ValType::I32, ValType::I32)
148 | (ValType::F32, ValType::F32)
149 | (ValType::F64, ValType::F64) => {}
150 (ValType::I32, ValType::I64) => {
151 body.unop(UnaryOp::I64ExtendUI32);
152 }
153 (ValType::I64, ValType::I32) => {
154 body.unop(UnaryOp::I32WrapI64);
155 }
156 (ValType::F32, ValType::F64) => {
157 body.unop(UnaryOp::F64PromoteF32);
158 }
159 (ValType::F64, ValType::F32) => {
160 body.unop(UnaryOp::F32DemoteF64);
161 }
162 _ => {
163 return Err(LowerError::UnsupportedInstruction(
164 "unsupported inout writeback conversion",
165 ));
166 }
167 }
168 body.local_set(local_runtime);
169 }
170 }
171 for arg in args {
172 let arg_binding = ctx.function.call_arg_binding(arg);
173 if !arg_binding.is_owned_move() {
174 continue;
175 }
176 let ownership = local_heap_ownership(
177 ctx.local_heap_ownership,
178 arg.local.as_u32(),
179 "call owned-move arg ownership",
180 )?;
181 if !ownership.uses_heap_ops() {
182 continue;
183 }
184 let local = runtime_local_index(
185 ctx.local_indices,
186 arg.local.as_u32(),
187 "call owned-move arg local",
188 )?;
189 body.i32_const(0).local_set(local);
190 }
191 Ok(true)
192 }
193 ruka_mir::MirInstr::CallIntrinsic {
194 intrinsic,
195 args,
196 dst,
197 source_pos,
198 } => match intrinsic {
199 ruka_mir::MirIntrinsic::Ptr => {
200 if args.len() != 1 {
201 return Err(LowerError::UnsupportedInstruction("@box arity"));
202 }
203 let src_mir_local = args[0].local.as_u32();
204 let src_local =
205 runtime_local_index(ctx.local_indices, args[0].local.as_u32(), "ptr arg")?;
206 let src_ty = runtime_local_valtype(
207 ctx.local_runtime_types,
208 args[0].local.as_u32(),
209 "ptr arg valtype",
210 )?;
211 let dst_local = runtime_local_index(ctx.local_indices, dst.as_u32(), "ptr dst")?;
212 let src_mir_ty = local_ty(ctx.local_tys, src_mir_local, "ptr arg type")?;
213 let src_ownership = local_heap_ownership(
214 ctx.local_heap_ownership,
215 src_mir_local,
216 "ptr arg ownership",
217 )?;
218
219 let mut pointer_src_local = src_local;
220 let mut pointer_src_ty = src_ty;
221 let cloned_shadow_stack_value =
222 ctx.shadow_stack_offsets.contains_key(&src_mir_local)
223 && matches!(src_mir_ty, Ty::Struct { .. } | Ty::Tuple(_));
224 if cloned_shadow_stack_value {
225 emit_clone_for_type(
226 body,
227 ctx.runtime,
228 ctx.memory_id,
229 *source_pos,
230 ctx.function_line,
231 src_local,
232 src_mir_ty,
233 ctx.structs,
234 ctx.enums,
235 ctx.scratch_i32_local,
236 ctx.scratch_i32_local_b,
237 ctx.scratch_i32_local_c,
238 ctx.scratch_i32_local_d,
239 ctx.scratch_i32_local_e,
240 ctx.scratch_i64_local,
241 ctx.scratch_i32_local_c,
242 )?;
243 pointer_src_local = ctx.scratch_i32_local_c;
244 pointer_src_ty = ValType::I32;
245 }
246 emit_pointer_alloc(
247 body,
248 ctx.runtime,
249 ctx.memory_id,
250 *source_pos,
251 ctx.function_line,
252 pointer_src_local,
253 pointer_src_ty,
254 dst_local,
255 )?;
256 let arg_binding = ctx.function.call_arg_binding(&args[0]);
257 if arg_binding.is_owned_move() {
258 if cloned_shadow_stack_value {
259 let free = runtime_function(ctx.runtime, wasm_api::RT_FREE_BYTES_SYMBOL)?;
260 let release_plan = build_release_plan(src_mir_ty, ctx.structs, ctx.enums)?;
261 body.local_get(src_local).i32_const(1).instr(Store {
262 memory: ctx.memory_id,
263 kind: StoreKind::I32 { atomic: false },
264 arg: MemArg {
265 align: 4,
266 offset: ARRAY_HEADER_OFFSET,
267 },
268 });
269 emit_release_plan_with_free(
270 body,
271 ctx.memory_id,
272 src_local,
273 &release_plan,
274 ctx.pointer_drop_functions,
275 ctx.scratch_i32_local,
276 ctx.scratch_i32_local_b,
277 ctx.scratch_i32_local_d,
278 ctx.scratch_i32_local_c,
279 ctx.scratch_i64_local,
280 false,
281 free.function_id,
282 );
283 } else if src_ownership.uses_heap_ops() {
284 body.i32_const(0).local_set(src_local);
285 }
286 }
287 Ok(true)
288 }
289 ruka_mir::MirIntrinsic::Array => {
290 if args.len() != 2 {
291 return Err(LowerError::UnsupportedInstruction("@array arity"));
292 }
293 let init_arg = &args[0];
294 let init_local =
295 runtime_local_index(ctx.local_indices, args[0].local.as_u32(), "array init")?;
296 let len_local =
297 runtime_local_index(ctx.local_indices, args[1].local.as_u32(), "array len")?;
298 let init_mir_ty = local_ty(ctx.local_tys, args[0].local.as_u32(), "array init ty")?;
299 let init_ownership = local_heap_ownership(
300 ctx.local_heap_ownership,
301 args[0].local.as_u32(),
302 "array init ownership",
303 )?;
304 let init_ty = runtime_local_valtype(
305 ctx.local_runtime_types,
306 args[0].local.as_u32(),
307 "array init valtype",
308 )?;
309 let dst_local = runtime_local_index(ctx.local_indices, dst.as_u32(), "array dst")?;
310 body.local_get(len_local)
311 .unop(UnaryOp::I32WrapI64)
312 .local_set(ctx.scratch_i32_local);
313 aggregate::emit_array_alloc(
314 body,
315 ctx.runtime,
316 *source_pos,
317 ctx.function_line,
318 ctx.memory_id,
319 ctx.scratch_i32_local,
320 dst_local,
321 ctx.scratch_i32_local_b,
322 )?;
323 body.i64_const(0).local_set(ctx.scratch_i64_local);
324 let mut clone_error = None;
325 body.block(None, |done| {
326 let done_id = done.id();
327 done.loop_(None, |loop_| {
328 let loop_id = loop_.id();
329 loop_
330 .local_get(ctx.scratch_i64_local)
331 .local_get(len_local)
332 .binop(BinaryOp::I64LtS)
333 .unop(UnaryOp::I32Eqz)
334 .br_if(done_id)
335 .local_get(ctx.scratch_i64_local)
336 .unop(UnaryOp::I32WrapI64)
337 .local_set(ctx.scratch_i32_local_b);
338 let mut item_local = init_local;
339 if init_ownership.uses_heap_ops() {
340 if let Err(err) = emit_clone_for_type(
341 loop_,
342 ctx.runtime,
343 ctx.memory_id,
344 *source_pos,
345 ctx.function_line,
346 init_local,
347 init_mir_ty,
348 ctx.structs,
349 ctx.enums,
350 ctx.scratch_i32_local,
351 ctx.scratch_i32_local_b,
352 ctx.scratch_i32_local_c,
353 ctx.scratch_i32_local_d,
354 ctx.scratch_i32_local_e,
355 ctx.scratch_i64_local,
356 ctx.scratch_i32_local_c,
357 ) {
358 clone_error = Some(err);
359 return;
360 }
361 item_local = ctx.scratch_i32_local_c;
362 }
363 aggregate::emit_array_store(
364 loop_,
365 ctx.memory_id,
366 dst_local,
367 ctx.scratch_i32_local_b,
368 item_local,
369 init_ty,
370 );
371 loop_
372 .local_get(ctx.scratch_i64_local)
373 .i64_const(1)
374 .binop(BinaryOp::I64Add)
375 .local_set(ctx.scratch_i64_local)
376 .br(loop_id);
377 });
378 });
379 if let Some(err) = clone_error {
380 return Err(err);
381 }
382 let init_binding = ctx.function.call_arg_binding(init_arg);
383 if init_binding.is_owned_move() {
384 if init_ownership.uses_heap_ops() {
385 match init_mir_ty {
386 Ty::Pointer(item) => emit_pointer_release(
387 body,
388 ctx.runtime,
389 ctx.memory_id,
390 init_local,
391 item.as_ref(),
392 ctx.structs,
393 ctx.enums,
394 ctx.pointer_drop_functions,
395 ctx.scratch_i32_local,
396 ctx.scratch_i32_local_b,
397 ctx.scratch_i32_local_d,
398 ctx.scratch_i32_local_c,
399 ctx.scratch_i64_local,
400 )?,
401 Ty::String => emit_string_release(
402 body,
403 ctx.runtime,
404 ctx.memory_id,
405 init_local,
406 ctx.scratch_i32_local,
407 ctx.scratch_i32_local_b,
408 ctx.scratch_i32_local_c,
409 )?,
410 Ty::Array { item, .. } => emit_array_release(
411 body,
412 ctx.runtime,
413 ctx.memory_id,
414 init_local,
415 item.as_ref(),
416 ctx.structs,
417 ctx.enums,
418 ctx.pointer_drop_functions,
419 ctx.scratch_i32_local,
420 ctx.scratch_i32_local_b,
421 ctx.scratch_i32_local_c,
422 ctx.scratch_i32_local_d,
423 ctx.scratch_i64_local,
424 )?,
425 Ty::Slice(item) => emit_array_release(
426 body,
427 ctx.runtime,
428 ctx.memory_id,
429 init_local,
430 item.as_ref(),
431 ctx.structs,
432 ctx.enums,
433 ctx.pointer_drop_functions,
434 ctx.scratch_i32_local,
435 ctx.scratch_i32_local_b,
436 ctx.scratch_i32_local_c,
437 ctx.scratch_i32_local_d,
438 ctx.scratch_i64_local,
439 )?,
440 Ty::Enum { .. } => emit_enum_release(
441 body,
442 ctx.runtime,
443 ctx.memory_id,
444 init_local,
445 init_mir_ty,
446 ctx.structs,
447 ctx.enums,
448 ctx.pointer_drop_functions,
449 ctx.scratch_i32_local,
450 ctx.scratch_i32_local_b,
451 ctx.scratch_i32_local_c,
452 ctx.scratch_i32_local_d,
453 ctx.scratch_i64_local,
454 )?,
455 _ => {}
456 }
457 }
458 }
459 Ok(true)
460 }
461 },
462 ruka_mir::MirInstr::CallExtern {
463 symbol, args, dst, ..
464 } => {
465 let runtime_fn = ctx
466 .runtime
467 .resolve(symbol.as_str())
468 .ok_or_else(|| LowerError::UnknownRuntimeSymbol(symbol.clone()))?;
469 if runtime_fn.descriptor.params.len() != args.len() {
470 return Err(LowerError::UnsupportedInstruction(
471 "extern arg arity mismatch",
472 ));
473 }
474 for (arg, expected_ty) in args.iter().zip(runtime_fn.descriptor.params.iter()) {
475 emit_call_arg(
476 body,
477 ctx,
478 arg,
479 "extern arg",
480 Some(wasm_valtype(*expected_ty)),
481 false,
482 )?;
483 }
484 body.call(runtime_fn.function_id);
485
486 let dst_runtime = runtime_local(ctx.local_indices, dst.as_u32())?;
487 match (runtime_fn.descriptor.result, dst_runtime) {
488 (Some(result_ty), Some(dst_local)) => {
489 let dst_ty =
490 runtime_local_valtype(ctx.local_runtime_types, dst.as_u32(), "extern dst")?;
491 if dst_ty != wasm_valtype(result_ty) {
492 return Err(LowerError::UnsupportedInstruction(
493 "extern result type mismatch",
494 ));
495 }
496 body.local_set(dst_local);
497 }
498 (None, None) => {}
499 (Some(_), None) => {
500 body.drop();
501 }
502 (None, Some(_)) => {
503 return Err(LowerError::UnsupportedInstruction(
504 "extern unit/value mismatch",
505 ));
506 }
507 }
508 Ok(true)
509 }
510 _ => Ok(false),
511 }
512}