1use super::*;
2
3pub(crate) fn block_pc(
4 block_id_to_pc: &BTreeMap<u32, i32>,
5 block_id: ruka_mir::MirBlockId,
6) -> Result<i32, LowerError> {
7 block_id_to_pc
8 .get(&block_id.as_u32())
9 .copied()
10 .ok_or(LowerError::MissingBlockPc(block_id.as_u32()))
11}
12
13pub(crate) fn int32_from_u32(value: u32, context: &'static str) -> Result<i32, LowerError> {
15 i32::try_from(value).map_err(|_| LowerError::Int32Overflow(context))
16}
17
18pub(crate) fn int32_from_usize(value: usize, context: &'static str) -> Result<i32, LowerError> {
20 i32::try_from(value).map_err(|_| LowerError::Int32Overflow(context))
21}
22
23pub(crate) fn runtime_local(
25 local_indices: &BTreeMap<u32, Option<LocalId>>,
26 local: u32,
27) -> Result<Option<LocalId>, LowerError> {
28 local_indices
29 .get(&local)
30 .copied()
31 .ok_or(LowerError::UnsupportedInstruction("missing local"))
32}
33
34pub(crate) fn runtime_local_index(
36 local_indices: &BTreeMap<u32, Option<LocalId>>,
37 local: u32,
38 context: &'static str,
39) -> Result<LocalId, LowerError> {
40 runtime_local(local_indices, local)?.ok_or(LowerError::UnsupportedInstruction(context))
41}
42
43pub(crate) fn runtime_local_valtype(
45 local_runtime_types: &BTreeMap<u32, Option<ValType>>,
46 local: u32,
47 context: &'static str,
48) -> Result<ValType, LowerError> {
49 local_runtime_types
50 .get(&local)
51 .and_then(|value| *value)
52 .ok_or(LowerError::UnsupportedInstruction(context))
53}
54
55pub(crate) fn local_ty<'a>(
57 local_tys: &'a BTreeMap<u32, Ty>,
58 local: u32,
59 context: &'static str,
60) -> Result<&'a Ty, LowerError> {
61 local_tys
62 .get(&local)
63 .ok_or(LowerError::UnsupportedInstruction(context))
64}
65
66pub(crate) fn local_repr(
68 local_reprs: &BTreeMap<u32, ruka_mir::MirLocalRepr>,
69 local: u32,
70 context: &'static str,
71) -> Result<ruka_mir::MirLocalRepr, LowerError> {
72 local_reprs
73 .get(&local)
74 .copied()
75 .ok_or(LowerError::UnsupportedInstruction(context))
76}
77
78pub(crate) fn local_heap_ownership(
80 local_heap_ownership: &BTreeMap<u32, ruka_mir::MirHeapOwnership>,
81 local: u32,
82 context: &'static str,
83) -> Result<ruka_mir::MirHeapOwnership, LowerError> {
84 local_heap_ownership
85 .get(&local)
86 .copied()
87 .ok_or(LowerError::UnsupportedInstruction(context))
88}
89
90pub(crate) fn emit_call_arg(
92 body: &mut walrus::InstrSeqBuilder,
93 ctx: &LowerCtx<'_>,
94 arg: &ruka_mir::MirCallArg,
95 context: &'static str,
96 expected_ty: Option<ValType>,
97 mutable_borrow_inout: bool,
98) -> Result<(), LowerError> {
99 let binding = ctx.function.call_arg_binding(arg);
100 let local = runtime_local_index(ctx.local_indices, arg.local.as_u32(), context)?;
101 let strategy = call_arg_strategy(ctx, arg, context, mutable_borrow_inout)?;
102 let mut by_value_local = local;
103 if binding.is_owned_copy() && matches!(strategy, CallArgStrategy::ByValue) {
104 let ownership = local_heap_ownership(
105 ctx.local_heap_ownership,
106 arg.local.as_u32(),
107 "call arg ownership",
108 )?;
109 if ownership.uses_heap_ops() {
110 let arg_ty = local_ty(ctx.local_tys, arg.local.as_u32(), context)?;
111 emit_clone_for_type(
112 body,
113 ctx.runtime,
114 ctx.memory_id,
115 None,
116 ctx.function_line,
117 local,
118 arg_ty,
119 ctx.structs,
120 ctx.enums,
121 ctx.scratch_i32_local,
122 ctx.scratch_i32_local_b,
123 ctx.scratch_i32_local_c,
124 ctx.scratch_i32_local_d,
125 ctx.scratch_i32_local_e,
126 ctx.scratch_i64_local,
127 ctx.scratch_i32_local_c,
128 )?;
129 by_value_local = ctx.scratch_i32_local_c;
130 }
131 }
132 if let Some(expected) = expected_ty {
133 let actual = call_arg_valtype(ctx, arg, strategy, context)?;
134 if actual != expected {
135 return Err(LowerError::UnsupportedInstruction(
136 "call arg runtime type mismatch",
137 ));
138 }
139 }
140 match strategy {
141 CallArgStrategy::ByValue => {
142 body.local_get(by_value_local);
143 Ok(())
144 }
145 CallArgStrategy::DerefToI64 => {
146 body.local_get(local).instr(Load {
147 memory: ctx.memory_id,
148 kind: LoadKind::I64 { atomic: false },
149 arg: MemArg {
150 align: 8,
151 offset: 0,
152 },
153 });
154 Ok(())
155 }
156 CallArgStrategy::DerefToI32 => {
157 body.local_get(local)
158 .instr(Load {
159 memory: ctx.memory_id,
160 kind: LoadKind::I64 { atomic: false },
161 arg: MemArg {
162 align: 8,
163 offset: 0,
164 },
165 })
166 .unop(UnaryOp::I32WrapI64);
167 Ok(())
168 }
169 }
170}
171
172pub(crate) fn call_arg_valtype(
173 ctx: &LowerCtx<'_>,
174 arg: &ruka_mir::MirCallArg,
175 strategy: CallArgStrategy,
176 context: &'static str,
177) -> Result<ValType, LowerError> {
178 match strategy {
179 CallArgStrategy::ByValue => {
180 runtime_local_valtype(ctx.local_runtime_types, arg.local.as_u32(), context)
181 }
182 CallArgStrategy::DerefToI64 => Ok(ValType::I64),
183 CallArgStrategy::DerefToI32 => Ok(ValType::I32),
184 }
185}
186
187pub(crate) fn call_arg_strategy(
189 ctx: &LowerCtx<'_>,
190 arg: &ruka_mir::MirCallArg,
191 context: &'static str,
192 mutable_borrow_inout: bool,
193) -> Result<CallArgStrategy, LowerError> {
194 let binding = ctx.function.call_arg_binding(arg);
195 let arg_ty = local_ty(ctx.local_tys, arg.local.as_u32(), context)?;
196 let local_repr = local_repr(ctx.local_reprs, arg.local.as_u32(), context)?;
197 debug_assert_eq!(
198 binding.local_ty(),
199 arg_ty,
200 "call arg type metadata should agree"
201 );
202 debug_assert_eq!(
203 binding.local.repr, local_repr,
204 "call arg repr metadata should agree"
205 );
206 if binding.is_mutable_borrow() && !mutable_borrow_inout {
207 return Ok(CallArgStrategy::ByValue);
208 }
209 if binding.is_mutable_borrow() {
210 if binding.requires_deref_read() {
211 let inner = binding
212 .place_item_ty()
213 .expect("place reads should expose item type");
214 if binding.is_slice_place() {
215 return Ok(CallArgStrategy::ByValue);
216 }
217 if !binding.is_place() {
218 return Ok(CallArgStrategy::ByValue);
219 }
220 if is_passthrough_place_local(ctx, arg.local) {
221 return Ok(CallArgStrategy::ByValue);
222 }
223 return match ty_to_valtype(inner) {
224 ValType::I64 => Ok(CallArgStrategy::DerefToI64),
225 ValType::I32 => Ok(CallArgStrategy::DerefToI32),
226 _ => Err(LowerError::UnsupportedInstruction(
227 "unsupported dereferenced call arg type",
228 )),
229 };
230 }
231 return Ok(CallArgStrategy::ByValue);
232 }
233 if binding.requires_deref_read() {
234 let inner = binding
235 .place_item_ty()
236 .expect("place reads should expose item type");
237 if binding.is_slice_place() {
238 Ok(CallArgStrategy::ByValue)
239 } else if !binding.is_place() {
240 Ok(CallArgStrategy::ByValue)
241 } else if is_passthrough_place_local(ctx, arg.local) {
242 Ok(CallArgStrategy::ByValue)
243 } else {
244 match ty_to_valtype(inner) {
245 ValType::I64 => Ok(CallArgStrategy::DerefToI64),
246 ValType::I32 => Ok(CallArgStrategy::DerefToI32),
247 _ => Err(LowerError::UnsupportedInstruction(
248 "unsupported dereferenced call arg type",
249 )),
250 }
251 }
252 } else {
253 Ok(CallArgStrategy::ByValue)
254 }
255}
256
257#[derive(Clone, Copy, Debug, PartialEq, Eq)]
259pub(crate) enum CallArgStrategy {
260 ByValue,
261 DerefToI64,
262 DerefToI32,
263}
264
265pub(crate) fn wasm_valtype(ty: WasmValType) -> ValType {
267 match ty {
268 WasmValType::I32 => ValType::I32,
269 WasmValType::I64 => ValType::I64,
270 }
271}
272
273pub(crate) fn emit_widen_to_i64(
275 body: &mut walrus::InstrSeqBuilder,
276 value_ty: ValType,
277) -> Result<(), LowerError> {
278 match value_ty {
279 ValType::I64 => Ok(()),
280 ValType::I32 => {
281 body.unop(UnaryOp::I64ExtendUI32);
282 Ok(())
283 }
284 _ => Err(LowerError::UnsupportedInstruction(
285 "unsupported slot value type",
286 )),
287 }
288}