1use super::*;
2
3#[derive(Clone)]
4pub(crate) enum ReleasePlan {
5 None,
6 Pointer {
7 pointee: Box<ReleasePlan>,
8 pointer_ty_key: String,
9 },
10 String,
11 Array(Box<ReleasePlan>),
12 Aggregate {
13 payload_bytes: u32,
14 fields: Vec<(u32, bool, ReleasePlan)>,
15 },
16 Enum {
17 payload_bytes: u32,
18 variants: Vec<Vec<(u32, bool, ReleasePlan)>>,
19 },
20}
21
22pub(crate) fn build_release_plan(
23 ty: &Ty,
24 structs: &[ruka_mir::MirStructDecl],
25 enums: &[ruka_mir::MirEnumDecl],
26) -> Result<ReleasePlan, LowerError> {
27 build_release_plan_with_visiting(ty, structs, enums, &mut BTreeSet::new())
28}
29
30pub(crate) fn build_release_plan_with_visiting(
31 ty: &Ty,
32 structs: &[ruka_mir::MirStructDecl],
33 enums: &[ruka_mir::MirEnumDecl],
34 visiting_structs: &mut BTreeSet<String>,
35) -> Result<ReleasePlan, LowerError> {
36 match ty {
37 Ty::Option(item) => {
38 let lowered = Ty::Enum {
39 name: "Option".to_owned(),
40 args: vec![(**item).clone()],
41 };
42 build_release_plan_with_visiting(&lowered, structs, enums, visiting_structs)
43 }
44 Ty::Pointer(item) => Ok(ReleasePlan::Pointer {
45 pointee: Box::new(build_release_plan_with_visiting(
46 item,
47 structs,
48 enums,
49 visiting_structs,
50 )?),
51 pointer_ty_key: ty_key(ty),
52 }),
53 Ty::String => Ok(ReleasePlan::String),
54 Ty::Array { item, .. } | Ty::Slice(item) => Ok(ReleasePlan::Array(Box::new(
55 build_release_plan_with_visiting(item, structs, enums, visiting_structs)?,
56 ))),
57 Ty::Tuple(items) => {
58 let mut fields = Vec::<(u32, bool, ReleasePlan)>::new();
59 for (index, field_ty) in items.iter().enumerate() {
60 let plan =
61 build_release_plan_with_visiting(field_ty, structs, enums, visiting_structs)?;
62 if matches!(plan, ReleasePlan::None) {
63 continue;
64 }
65 let inline = aggregate::is_inline_aggregate_ty(field_ty);
66 let field_name = index.to_string();
67 let offset =
68 aggregate::aggregate_field_offset(ty, field_name.as_str(), structs, enums)?;
69 fields.push((offset, inline, plan));
70 }
71 Ok(ReleasePlan::Aggregate {
72 payload_bytes: aggregate::aggregate_payload_bytes(ty, structs, enums)?,
73 fields,
74 })
75 }
76 Ty::Struct { name, .. } => {
77 if !visiting_structs.insert(name.clone()) {
78 return Ok(ReleasePlan::None);
79 }
80 let decl = structs.iter().find(|decl| decl.name == *name).ok_or(
81 LowerError::UnsupportedInstruction("missing struct declaration"),
82 )?;
83 let mut fields = Vec::<(u32, bool, ReleasePlan)>::new();
84 for field in &decl.fields {
85 let field_ty =
86 aggregate::aggregate_field_ty(ty, field.name.as_str(), structs, enums)?;
87 let plan =
88 build_release_plan_with_visiting(&field_ty, structs, enums, visiting_structs)?;
89 if matches!(plan, ReleasePlan::None) {
90 continue;
91 }
92 let inline = aggregate::is_inline_aggregate_ty(&field_ty);
93 let offset =
94 aggregate::aggregate_field_offset(ty, field.name.as_str(), structs, enums)?;
95 fields.push((offset, inline, plan));
96 }
97 let _ = visiting_structs.remove(name);
98 Ok(ReleasePlan::Aggregate {
99 payload_bytes: aggregate::aggregate_payload_bytes(ty, structs, enums)?,
100 fields,
101 })
102 }
103 Ty::Enum { name, args } => {
104 let decl = enum_decl_by_name(enums, name)?;
105 let bindings = decl
106 .type_params
107 .iter()
108 .cloned()
109 .zip(args.iter().cloned())
110 .collect::<BTreeMap<_, _>>();
111 let mut variants =
112 Vec::<Vec<(u32, bool, ReleasePlan)>>::with_capacity(decl.variants.len());
113 for variant in &decl.variants {
114 let mut field_plans = Vec::<(u32, bool, ReleasePlan)>::new();
115 for (index, payload) in variant.payload.iter().enumerate() {
116 let field_ty = mir_type_expr_to_ty_with_subst(payload, &bindings)?;
117 let plan = build_release_plan_with_visiting(
118 &field_ty,
119 structs,
120 enums,
121 visiting_structs,
122 )?;
123 if matches!(plan, ReleasePlan::None) {
124 continue;
125 }
126 let field_offset =
127 aggregate::enum_field_layout(ty, &variant.name, index, structs, enums)?
128 .offset;
129 let inline = aggregate::is_inline_aggregate_ty(&field_ty);
130 field_plans.push((field_offset, inline, plan));
131 }
132 variants.push(field_plans);
133 }
134 Ok(ReleasePlan::Enum {
135 payload_bytes: aggregate::enum_payload_bytes(ty, structs, enums)?,
136 variants,
137 })
138 }
139 _ => Ok(ReleasePlan::None),
140 }
141}
142
143pub(crate) fn emit_release_plan_with_free(
144 body: &mut walrus::InstrSeqBuilder,
145 memory_id: MemoryId,
146 value_local: LocalId,
147 plan: &ReleasePlan,
148 pointer_drop_functions: &BTreeMap<String, FunctionId>,
149 scratch_ptr_local: LocalId,
150 scratch_count_local: LocalId,
151 scratch_size_local: LocalId,
152 scratch_value_local: LocalId,
153 scratch_i64_local: LocalId,
154 free_container: bool,
155 free_id: FunctionId,
156) {
157 match plan {
158 ReleasePlan::None => {}
159 ReleasePlan::Pointer {
160 pointee,
161 pointer_ty_key,
162 } => {
163 if let Some(function_id) = pointer_drop_functions.get(pointer_ty_key) {
164 body.local_get(value_local).call(*function_id);
165 } else {
166 emit_pointer_release_with_free(
167 body,
168 memory_id,
169 value_local,
170 pointee.as_ref(),
171 pointer_drop_functions,
172 scratch_ptr_local,
173 scratch_count_local,
174 scratch_size_local,
175 scratch_value_local,
176 scratch_i64_local,
177 free_id,
178 );
179 }
180 }
181 ReleasePlan::String => emit_string_release_with_free(
182 body,
183 memory_id,
184 value_local,
185 scratch_ptr_local,
186 scratch_count_local,
187 scratch_size_local,
188 free_id,
189 ),
190 ReleasePlan::Array(item_plan) => emit_array_release_with_free(
191 body,
192 memory_id,
193 value_local,
194 item_plan.as_ref(),
195 pointer_drop_functions,
196 scratch_ptr_local,
197 scratch_count_local,
198 scratch_size_local,
199 scratch_value_local,
200 scratch_i64_local,
201 free_container,
202 free_id,
203 ),
204 ReleasePlan::Aggregate {
205 payload_bytes,
206 fields,
207 } => emit_aggregate_release_with_free(
208 body,
209 memory_id,
210 value_local,
211 *payload_bytes,
212 fields,
213 pointer_drop_functions,
214 scratch_ptr_local,
215 scratch_count_local,
216 scratch_size_local,
217 scratch_value_local,
218 scratch_i64_local,
219 free_container,
220 free_id,
221 ),
222 ReleasePlan::Enum {
223 payload_bytes,
224 variants,
225 } => emit_enum_release_with_free(
226 body,
227 memory_id,
228 value_local,
229 *payload_bytes,
230 variants,
231 pointer_drop_functions,
232 scratch_ptr_local,
233 scratch_count_local,
234 scratch_size_local,
235 scratch_value_local,
236 scratch_i64_local,
237 free_container,
238 free_id,
239 ),
240 }
241}
242
243pub(crate) fn nested_release_value_local(
244 value_local: LocalId,
245 scratch_size_local: LocalId,
246 scratch_value_local: LocalId,
247) -> LocalId {
248 if value_local == scratch_size_local {
249 scratch_value_local
250 } else {
251 scratch_size_local
252 }
253}
254
255pub(crate) fn emit_enum_release_with_free(
256 body: &mut walrus::InstrSeqBuilder,
257 memory_id: MemoryId,
258 enum_local: LocalId,
259 payload_bytes: u32,
260 variants: &[Vec<(u32, bool, ReleasePlan)>],
261 pointer_drop_functions: &BTreeMap<String, FunctionId>,
262 scratch_ptr_local: LocalId,
263 scratch_count_local: LocalId,
264 scratch_size_local: LocalId,
265 scratch_value_local: LocalId,
266 scratch_i64_local: LocalId,
267 free_container: bool,
268 free_id: FunctionId,
269) {
270 let nested_value_local =
271 nested_release_value_local(enum_local, scratch_size_local, scratch_value_local);
272 body.local_get(enum_local)
273 .i32_const(0)
274 .binop(BinaryOp::I32Ne)
275 .if_else(
276 None,
277 |then_| {
278 then_
279 .local_get(enum_local)
280 .local_set(scratch_size_local)
281 .local_get(scratch_size_local)
282 .instr(Load {
283 memory: memory_id,
284 kind: LoadKind::I32 { atomic: false },
285 arg: MemArg {
286 align: 4,
287 offset: ARRAY_HEADER_OFFSET,
288 },
289 })
290 .local_set(scratch_ptr_local)
291 .local_get(scratch_ptr_local)
292 .i32_const(1)
293 .binop(BinaryOp::I32Eq)
294 .if_else(
295 None,
296 |owned_| {
297 owned_
298 .local_get(scratch_size_local)
299 .i32_const(0)
300 .instr(Store {
301 memory: memory_id,
302 kind: StoreKind::I32 { atomic: false },
303 arg: MemArg {
304 align: 4,
305 offset: ARRAY_HEADER_OFFSET,
306 },
307 })
308 .local_get(scratch_size_local)
309 .i32_const(ARRAY_DATA_OFFSET as i32)
310 .binop(BinaryOp::I32Add)
311 .instr(Load {
312 memory: memory_id,
313 kind: LoadKind::I32 { atomic: false },
314 arg: MemArg {
315 align: 4,
316 offset: 0,
317 },
318 })
319 .local_set(scratch_count_local);
320
321 for (variant_index, field_plans) in variants.iter().enumerate() {
322 let variant_index = variant_index as i32;
323 owned_
324 .local_get(scratch_count_local)
325 .i32_const(variant_index)
326 .binop(BinaryOp::I32Eq)
327 .if_else(
328 None,
329 |variant_block| {
330 for (offset, inline, plan) in field_plans {
331 if *inline {
332 variant_block
333 .local_get(scratch_size_local)
334 .i32_const(ARRAY_DATA_OFFSET as i32)
335 .binop(BinaryOp::I32Add)
336 .i32_const(*offset as i32)
337 .binop(BinaryOp::I32Add)
338 .local_set(nested_value_local);
339 } else {
340 variant_block
341 .local_get(scratch_size_local)
342 .i32_const(ARRAY_DATA_OFFSET as i32)
343 .binop(BinaryOp::I32Add)
344 .i32_const(*offset as i32)
345 .binop(BinaryOp::I32Add)
346 .instr(Load {
347 memory: memory_id,
348 kind: LoadKind::I32 { atomic: false },
349 arg: MemArg {
350 align: 4,
351 offset: 0,
352 },
353 })
354 .local_set(nested_value_local);
355 }
356 emit_release_plan_with_free(
357 variant_block,
358 memory_id,
359 nested_value_local,
360 plan,
361 pointer_drop_functions,
362 scratch_ptr_local,
363 scratch_count_local,
364 scratch_size_local,
365 scratch_value_local,
366 scratch_i64_local,
367 !*inline,
368 free_id,
369 );
370 }
371 },
372 |_else_| {},
373 );
374 }
375
376 if free_container {
377 owned_
378 .local_get(enum_local)
379 .i32_const((ARRAY_DATA_OFFSET + payload_bytes) as i32)
380 .call(free_id);
381 }
382 },
383 |_already_released| {},
384 );
385 },
386 |_else_| {},
387 );
388}
389
390pub(crate) fn emit_aggregate_release_with_free(
391 body: &mut walrus::InstrSeqBuilder,
392 memory_id: MemoryId,
393 aggregate_local: LocalId,
394 payload_bytes: u32,
395 fields: &[(u32, bool, ReleasePlan)],
396 pointer_drop_functions: &BTreeMap<String, FunctionId>,
397 scratch_ptr_local: LocalId,
398 scratch_count_local: LocalId,
399 scratch_size_local: LocalId,
400 scratch_value_local: LocalId,
401 scratch_i64_local: LocalId,
402 free_container: bool,
403 free_id: FunctionId,
404) {
405 let nested_value_local =
406 nested_release_value_local(aggregate_local, scratch_size_local, scratch_value_local);
407 body.local_get(aggregate_local)
408 .i32_const(0)
409 .binop(BinaryOp::I32Ne)
410 .if_else(
411 None,
412 |then_| {
413 then_
414 .local_get(aggregate_local)
415 .local_set(scratch_size_local)
416 .local_get(scratch_size_local)
417 .instr(Load {
418 memory: memory_id,
419 kind: LoadKind::I32 { atomic: false },
420 arg: MemArg {
421 align: 4,
422 offset: ARRAY_HEADER_OFFSET,
423 },
424 })
425 .local_set(scratch_ptr_local)
426 .local_get(scratch_ptr_local)
427 .i32_const(1)
428 .binop(BinaryOp::I32Eq)
429 .if_else(
430 None,
431 |owned_| {
432 owned_
433 .local_get(scratch_size_local)
434 .i32_const(0)
435 .instr(Store {
436 memory: memory_id,
437 kind: StoreKind::I32 { atomic: false },
438 arg: MemArg {
439 align: 4,
440 offset: ARRAY_HEADER_OFFSET,
441 },
442 });
443 if free_container && fields.len() == 1 {
444 let (offset, inline, plan) = &fields[0];
445 if !*inline {
446 owned_
447 .local_get(scratch_size_local)
448 .i32_const(ARRAY_DATA_OFFSET as i32)
449 .binop(BinaryOp::I32Add)
450 .i32_const(*offset as i32)
451 .binop(BinaryOp::I32Add)
452 .instr(Load {
453 memory: memory_id,
454 kind: LoadKind::I32 { atomic: false },
455 arg: MemArg {
456 align: 4,
457 offset: 0,
458 },
459 })
460 .local_set(nested_value_local)
461 .local_get(aggregate_local)
462 .i32_const((ARRAY_DATA_OFFSET + payload_bytes) as i32)
463 .call(free_id);
464 emit_release_plan_with_free(
465 owned_,
466 memory_id,
467 nested_value_local,
468 plan,
469 pointer_drop_functions,
470 scratch_ptr_local,
471 scratch_count_local,
472 scratch_size_local,
473 scratch_value_local,
474 scratch_i64_local,
475 true,
476 free_id,
477 );
478 return;
479 }
480 }
481 for (offset, inline, plan) in fields {
482 if *inline {
483 owned_
484 .local_get(scratch_size_local)
485 .i32_const(ARRAY_DATA_OFFSET as i32)
486 .binop(BinaryOp::I32Add)
487 .i32_const(*offset as i32)
488 .binop(BinaryOp::I32Add)
489 .local_set(nested_value_local);
490 } else {
491 owned_
492 .local_get(scratch_size_local)
493 .i32_const(ARRAY_DATA_OFFSET as i32)
494 .binop(BinaryOp::I32Add)
495 .i32_const(*offset as i32)
496 .binop(BinaryOp::I32Add)
497 .instr(Load {
498 memory: memory_id,
499 kind: LoadKind::I32 { atomic: false },
500 arg: MemArg {
501 align: 4,
502 offset: 0,
503 },
504 })
505 .local_set(nested_value_local);
506 }
507 emit_release_plan_with_free(
508 owned_,
509 memory_id,
510 nested_value_local,
511 plan,
512 pointer_drop_functions,
513 scratch_ptr_local,
514 scratch_count_local,
515 scratch_size_local,
516 scratch_value_local,
517 scratch_i64_local,
518 !*inline,
519 free_id,
520 );
521 }
522 if free_container {
523 owned_
524 .local_get(aggregate_local)
525 .i32_const((ARRAY_DATA_OFFSET + payload_bytes) as i32)
526 .call(free_id);
527 }
528 },
529 |_already_released| {},
530 );
531 },
532 |_else_| {},
533 );
534}