ruka_codegen_wasm/codegen/wasm/
aggregate.rs

1use ruka_types::Ty;
2use std::collections::BTreeMap;
3use walrus::ir::{BinaryOp, Load, LoadKind, MemArg, Store, StoreKind, UnaryOp};
4use walrus::{LocalId, MemoryId, ValType};
5
6use crate::{
7    ARRAY_CAP_OFFSET, ARRAY_DATA_OFFSET, ARRAY_HEADER_OFFSET, ARRAY_LEN_OFFSET, ARRAY_SLOT_BYTES,
8    ENUM_PAYLOAD_OFFSET, LowerCtx, LowerError, alloc_site_parts, emit_copy_bytes, int32_from_usize,
9    local_ty, runtime_local_index, runtime_tracked_alloc_function,
10};
11
12#[derive(Clone, Copy)]
13pub(crate) struct FieldLayout {
14    pub(crate) offset: u32,
15    pub(crate) size: u32,
16    pub(crate) align: u32,
17    pub(crate) valtype: ValType,
18    pub(crate) inline_aggregate: bool,
19}
20
21#[derive(Clone)]
22struct AggregateLayout {
23    payload_bytes: u32,
24    fields: Vec<FieldLayout>,
25}
26
27#[derive(Clone)]
28struct EnumVariantLayout {
29    fields: Vec<FieldLayout>,
30}
31
32#[derive(Clone)]
33struct EnumLayout {
34    variants: BTreeMap<String, EnumVariantLayout>,
35    payload_bytes: u32,
36}
37
38pub(crate) fn is_inline_aggregate_ty(ty: &Ty) -> bool {
39    matches!(ty, Ty::Tuple(_) | Ty::Struct { .. })
40}
41
42/// Collection storage layout used by the backend runtime model.
43#[derive(Clone, Copy, Debug, PartialEq, Eq)]
44pub(crate) enum CollectionLayout {
45    InlineArray,
46    SlicePacked,
47}
48
49/// Resolve the backend layout used for collection-like values.
50pub(crate) fn collection_layout(ty: &Ty) -> Result<CollectionLayout, LowerError> {
51    match ty {
52        Ty::Array { .. } => Ok(CollectionLayout::InlineArray),
53        Ty::Slice(_) => Ok(CollectionLayout::SlicePacked),
54        Ty::RefRo(inner) | Ty::RefMut(inner) if matches!(inner.as_ref(), Ty::Slice(_)) => {
55            Ok(CollectionLayout::SlicePacked)
56        }
57        Ty::RefRo(inner) | Ty::RefMut(inner) => collection_layout(inner),
58        _ => Err(LowerError::UnsupportedInstruction(
59            "unsupported collection layout type",
60        )),
61    }
62}
63
64/// Return total aggregate payload bytes (excluding backend header space).
65pub(crate) fn aggregate_payload_bytes(
66    aggregate_ty: &Ty,
67    structs: &[ruka_mir::MirStructDecl],
68    enums: &[ruka_mir::MirEnumDecl],
69) -> Result<u32, LowerError> {
70    Ok(aggregate_layout(aggregate_ty, structs, enums)?.payload_bytes)
71}
72
73/// Return the aggregate type reachable through a direct local or reference local.
74pub(crate) fn root_aggregate_ty(ty: &Ty) -> Result<&Ty, LowerError> {
75    match ty {
76        Ty::Struct { .. } | Ty::Tuple(_) => Ok(ty),
77        Ty::RefRo(inner) | Ty::RefMut(inner) => match inner.as_ref() {
78            Ty::Struct { .. } | Ty::Tuple(_) => Ok(inner.as_ref()),
79            _ => Err(LowerError::UnsupportedInstruction(
80                "unsupported field base type",
81            )),
82        },
83        _ => Err(LowerError::UnsupportedInstruction(
84            "unsupported field base type",
85        )),
86    }
87}
88
89/// Return the type of one aggregate field.
90pub(crate) fn aggregate_field_ty(
91    base_ty: &Ty,
92    field: &str,
93    structs: &[ruka_mir::MirStructDecl],
94    enums: &[ruka_mir::MirEnumDecl],
95) -> Result<Ty, LowerError> {
96    match base_ty {
97        Ty::Tuple(items) => {
98            let index = field
99                .parse::<usize>()
100                .map_err(|_| LowerError::UnsupportedInstruction("tuple field must be numeric"))?;
101            items
102                .get(index)
103                .cloned()
104                .ok_or(LowerError::UnsupportedInstruction(
105                    "tuple field index out of bounds",
106                ))
107        }
108        Ty::Struct { name, .. } => {
109            let decl = structs.iter().find(|decl| decl.name == *name).ok_or(
110                LowerError::UnsupportedInstruction("missing struct declaration"),
111            )?;
112            let field_ty = decl
113                .fields
114                .iter()
115                .find(|decl_field| decl_field.name == field)
116                .map(|decl_field| &decl_field.ty)
117                .ok_or(LowerError::UnsupportedInstruction("unknown struct field"))?;
118            mir_type_expr_to_ty(field_ty, enums, &BTreeMap::new())
119        }
120        _ => Err(LowerError::UnsupportedInstruction(
121            "unsupported aggregate field access",
122        )),
123    }
124}
125
126/// Return one struct field index in declaration order.
127pub(crate) fn struct_field_index_by_name(
128    structs: &[ruka_mir::MirStructDecl],
129    struct_name: &str,
130    field: &str,
131) -> Result<usize, LowerError> {
132    let decl = structs.iter().find(|decl| decl.name == struct_name).ok_or(
133        LowerError::UnsupportedInstruction("missing struct declaration"),
134    )?;
135    decl.fields
136        .iter()
137        .position(|decl_field| decl_field.name == field)
138        .ok_or(LowerError::UnsupportedInstruction("unknown struct field"))
139}
140
141/// Convert a concrete MIR field type back into checker type form.
142fn mir_type_expr_to_ty(
143    ty: &ruka_mir::MirTypeExpr,
144    enums: &[ruka_mir::MirEnumDecl],
145    bindings: &BTreeMap<String, Ty>,
146) -> Result<Ty, LowerError> {
147    match ty {
148        ruka_mir::MirTypeExpr::Named(name) => match name.as_str() {
149            "Unit" => Ok(Ty::Unit),
150            "u8" => Ok(Ty::U8),
151            "u16" => Ok(Ty::U16),
152            "u32" => Ok(Ty::U32),
153            "u64" => Ok(Ty::U64),
154            "i8" => Ok(Ty::I8),
155            "i16" => Ok(Ty::I16),
156            "i32" => Ok(Ty::I32),
157            "i64" => Ok(Ty::I64),
158            "f32" => Ok(Ty::F32),
159            "f64" => Ok(Ty::F64),
160            "String" => Ok(Ty::String),
161            "Bool" => Ok(Ty::Bool),
162            other => {
163                if let Some(actual) = bindings.get(other) {
164                    Ok(actual.clone())
165                } else if enums.iter().any(|decl| decl.name == other) {
166                    Ok(Ty::Enum {
167                        name: other.to_owned(),
168                        args: Vec::new(),
169                    })
170                } else {
171                    Ok(Ty::Struct {
172                        name: other.to_owned(),
173                        args: Vec::new(),
174                    })
175                }
176            }
177        },
178        ruka_mir::MirTypeExpr::Pointer { item } => Ok(Ty::Pointer(Box::new(mir_type_expr_to_ty(
179            item, enums, bindings,
180        )?))),
181        ruka_mir::MirTypeExpr::Array { item, len } => Ok(Ty::Array {
182            item: Box::new(mir_type_expr_to_ty(item, enums, bindings)?),
183            len: *len,
184        }),
185        ruka_mir::MirTypeExpr::Slice { item } => Ok(Ty::Slice(Box::new(mir_type_expr_to_ty(
186            item, enums, bindings,
187        )?))),
188        ruka_mir::MirTypeExpr::Tuple(items) => Ok(Ty::Tuple(
189            items
190                .iter()
191                .map(|item| mir_type_expr_to_ty(item, enums, bindings))
192                .collect::<Result<Vec<_>, _>>()?,
193        )),
194        ruka_mir::MirTypeExpr::Apply { callee, args } => {
195            if enums.iter().any(|decl| decl.name == *callee) {
196                Ok(Ty::Enum {
197                    name: callee.clone(),
198                    args: args
199                        .iter()
200                        .map(|item| mir_type_expr_to_ty(item, enums, bindings))
201                        .collect::<Result<Vec<_>, _>>()?,
202                })
203            } else {
204                Ok(Ty::Struct {
205                    name: callee.clone(),
206                    args: args
207                        .iter()
208                        .map(|item| mir_type_expr_to_ty(item, enums, bindings))
209                        .collect::<Result<Vec<_>, _>>()?,
210                })
211            }
212        }
213    }
214}
215
216/// Load the current struct pointer used as the root for a field assignment.
217pub(crate) fn emit_struct_base_ptr(
218    body: &mut walrus::InstrSeqBuilder,
219    ctx: &LowerCtx<'_>,
220    base: ruka_mir::MirLocalId,
221    dst_local: LocalId,
222) -> Result<(), LowerError> {
223    let base_ty = local_ty(ctx.local_tys, base.as_u32(), "field base ty")?;
224    match base_ty {
225        Ty::Struct { .. } => {
226            let base_local = runtime_local_index(ctx.local_indices, base.as_u32(), "field base")?;
227            body.local_get(base_local).local_set(dst_local);
228            Ok(())
229        }
230        Ty::RefRo(inner) | Ty::RefMut(inner) if matches!(inner.as_ref(), Ty::Struct { .. }) => {
231            let base_local = runtime_local_index(ctx.local_indices, base.as_u32(), "field base")?;
232            if crate::is_passthrough_place_local(ctx, base) {
233                body.local_get(base_local).local_set(dst_local);
234            } else {
235                body.local_get(base_local)
236                    .instr(Load {
237                        memory: ctx.memory_id,
238                        kind: LoadKind::I64 { atomic: false },
239                        arg: MemArg {
240                            align: 8,
241                            offset: 0,
242                        },
243                    })
244                    .unop(UnaryOp::I32WrapI64)
245                    .local_set(dst_local);
246            }
247            Ok(())
248        }
249        _ => Err(LowerError::UnsupportedInstruction(
250            "unsupported field base type",
251        )),
252    }
253}
254
255/// Load the current aggregate pointer used as the root for a field access or assignment.
256pub(crate) fn emit_aggregate_base_ptr(
257    body: &mut walrus::InstrSeqBuilder,
258    ctx: &LowerCtx<'_>,
259    base: ruka_mir::MirLocalId,
260    dst_local: LocalId,
261) -> Result<(), LowerError> {
262    let base_ty = local_ty(ctx.local_tys, base.as_u32(), "field base ty")?;
263    if is_tuple_base_ty(base_ty) {
264        emit_tuple_base_ptr(body, ctx, base, dst_local)
265    } else {
266        emit_struct_base_ptr(body, ctx, base, dst_local)
267    }
268}
269
270/// Read one aggregate field and leave the resulting pointer-like value on the stack.
271pub(crate) fn emit_read_aggregate_field_ptr(
272    body: &mut walrus::InstrSeqBuilder,
273    ctx: &LowerCtx<'_>,
274    base_ptr_local: LocalId,
275    base_ty: &Ty,
276    field: &str,
277) -> Result<(), LowerError> {
278    let field = aggregate_field_layout(base_ty, field, ctx.structs, ctx.enums)?;
279    if field.inline_aggregate {
280        body.local_get(base_ptr_local)
281            .i32_const(ARRAY_DATA_OFFSET as i32)
282            .binop(BinaryOp::I32Add)
283            .i32_const(int32_from_usize(
284                field.offset as usize,
285                "aggregate field offset",
286            )?)
287            .binop(BinaryOp::I32Add)
288            .unop(UnaryOp::I64ExtendUI32);
289        return Ok(());
290    }
291    body.local_get(base_ptr_local)
292        .i32_const(ARRAY_DATA_OFFSET as i32)
293        .binop(BinaryOp::I32Add)
294        .i32_const(int32_from_usize(
295            field.offset as usize,
296            "aggregate field offset",
297        )?)
298        .binop(BinaryOp::I32Add)
299        .instr(match (field.valtype, field.size) {
300            (ValType::I64, _) => Load {
301                memory: ctx.memory_id,
302                kind: LoadKind::I64 { atomic: false },
303                arg: MemArg {
304                    align: 8,
305                    offset: 0,
306                },
307            },
308            (ValType::I32, 1) => Load {
309                memory: ctx.memory_id,
310                kind: LoadKind::I32_8 {
311                    kind: walrus::ir::ExtendedLoad::ZeroExtend,
312                },
313                arg: MemArg {
314                    align: 1,
315                    offset: 0,
316                },
317            },
318            (ValType::I32, _) => Load {
319                memory: ctx.memory_id,
320                kind: LoadKind::I32 { atomic: false },
321                arg: MemArg {
322                    align: 4,
323                    offset: 0,
324                },
325            },
326            _ => {
327                return Err(LowerError::UnsupportedInstruction(
328                    "unsupported aggregate field load kind",
329                ));
330            }
331        });
332    if field.valtype == ValType::I32 {
333        body.unop(UnaryOp::I64ExtendUI32);
334    }
335    Ok(())
336}
337
338/// Write one aggregate field from a runtime local.
339pub(crate) fn emit_write_aggregate_field(
340    body: &mut walrus::InstrSeqBuilder,
341    ctx: &LowerCtx<'_>,
342    base_ptr_local: LocalId,
343    base_ty: &Ty,
344    field: &str,
345    src_local: LocalId,
346    src_ty: ValType,
347) -> Result<(), LowerError> {
348    let field = aggregate_field_layout(base_ty, field, ctx.structs, ctx.enums)?;
349    if field.inline_aggregate {
350        if src_ty != ValType::I32 {
351            return Err(LowerError::UnsupportedInstruction(
352                "inline aggregate field source must be i32 pointer",
353            ));
354        }
355        body.local_get(base_ptr_local)
356            .i32_const(ARRAY_DATA_OFFSET as i32)
357            .binop(BinaryOp::I32Add)
358            .i32_const(int32_from_usize(
359                field.offset as usize,
360                "aggregate field offset",
361            )?)
362            .binop(BinaryOp::I32Add)
363            .local_set(ctx.scratch_i32_local_c);
364        emit_copy_bytes(
365            body,
366            ctx.memory_id,
367            src_local,
368            ctx.scratch_i32_local_c,
369            field.size,
370            ctx.scratch_i32_local,
371            ctx.scratch_i32_local_b,
372        )?;
373        return Ok(());
374    }
375    body.local_get(base_ptr_local)
376        .i32_const(ARRAY_DATA_OFFSET as i32)
377        .binop(BinaryOp::I32Add)
378        .i32_const(int32_from_usize(
379            field.offset as usize,
380            "aggregate field offset",
381        )?)
382        .binop(BinaryOp::I32Add)
383        .local_get(src_local);
384    match (src_ty, field.valtype, field.size) {
385        (ValType::I64, ValType::I64, _) => {
386            body.instr(Store {
387                memory: ctx.memory_id,
388                kind: StoreKind::I64 { atomic: false },
389                arg: MemArg {
390                    align: 8,
391                    offset: 0,
392                },
393            });
394        }
395        (ValType::I32, ValType::I64, _) => {
396            body.unop(UnaryOp::I64ExtendUI32).instr(Store {
397                memory: ctx.memory_id,
398                kind: StoreKind::I64 { atomic: false },
399                arg: MemArg {
400                    align: 8,
401                    offset: 0,
402                },
403            });
404        }
405        (ValType::I64, ValType::I32, 1) => {
406            body.unop(UnaryOp::I32WrapI64).instr(Store {
407                memory: ctx.memory_id,
408                kind: StoreKind::I32_8 { atomic: false },
409                arg: MemArg {
410                    align: 1,
411                    offset: 0,
412                },
413            });
414        }
415        (ValType::I32, ValType::I32, 1) => {
416            body.instr(Store {
417                memory: ctx.memory_id,
418                kind: StoreKind::I32_8 { atomic: false },
419                arg: MemArg {
420                    align: 1,
421                    offset: 0,
422                },
423            });
424        }
425        (ValType::I64, ValType::I32, _) => {
426            body.unop(UnaryOp::I32WrapI64).instr(Store {
427                memory: ctx.memory_id,
428                kind: StoreKind::I32 { atomic: false },
429                arg: MemArg {
430                    align: 4,
431                    offset: 0,
432                },
433            });
434        }
435        (ValType::I32, ValType::I32, _) => {
436            body.instr(Store {
437                memory: ctx.memory_id,
438                kind: StoreKind::I32 { atomic: false },
439                arg: MemArg {
440                    align: 4,
441                    offset: 0,
442                },
443            });
444        }
445        _ => {
446            return Err(LowerError::UnsupportedInstruction(
447                "unsupported aggregate field store kind",
448            ));
449        }
450    }
451    Ok(())
452}
453
454/// Return one aggregate field index by tuple index or struct declaration order.
455fn aggregate_field_index(
456    base_ty: &Ty,
457    field: &str,
458    structs: &[ruka_mir::MirStructDecl],
459) -> Result<usize, LowerError> {
460    match base_ty {
461        Ty::Tuple(items) => {
462            let index = field
463                .parse::<usize>()
464                .map_err(|_| LowerError::UnsupportedInstruction("tuple field must be numeric"))?;
465            if index >= items.len() {
466                return Err(LowerError::UnsupportedInstruction(
467                    "tuple field index out of bounds",
468                ));
469            }
470            Ok(index)
471        }
472        Ty::Struct { name, .. } => struct_field_index_by_name(structs, name, field),
473        _ => Err(LowerError::UnsupportedInstruction(
474            "unsupported aggregate field access",
475        )),
476    }
477}
478
479fn aggregate_field_layout(
480    base_ty: &Ty,
481    field: &str,
482    structs: &[ruka_mir::MirStructDecl],
483    enums: &[ruka_mir::MirEnumDecl],
484) -> Result<FieldLayout, LowerError> {
485    let index = aggregate_field_index(base_ty, field, structs)?;
486    let layout = aggregate_layout(base_ty, structs, enums)?;
487    layout
488        .fields
489        .get(index)
490        .copied()
491        .ok_or(LowerError::UnsupportedInstruction(
492            "aggregate field index out of bounds",
493        ))
494}
495
496/// Return one aggregate field byte offset in payload layout order.
497pub(crate) fn aggregate_field_offset(
498    base_ty: &Ty,
499    field: &str,
500    structs: &[ruka_mir::MirStructDecl],
501    enums: &[ruka_mir::MirEnumDecl],
502) -> Result<u32, LowerError> {
503    Ok(aggregate_field_layout(base_ty, field, structs, enums)?.offset)
504}
505
506fn aggregate_layout(
507    aggregate_ty: &Ty,
508    structs: &[ruka_mir::MirStructDecl],
509    enums: &[ruka_mir::MirEnumDecl],
510) -> Result<AggregateLayout, LowerError> {
511    if matches!(aggregate_ty, Ty::Enum { .. }) {
512        return Ok(AggregateLayout {
513            payload_bytes: enum_layout(aggregate_ty, structs, enums)?.payload_bytes,
514            fields: Vec::new(),
515        });
516    }
517    if let Ty::Array { len, .. } = aggregate_ty {
518        let mut fields = Vec::<FieldLayout>::with_capacity(*len);
519        let mut offset = 0_u32;
520        for _ in 0..*len {
521            fields.push(FieldLayout {
522                offset,
523                size: 8,
524                align: 8,
525                valtype: ValType::I64,
526                inline_aggregate: false,
527            });
528            offset = offset.saturating_add(8);
529        }
530        return Ok(AggregateLayout {
531            payload_bytes: offset,
532            fields,
533        });
534    }
535    let field_tys = aggregate_field_types(aggregate_ty, structs, enums)?;
536    layout_from_field_tys(&field_tys, structs, enums)
537}
538
539fn align_to(offset: u32, align: u32) -> u32 {
540    if align <= 1 {
541        return offset;
542    }
543    let mask = align - 1;
544    (offset + mask) & !mask
545}
546
547fn scalar_layout(ty: &Ty) -> (u32, u32, ValType) {
548    match ty {
549        Ty::U8 | Ty::I8 | Ty::Bool => (1, 1, ValType::I32),
550        Ty::U16 | Ty::I16 => (2, 2, ValType::I32),
551        Ty::U32 | Ty::I32 | Ty::F32 => (4, 4, ValType::I32),
552        Ty::U64 | Ty::I64 | Ty::F64 => (8, 8, ValType::I64),
553        Ty::Unit => (0, 1, ValType::I32),
554        Ty::String | Ty::Pointer(_) | Ty::Option(_) | Ty::RefRo(_) | Ty::RefMut(_) => {
555            (4, 4, ValType::I32)
556        }
557        Ty::Array { .. } | Ty::Slice(_) | Ty::Tuple(_) | Ty::Struct { .. } | Ty::Enum { .. } => {
558            (4, 4, ValType::I32)
559        }
560    }
561}
562
563fn aggregate_field_types(
564    aggregate_ty: &Ty,
565    structs: &[ruka_mir::MirStructDecl],
566    enums: &[ruka_mir::MirEnumDecl],
567) -> Result<Vec<Ty>, LowerError> {
568    match aggregate_ty {
569        Ty::Slice(_) => Ok(vec![
570            Ty::Pointer(Box::new(Ty::Unit)),
571            Ty::Pointer(Box::new(Ty::Unit)),
572        ]),
573        Ty::Tuple(items) => Ok(items.clone()),
574        Ty::Struct { name, .. } => {
575            let decl = structs.iter().find(|decl| decl.name == *name).ok_or(
576                LowerError::UnsupportedInstruction("missing struct declaration"),
577            )?;
578            decl.fields
579                .iter()
580                .map(|field| mir_type_expr_to_ty(&field.ty, enums, &BTreeMap::new()))
581                .collect::<Result<Vec<_>, _>>()
582        }
583        _ => Err(LowerError::UnsupportedInstruction(
584            "unsupported aggregate layout type",
585        )),
586    }
587}
588
589fn layout_from_field_tys(
590    field_tys: &[Ty],
591    structs: &[ruka_mir::MirStructDecl],
592    enums: &[ruka_mir::MirEnumDecl],
593) -> Result<AggregateLayout, LowerError> {
594    let mut fields = Vec::<FieldLayout>::with_capacity(field_tys.len());
595    let mut offset = 0_u32;
596    for field_ty in field_tys {
597        let (size, align, valtype, inline_aggregate) = value_layout(field_ty, structs, enums)?;
598        offset = align_to(offset, align);
599        fields.push(FieldLayout {
600            offset,
601            size,
602            align,
603            valtype,
604            inline_aggregate,
605        });
606        offset = offset.saturating_add(size);
607    }
608    Ok(AggregateLayout {
609        payload_bytes: offset,
610        fields,
611    })
612}
613
614fn value_layout(
615    ty: &Ty,
616    structs: &[ruka_mir::MirStructDecl],
617    enums: &[ruka_mir::MirEnumDecl],
618) -> Result<(u32, u32, ValType, bool), LowerError> {
619    if !is_inline_aggregate_ty(ty) {
620        let (size, align, valtype) = scalar_layout(ty);
621        return Ok((size, align, valtype, false));
622    }
623    let payload_bytes = aggregate_payload_bytes(ty, structs, enums)?;
624    Ok((ARRAY_DATA_OFFSET + payload_bytes, 4, ValType::I32, true))
625}
626
627fn enum_layout(
628    enum_ty: &Ty,
629    structs: &[ruka_mir::MirStructDecl],
630    enums: &[ruka_mir::MirEnumDecl],
631) -> Result<EnumLayout, LowerError> {
632    if let Ty::Option(item) = enum_ty {
633        let lowered = Ty::Enum {
634            name: "Option".to_owned(),
635            args: vec![(**item).clone()],
636        };
637        return enum_layout(&lowered, structs, enums);
638    }
639    let Ty::Enum { name, args } = enum_ty else {
640        return Err(LowerError::UnsupportedInstruction("expected enum type"));
641    };
642    let decl =
643        enums
644            .iter()
645            .find(|decl| decl.name == *name)
646            .ok_or(LowerError::UnsupportedInstruction(
647                "missing enum declaration",
648            ))?;
649    let bindings = decl
650        .type_params
651        .iter()
652        .cloned()
653        .zip(args.iter().cloned())
654        .collect::<BTreeMap<_, _>>();
655    let mut variants = BTreeMap::<String, EnumVariantLayout>::new();
656    let mut max_payload_bytes = ENUM_PAYLOAD_OFFSET;
657    for variant in &decl.variants {
658        let payload_types = variant
659            .payload
660            .iter()
661            .map(|item| mir_type_expr_to_ty(item, enums, &bindings))
662            .collect::<Result<Vec<_>, _>>()?;
663        let payload_layout = layout_from_field_tys(&payload_types, structs, enums)?;
664        let mut fields = Vec::<FieldLayout>::with_capacity(payload_layout.fields.len());
665        for field in payload_layout.fields {
666            let offset = ENUM_PAYLOAD_OFFSET.saturating_add(field.offset);
667            fields.push(FieldLayout {
668                offset,
669                size: field.size,
670                align: field.align,
671                valtype: field.valtype,
672                inline_aggregate: field.inline_aggregate,
673            });
674        }
675        let payload_bytes = ENUM_PAYLOAD_OFFSET.saturating_add(payload_layout.payload_bytes);
676        max_payload_bytes = max_payload_bytes.max(payload_bytes);
677        let _ = variants.insert(variant.name.clone(), EnumVariantLayout { fields });
678    }
679    Ok(EnumLayout {
680        variants,
681        payload_bytes: max_payload_bytes,
682    })
683}
684
685pub(crate) fn enum_payload_bytes(
686    enum_ty: &Ty,
687    structs: &[ruka_mir::MirStructDecl],
688    enums: &[ruka_mir::MirEnumDecl],
689) -> Result<u32, LowerError> {
690    Ok(enum_layout(enum_ty, structs, enums)?.payload_bytes)
691}
692
693pub(crate) fn enum_field_layout(
694    enum_ty: &Ty,
695    variant: &str,
696    index: usize,
697    structs: &[ruka_mir::MirStructDecl],
698    enums: &[ruka_mir::MirEnumDecl],
699) -> Result<FieldLayout, LowerError> {
700    let layout = enum_layout(enum_ty, structs, enums)?;
701    let variant_layout = layout
702        .variants
703        .get(variant)
704        .ok_or(LowerError::UnsupportedInstruction("unknown enum variant"))?;
705    variant_layout
706        .fields
707        .get(index)
708        .copied()
709        .ok_or(LowerError::UnsupportedInstruction(
710            "enum payload index out of bounds",
711        ))
712}
713
714/// Return whether a local type can act as a tuple field base.
715pub(crate) fn is_tuple_base_ty(ty: &Ty) -> bool {
716    match ty {
717        Ty::Tuple(_) => true,
718        Ty::RefRo(inner) | Ty::RefMut(inner) => matches!(inner.as_ref(), Ty::Tuple(_)),
719        _ => false,
720    }
721}
722
723/// Load the current tuple pointer used as the root for a tuple field access.
724pub(crate) fn emit_tuple_base_ptr(
725    body: &mut walrus::InstrSeqBuilder,
726    ctx: &LowerCtx<'_>,
727    base: ruka_mir::MirLocalId,
728    dst_local: LocalId,
729) -> Result<(), LowerError> {
730    let base_ty = local_ty(ctx.local_tys, base.as_u32(), "tuple field base ty")?;
731    match base_ty {
732        Ty::Tuple(_) => {
733            let base_local =
734                runtime_local_index(ctx.local_indices, base.as_u32(), "tuple field base")?;
735            body.local_get(base_local).local_set(dst_local);
736            Ok(())
737        }
738        Ty::RefRo(inner) | Ty::RefMut(inner) if matches!(inner.as_ref(), Ty::Tuple(_)) => {
739            let base_local =
740                runtime_local_index(ctx.local_indices, base.as_u32(), "tuple field base")?;
741            if crate::is_passthrough_place_local(ctx, base) {
742                body.local_get(base_local).local_set(dst_local);
743            } else {
744                body.local_get(base_local)
745                    .instr(Load {
746                        memory: ctx.memory_id,
747                        kind: LoadKind::I64 { atomic: false },
748                        arg: MemArg {
749                            align: 8,
750                            offset: 0,
751                        },
752                    })
753                    .unop(UnaryOp::I32WrapI64)
754                    .local_set(dst_local);
755            }
756            Ok(())
757        }
758        _ => Err(LowerError::UnsupportedInstruction(
759            "unsupported tuple field base type",
760        )),
761    }
762}
763
764/// Allocate heap bytes and write array header with len/cap.
765pub(crate) fn emit_array_alloc(
766    body: &mut walrus::InstrSeqBuilder,
767    runtime: &super::linker::RuntimeFunctions,
768    source_pos: Option<ruka_mir::MirSourcePos>,
769    fallback_line: i32,
770    memory_id: MemoryId,
771    len_local: LocalId,
772    dst_local: LocalId,
773    scratch_i32_local: LocalId,
774) -> Result<(), LowerError> {
775    let alloc = runtime_tracked_alloc_function(runtime)?;
776    let (kind_id, file_id, line, column) =
777        alloc_site_parts(crate::ALLOC_SITE_ARRAY_NEW, source_pos, fallback_line);
778    body.local_get(len_local)
779        .i32_const(ARRAY_SLOT_BYTES)
780        .binop(BinaryOp::I32Mul)
781        .i32_const(ARRAY_DATA_OFFSET as i32)
782        .binop(BinaryOp::I32Add)
783        .local_set(scratch_i32_local)
784        .local_get(scratch_i32_local)
785        .i32_const(kind_id)
786        .i32_const(file_id)
787        .i32_const(line)
788        .i32_const(column)
789        .call(alloc.function_id)
790        .local_set(dst_local)
791        .local_get(dst_local)
792        .i32_const(1)
793        .instr(Store {
794            memory: memory_id,
795            kind: StoreKind::I32 { atomic: false },
796            arg: MemArg {
797                align: 4,
798                offset: ARRAY_HEADER_OFFSET,
799            },
800        })
801        .local_get(dst_local)
802        .local_get(len_local)
803        .instr(Store {
804            memory: memory_id,
805            kind: StoreKind::I32 { atomic: false },
806            arg: MemArg {
807                align: 4,
808                offset: ARRAY_LEN_OFFSET,
809            },
810        })
811        .local_get(dst_local)
812        .local_get(len_local)
813        .instr(Store {
814            memory: memory_id,
815            kind: StoreKind::I32 { atomic: false },
816            arg: MemArg {
817                align: 4,
818                offset: ARRAY_CAP_OFFSET,
819            },
820        });
821    Ok(())
822}
823
824/// Store a runtime local in an array slot with widening when required.
825pub(crate) fn emit_array_store(
826    body: &mut walrus::InstrSeqBuilder,
827    memory_id: MemoryId,
828    array_local: LocalId,
829    index_local: LocalId,
830    value_local: LocalId,
831    value_ty: ValType,
832) {
833    body.local_get(array_local)
834        .i32_const(ARRAY_DATA_OFFSET as i32)
835        .binop(BinaryOp::I32Add)
836        .local_get(index_local)
837        .i32_const(ARRAY_SLOT_BYTES)
838        .binop(BinaryOp::I32Mul)
839        .binop(BinaryOp::I32Add)
840        .local_get(value_local);
841    match value_ty {
842        ValType::I64 => {}
843        ValType::I32 => {
844            body.unop(UnaryOp::I64ExtendUI32);
845        }
846        _ => {}
847    }
848    body.instr(Store {
849        memory: memory_id,
850        kind: StoreKind::I64 { atomic: false },
851        arg: MemArg {
852            align: 8,
853            offset: 0,
854        },
855    });
856}