ruka_codegen_wasm/
lower_instr_aggregate.rs

1use super::*;
2
3pub(crate) fn lower_aggregate_instr(
4    instr: &ruka_mir::MirInstr,
5    body: &mut walrus::InstrSeqBuilder,
6    ctx: &LowerCtx<'_>,
7) -> Result<bool, LowerError> {
8    match instr {
9        ruka_mir::MirInstr::MakeArray {
10            items,
11            dst,
12            source_pos,
13        } => {
14            let dst_local = runtime_local_index(ctx.local_indices, dst.as_u32(), "array dst")?;
15            body.i32_const(int32_from_usize(items.len(), "array length")?)
16                .local_set(ctx.scratch_i32_local);
17            aggregate::emit_array_alloc(
18                body,
19                ctx.runtime,
20                *source_pos,
21                ctx.function_line,
22                ctx.memory_id,
23                ctx.scratch_i32_local,
24                dst_local,
25                ctx.scratch_i32_local_b,
26            )?;
27            for (index, item) in items.iter().enumerate() {
28                let src =
29                    runtime_local_index(ctx.local_indices, item.local.as_u32(), "array item")?;
30                let mut value_local = src;
31                if item.is_owned_copy() {
32                    let ownership = local_heap_ownership(
33                        ctx.local_heap_ownership,
34                        item.local.as_u32(),
35                        "array item ownership",
36                    )?;
37                    if ownership.uses_heap_ops() {
38                        let item_mir_ty =
39                            local_ty(ctx.local_tys, item.local.as_u32(), "array item mir type")?;
40                        emit_clone_for_type(
41                            body,
42                            ctx.runtime,
43                            ctx.memory_id,
44                            *source_pos,
45                            ctx.function_line,
46                            src,
47                            item_mir_ty,
48                            ctx.structs,
49                            ctx.enums,
50                            ctx.scratch_i32_local,
51                            ctx.scratch_i32_local_b,
52                            ctx.scratch_i32_local_c,
53                            ctx.scratch_i32_local_d,
54                            ctx.scratch_i32_local_e,
55                            ctx.scratch_i64_local,
56                            ctx.scratch_i32_local_c,
57                        )?;
58                        value_local = ctx.scratch_i32_local_c;
59                    }
60                }
61                body.i32_const(int32_from_usize(index, "array index")?)
62                    .local_set(ctx.scratch_i32_local_b);
63                let item_ty = runtime_local_valtype(
64                    ctx.local_runtime_types,
65                    item.local.as_u32(),
66                    "array item valtype",
67                )?;
68                aggregate::emit_array_store(
69                    body,
70                    ctx.memory_id,
71                    dst_local,
72                    ctx.scratch_i32_local_b,
73                    value_local,
74                    item_ty,
75                );
76                if item.is_owned_move() {
77                    let ownership = local_heap_ownership(
78                        ctx.local_heap_ownership,
79                        item.local.as_u32(),
80                        "array item owned move",
81                    )?;
82                    if ownership.uses_heap_ops() {
83                        body.i32_const(0).local_set(src);
84                    }
85                }
86            }
87            Ok(true)
88        }
89        ruka_mir::MirInstr::MakeTuple {
90            items,
91            dst,
92            source_pos: _,
93        } => {
94            let dst_local = runtime_local_index(ctx.local_indices, dst.as_u32(), "tuple dst")?;
95            let tuple_ty = local_ty(ctx.local_tys, dst.as_u32(), "tuple dst ty")?;
96            if !is_shadow_stack_local(ctx, *dst) {
97                return Err(LowerError::UnsupportedInstruction(
98                    "tuple destination must be shadow-stack local",
99                ));
100            }
101            for (index, item) in items.iter().enumerate() {
102                let src =
103                    runtime_local_index(ctx.local_indices, item.local.as_u32(), "tuple item")?;
104                let mut value_local = src;
105                if item.is_owned_copy() {
106                    let ownership = local_heap_ownership(
107                        ctx.local_heap_ownership,
108                        item.local.as_u32(),
109                        "tuple item ownership",
110                    )?;
111                    if ownership.uses_heap_ops() {
112                        let item_mir_ty =
113                            local_ty(ctx.local_tys, item.local.as_u32(), "tuple item mir type")?;
114                        emit_clone_for_type(
115                            body,
116                            ctx.runtime,
117                            ctx.memory_id,
118                            None,
119                            ctx.function_line,
120                            src,
121                            item_mir_ty,
122                            ctx.structs,
123                            ctx.enums,
124                            ctx.scratch_i32_local,
125                            ctx.scratch_i32_local_b,
126                            ctx.scratch_i32_local_c,
127                            ctx.scratch_i32_local_d,
128                            ctx.scratch_i32_local_e,
129                            ctx.scratch_i64_local,
130                            ctx.scratch_i32_local_c,
131                        )?;
132                        value_local = ctx.scratch_i32_local_c;
133                    }
134                }
135                let item_ty = runtime_local_valtype(
136                    ctx.local_runtime_types,
137                    item.local.as_u32(),
138                    "tuple item valtype",
139                )?;
140                let field_name = index.to_string();
141                aggregate::emit_write_aggregate_field(
142                    body,
143                    ctx,
144                    dst_local,
145                    tuple_ty,
146                    field_name.as_str(),
147                    value_local,
148                    item_ty,
149                )?;
150                if item.is_owned_move() {
151                    let ownership = local_heap_ownership(
152                        ctx.local_heap_ownership,
153                        item.local.as_u32(),
154                        "tuple item owned move",
155                    )?;
156                    if ownership.uses_heap_ops() {
157                        body.i32_const(0).local_set(src);
158                    }
159                }
160            }
161            Ok(true)
162        }
163        ruka_mir::MirInstr::CollectionLen {
164            collection: collection_id,
165            dst,
166        } => {
167            let collection =
168                runtime_local_index(ctx.local_indices, collection_id.as_u32(), "collection len")?;
169            let dst = runtime_local_index(ctx.local_indices, dst.as_u32(), "collection len dst")?;
170            match aggregate::collection_layout(local_ty(
171                ctx.local_tys,
172                collection_id.as_u32(),
173                "collection len type",
174            )?)? {
175                aggregate::CollectionLayout::InlineArray => {
176                    body.local_get(collection)
177                        .i32_const(int32_from_u32(ARRAY_LEN_OFFSET, "array len offset")?)
178                        .binop(BinaryOp::I32Add)
179                        .instr(Load {
180                            memory: ctx.memory_id,
181                            kind: LoadKind::I32 { atomic: false },
182                            arg: MemArg {
183                                align: 4,
184                                offset: 0,
185                            },
186                        })
187                        .unop(UnaryOp::I64ExtendUI32)
188                        .local_set(dst);
189                }
190                aggregate::CollectionLayout::SlicePacked => {
191                    body.local_get(collection)
192                        .i32_const((ARRAY_DATA_OFFSET + 4) as i32)
193                        .binop(BinaryOp::I32Add)
194                        .instr(Load {
195                            memory: ctx.memory_id,
196                            kind: LoadKind::I32 { atomic: false },
197                            arg: MemArg {
198                                align: 4,
199                                offset: 0,
200                            },
201                        })
202                        .unop(UnaryOp::I64ExtendUI32)
203                        .local_set(dst);
204                }
205            }
206            Ok(true)
207        }
208        ruka_mir::MirInstr::IndexBorrowRo {
209            collection: collection_id,
210            index,
211            dst,
212        }
213        | ruka_mir::MirInstr::IndexBorrowMut {
214            collection: collection_id,
215            index,
216            dst,
217        } => {
218            let collection = runtime_local_index(
219                ctx.local_indices,
220                collection_id.as_u32(),
221                "index collection",
222            )?;
223            let index = runtime_local_index(ctx.local_indices, index.as_u32(), "index value")?;
224            let dst = runtime_local_index(ctx.local_indices, dst.as_u32(), "index dst")?;
225            match aggregate::collection_layout(local_ty(
226                ctx.local_tys,
227                collection_id.as_u32(),
228                "index collection ty",
229            )?)? {
230                aggregate::CollectionLayout::InlineArray => {
231                    body.local_get(collection)
232                        .i32_const(ARRAY_DATA_OFFSET as i32)
233                        .binop(BinaryOp::I32Add);
234                }
235                aggregate::CollectionLayout::SlicePacked => {
236                    body.local_get(collection)
237                        .i32_const(ARRAY_DATA_OFFSET as i32)
238                        .binop(BinaryOp::I32Add)
239                        .instr(Load {
240                            memory: ctx.memory_id,
241                            kind: LoadKind::I32 { atomic: false },
242                            arg: MemArg {
243                                align: 4,
244                                offset: 0,
245                            },
246                        });
247                }
248            }
249            body.local_get(index)
250                .unop(UnaryOp::I32WrapI64)
251                .local_set(ctx.scratch_i32_local);
252            body.local_get(ctx.scratch_i32_local)
253                .i32_const(ARRAY_SLOT_BYTES)
254                .binop(BinaryOp::I32Mul)
255                .binop(BinaryOp::I32Add)
256                .local_set(dst);
257            Ok(true)
258        }
259        ruka_mir::MirInstr::SliceBorrowRo {
260            collection: collection_id,
261            start,
262            end,
263            dst,
264        }
265        | ruka_mir::MirInstr::SliceBorrowMut {
266            collection: collection_id,
267            start,
268            end,
269            dst,
270        } => {
271            let collection = runtime_local_index(
272                ctx.local_indices,
273                collection_id.as_u32(),
274                "slice collection",
275            )?;
276            let dst_local = runtime_local_index(ctx.local_indices, dst.as_u32(), "slice dst")?;
277            let layout = aggregate::collection_layout(local_ty(
278                ctx.local_tys,
279                collection_id.as_u32(),
280                "slice ty",
281            )?)?;
282
283            match layout {
284                aggregate::CollectionLayout::InlineArray => {
285                    body.local_get(collection)
286                        .i32_const(ARRAY_DATA_OFFSET as i32)
287                        .binop(BinaryOp::I32Add)
288                        .local_set(ctx.scratch_i32_local);
289                }
290                aggregate::CollectionLayout::SlicePacked => {
291                    body.local_get(collection)
292                        .i32_const(ARRAY_DATA_OFFSET as i32)
293                        .binop(BinaryOp::I32Add)
294                        .instr(Load {
295                            memory: ctx.memory_id,
296                            kind: LoadKind::I32 { atomic: false },
297                            arg: MemArg {
298                                align: 4,
299                                offset: 0,
300                            },
301                        })
302                        .local_set(ctx.scratch_i32_local);
303                }
304            }
305
306            if let Some(start) = start {
307                let start_local =
308                    runtime_local_index(ctx.local_indices, start.as_u32(), "slice start value")?;
309                body.local_get(start_local)
310                    .unop(UnaryOp::I32WrapI64)
311                    .local_set(ctx.scratch_i32_local_b);
312            } else {
313                body.i32_const(0).local_set(ctx.scratch_i32_local_b);
314            }
315
316            if let Some(end) = end {
317                let end_local =
318                    runtime_local_index(ctx.local_indices, end.as_u32(), "slice end value")?;
319                body.local_get(end_local)
320                    .unop(UnaryOp::I32WrapI64)
321                    .local_set(ctx.scratch_i32_local_c);
322            } else {
323                match layout {
324                    aggregate::CollectionLayout::InlineArray => {
325                        body.local_get(collection)
326                            .instr(Load {
327                                memory: ctx.memory_id,
328                                kind: LoadKind::I32 { atomic: false },
329                                arg: MemArg {
330                                    align: 4,
331                                    offset: ARRAY_LEN_OFFSET,
332                                },
333                            })
334                            .local_set(ctx.scratch_i32_local_c);
335                    }
336                    aggregate::CollectionLayout::SlicePacked => {
337                        body.local_get(collection)
338                            .i32_const((ARRAY_DATA_OFFSET + 4) as i32)
339                            .binop(BinaryOp::I32Add)
340                            .instr(Load {
341                                memory: ctx.memory_id,
342                                kind: LoadKind::I32 { atomic: false },
343                                arg: MemArg {
344                                    align: 4,
345                                    offset: 0,
346                                },
347                            })
348                            .local_set(ctx.scratch_i32_local_c);
349                    }
350                }
351            }
352
353            match local_ty(ctx.local_tys, dst.as_u32(), "slice dst ty")? {
354                Ty::Slice(_) => {}
355                Ty::RefRo(inner) | Ty::RefMut(inner) if matches!(inner.as_ref(), Ty::Slice(_)) => {}
356                Ty::RefRo(_) | Ty::RefMut(_) => {
357                    return Err(LowerError::UnsupportedInstruction(
358                        "slice borrow destination must be slice-like",
359                    ));
360                }
361                _ => {
362                    return Err(LowerError::UnsupportedInstruction(
363                        "slice borrow destination must be slice-like",
364                    ));
365                }
366            }
367            if !is_shadow_stack_local(ctx, *dst) {
368                return Err(LowerError::UnsupportedInstruction(
369                    "slice destination must be shadow-stack local",
370                ));
371            }
372            body.local_get(ctx.scratch_i32_local_c)
373                .local_get(ctx.scratch_i32_local_b)
374                .binop(BinaryOp::I32Sub)
375                .local_set(ctx.scratch_i32_local_c)
376                .local_get(dst_local)
377                .i32_const(ARRAY_DATA_OFFSET as i32)
378                .binop(BinaryOp::I32Add)
379                .local_get(ctx.scratch_i32_local)
380                .local_get(ctx.scratch_i32_local_b)
381                .i32_const(ARRAY_SLOT_BYTES)
382                .binop(BinaryOp::I32Mul)
383                .binop(BinaryOp::I32Add)
384                .instr(Store {
385                    memory: ctx.memory_id,
386                    kind: StoreKind::I32 { atomic: false },
387                    arg: MemArg {
388                        align: 4,
389                        offset: 0,
390                    },
391                })
392                .local_get(dst_local)
393                .i32_const((ARRAY_DATA_OFFSET + 4) as i32)
394                .binop(BinaryOp::I32Add)
395                .local_get(ctx.scratch_i32_local_c)
396                .instr(Store {
397                    memory: ctx.memory_id,
398                    kind: StoreKind::I32 { atomic: false },
399                    arg: MemArg {
400                        align: 4,
401                        offset: 0,
402                    },
403                });
404            Ok(true)
405        }
406        ruka_mir::MirInstr::MakeStruct {
407            name: _,
408            fields,
409            dst,
410            source_pos: _,
411        } => {
412            let dst_local = runtime_local_index(ctx.local_indices, dst.as_u32(), "struct dst")?;
413            let struct_ty = local_ty(ctx.local_tys, dst.as_u32(), "struct dst ty")?;
414            if !is_shadow_stack_local(ctx, *dst) {
415                return Err(LowerError::UnsupportedInstruction(
416                    "struct destination must be shadow-stack local",
417                ));
418            }
419            for (field_name, local_id) in fields {
420                let value_local =
421                    runtime_local_index(ctx.local_indices, local_id.as_u32(), "struct field")?;
422                let value_ty = runtime_local_valtype(
423                    ctx.local_runtime_types,
424                    local_id.as_u32(),
425                    "struct field ty",
426                )?;
427                aggregate::emit_write_aggregate_field(
428                    body,
429                    ctx,
430                    dst_local,
431                    struct_ty,
432                    field_name,
433                    value_local,
434                    value_ty,
435                )?;
436            }
437            Ok(true)
438        }
439        ruka_mir::MirInstr::MakeEnum {
440            enum_name,
441            variant,
442            payload,
443            dst,
444            source_pos,
445        } => {
446            let dst_local = runtime_local_index(ctx.local_indices, dst.as_u32(), "enum dst")?;
447            let enum_ty = local_ty(ctx.local_tys, dst.as_u32(), "enum dst ty")?;
448            let payload_bytes = aggregate::enum_payload_bytes(enum_ty, ctx.structs, ctx.enums)?;
449            let alloc = runtime_tracked_alloc_function(ctx.runtime)?;
450            let (kind_id, file_id, line, column) =
451                alloc_site_parts(ALLOC_SITE_ENUM_NEW, *source_pos, ctx.function_line);
452            body.i32_const(int32_from_u32(
453                ARRAY_DATA_OFFSET + payload_bytes,
454                "enum alloc bytes",
455            )?)
456            .i32_const(kind_id)
457            .i32_const(file_id)
458            .i32_const(line)
459            .i32_const(column)
460            .call(alloc.function_id)
461            .local_set(dst_local);
462            body.local_get(dst_local).i32_const(1).instr(Store {
463                memory: ctx.memory_id,
464                kind: StoreKind::I32 { atomic: false },
465                arg: MemArg {
466                    align: 4,
467                    offset: ARRAY_HEADER_OFFSET,
468                },
469            });
470
471            let variant_tag = int32_from_u32(
472                enum_variant_index(ctx.enums, enum_name, variant)?,
473                "enum variant tag",
474            )?;
475            body.local_get(dst_local)
476                .i32_const(ARRAY_DATA_OFFSET as i32)
477                .binop(BinaryOp::I32Add)
478                .i32_const(variant_tag)
479                .instr(Store {
480                    memory: ctx.memory_id,
481                    kind: StoreKind::I32 { atomic: false },
482                    arg: MemArg {
483                        align: 4,
484                        offset: ENUM_TAG_OFFSET,
485                    },
486                });
487
488            for (index, item) in payload.iter().enumerate() {
489                let item_local =
490                    runtime_local_index(ctx.local_indices, item.local.as_u32(), "enum payload")?;
491                let mut payload_local = item_local;
492                if item.is_owned_copy() {
493                    let ownership = local_heap_ownership(
494                        ctx.local_heap_ownership,
495                        item.local.as_u32(),
496                        "enum payload ownership",
497                    )?;
498                    if ownership.uses_heap_ops() {
499                        let item_mir_ty =
500                            local_ty(ctx.local_tys, item.local.as_u32(), "enum payload mir type")?;
501                        emit_clone_for_type(
502                            body,
503                            ctx.runtime,
504                            ctx.memory_id,
505                            *source_pos,
506                            ctx.function_line,
507                            item_local,
508                            item_mir_ty,
509                            ctx.structs,
510                            ctx.enums,
511                            ctx.scratch_i32_local,
512                            ctx.scratch_i32_local_b,
513                            ctx.scratch_i32_local_c,
514                            ctx.scratch_i32_local_d,
515                            ctx.scratch_i32_local_e,
516                            ctx.scratch_i64_local,
517                            ctx.scratch_i32_local_c,
518                        )?;
519                        payload_local = ctx.scratch_i32_local_c;
520                    }
521                }
522                let field_layout =
523                    aggregate::enum_field_layout(enum_ty, variant, index, ctx.structs, ctx.enums)?;
524                body.local_get(dst_local)
525                    .i32_const(ARRAY_DATA_OFFSET as i32)
526                    .binop(BinaryOp::I32Add)
527                    .i32_const(int32_from_u32(field_layout.offset, "enum payload field")?)
528                    .binop(BinaryOp::I32Add)
529                    .local_set(ctx.scratch_i32_local_c);
530                if field_layout.inline_aggregate {
531                    let item_ty = runtime_local_valtype(
532                        ctx.local_runtime_types,
533                        item.local.as_u32(),
534                        "enum payload ty",
535                    )?;
536                    if item_ty != ValType::I32 {
537                        return Err(LowerError::UnsupportedInstruction(
538                            "inline enum payload must use i32 pointer value",
539                        ));
540                    }
541                    emit_copy_bytes(
542                        body,
543                        ctx.memory_id,
544                        payload_local,
545                        ctx.scratch_i32_local_c,
546                        field_layout.size,
547                        ctx.scratch_i32_local,
548                        ctx.scratch_i32_local_b,
549                    )?;
550                    continue;
551                }
552                let item_ty = runtime_local_valtype(
553                    ctx.local_runtime_types,
554                    item.local.as_u32(),
555                    "enum payload ty",
556                )?;
557                body.local_get(ctx.scratch_i32_local_c)
558                    .local_get(payload_local);
559                match (item_ty, field_layout.valtype, field_layout.size) {
560                    (ValType::I64, ValType::I64, _) => {
561                        body.instr(Store {
562                            memory: ctx.memory_id,
563                            kind: StoreKind::I64 { atomic: false },
564                            arg: MemArg {
565                                align: field_layout.align,
566                                offset: 0,
567                            },
568                        });
569                    }
570                    (ValType::I32, ValType::I64, _) => {
571                        body.unop(UnaryOp::I64ExtendUI32).instr(Store {
572                            memory: ctx.memory_id,
573                            kind: StoreKind::I64 { atomic: false },
574                            arg: MemArg {
575                                align: field_layout.align,
576                                offset: 0,
577                            },
578                        });
579                    }
580                    (ValType::I64, ValType::I32, 1) => {
581                        body.unop(UnaryOp::I32WrapI64).instr(Store {
582                            memory: ctx.memory_id,
583                            kind: StoreKind::I32_8 { atomic: false },
584                            arg: MemArg {
585                                align: 1,
586                                offset: 0,
587                            },
588                        });
589                    }
590                    (ValType::I32, ValType::I32, 1) => {
591                        body.instr(Store {
592                            memory: ctx.memory_id,
593                            kind: StoreKind::I32_8 { atomic: false },
594                            arg: MemArg {
595                                align: 1,
596                                offset: 0,
597                            },
598                        });
599                    }
600                    (ValType::I64, ValType::I32, _) => {
601                        body.unop(UnaryOp::I32WrapI64).instr(Store {
602                            memory: ctx.memory_id,
603                            kind: StoreKind::I32 { atomic: false },
604                            arg: MemArg {
605                                align: field_layout.align,
606                                offset: 0,
607                            },
608                        });
609                    }
610                    (ValType::I32, ValType::I32, _) => {
611                        body.instr(Store {
612                            memory: ctx.memory_id,
613                            kind: StoreKind::I32 { atomic: false },
614                            arg: MemArg {
615                                align: field_layout.align,
616                                offset: 0,
617                            },
618                        });
619                    }
620                    _ => {
621                        return Err(LowerError::UnsupportedInstruction(
622                            "unsupported enum payload value type",
623                        ));
624                    }
625                }
626                if item.is_owned_move() {
627                    let ownership = local_heap_ownership(
628                        ctx.local_heap_ownership,
629                        item.local.as_u32(),
630                        "enum payload owned move",
631                    )?;
632                    if ownership.uses_heap_ops() {
633                        body.i32_const(0).local_set(item_local);
634                    }
635                }
636            }
637            Ok(true)
638        }
639        ruka_mir::MirInstr::EnumIsVariant {
640            value,
641            enum_name,
642            variant,
643            dst,
644            ..
645        } => {
646            let value_local =
647                runtime_local_index(ctx.local_indices, value.as_u32(), "enum is variant value")?;
648            let dst_local =
649                runtime_local_index(ctx.local_indices, dst.as_u32(), "enum is variant dst")?;
650            let variant_tag = int32_from_u32(
651                enum_variant_index(ctx.enums, enum_name, variant)?,
652                "enum variant tag",
653            )?;
654            body.local_get(value_local)
655                .i32_const(ARRAY_DATA_OFFSET as i32)
656                .binop(BinaryOp::I32Add)
657                .instr(Load {
658                    memory: ctx.memory_id,
659                    kind: LoadKind::I32 { atomic: false },
660                    arg: MemArg {
661                        align: 4,
662                        offset: ENUM_TAG_OFFSET,
663                    },
664                })
665                .i32_const(variant_tag)
666                .binop(BinaryOp::I32Eq)
667                .local_set(dst_local);
668            Ok(true)
669        }
670        ruka_mir::MirInstr::EnumGetField {
671            value,
672            variant,
673            index,
674            dst,
675            ..
676        } => {
677            let value_local =
678                runtime_local_index(ctx.local_indices, value.as_u32(), "enum get field value")?;
679            let dst_local =
680                runtime_local_index(ctx.local_indices, dst.as_u32(), "enum get field dst")?;
681            let enum_ty = local_ty(ctx.local_tys, value.as_u32(), "enum get field value ty")?;
682            let field_layout =
683                aggregate::enum_field_layout(enum_ty, variant, *index, ctx.structs, ctx.enums)?;
684            let dst_ty = runtime_local_valtype(
685                ctx.local_runtime_types,
686                dst.as_u32(),
687                "enum get field dst ty",
688            )?;
689            body.local_get(value_local)
690                .i32_const(ARRAY_DATA_OFFSET as i32)
691                .binop(BinaryOp::I32Add)
692                .i32_const(int32_from_u32(field_layout.offset, "enum payload field")?)
693                .binop(BinaryOp::I32Add);
694            if field_layout.inline_aggregate {
695                if dst_ty != ValType::I32 {
696                    return Err(LowerError::UnsupportedInstruction(
697                        "inline enum payload destination must be i32 pointer",
698                    ));
699                }
700                body.local_set(dst_local);
701                return Ok(true);
702            }
703            body.instr(match (field_layout.valtype, field_layout.size) {
704                (ValType::I64, _) => Load {
705                    memory: ctx.memory_id,
706                    kind: LoadKind::I64 { atomic: false },
707                    arg: MemArg {
708                        align: field_layout.align,
709                        offset: 0,
710                    },
711                },
712                (ValType::I32, 1) => Load {
713                    memory: ctx.memory_id,
714                    kind: LoadKind::I32_8 {
715                        kind: walrus::ir::ExtendedLoad::ZeroExtend,
716                    },
717                    arg: MemArg {
718                        align: 1,
719                        offset: 0,
720                    },
721                },
722                (ValType::I32, _) => Load {
723                    memory: ctx.memory_id,
724                    kind: LoadKind::I32 { atomic: false },
725                    arg: MemArg {
726                        align: field_layout.align,
727                        offset: 0,
728                    },
729                },
730                _ => {
731                    return Err(LowerError::UnsupportedInstruction(
732                        "unsupported enum get field load kind",
733                    ));
734                }
735            });
736            match dst_ty {
737                ValType::I64 => {}
738                ValType::I32 => {
739                    if field_layout.valtype == ValType::I64 {
740                        body.unop(UnaryOp::I32WrapI64);
741                    }
742                }
743                _ => {
744                    return Err(LowerError::UnsupportedInstruction(
745                        "unsupported enum get field destination type",
746                    ));
747                }
748            }
749            body.local_set(dst_local);
750            Ok(true)
751        }
752        ruka_mir::MirInstr::ReadField {
753            base: base_id,
754            field,
755            dst,
756        } => {
757            let base_ty = local_ty(ctx.local_tys, base_id.as_u32(), "field base ty")?;
758            let aggregate_ty = aggregate::root_aggregate_ty(base_ty)?;
759            let dst_local = runtime_local_index(ctx.local_indices, dst.as_u32(), "field dst")?;
760            let dst_ty =
761                runtime_local_valtype(ctx.local_runtime_types, dst.as_u32(), "field dst ty")?;
762            aggregate::emit_aggregate_base_ptr(body, ctx, *base_id, ctx.scratch_i32_local)?;
763            aggregate::emit_read_aggregate_field_ptr(
764                body,
765                ctx,
766                ctx.scratch_i32_local,
767                aggregate_ty,
768                field,
769            )?;
770            match dst_ty {
771                ValType::I64 => {}
772                ValType::I32 => {
773                    body.unop(UnaryOp::I32WrapI64);
774                }
775                _ => {
776                    return Err(LowerError::UnsupportedInstruction(
777                        "unsupported field destination type",
778                    ));
779                }
780            }
781            body.local_set(dst_local);
782            Ok(true)
783        }
784        ruka_mir::MirInstr::FieldBorrowRo {
785            base: base_id,
786            field,
787            dst,
788        }
789        | ruka_mir::MirInstr::FieldBorrowMut {
790            base: base_id,
791            field,
792            dst,
793        } => {
794            let base_ty = local_ty(ctx.local_tys, base_id.as_u32(), "field borrow base ty")?;
795            let aggregate_ty = aggregate::root_aggregate_ty(base_ty)?;
796            let field_offset =
797                aggregate::aggregate_field_offset(aggregate_ty, field, ctx.structs, ctx.enums)?;
798            let dst_local =
799                runtime_local_index(ctx.local_indices, dst.as_u32(), "field borrow dst")?;
800            aggregate::emit_aggregate_base_ptr(body, ctx, *base_id, ctx.scratch_i32_local)?;
801            body.local_get(ctx.scratch_i32_local)
802                .i32_const(ARRAY_DATA_OFFSET as i32)
803                .binop(BinaryOp::I32Add)
804                .i32_const(field_offset as i32)
805                .binop(BinaryOp::I32Add)
806                .local_set(dst_local);
807            Ok(true)
808        }
809        ruka_mir::MirInstr::AssignFieldPath { base, fields, src } => {
810            let src_local = runtime_local_index(ctx.local_indices, src.as_u32(), "field src")?;
811            let src_ty =
812                runtime_local_valtype(ctx.local_runtime_types, src.as_u32(), "field src ty")?;
813            if fields.is_empty() {
814                return Err(LowerError::UnsupportedInstruction("empty field path"));
815            }
816
817            let mut current_ty = aggregate::root_aggregate_ty(local_ty(
818                ctx.local_tys,
819                base.as_u32(),
820                "field base ty",
821            )?)?
822            .clone();
823            aggregate::emit_aggregate_base_ptr(body, ctx, *base, ctx.scratch_i32_local)?;
824
825            for field_name in &fields[..fields.len() - 1] {
826                let next_ty =
827                    aggregate::aggregate_field_ty(&current_ty, field_name, ctx.structs, ctx.enums)?;
828                let field_offset = aggregate::aggregate_field_offset(
829                    &current_ty,
830                    field_name,
831                    ctx.structs,
832                    ctx.enums,
833                )?;
834                body.local_get(ctx.scratch_i32_local)
835                    .i32_const(ARRAY_DATA_OFFSET as i32)
836                    .binop(BinaryOp::I32Add)
837                    .i32_const(int32_from_u32(field_offset, "field path offset")?)
838                    .binop(BinaryOp::I32Add)
839                    .local_set(ctx.scratch_i32_local);
840                current_ty = next_ty;
841            }
842
843            aggregate::emit_write_aggregate_field(
844                body,
845                ctx,
846                ctx.scratch_i32_local,
847                &current_ty,
848                fields
849                    .last()
850                    .ok_or(LowerError::UnsupportedInstruction("empty field path"))?,
851                src_local,
852                src_ty,
853            )?;
854            Ok(true)
855        }
856        _ => Ok(false),
857    }
858}