1use super::*;
5use crate::translate::TargetEnvironment;
6use crate::{TRAP_INTERNAL_ASSERT, func_environ::FuncEnvironment};
7use cranelift_codegen::ir::condcodes::IntCC;
8use cranelift_codegen::ir::{self, InstBuilder};
9use cranelift_frontend::FunctionBuilder;
10use smallvec::SmallVec;
11use wasmtime_environ::{
12 GcTypeLayouts, ModuleInternedTypeIndex, PtrSize, TypeIndex, VMGcKind, WasmHeapTopType,
13 WasmHeapType, WasmRefType, WasmResult, WasmStorageType, WasmValType, drc::DrcTypeLayouts,
14};
15
16#[derive(Default)]
17pub struct DrcCompiler {
18 layouts: DrcTypeLayouts,
19}
20
21impl DrcCompiler {
22 fn load_ref_count(
26 &mut self,
27 func_env: &mut FuncEnvironment<'_>,
28 builder: &mut FunctionBuilder,
29 gc_ref: ir::Value,
30 ) -> ir::Value {
31 let offset = func_env.offsets.vm_drc_header_ref_count();
32 let pointer = func_env.prepare_gc_ref_access(
33 builder,
34 gc_ref,
35 BoundsCheck::StaticOffset {
36 offset,
37 access_size: u8::try_from(ir::types::I64.bytes()).unwrap(),
38 },
39 );
40 builder
41 .ins()
42 .load(ir::types::I64, ir::MemFlags::trusted(), pointer, 0)
43 }
44
45 fn store_ref_count(
50 &mut self,
51 func_env: &mut FuncEnvironment<'_>,
52 builder: &mut FunctionBuilder,
53 gc_ref: ir::Value,
54 new_ref_count: ir::Value,
55 ) {
56 let offset = func_env.offsets.vm_drc_header_ref_count();
57 let pointer = func_env.prepare_gc_ref_access(
58 builder,
59 gc_ref,
60 BoundsCheck::StaticOffset {
61 offset,
62 access_size: u8::try_from(ir::types::I64.bytes()).unwrap(),
63 },
64 );
65 builder
66 .ins()
67 .store(ir::MemFlags::trusted(), new_ref_count, pointer, 0);
68 }
69
70 fn mutate_ref_count(
77 &mut self,
78 func_env: &mut FuncEnvironment<'_>,
79 builder: &mut FunctionBuilder,
80 gc_ref: ir::Value,
81 delta: i64,
82 ) -> ir::Value {
83 debug_assert!(delta == -1 || delta == 1);
84 let old_ref_count = self.load_ref_count(func_env, builder, gc_ref);
85 let new_ref_count = builder.ins().iadd_imm(old_ref_count, delta);
86 self.store_ref_count(func_env, builder, gc_ref, new_ref_count);
87 new_ref_count
88 }
89
90 fn push_onto_over_approximated_stack_roots(
96 &mut self,
97 func_env: &mut FuncEnvironment<'_>,
98 builder: &mut FunctionBuilder<'_>,
99 gc_ref: ir::Value,
100 reserved: ir::Value,
101 ) {
102 debug_assert_eq!(builder.func.dfg.value_type(gc_ref), ir::types::I32);
103 debug_assert_eq!(builder.func.dfg.value_type(reserved), ir::types::I32);
104
105 let head = self.load_over_approximated_stack_roots_head(func_env, builder);
106
107 let next = builder
110 .ins()
111 .load(ir::types::I32, ir::MemFlags::trusted(), head, 0);
112
113 self.set_next_over_approximated_stack_root(func_env, builder, gc_ref, next);
115 self.set_in_over_approximated_stack_roots_bit(func_env, builder, gc_ref, reserved);
116
117 self.mutate_ref_count(func_env, builder, gc_ref, 1);
119
120 builder
122 .ins()
123 .store(ir::MemFlags::trusted(), gc_ref, head, 0);
124 }
125
126 fn load_over_approximated_stack_roots_head(
129 &mut self,
130 func_env: &mut FuncEnvironment<'_>,
131 builder: &mut FunctionBuilder,
132 ) -> ir::Value {
133 let ptr_ty = func_env.pointer_type();
134 let vmctx = func_env.vmctx(&mut builder.func);
135 let vmctx = builder.ins().global_value(ptr_ty, vmctx);
136 builder.ins().load(
137 ptr_ty,
138 ir::MemFlags::trusted().with_readonly(),
139 vmctx,
140 i32::from(func_env.offsets.ptr.vmctx_gc_heap_data()),
141 )
142 }
143
144 fn set_next_over_approximated_stack_root(
146 &mut self,
147 func_env: &mut FuncEnvironment<'_>,
148 builder: &mut FunctionBuilder<'_>,
149 gc_ref: ir::Value,
150 next: ir::Value,
151 ) {
152 debug_assert_eq!(builder.func.dfg.value_type(gc_ref), ir::types::I32);
153 debug_assert_eq!(builder.func.dfg.value_type(next), ir::types::I32);
154 let ptr = func_env.prepare_gc_ref_access(
155 builder,
156 gc_ref,
157 BoundsCheck::StaticOffset {
158 offset: func_env
159 .offsets
160 .vm_drc_header_next_over_approximated_stack_root(),
161 access_size: u8::try_from(ir::types::I32.bytes()).unwrap(),
162 },
163 );
164 builder.ins().store(ir::MemFlags::trusted(), next, ptr, 0);
165 }
166
167 fn set_in_over_approximated_stack_roots_bit(
170 &mut self,
171 func_env: &mut FuncEnvironment<'_>,
172 builder: &mut FunctionBuilder<'_>,
173 gc_ref: ir::Value,
174 old_reserved_bits: ir::Value,
175 ) {
176 let in_set_bit = builder.ins().iconst(
177 ir::types::I32,
178 i64::from(wasmtime_environ::drc::HEADER_IN_OVER_APPROX_LIST_BIT),
179 );
180 let new_reserved = builder.ins().bor(old_reserved_bits, in_set_bit);
181 self.set_reserved_bits(func_env, builder, gc_ref, new_reserved);
182 }
183
184 fn set_reserved_bits(
186 &mut self,
187 func_env: &mut FuncEnvironment<'_>,
188 builder: &mut FunctionBuilder<'_>,
189 gc_ref: ir::Value,
190 new_reserved: ir::Value,
191 ) {
192 let ptr = func_env.prepare_gc_ref_access(
193 builder,
194 gc_ref,
195 BoundsCheck::StaticOffset {
196 offset: func_env.offsets.vm_gc_header_reserved_bits(),
197 access_size: u8::try_from(ir::types::I32.bytes()).unwrap(),
198 },
199 );
200 builder
201 .ins()
202 .store(ir::MemFlags::trusted(), new_reserved, ptr, 0);
203 }
204
205 fn init_field(
207 &mut self,
208 func_env: &mut FuncEnvironment<'_>,
209 builder: &mut FunctionBuilder<'_>,
210 field_addr: ir::Value,
211 ty: WasmStorageType,
212 val: ir::Value,
213 ) -> WasmResult<()> {
214 let flags = ir::MemFlags::trusted().with_endianness(ir::Endianness::Little);
216
217 match ty {
218 WasmStorageType::Val(WasmValType::Ref(r))
219 if r.heap_type.top() == WasmHeapTopType::Func =>
220 {
221 write_func_ref_at_addr(func_env, builder, r, flags, field_addr, val)?;
222 }
223 WasmStorageType::Val(WasmValType::Ref(r)) => {
224 self.translate_init_gc_reference(func_env, builder, r, field_addr, val, flags)?;
225 }
226 WasmStorageType::I8 => {
227 assert_eq!(builder.func.dfg.value_type(val), ir::types::I32);
228 builder.ins().istore8(flags, val, field_addr, 0);
229 }
230 WasmStorageType::I16 => {
231 assert_eq!(builder.func.dfg.value_type(val), ir::types::I32);
232 builder.ins().istore16(flags, val, field_addr, 0);
233 }
234 WasmStorageType::Val(_) => {
235 let size_of_access = wasmtime_environ::byte_size_of_wasm_ty_in_gc_heap(&ty);
236 assert_eq!(builder.func.dfg.value_type(val).bytes(), size_of_access);
237 builder.ins().store(flags, val, field_addr, 0);
238 }
239 }
240
241 Ok(())
242 }
243
244 fn translate_init_gc_reference(
254 &mut self,
255 func_env: &mut FuncEnvironment<'_>,
256 builder: &mut FunctionBuilder,
257 ty: WasmRefType,
258 dst: ir::Value,
259 new_val: ir::Value,
260 flags: ir::MemFlags,
261 ) -> WasmResult<()> {
262 let (ref_ty, needs_stack_map) = func_env.reference_type(ty.heap_type);
263 debug_assert!(needs_stack_map);
264
265 if let WasmHeapType::None = ty.heap_type {
268 if ty.nullable {
269 let null = builder.ins().iconst(ref_ty, 0);
270 builder.ins().store(flags, null, dst, 0);
271 } else {
272 let zero = builder.ins().iconst(ir::types::I32, 0);
273 builder.ins().trapz(zero, TRAP_INTERNAL_ASSERT);
274 }
275 return Ok(());
276 };
277
278 if let WasmHeapType::I31 = ty.heap_type {
280 return unbarriered_store_gc_ref(builder, ty.heap_type, dst, new_val, flags);
281 }
282
283 let current_block = builder.current_block().unwrap();
309 let inc_ref_block = builder.create_block();
310 let continue_block = builder.create_block();
311
312 builder.ensure_inserted_block();
313 builder.insert_block_after(inc_ref_block, current_block);
314 builder.insert_block_after(continue_block, inc_ref_block);
315
316 log::trace!("DRC initialization barrier: check if the value is null or i31");
319 let new_val_is_null_or_i31 = func_env.gc_ref_is_null_or_i31(builder, ty, new_val);
320 builder.ins().brif(
321 new_val_is_null_or_i31,
322 continue_block,
323 &[],
324 inc_ref_block,
325 &[],
326 );
327
328 builder.switch_to_block(inc_ref_block);
331 builder.seal_block(inc_ref_block);
332 log::trace!("DRC initialization barrier: increment the ref count of the initial value");
333 self.mutate_ref_count(func_env, builder, new_val, 1);
334 builder.ins().jump(continue_block, &[]);
335
336 builder.switch_to_block(continue_block);
339 builder.seal_block(continue_block);
340 log::trace!(
341 "DRC initialization barrier: finally, store into {dst:?} to initialize the field"
342 );
343 unbarriered_store_gc_ref(builder, ty.heap_type, dst, new_val, flags)?;
344
345 Ok(())
346 }
347}
348
349fn emit_gc_raw_alloc(
351 func_env: &mut FuncEnvironment<'_>,
352 builder: &mut FunctionBuilder<'_>,
353 kind: VMGcKind,
354 ty: ModuleInternedTypeIndex,
355 size: ir::Value,
356 align: u32,
357) -> ir::Value {
358 let gc_alloc_raw_builtin = func_env.builtin_functions.gc_alloc_raw(builder.func);
359 let vmctx = func_env.vmctx_val(&mut builder.cursor());
360
361 let kind = builder
362 .ins()
363 .iconst(ir::types::I32, i64::from(kind.as_u32()));
364
365 let ty = builder.ins().iconst(ir::types::I32, i64::from(ty.as_u32()));
366
367 assert!(align.is_power_of_two());
368 let align = builder.ins().iconst(ir::types::I32, i64::from(align));
369
370 let call_inst = builder
371 .ins()
372 .call(gc_alloc_raw_builtin, &[vmctx, kind, ty, size, align]);
373
374 let gc_ref = builder.func.dfg.first_result(call_inst);
375 builder.declare_value_needs_stack_map(gc_ref);
376 gc_ref
377}
378
379impl GcCompiler for DrcCompiler {
380 fn layouts(&self) -> &dyn GcTypeLayouts {
381 &self.layouts
382 }
383
384 fn alloc_array(
385 &mut self,
386 func_env: &mut FuncEnvironment<'_>,
387 builder: &mut FunctionBuilder<'_>,
388 array_type_index: TypeIndex,
389 init: super::ArrayInit<'_>,
390 ) -> WasmResult<ir::Value> {
391 let interned_type_index =
392 func_env.module.types[array_type_index].unwrap_module_type_index();
393 let ptr_ty = func_env.pointer_type();
394
395 let len_offset = gc_compiler(func_env)?.layouts().array_length_field_offset();
396 let array_layout = func_env.array_layout(interned_type_index).clone();
397 let base_size = array_layout.base_size;
398 let align = array_layout.align;
399 let len_to_elems_delta = base_size.checked_sub(len_offset).unwrap();
400
401 let len = init.len(&mut builder.cursor());
404 let size = emit_array_size(func_env, builder, &array_layout, len);
405
406 let array_ref = emit_gc_raw_alloc(
409 func_env,
410 builder,
411 VMGcKind::ArrayRef,
412 interned_type_index,
413 size,
414 align,
415 );
416
417 let base = func_env.get_gc_heap_base(builder);
422 let extended_array_ref =
423 uextend_i32_to_pointer_type(builder, func_env.pointer_type(), array_ref);
424 let object_addr = builder.ins().iadd(base, extended_array_ref);
425 let len_addr = builder.ins().iadd_imm(object_addr, i64::from(len_offset));
426 let len = init.len(&mut builder.cursor());
427 builder
428 .ins()
429 .store(ir::MemFlags::trusted(), len, len_addr, 0);
430
431 let len_to_elems_delta = builder.ins().iconst(ptr_ty, i64::from(len_to_elems_delta));
433 let elems_addr = builder.ins().iadd(len_addr, len_to_elems_delta);
434 init.initialize(
435 func_env,
436 builder,
437 interned_type_index,
438 base_size,
439 size,
440 elems_addr,
441 |func_env, builder, elem_ty, elem_addr, val| {
442 self.init_field(func_env, builder, elem_addr, elem_ty, val)
443 },
444 )?;
445 Ok(array_ref)
446 }
447
448 fn alloc_struct(
449 &mut self,
450 func_env: &mut FuncEnvironment<'_>,
451 builder: &mut FunctionBuilder<'_>,
452 struct_type_index: TypeIndex,
453 field_vals: &[ir::Value],
454 ) -> WasmResult<ir::Value> {
455 let interned_type_index =
458 func_env.module.types[struct_type_index].unwrap_module_type_index();
459
460 let struct_layout = func_env.struct_layout(interned_type_index);
461
462 let struct_size = struct_layout.size;
464 let struct_align = struct_layout.align;
465 let field_offsets: SmallVec<[_; 8]> = struct_layout.fields.iter().copied().collect();
466 assert_eq!(field_vals.len(), field_offsets.len());
467
468 let struct_size_val = builder.ins().iconst(ir::types::I32, i64::from(struct_size));
469
470 let struct_ref = emit_gc_raw_alloc(
471 func_env,
472 builder,
473 VMGcKind::StructRef,
474 interned_type_index,
475 struct_size_val,
476 struct_align,
477 );
478
479 let base = func_env.get_gc_heap_base(builder);
484 let extended_struct_ref =
485 uextend_i32_to_pointer_type(builder, func_env.pointer_type(), struct_ref);
486 let raw_ptr_to_struct = builder.ins().iadd(base, extended_struct_ref);
487 initialize_struct_fields(
488 func_env,
489 builder,
490 interned_type_index,
491 raw_ptr_to_struct,
492 field_vals,
493 |func_env, builder, ty, field_addr, val| {
494 self.init_field(func_env, builder, field_addr, ty, val)
495 },
496 )?;
497
498 Ok(struct_ref)
499 }
500
501 fn translate_read_gc_reference(
502 &mut self,
503 func_env: &mut FuncEnvironment<'_>,
504 builder: &mut FunctionBuilder,
505 ty: WasmRefType,
506 src: ir::Value,
507 flags: ir::MemFlags,
508 ) -> WasmResult<ir::Value> {
509 log::trace!("translate_read_gc_reference({ty:?}, {src:?}, {flags:?})");
510
511 assert!(ty.is_vmgcref_type());
512
513 let (reference_type, needs_stack_map) = func_env.reference_type(ty.heap_type);
514 debug_assert!(needs_stack_map);
515
516 if let WasmHeapType::None = ty.heap_type {
520 let null = builder.ins().iconst(reference_type, 0);
521
522 if flags.trap_code().is_some() {
533 let _ = builder.ins().load(reference_type, flags, src, 0);
534 }
535
536 if !ty.nullable {
537 let zero = builder.ins().iconst(ir::types::I32, 0);
541 builder.ins().trapz(zero, TRAP_INTERNAL_ASSERT);
542 }
543
544 return Ok(null);
545 };
546
547 if let WasmHeapType::I31 = ty.heap_type {
549 return unbarriered_load_gc_ref(builder, ty.heap_type, src, flags);
550 }
551
552 let current_block = builder.current_block().unwrap();
587 let non_null_gc_ref_block = builder.create_block();
588 let insert_block = builder.create_block();
589 let continue_block = builder.create_block();
590
591 builder.ensure_inserted_block();
592 builder.insert_block_after(non_null_gc_ref_block, current_block);
593 builder.insert_block_after(insert_block, non_null_gc_ref_block);
594 builder.insert_block_after(continue_block, insert_block);
595
596 log::trace!("DRC read barrier: load the gc reference and check for null or i31");
597 let gc_ref = unbarriered_load_gc_ref(builder, ty.heap_type, src, flags)?;
598 let gc_ref_is_null_or_i31 = func_env.gc_ref_is_null_or_i31(builder, ty, gc_ref);
599 builder.ins().brif(
600 gc_ref_is_null_or_i31,
601 continue_block,
602 &[],
603 non_null_gc_ref_block,
604 &[],
605 );
606
607 builder.switch_to_block(non_null_gc_ref_block);
612 builder.seal_block(non_null_gc_ref_block);
613 log::trace!(
614 "DRC read barrier: check whether this object is already in the \
615 over-approximated-stack-roots list"
616 );
617 let ptr = func_env.prepare_gc_ref_access(
618 builder,
619 gc_ref,
620 BoundsCheck::StaticOffset {
621 offset: func_env.offsets.vm_gc_header_reserved_bits(),
622 access_size: u8::try_from(ir::types::I32.bytes()).unwrap(),
623 },
624 );
625 let reserved = builder
626 .ins()
627 .load(ir::types::I32, ir::MemFlags::trusted(), ptr, 0);
628 let in_set_bit = builder.ins().iconst(
629 ir::types::I32,
630 i64::from(wasmtime_environ::drc::HEADER_IN_OVER_APPROX_LIST_BIT),
631 );
632 let in_set = builder.ins().band(reserved, in_set_bit);
633 builder
634 .ins()
635 .brif(in_set, continue_block, &[], insert_block, &[]);
636
637 builder.switch_to_block(insert_block);
640 builder.seal_block(insert_block);
641 log::trace!(
642 "DRC read barrier: push the object onto the over-approximated-stack-roots list"
643 );
644 self.push_onto_over_approximated_stack_roots(func_env, builder, gc_ref, reserved);
645 builder.ins().jump(continue_block, &[]);
646
647 builder.switch_to_block(continue_block);
649 builder.seal_block(continue_block);
650 log::trace!("translate_read_gc_reference(..) -> {gc_ref:?}");
651 Ok(gc_ref)
652 }
653
654 fn translate_write_gc_reference(
655 &mut self,
656 func_env: &mut FuncEnvironment<'_>,
657 builder: &mut FunctionBuilder,
658 ty: WasmRefType,
659 dst: ir::Value,
660 new_val: ir::Value,
661 flags: ir::MemFlags,
662 ) -> WasmResult<()> {
663 assert!(ty.is_vmgcref_type());
664
665 let (ref_ty, needs_stack_map) = func_env.reference_type(ty.heap_type);
666 debug_assert!(needs_stack_map);
667
668 if let WasmHeapType::None = ty.heap_type {
672 if ty.nullable {
673 let null = builder.ins().iconst(ref_ty, 0);
674 builder.ins().store(flags, null, dst, 0);
675 } else {
676 let zero = builder.ins().iconst(ir::types::I32, 0);
680 builder.ins().trapz(zero, TRAP_INTERNAL_ASSERT);
681 }
682 return Ok(());
683 };
684
685 if let WasmHeapType::I31 = ty.heap_type {
687 return unbarriered_store_gc_ref(builder, ty.heap_type, dst, new_val, flags);
688 }
689
690 let current_block = builder.current_block().unwrap();
753 let inc_ref_block = builder.create_block();
754 let check_old_val_block = builder.create_block();
755 let dec_ref_block = builder.create_block();
756 let drop_old_val_block = builder.create_block();
757 let store_dec_ref_block = builder.create_block();
758 let continue_block = builder.create_block();
759
760 builder.ensure_inserted_block();
761 builder.set_cold_block(drop_old_val_block);
762
763 builder.insert_block_after(inc_ref_block, current_block);
764 builder.insert_block_after(check_old_val_block, inc_ref_block);
765 builder.insert_block_after(dec_ref_block, check_old_val_block);
766 builder.insert_block_after(drop_old_val_block, dec_ref_block);
767 builder.insert_block_after(store_dec_ref_block, drop_old_val_block);
768 builder.insert_block_after(continue_block, store_dec_ref_block);
769
770 log::trace!("DRC write barrier: load old ref; check if new ref is null or i31");
773 let old_val = unbarriered_load_gc_ref(builder, ty.heap_type, dst, flags)?;
774 let new_val_is_null_or_i31 = func_env.gc_ref_is_null_or_i31(builder, ty, new_val);
775 builder.ins().brif(
776 new_val_is_null_or_i31,
777 check_old_val_block,
778 &[],
779 inc_ref_block,
780 &[],
781 );
782
783 builder.switch_to_block(inc_ref_block);
786 log::trace!("DRC write barrier: increment new ref's ref count");
787 builder.seal_block(inc_ref_block);
788 self.mutate_ref_count(func_env, builder, new_val, 1);
789 builder.ins().jump(check_old_val_block, &[]);
790
791 builder.switch_to_block(check_old_val_block);
795 builder.seal_block(check_old_val_block);
796 log::trace!("DRC write barrier: store new ref into field; check if old ref is null or i31");
797 unbarriered_store_gc_ref(builder, ty.heap_type, dst, new_val, flags)?;
798 let old_val_is_null_or_i31 = func_env.gc_ref_is_null_or_i31(builder, ty, old_val);
799 builder.ins().brif(
800 old_val_is_null_or_i31,
801 continue_block,
802 &[],
803 dec_ref_block,
804 &[],
805 );
806
807 builder.switch_to_block(dec_ref_block);
810 builder.seal_block(dec_ref_block);
811 log::trace!(
812 "DRC write barrier: decrement old ref's ref count and check for zero ref count"
813 );
814 let ref_count = self.load_ref_count(func_env, builder, old_val);
815 let new_ref_count = builder.ins().iadd_imm(ref_count, -1);
816 let old_val_needs_drop = builder.ins().icmp_imm(IntCC::Equal, new_ref_count, 0);
817 builder.ins().brif(
818 old_val_needs_drop,
819 drop_old_val_block,
820 &[],
821 store_dec_ref_block,
822 &[],
823 );
824
825 builder.switch_to_block(drop_old_val_block);
832 builder.seal_block(drop_old_val_block);
833 log::trace!("DRC write barrier: drop old ref with a ref count of zero");
834 let drop_gc_ref_libcall = func_env.builtin_functions.drop_gc_ref(builder.func);
835 let vmctx = func_env.vmctx_val(&mut builder.cursor());
836 builder.ins().call(drop_gc_ref_libcall, &[vmctx, old_val]);
837 builder.ins().jump(continue_block, &[]);
838
839 builder.switch_to_block(store_dec_ref_block);
842 builder.seal_block(store_dec_ref_block);
843 log::trace!("DRC write barrier: store decremented ref count into old ref");
844 self.store_ref_count(func_env, builder, old_val, new_ref_count);
845 builder.ins().jump(continue_block, &[]);
846
847 builder.switch_to_block(continue_block);
849 builder.seal_block(continue_block);
850 log::trace!("DRC write barrier: finished");
851 Ok(())
852 }
853}