1use super::*;
5use crate::gc::gc_compiler;
6use crate::translate::TargetEnvironment;
7use crate::{func_environ::FuncEnvironment, gc::GcCompiler, TRAP_INTERNAL_ASSERT};
8use cranelift_codegen::ir::condcodes::IntCC;
9use cranelift_codegen::ir::{self, InstBuilder};
10use cranelift_frontend::FunctionBuilder;
11use smallvec::SmallVec;
12use wasmtime_environ::{
13 drc::DrcTypeLayouts, GcTypeLayouts, ModuleInternedTypeIndex, PtrSize, TypeIndex, VMGcKind,
14 WasmHeapTopType, WasmHeapType, WasmRefType, WasmResult, WasmStorageType, WasmValType,
15};
16
17#[derive(Default)]
18pub struct DrcCompiler {
19 layouts: DrcTypeLayouts,
20}
21
22impl DrcCompiler {
23 fn load_ref_count(
27 &mut self,
28 func_env: &mut FuncEnvironment<'_>,
29 builder: &mut FunctionBuilder,
30 gc_ref: ir::Value,
31 ) -> ir::Value {
32 let offset = func_env.offsets.vm_drc_header_ref_count();
33 let pointer = func_env.prepare_gc_ref_access(
34 builder,
35 gc_ref,
36 Offset::Static(offset),
37 BoundsCheck::Access(ir::types::I64.bytes()),
38 );
39 builder
40 .ins()
41 .load(ir::types::I64, ir::MemFlags::trusted(), pointer, 0)
42 }
43
44 fn store_ref_count(
49 &mut self,
50 func_env: &mut FuncEnvironment<'_>,
51 builder: &mut FunctionBuilder,
52 gc_ref: ir::Value,
53 new_ref_count: ir::Value,
54 ) {
55 let offset = func_env.offsets.vm_drc_header_ref_count();
56 let pointer = func_env.prepare_gc_ref_access(
57 builder,
58 gc_ref,
59 Offset::Static(offset),
60 BoundsCheck::Access(ir::types::I64.bytes()),
61 );
62 builder
63 .ins()
64 .store(ir::MemFlags::trusted(), new_ref_count, pointer, 0);
65 }
66
67 fn mutate_ref_count(
74 &mut self,
75 func_env: &mut FuncEnvironment<'_>,
76 builder: &mut FunctionBuilder,
77 gc_ref: ir::Value,
78 delta: i64,
79 ) -> ir::Value {
80 debug_assert!(delta == -1 || delta == 1);
81 let old_ref_count = self.load_ref_count(func_env, builder, gc_ref);
82 let new_ref_count = builder.ins().iadd_imm(old_ref_count, delta);
83 self.store_ref_count(func_env, builder, gc_ref, new_ref_count);
84 new_ref_count
85 }
86
87 fn load_bump_region(
90 &mut self,
91 func_env: &mut FuncEnvironment<'_>,
92 builder: &mut FunctionBuilder,
93 ) -> (ir::Value, ir::Value, ir::Value) {
94 let ptr_ty = func_env.pointer_type();
95 let vmctx = func_env.vmctx(&mut builder.func);
96 let vmctx = builder.ins().global_value(ptr_ty, vmctx);
97 let activations_table = builder.ins().load(
98 ptr_ty,
99 ir::MemFlags::trusted().with_readonly(),
100 vmctx,
101 i32::from(func_env.offsets.ptr.vmctx_gc_heap_data()),
102 );
103 let next = builder.ins().load(
104 ptr_ty,
105 ir::MemFlags::trusted(),
106 activations_table,
107 i32::try_from(func_env.offsets.vm_gc_ref_activation_table_next()).unwrap(),
108 );
109 let end = builder.ins().load(
110 ptr_ty,
111 ir::MemFlags::trusted(),
112 activations_table,
113 i32::try_from(func_env.offsets.vm_gc_ref_activation_table_end()).unwrap(),
114 );
115 (activations_table, next, end)
116 }
117
118 fn init_field(
120 &mut self,
121 func_env: &mut FuncEnvironment<'_>,
122 builder: &mut FunctionBuilder<'_>,
123 field_addr: ir::Value,
124 ty: WasmStorageType,
125 val: ir::Value,
126 ) -> WasmResult<()> {
127 let flags = ir::MemFlags::trusted().with_endianness(ir::Endianness::Little);
129
130 match ty {
131 WasmStorageType::Val(WasmValType::Ref(r))
132 if r.heap_type.top() == WasmHeapTopType::Func =>
133 {
134 write_func_ref_at_addr(func_env, builder, r, flags, field_addr, val)?;
135 }
136 WasmStorageType::Val(WasmValType::Ref(r)) => {
137 self.translate_init_gc_reference(func_env, builder, r, field_addr, val, flags)?;
138 }
139 WasmStorageType::I8 => {
140 assert_eq!(builder.func.dfg.value_type(val), ir::types::I32);
141 builder.ins().istore8(flags, val, field_addr, 0);
142 }
143 WasmStorageType::I16 => {
144 assert_eq!(builder.func.dfg.value_type(val), ir::types::I32);
145 builder.ins().istore16(flags, val, field_addr, 0);
146 }
147 WasmStorageType::Val(_) => {
148 let size_of_access = wasmtime_environ::byte_size_of_wasm_ty_in_gc_heap(&ty);
149 assert_eq!(builder.func.dfg.value_type(val).bytes(), size_of_access);
150 builder.ins().store(flags, val, field_addr, 0);
151 }
152 }
153
154 Ok(())
155 }
156
157 fn translate_init_gc_reference(
167 &mut self,
168 func_env: &mut FuncEnvironment<'_>,
169 builder: &mut FunctionBuilder,
170 ty: WasmRefType,
171 dst: ir::Value,
172 new_val: ir::Value,
173 flags: ir::MemFlags,
174 ) -> WasmResult<()> {
175 let (ref_ty, needs_stack_map) = func_env.reference_type(ty.heap_type);
176 debug_assert!(needs_stack_map);
177
178 if let WasmHeapType::None = ty.heap_type {
181 if ty.nullable {
182 let null = builder.ins().iconst(ref_ty, 0);
183 builder.ins().store(flags, null, dst, 0);
184 } else {
185 let zero = builder.ins().iconst(ir::types::I32, 0);
186 builder.ins().trapz(zero, TRAP_INTERNAL_ASSERT);
187 }
188 return Ok(());
189 };
190
191 if let WasmHeapType::I31 = ty.heap_type {
193 return unbarriered_store_gc_ref(builder, ty.heap_type, dst, new_val, flags);
194 }
195
196 let current_block = builder.current_block().unwrap();
222 let inc_ref_block = builder.create_block();
223 let continue_block = builder.create_block();
224
225 builder.ensure_inserted_block();
226 builder.insert_block_after(inc_ref_block, current_block);
227 builder.insert_block_after(continue_block, inc_ref_block);
228
229 log::trace!("DRC initialization barrier: check if the value is null or i31");
232 let new_val_is_null_or_i31 = func_env.gc_ref_is_null_or_i31(builder, ty, new_val);
233 builder.ins().brif(
234 new_val_is_null_or_i31,
235 continue_block,
236 &[],
237 inc_ref_block,
238 &[],
239 );
240
241 builder.switch_to_block(inc_ref_block);
244 builder.seal_block(inc_ref_block);
245 log::trace!("DRC initialization barrier: increment the ref count of the initial value");
246 self.mutate_ref_count(func_env, builder, new_val, 1);
247 builder.ins().jump(continue_block, &[]);
248
249 builder.switch_to_block(continue_block);
252 builder.seal_block(continue_block);
253 log::trace!(
254 "DRC initialization barrier: finally, store into {dst:?} to initialize the field"
255 );
256 unbarriered_store_gc_ref(builder, ty.heap_type, dst, new_val, flags)?;
257
258 Ok(())
259 }
260}
261
262fn emit_gc_raw_alloc(
267 func_env: &mut FuncEnvironment<'_>,
268 builder: &mut FunctionBuilder<'_>,
269 kind: VMGcKind,
270 ty: ModuleInternedTypeIndex,
271 size: ir::Value,
272 align: u32,
273) -> ir::Value {
274 let gc_alloc_raw_builtin = func_env.builtin_functions.gc_alloc_raw(builder.func);
275 let vmctx = func_env.vmctx_val(&mut builder.cursor());
276
277 let kind = builder
278 .ins()
279 .iconst(ir::types::I32, i64::from(kind.as_u32()));
280
281 let ty = builder.ins().iconst(ir::types::I32, i64::from(ty.as_u32()));
282
283 assert!(align.is_power_of_two());
284 let align = builder.ins().iconst(ir::types::I32, i64::from(align));
285
286 let call_inst = builder
287 .ins()
288 .call(gc_alloc_raw_builtin, &[vmctx, kind, ty, size, align]);
289
290 let gc_ref = builder.func.dfg.first_result(call_inst);
291 let gc_ref = builder.ins().ireduce(ir::types::I32, gc_ref);
292 builder.declare_value_needs_stack_map(gc_ref);
293 gc_ref
294}
295
296impl GcCompiler for DrcCompiler {
297 fn layouts(&self) -> &dyn GcTypeLayouts {
298 &self.layouts
299 }
300
301 fn alloc_array(
302 &mut self,
303 func_env: &mut FuncEnvironment<'_>,
304 builder: &mut FunctionBuilder<'_>,
305 array_type_index: TypeIndex,
306 init: super::ArrayInit<'_>,
307 ) -> WasmResult<ir::Value> {
308 let interned_type_index =
309 func_env.module.types[array_type_index].unwrap_module_type_index();
310 let ptr_ty = func_env.pointer_type();
311
312 let len_offset = gc_compiler(func_env)?.layouts().array_length_field_offset();
313 let array_layout = func_env.array_layout(interned_type_index).clone();
314 let base_size = array_layout.base_size;
315 let align = array_layout.align;
316 let len_to_elems_delta = base_size.checked_sub(len_offset).unwrap();
317
318 let size = emit_array_size(func_env, builder, &array_layout, init);
321
322 let array_ref = emit_gc_raw_alloc(
325 func_env,
326 builder,
327 VMGcKind::ArrayRef,
328 interned_type_index,
329 size,
330 align,
331 );
332
333 let base = func_env.get_gc_heap_base(builder);
338 let extended_array_ref =
339 uextend_i32_to_pointer_type(builder, func_env.pointer_type(), array_ref);
340 let object_addr = builder.ins().iadd(base, extended_array_ref);
341 let len_addr = builder.ins().iadd_imm(object_addr, i64::from(len_offset));
342 let len = init.len(&mut builder.cursor());
343 builder
344 .ins()
345 .store(ir::MemFlags::trusted(), len, len_addr, 0);
346
347 let len_to_elems_delta = builder.ins().iconst(ptr_ty, i64::from(len_to_elems_delta));
349 let elems_addr = builder.ins().iadd(len_addr, len_to_elems_delta);
350 init.initialize(
351 func_env,
352 builder,
353 interned_type_index,
354 base_size,
355 size,
356 elems_addr,
357 |func_env, builder, elem_ty, elem_addr, val| {
358 self.init_field(func_env, builder, elem_addr, elem_ty, val)
359 },
360 )?;
361 Ok(array_ref)
362 }
363
364 fn alloc_struct(
365 &mut self,
366 func_env: &mut FuncEnvironment<'_>,
367 builder: &mut FunctionBuilder<'_>,
368 struct_type_index: TypeIndex,
369 field_vals: &[ir::Value],
370 ) -> WasmResult<ir::Value> {
371 let interned_type_index =
374 func_env.module.types[struct_type_index].unwrap_module_type_index();
375
376 let struct_layout = func_env.struct_layout(interned_type_index);
377
378 let struct_size = struct_layout.size;
380 let struct_align = struct_layout.align;
381 let field_offsets: SmallVec<[_; 8]> = struct_layout.fields.iter().copied().collect();
382 assert_eq!(field_vals.len(), field_offsets.len());
383
384 assert_eq!(VMGcKind::MASK & struct_size, 0);
385 assert_eq!(VMGcKind::UNUSED_MASK & struct_size, struct_size);
386 let struct_size_val = builder.ins().iconst(ir::types::I32, i64::from(struct_size));
387
388 let struct_ref = emit_gc_raw_alloc(
389 func_env,
390 builder,
391 VMGcKind::StructRef,
392 interned_type_index,
393 struct_size_val,
394 struct_align,
395 );
396
397 let base = func_env.get_gc_heap_base(builder);
402 let extended_struct_ref =
403 uextend_i32_to_pointer_type(builder, func_env.pointer_type(), struct_ref);
404 let raw_ptr_to_struct = builder.ins().iadd(base, extended_struct_ref);
405 initialize_struct_fields(
406 func_env,
407 builder,
408 interned_type_index,
409 raw_ptr_to_struct,
410 field_vals,
411 |func_env, builder, ty, field_addr, val| {
412 self.init_field(func_env, builder, field_addr, ty, val)
413 },
414 )?;
415
416 Ok(struct_ref)
417 }
418
419 fn translate_read_gc_reference(
420 &mut self,
421 func_env: &mut FuncEnvironment<'_>,
422 builder: &mut FunctionBuilder,
423 ty: WasmRefType,
424 src: ir::Value,
425 flags: ir::MemFlags,
426 ) -> WasmResult<ir::Value> {
427 log::trace!("translate_read_gc_reference({ty:?}, {src:?}, {flags:?})");
428
429 assert!(ty.is_vmgcref_type());
430
431 let (reference_type, needs_stack_map) = func_env.reference_type(ty.heap_type);
432 debug_assert!(needs_stack_map);
433
434 if let WasmHeapType::None = ty.heap_type {
438 let null = builder.ins().iconst(reference_type, 0);
439
440 if flags.trap_code().is_some() {
451 let _ = builder.ins().load(reference_type, flags, src, 0);
452 }
453
454 if !ty.nullable {
455 let zero = builder.ins().iconst(ir::types::I32, 0);
459 builder.ins().trapz(zero, TRAP_INTERNAL_ASSERT);
460 }
461
462 return Ok(null);
463 };
464
465 if let WasmHeapType::I31 = ty.heap_type {
467 return unbarriered_load_gc_ref(builder, ty.heap_type, src, flags);
468 }
469
470 let current_block = builder.current_block().unwrap();
510 let non_null_gc_ref_block = builder.create_block();
511 let gc_block = builder.create_block();
512 let no_gc_block = builder.create_block();
513 let continue_block = builder.create_block();
514
515 builder.set_cold_block(gc_block);
516 builder.ensure_inserted_block();
517 builder.insert_block_after(non_null_gc_ref_block, current_block);
518 builder.insert_block_after(no_gc_block, non_null_gc_ref_block);
519 builder.insert_block_after(gc_block, no_gc_block);
520 builder.insert_block_after(continue_block, gc_block);
521
522 log::trace!("DRC read barrier: load the gc reference and check for null or i31");
523 let gc_ref = unbarriered_load_gc_ref(builder, ty.heap_type, src, flags)?;
524 let gc_ref_is_null_or_i31 = func_env.gc_ref_is_null_or_i31(builder, ty, gc_ref);
525 builder.ins().brif(
526 gc_ref_is_null_or_i31,
527 continue_block,
528 &[],
529 non_null_gc_ref_block,
530 &[],
531 );
532
533 builder.switch_to_block(non_null_gc_ref_block);
539 builder.seal_block(non_null_gc_ref_block);
540 log::trace!("DRC read barrier: load bump region and check capacity");
541 let (activations_table, next, end) = self.load_bump_region(func_env, builder);
542 let bump_region_is_full = builder.ins().icmp(IntCC::Equal, next, end);
543 builder
544 .ins()
545 .brif(bump_region_is_full, gc_block, &[], no_gc_block, &[]);
546
547 builder.switch_to_block(no_gc_block);
553 builder.seal_block(no_gc_block);
554 log::trace!("DRC read barrier: increment ref count and inline insert into bump region");
555 self.mutate_ref_count(func_env, builder, gc_ref, 1);
556 builder
557 .ins()
558 .store(ir::MemFlags::trusted(), gc_ref, next, 0);
559 let new_next = builder
560 .ins()
561 .iadd_imm(next, i64::from(reference_type.bytes()));
562 builder.ins().store(
563 ir::MemFlags::trusted(),
564 new_next,
565 activations_table,
566 i32::try_from(func_env.offsets.vm_gc_ref_activation_table_next()).unwrap(),
567 );
568 builder.ins().jump(continue_block, &[]);
569
570 builder.switch_to_block(gc_block);
572 builder.seal_block(gc_block);
573 log::trace!("DRC read barrier: slow path for when the bump region is full; do a gc");
574 let gc_libcall = func_env.builtin_functions.gc(builder.func);
575 let vmctx = func_env.vmctx_val(&mut builder.cursor());
576 builder.ins().call(gc_libcall, &[vmctx, gc_ref]);
577 builder.ins().jump(continue_block, &[]);
578
579 builder.switch_to_block(continue_block);
581 builder.seal_block(continue_block);
582 log::trace!("translate_read_gc_reference(..) -> {gc_ref:?}");
583 Ok(gc_ref)
584 }
585
586 fn translate_write_gc_reference(
587 &mut self,
588 func_env: &mut FuncEnvironment<'_>,
589 builder: &mut FunctionBuilder,
590 ty: WasmRefType,
591 dst: ir::Value,
592 new_val: ir::Value,
593 flags: ir::MemFlags,
594 ) -> WasmResult<()> {
595 assert!(ty.is_vmgcref_type());
596
597 let (ref_ty, needs_stack_map) = func_env.reference_type(ty.heap_type);
598 debug_assert!(needs_stack_map);
599
600 if let WasmHeapType::None = ty.heap_type {
604 if ty.nullable {
605 let null = builder.ins().iconst(ref_ty, 0);
606 builder.ins().store(flags, null, dst, 0);
607 } else {
608 let zero = builder.ins().iconst(ir::types::I32, 0);
612 builder.ins().trapz(zero, TRAP_INTERNAL_ASSERT);
613 }
614 return Ok(());
615 };
616
617 if let WasmHeapType::I31 = ty.heap_type {
619 return unbarriered_store_gc_ref(builder, ty.heap_type, dst, new_val, flags);
620 }
621
622 let current_block = builder.current_block().unwrap();
685 let inc_ref_block = builder.create_block();
686 let check_old_val_block = builder.create_block();
687 let dec_ref_block = builder.create_block();
688 let drop_old_val_block = builder.create_block();
689 let store_dec_ref_block = builder.create_block();
690 let continue_block = builder.create_block();
691
692 builder.ensure_inserted_block();
693 builder.set_cold_block(drop_old_val_block);
694
695 builder.insert_block_after(inc_ref_block, current_block);
696 builder.insert_block_after(check_old_val_block, inc_ref_block);
697 builder.insert_block_after(dec_ref_block, check_old_val_block);
698 builder.insert_block_after(drop_old_val_block, dec_ref_block);
699 builder.insert_block_after(store_dec_ref_block, drop_old_val_block);
700 builder.insert_block_after(continue_block, store_dec_ref_block);
701
702 log::trace!("DRC write barrier: load old ref; check if new ref is null or i31");
705 let old_val = unbarriered_load_gc_ref(builder, ty.heap_type, dst, flags)?;
706 let new_val_is_null_or_i31 = func_env.gc_ref_is_null_or_i31(builder, ty, new_val);
707 builder.ins().brif(
708 new_val_is_null_or_i31,
709 check_old_val_block,
710 &[],
711 inc_ref_block,
712 &[],
713 );
714
715 builder.switch_to_block(inc_ref_block);
718 log::trace!("DRC write barrier: increment new ref's ref count");
719 builder.seal_block(inc_ref_block);
720 self.mutate_ref_count(func_env, builder, new_val, 1);
721 builder.ins().jump(check_old_val_block, &[]);
722
723 builder.switch_to_block(check_old_val_block);
727 builder.seal_block(check_old_val_block);
728 log::trace!("DRC write barrier: store new ref into field; check if old ref is null or i31");
729 unbarriered_store_gc_ref(builder, ty.heap_type, dst, new_val, flags)?;
730 let old_val_is_null_or_i31 = func_env.gc_ref_is_null_or_i31(builder, ty, old_val);
731 builder.ins().brif(
732 old_val_is_null_or_i31,
733 continue_block,
734 &[],
735 dec_ref_block,
736 &[],
737 );
738
739 builder.switch_to_block(dec_ref_block);
742 builder.seal_block(dec_ref_block);
743 log::trace!(
744 "DRC write barrier: decrement old ref's ref count and check for zero ref count"
745 );
746 let ref_count = self.load_ref_count(func_env, builder, old_val);
747 let new_ref_count = builder.ins().iadd_imm(ref_count, -1);
748 let old_val_needs_drop = builder.ins().icmp_imm(IntCC::Equal, new_ref_count, 0);
749 builder.ins().brif(
750 old_val_needs_drop,
751 drop_old_val_block,
752 &[],
753 store_dec_ref_block,
754 &[],
755 );
756
757 builder.switch_to_block(drop_old_val_block);
764 builder.seal_block(drop_old_val_block);
765 log::trace!("DRC write barrier: drop old ref with a ref count of zero");
766 let drop_gc_ref_libcall = func_env.builtin_functions.drop_gc_ref(builder.func);
767 let vmctx = func_env.vmctx_val(&mut builder.cursor());
768 builder.ins().call(drop_gc_ref_libcall, &[vmctx, old_val]);
769 builder.ins().jump(continue_block, &[]);
770
771 builder.switch_to_block(store_dec_ref_block);
774 builder.seal_block(store_dec_ref_block);
775 log::trace!("DRC write barrier: store decremented ref count into old ref");
776 self.store_ref_count(func_env, builder, old_val, new_ref_count);
777 builder.ins().jump(continue_block, &[]);
778
779 builder.switch_to_block(continue_block);
781 builder.seal_block(continue_block);
782 log::trace!("DRC write barrier: finished");
783 Ok(())
784 }
785}