1use super::*;
5use crate::translate::TargetEnvironment;
6use crate::{func_environ::FuncEnvironment, TRAP_INTERNAL_ASSERT};
7use cranelift_codegen::ir::condcodes::IntCC;
8use cranelift_codegen::ir::{self, InstBuilder};
9use cranelift_frontend::FunctionBuilder;
10use smallvec::SmallVec;
11use wasmtime_environ::{
12 drc::DrcTypeLayouts, GcTypeLayouts, ModuleInternedTypeIndex, PtrSize, TypeIndex, VMGcKind,
13 WasmHeapTopType, WasmHeapType, WasmRefType, WasmResult, WasmStorageType, WasmValType,
14};
15
16#[derive(Default)]
17pub struct DrcCompiler {
18 layouts: DrcTypeLayouts,
19}
20
21impl DrcCompiler {
22 fn load_ref_count(
26 &mut self,
27 func_env: &mut FuncEnvironment<'_>,
28 builder: &mut FunctionBuilder,
29 gc_ref: ir::Value,
30 ) -> ir::Value {
31 let offset = func_env.offsets.vm_drc_header_ref_count();
32 let pointer = func_env.prepare_gc_ref_access(
33 builder,
34 gc_ref,
35 Offset::Static(offset),
36 BoundsCheck::Access(ir::types::I64.bytes()),
37 );
38 builder
39 .ins()
40 .load(ir::types::I64, ir::MemFlags::trusted(), pointer, 0)
41 }
42
43 fn store_ref_count(
48 &mut self,
49 func_env: &mut FuncEnvironment<'_>,
50 builder: &mut FunctionBuilder,
51 gc_ref: ir::Value,
52 new_ref_count: ir::Value,
53 ) {
54 let offset = func_env.offsets.vm_drc_header_ref_count();
55 let pointer = func_env.prepare_gc_ref_access(
56 builder,
57 gc_ref,
58 Offset::Static(offset),
59 BoundsCheck::Access(ir::types::I64.bytes()),
60 );
61 builder
62 .ins()
63 .store(ir::MemFlags::trusted(), new_ref_count, pointer, 0);
64 }
65
66 fn mutate_ref_count(
73 &mut self,
74 func_env: &mut FuncEnvironment<'_>,
75 builder: &mut FunctionBuilder,
76 gc_ref: ir::Value,
77 delta: i64,
78 ) -> ir::Value {
79 debug_assert!(delta == -1 || delta == 1);
80 let old_ref_count = self.load_ref_count(func_env, builder, gc_ref);
81 let new_ref_count = builder.ins().iadd_imm(old_ref_count, delta);
82 self.store_ref_count(func_env, builder, gc_ref, new_ref_count);
83 new_ref_count
84 }
85
86 fn load_bump_region(
89 &mut self,
90 func_env: &mut FuncEnvironment<'_>,
91 builder: &mut FunctionBuilder,
92 ) -> (ir::Value, ir::Value, ir::Value) {
93 let ptr_ty = func_env.pointer_type();
94 let vmctx = func_env.vmctx(&mut builder.func);
95 let vmctx = builder.ins().global_value(ptr_ty, vmctx);
96 let activations_table = builder.ins().load(
97 ptr_ty,
98 ir::MemFlags::trusted().with_readonly(),
99 vmctx,
100 i32::from(func_env.offsets.ptr.vmctx_gc_heap_data()),
101 );
102 let next = builder.ins().load(
103 ptr_ty,
104 ir::MemFlags::trusted(),
105 activations_table,
106 i32::try_from(func_env.offsets.vm_gc_ref_activation_table_next()).unwrap(),
107 );
108 let end = builder.ins().load(
109 ptr_ty,
110 ir::MemFlags::trusted(),
111 activations_table,
112 i32::try_from(func_env.offsets.vm_gc_ref_activation_table_end()).unwrap(),
113 );
114 (activations_table, next, end)
115 }
116
117 fn init_field(
119 &mut self,
120 func_env: &mut FuncEnvironment<'_>,
121 builder: &mut FunctionBuilder<'_>,
122 field_addr: ir::Value,
123 ty: WasmStorageType,
124 val: ir::Value,
125 ) -> WasmResult<()> {
126 let flags = ir::MemFlags::trusted().with_endianness(ir::Endianness::Little);
128
129 match ty {
130 WasmStorageType::Val(WasmValType::Ref(r))
131 if r.heap_type.top() == WasmHeapTopType::Func =>
132 {
133 write_func_ref_at_addr(func_env, builder, r, flags, field_addr, val)?;
134 }
135 WasmStorageType::Val(WasmValType::Ref(r)) => {
136 self.translate_init_gc_reference(func_env, builder, r, field_addr, val, flags)?;
137 }
138 WasmStorageType::I8 => {
139 assert_eq!(builder.func.dfg.value_type(val), ir::types::I32);
140 builder.ins().istore8(flags, val, field_addr, 0);
141 }
142 WasmStorageType::I16 => {
143 assert_eq!(builder.func.dfg.value_type(val), ir::types::I32);
144 builder.ins().istore16(flags, val, field_addr, 0);
145 }
146 WasmStorageType::Val(_) => {
147 let size_of_access = wasmtime_environ::byte_size_of_wasm_ty_in_gc_heap(&ty);
148 assert_eq!(builder.func.dfg.value_type(val).bytes(), size_of_access);
149 builder.ins().store(flags, val, field_addr, 0);
150 }
151 }
152
153 Ok(())
154 }
155
156 fn translate_init_gc_reference(
166 &mut self,
167 func_env: &mut FuncEnvironment<'_>,
168 builder: &mut FunctionBuilder,
169 ty: WasmRefType,
170 dst: ir::Value,
171 new_val: ir::Value,
172 flags: ir::MemFlags,
173 ) -> WasmResult<()> {
174 let (ref_ty, needs_stack_map) = func_env.reference_type(ty.heap_type);
175 debug_assert!(needs_stack_map);
176
177 if let WasmHeapType::None = ty.heap_type {
180 if ty.nullable {
181 let null = builder.ins().iconst(ref_ty, 0);
182 builder.ins().store(flags, null, dst, 0);
183 } else {
184 let zero = builder.ins().iconst(ir::types::I32, 0);
185 builder.ins().trapz(zero, TRAP_INTERNAL_ASSERT);
186 }
187 return Ok(());
188 };
189
190 if let WasmHeapType::I31 = ty.heap_type {
192 return unbarriered_store_gc_ref(builder, ty.heap_type, dst, new_val, flags);
193 }
194
195 let current_block = builder.current_block().unwrap();
221 let inc_ref_block = builder.create_block();
222 let continue_block = builder.create_block();
223
224 builder.ensure_inserted_block();
225 builder.insert_block_after(inc_ref_block, current_block);
226 builder.insert_block_after(continue_block, inc_ref_block);
227
228 log::trace!("DRC initialization barrier: check if the value is null or i31");
231 let new_val_is_null_or_i31 = func_env.gc_ref_is_null_or_i31(builder, ty, new_val);
232 builder.ins().brif(
233 new_val_is_null_or_i31,
234 continue_block,
235 &[],
236 inc_ref_block,
237 &[],
238 );
239
240 builder.switch_to_block(inc_ref_block);
243 builder.seal_block(inc_ref_block);
244 log::trace!("DRC initialization barrier: increment the ref count of the initial value");
245 self.mutate_ref_count(func_env, builder, new_val, 1);
246 builder.ins().jump(continue_block, &[]);
247
248 builder.switch_to_block(continue_block);
251 builder.seal_block(continue_block);
252 log::trace!(
253 "DRC initialization barrier: finally, store into {dst:?} to initialize the field"
254 );
255 unbarriered_store_gc_ref(builder, ty.heap_type, dst, new_val, flags)?;
256
257 Ok(())
258 }
259}
260
261fn emit_gc_raw_alloc(
263 func_env: &mut FuncEnvironment<'_>,
264 builder: &mut FunctionBuilder<'_>,
265 kind: VMGcKind,
266 ty: ModuleInternedTypeIndex,
267 size: ir::Value,
268 align: u32,
269) -> ir::Value {
270 let gc_alloc_raw_builtin = func_env.builtin_functions.gc_alloc_raw(builder.func);
271 let vmctx = func_env.vmctx_val(&mut builder.cursor());
272
273 let kind = builder
274 .ins()
275 .iconst(ir::types::I32, i64::from(kind.as_u32()));
276
277 let ty = builder.ins().iconst(ir::types::I32, i64::from(ty.as_u32()));
278
279 assert!(align.is_power_of_two());
280 let align = builder.ins().iconst(ir::types::I32, i64::from(align));
281
282 let call_inst = builder
283 .ins()
284 .call(gc_alloc_raw_builtin, &[vmctx, kind, ty, size, align]);
285
286 let gc_ref = builder.func.dfg.first_result(call_inst);
287 builder.declare_value_needs_stack_map(gc_ref);
288 gc_ref
289}
290
291impl GcCompiler for DrcCompiler {
292 fn layouts(&self) -> &dyn GcTypeLayouts {
293 &self.layouts
294 }
295
296 fn alloc_array(
297 &mut self,
298 func_env: &mut FuncEnvironment<'_>,
299 builder: &mut FunctionBuilder<'_>,
300 array_type_index: TypeIndex,
301 init: super::ArrayInit<'_>,
302 ) -> WasmResult<ir::Value> {
303 let interned_type_index =
304 func_env.module.types[array_type_index].unwrap_module_type_index();
305 let ptr_ty = func_env.pointer_type();
306
307 let len_offset = gc_compiler(func_env)?.layouts().array_length_field_offset();
308 let array_layout = func_env.array_layout(interned_type_index).clone();
309 let base_size = array_layout.base_size;
310 let align = array_layout.align;
311 let len_to_elems_delta = base_size.checked_sub(len_offset).unwrap();
312
313 let len = init.len(&mut builder.cursor());
316 let size = emit_array_size(func_env, builder, &array_layout, len);
317
318 let array_ref = emit_gc_raw_alloc(
321 func_env,
322 builder,
323 VMGcKind::ArrayRef,
324 interned_type_index,
325 size,
326 align,
327 );
328
329 let base = func_env.get_gc_heap_base(builder);
334 let extended_array_ref =
335 uextend_i32_to_pointer_type(builder, func_env.pointer_type(), array_ref);
336 let object_addr = builder.ins().iadd(base, extended_array_ref);
337 let len_addr = builder.ins().iadd_imm(object_addr, i64::from(len_offset));
338 let len = init.len(&mut builder.cursor());
339 builder
340 .ins()
341 .store(ir::MemFlags::trusted(), len, len_addr, 0);
342
343 let len_to_elems_delta = builder.ins().iconst(ptr_ty, i64::from(len_to_elems_delta));
345 let elems_addr = builder.ins().iadd(len_addr, len_to_elems_delta);
346 init.initialize(
347 func_env,
348 builder,
349 interned_type_index,
350 base_size,
351 size,
352 elems_addr,
353 |func_env, builder, elem_ty, elem_addr, val| {
354 self.init_field(func_env, builder, elem_addr, elem_ty, val)
355 },
356 )?;
357 Ok(array_ref)
358 }
359
360 fn alloc_struct(
361 &mut self,
362 func_env: &mut FuncEnvironment<'_>,
363 builder: &mut FunctionBuilder<'_>,
364 struct_type_index: TypeIndex,
365 field_vals: &[ir::Value],
366 ) -> WasmResult<ir::Value> {
367 let interned_type_index =
370 func_env.module.types[struct_type_index].unwrap_module_type_index();
371
372 let struct_layout = func_env.struct_layout(interned_type_index);
373
374 let struct_size = struct_layout.size;
376 let struct_align = struct_layout.align;
377 let field_offsets: SmallVec<[_; 8]> = struct_layout.fields.iter().copied().collect();
378 assert_eq!(field_vals.len(), field_offsets.len());
379
380 assert_eq!(VMGcKind::MASK & struct_size, 0);
381 assert_eq!(VMGcKind::UNUSED_MASK & struct_size, struct_size);
382 let struct_size_val = builder.ins().iconst(ir::types::I32, i64::from(struct_size));
383
384 let struct_ref = emit_gc_raw_alloc(
385 func_env,
386 builder,
387 VMGcKind::StructRef,
388 interned_type_index,
389 struct_size_val,
390 struct_align,
391 );
392
393 let base = func_env.get_gc_heap_base(builder);
398 let extended_struct_ref =
399 uextend_i32_to_pointer_type(builder, func_env.pointer_type(), struct_ref);
400 let raw_ptr_to_struct = builder.ins().iadd(base, extended_struct_ref);
401 initialize_struct_fields(
402 func_env,
403 builder,
404 interned_type_index,
405 raw_ptr_to_struct,
406 field_vals,
407 |func_env, builder, ty, field_addr, val| {
408 self.init_field(func_env, builder, field_addr, ty, val)
409 },
410 )?;
411
412 Ok(struct_ref)
413 }
414
415 fn translate_read_gc_reference(
416 &mut self,
417 func_env: &mut FuncEnvironment<'_>,
418 builder: &mut FunctionBuilder,
419 ty: WasmRefType,
420 src: ir::Value,
421 flags: ir::MemFlags,
422 ) -> WasmResult<ir::Value> {
423 log::trace!("translate_read_gc_reference({ty:?}, {src:?}, {flags:?})");
424
425 assert!(ty.is_vmgcref_type());
426
427 let (reference_type, needs_stack_map) = func_env.reference_type(ty.heap_type);
428 debug_assert!(needs_stack_map);
429
430 if let WasmHeapType::None = ty.heap_type {
434 let null = builder.ins().iconst(reference_type, 0);
435
436 if flags.trap_code().is_some() {
447 let _ = builder.ins().load(reference_type, flags, src, 0);
448 }
449
450 if !ty.nullable {
451 let zero = builder.ins().iconst(ir::types::I32, 0);
455 builder.ins().trapz(zero, TRAP_INTERNAL_ASSERT);
456 }
457
458 return Ok(null);
459 };
460
461 if let WasmHeapType::I31 = ty.heap_type {
463 return unbarriered_load_gc_ref(builder, ty.heap_type, src, flags);
464 }
465
466 let current_block = builder.current_block().unwrap();
506 let non_null_gc_ref_block = builder.create_block();
507 let gc_block = builder.create_block();
508 let no_gc_block = builder.create_block();
509 let continue_block = builder.create_block();
510
511 builder.set_cold_block(gc_block);
512 builder.ensure_inserted_block();
513 builder.insert_block_after(non_null_gc_ref_block, current_block);
514 builder.insert_block_after(no_gc_block, non_null_gc_ref_block);
515 builder.insert_block_after(gc_block, no_gc_block);
516 builder.insert_block_after(continue_block, gc_block);
517
518 log::trace!("DRC read barrier: load the gc reference and check for null or i31");
519 let gc_ref = unbarriered_load_gc_ref(builder, ty.heap_type, src, flags)?;
520 let gc_ref_is_null_or_i31 = func_env.gc_ref_is_null_or_i31(builder, ty, gc_ref);
521 builder.ins().brif(
522 gc_ref_is_null_or_i31,
523 continue_block,
524 &[],
525 non_null_gc_ref_block,
526 &[],
527 );
528
529 builder.switch_to_block(non_null_gc_ref_block);
535 builder.seal_block(non_null_gc_ref_block);
536 log::trace!("DRC read barrier: load bump region and check capacity");
537 let (activations_table, next, end) = self.load_bump_region(func_env, builder);
538 let bump_region_is_full = builder.ins().icmp(IntCC::Equal, next, end);
539 builder
540 .ins()
541 .brif(bump_region_is_full, gc_block, &[], no_gc_block, &[]);
542
543 builder.switch_to_block(no_gc_block);
549 builder.seal_block(no_gc_block);
550 log::trace!("DRC read barrier: increment ref count and inline insert into bump region");
551 self.mutate_ref_count(func_env, builder, gc_ref, 1);
552 builder
553 .ins()
554 .store(ir::MemFlags::trusted(), gc_ref, next, 0);
555 let new_next = builder
556 .ins()
557 .iadd_imm(next, i64::from(reference_type.bytes()));
558 builder.ins().store(
559 ir::MemFlags::trusted(),
560 new_next,
561 activations_table,
562 i32::try_from(func_env.offsets.vm_gc_ref_activation_table_next()).unwrap(),
563 );
564 builder.ins().jump(continue_block, &[]);
565
566 builder.switch_to_block(gc_block);
568 builder.seal_block(gc_block);
569 log::trace!("DRC read barrier: slow path for when the bump region is full; do a gc");
570 let gc_libcall = func_env.builtin_functions.gc(builder.func);
571 let vmctx = func_env.vmctx_val(&mut builder.cursor());
572 builder.ins().call(gc_libcall, &[vmctx, gc_ref]);
573 builder.ins().jump(continue_block, &[]);
574
575 builder.switch_to_block(continue_block);
577 builder.seal_block(continue_block);
578 log::trace!("translate_read_gc_reference(..) -> {gc_ref:?}");
579 Ok(gc_ref)
580 }
581
582 fn translate_write_gc_reference(
583 &mut self,
584 func_env: &mut FuncEnvironment<'_>,
585 builder: &mut FunctionBuilder,
586 ty: WasmRefType,
587 dst: ir::Value,
588 new_val: ir::Value,
589 flags: ir::MemFlags,
590 ) -> WasmResult<()> {
591 assert!(ty.is_vmgcref_type());
592
593 let (ref_ty, needs_stack_map) = func_env.reference_type(ty.heap_type);
594 debug_assert!(needs_stack_map);
595
596 if let WasmHeapType::None = ty.heap_type {
600 if ty.nullable {
601 let null = builder.ins().iconst(ref_ty, 0);
602 builder.ins().store(flags, null, dst, 0);
603 } else {
604 let zero = builder.ins().iconst(ir::types::I32, 0);
608 builder.ins().trapz(zero, TRAP_INTERNAL_ASSERT);
609 }
610 return Ok(());
611 };
612
613 if let WasmHeapType::I31 = ty.heap_type {
615 return unbarriered_store_gc_ref(builder, ty.heap_type, dst, new_val, flags);
616 }
617
618 let current_block = builder.current_block().unwrap();
681 let inc_ref_block = builder.create_block();
682 let check_old_val_block = builder.create_block();
683 let dec_ref_block = builder.create_block();
684 let drop_old_val_block = builder.create_block();
685 let store_dec_ref_block = builder.create_block();
686 let continue_block = builder.create_block();
687
688 builder.ensure_inserted_block();
689 builder.set_cold_block(drop_old_val_block);
690
691 builder.insert_block_after(inc_ref_block, current_block);
692 builder.insert_block_after(check_old_val_block, inc_ref_block);
693 builder.insert_block_after(dec_ref_block, check_old_val_block);
694 builder.insert_block_after(drop_old_val_block, dec_ref_block);
695 builder.insert_block_after(store_dec_ref_block, drop_old_val_block);
696 builder.insert_block_after(continue_block, store_dec_ref_block);
697
698 log::trace!("DRC write barrier: load old ref; check if new ref is null or i31");
701 let old_val = unbarriered_load_gc_ref(builder, ty.heap_type, dst, flags)?;
702 let new_val_is_null_or_i31 = func_env.gc_ref_is_null_or_i31(builder, ty, new_val);
703 builder.ins().brif(
704 new_val_is_null_or_i31,
705 check_old_val_block,
706 &[],
707 inc_ref_block,
708 &[],
709 );
710
711 builder.switch_to_block(inc_ref_block);
714 log::trace!("DRC write barrier: increment new ref's ref count");
715 builder.seal_block(inc_ref_block);
716 self.mutate_ref_count(func_env, builder, new_val, 1);
717 builder.ins().jump(check_old_val_block, &[]);
718
719 builder.switch_to_block(check_old_val_block);
723 builder.seal_block(check_old_val_block);
724 log::trace!("DRC write barrier: store new ref into field; check if old ref is null or i31");
725 unbarriered_store_gc_ref(builder, ty.heap_type, dst, new_val, flags)?;
726 let old_val_is_null_or_i31 = func_env.gc_ref_is_null_or_i31(builder, ty, old_val);
727 builder.ins().brif(
728 old_val_is_null_or_i31,
729 continue_block,
730 &[],
731 dec_ref_block,
732 &[],
733 );
734
735 builder.switch_to_block(dec_ref_block);
738 builder.seal_block(dec_ref_block);
739 log::trace!(
740 "DRC write barrier: decrement old ref's ref count and check for zero ref count"
741 );
742 let ref_count = self.load_ref_count(func_env, builder, old_val);
743 let new_ref_count = builder.ins().iadd_imm(ref_count, -1);
744 let old_val_needs_drop = builder.ins().icmp_imm(IntCC::Equal, new_ref_count, 0);
745 builder.ins().brif(
746 old_val_needs_drop,
747 drop_old_val_block,
748 &[],
749 store_dec_ref_block,
750 &[],
751 );
752
753 builder.switch_to_block(drop_old_val_block);
760 builder.seal_block(drop_old_val_block);
761 log::trace!("DRC write barrier: drop old ref with a ref count of zero");
762 let drop_gc_ref_libcall = func_env.builtin_functions.drop_gc_ref(builder.func);
763 let vmctx = func_env.vmctx_val(&mut builder.cursor());
764 builder.ins().call(drop_gc_ref_libcall, &[vmctx, old_val]);
765 builder.ins().jump(continue_block, &[]);
766
767 builder.switch_to_block(store_dec_ref_block);
770 builder.seal_block(store_dec_ref_block);
771 log::trace!("DRC write barrier: store decremented ref count into old ref");
772 self.store_ref_count(func_env, builder, old_val, new_ref_count);
773 builder.ins().jump(continue_block, &[]);
774
775 builder.switch_to_block(continue_block);
777 builder.seal_block(continue_block);
778 log::trace!("DRC write barrier: finished");
779 Ok(())
780 }
781}