wasmtime_cranelift/gc/enabled/
null.rs1use super::*;
8use crate::gc::gc_compiler;
9use crate::{func_environ::FuncEnvironment, gc::GcCompiler};
10use cranelift_codegen::ir::{self, InstBuilder};
11use cranelift_frontend::FunctionBuilder;
12use wasmtime_environ::VMSharedTypeIndex;
13use wasmtime_environ::{
14 null::NullTypeLayouts, GcTypeLayouts, ModuleInternedTypeIndex, PtrSize, TypeIndex, VMGcKind,
15 WasmRefType, WasmResult,
16};
17
18#[derive(Default)]
19pub struct NullCompiler {
20 layouts: NullTypeLayouts,
21}
22
23impl NullCompiler {
24 fn emit_inline_alloc(
40 &mut self,
41 func_env: &mut FuncEnvironment<'_>,
42 builder: &mut FunctionBuilder,
43 kind: VMGcKind,
44 ty: Option<ModuleInternedTypeIndex>,
45 size: ir::Value,
46 align: ir::Value,
47 ) -> (ir::Value, ir::Value) {
48 assert_eq!(builder.func.dfg.value_type(size), ir::types::I32);
49 assert_eq!(builder.func.dfg.value_type(align), ir::types::I32);
50
51 let mask = builder
54 .ins()
55 .iconst(ir::types::I32, i64::from(VMGcKind::MASK));
56 let masked = builder.ins().band(size, mask);
57 func_env.trapnz(builder, masked, crate::TRAP_ALLOCATION_TOO_LARGE);
58
59 let pointer_type = func_env.pointer_type();
62 let vmctx = func_env.vmctx_val(&mut builder.cursor());
63 let ptr_to_next = builder.ins().load(
64 pointer_type,
65 ir::MemFlags::trusted().with_readonly(),
66 vmctx,
67 i32::from(func_env.offsets.ptr.vmctx_gc_heap_data()),
68 );
69 let next = builder
70 .ins()
71 .load(ir::types::I32, ir::MemFlags::trusted(), ptr_to_next, 0);
72
73 let minus_one = builder.ins().iconst(ir::types::I32, -1);
81 let align_minus_one = builder.ins().iadd(align, minus_one);
82 let next_plus_align_minus_one = func_env.uadd_overflow_trap(
83 builder,
84 next,
85 align_minus_one,
86 crate::TRAP_ALLOCATION_TOO_LARGE,
87 );
88 let not_align_minus_one = builder.ins().bnot(align_minus_one);
89 let aligned = builder
90 .ins()
91 .band(next_plus_align_minus_one, not_align_minus_one);
92
93 let end_of_object =
95 func_env.uadd_overflow_trap(builder, aligned, size, crate::TRAP_ALLOCATION_TOO_LARGE);
96 let uext_end_of_object = uextend_i32_to_pointer_type(builder, pointer_type, end_of_object);
97 let (base, bound) = func_env.get_gc_heap_base_bound(builder);
98 let is_in_bounds = builder.ins().icmp(
99 ir::condcodes::IntCC::UnsignedLessThanOrEqual,
100 uext_end_of_object,
101 bound,
102 );
103 func_env.trapz(builder, is_in_bounds, crate::TRAP_ALLOCATION_TOO_LARGE);
104
105 let uext_aligned = uextend_i32_to_pointer_type(builder, pointer_type, aligned);
113 let ptr_to_object = builder.ins().iadd(base, uext_aligned);
114 let kind = builder
115 .ins()
116 .iconst(ir::types::I32, i64::from(kind.as_u32()));
117 let kind_and_size = builder.ins().bor(kind, size);
118 let ty = match ty {
119 Some(ty) => func_env.module_interned_to_shared_ty(&mut builder.cursor(), ty),
120 None => builder.ins().iconst(
121 func_env.vmshared_type_index_ty(),
122 i64::from(VMSharedTypeIndex::reserved_value().as_bits()),
123 ),
124 };
125 builder.ins().store(
126 ir::MemFlags::trusted(),
127 kind_and_size,
128 ptr_to_object,
129 i32::try_from(wasmtime_environ::VM_GC_HEADER_KIND_OFFSET).unwrap(),
130 );
131 builder.ins().store(
132 ir::MemFlags::trusted(),
133 ty,
134 ptr_to_object,
135 i32::try_from(wasmtime_environ::VM_GC_HEADER_TYPE_INDEX_OFFSET).unwrap(),
136 );
137 builder
138 .ins()
139 .store(ir::MemFlags::trusted(), end_of_object, ptr_to_next, 0);
140
141 (aligned, ptr_to_object)
142 }
143}
144
145impl GcCompiler for NullCompiler {
146 fn layouts(&self) -> &dyn GcTypeLayouts {
147 &self.layouts
148 }
149
150 fn alloc_array(
151 &mut self,
152 func_env: &mut FuncEnvironment<'_>,
153 builder: &mut FunctionBuilder<'_>,
154 array_type_index: TypeIndex,
155 init: super::ArrayInit<'_>,
156 ) -> WasmResult<ir::Value> {
157 let interned_type_index =
158 func_env.module.types[array_type_index].unwrap_module_type_index();
159 let ptr_ty = func_env.pointer_type();
160
161 let len_offset = gc_compiler(func_env)?.layouts().array_length_field_offset();
162 let array_layout = func_env.array_layout(interned_type_index).clone();
163 let base_size = array_layout.base_size;
164 let align = array_layout.align;
165 let len_to_elems_delta = base_size.checked_sub(len_offset).unwrap();
166
167 let size = emit_array_size(func_env, builder, &array_layout, init);
170
171 assert!(align.is_power_of_two());
173 let align = builder.ins().iconst(ir::types::I32, i64::from(align));
174 let (gc_ref, ptr_to_object) = self.emit_inline_alloc(
175 func_env,
176 builder,
177 VMGcKind::ArrayRef,
178 Some(interned_type_index),
179 size,
180 align,
181 );
182
183 let len_addr = builder.ins().iadd_imm(ptr_to_object, i64::from(len_offset));
189 let len = init.len(&mut builder.cursor());
190 builder
191 .ins()
192 .store(ir::MemFlags::trusted(), len, len_addr, 0);
193
194 let len_to_elems_delta = builder.ins().iconst(ptr_ty, i64::from(len_to_elems_delta));
196 let elems_addr = builder.ins().iadd(len_addr, len_to_elems_delta);
197 init.initialize(
198 func_env,
199 builder,
200 interned_type_index,
201 base_size,
202 size,
203 elems_addr,
204 |func_env, builder, elem_ty, elem_addr, val| {
205 write_field_at_addr(func_env, builder, elem_ty, elem_addr, val)
206 },
207 )?;
208
209 Ok(gc_ref)
210 }
211
212 fn alloc_struct(
213 &mut self,
214 func_env: &mut FuncEnvironment<'_>,
215 builder: &mut FunctionBuilder<'_>,
216 struct_type_index: TypeIndex,
217 field_vals: &[ir::Value],
218 ) -> WasmResult<ir::Value> {
219 let interned_type_index =
220 func_env.module.types[struct_type_index].unwrap_module_type_index();
221 let struct_layout = func_env.struct_layout(interned_type_index);
222
223 let struct_size = struct_layout.size;
225 let struct_align = struct_layout.align;
226
227 assert_eq!(VMGcKind::MASK & struct_size, 0);
228 assert_eq!(VMGcKind::UNUSED_MASK & struct_size, struct_size);
229 let struct_size_val = builder.ins().iconst(ir::types::I32, i64::from(struct_size));
230
231 let align = builder
232 .ins()
233 .iconst(ir::types::I32, i64::from(struct_align));
234
235 let (struct_ref, raw_struct_pointer) = self.emit_inline_alloc(
236 func_env,
237 builder,
238 VMGcKind::StructRef,
239 Some(interned_type_index),
240 struct_size_val,
241 align,
242 );
243
244 initialize_struct_fields(
250 func_env,
251 builder,
252 interned_type_index,
253 raw_struct_pointer,
254 field_vals,
255 |func_env, builder, ty, field_addr, val| {
256 write_field_at_addr(func_env, builder, ty, field_addr, val)
257 },
258 )?;
259
260 Ok(struct_ref)
261 }
262
263 fn translate_read_gc_reference(
264 &mut self,
265 _func_env: &mut FuncEnvironment<'_>,
266 builder: &mut FunctionBuilder,
267 _ty: WasmRefType,
268 src: ir::Value,
269 flags: ir::MemFlags,
270 ) -> WasmResult<ir::Value> {
271 Ok(builder.ins().load(ir::types::I32, flags, src, 0))
274 }
275
276 fn translate_write_gc_reference(
277 &mut self,
278 _func_env: &mut FuncEnvironment<'_>,
279 builder: &mut FunctionBuilder,
280 ty: WasmRefType,
281 dst: ir::Value,
282 new_val: ir::Value,
283 flags: ir::MemFlags,
284 ) -> WasmResult<()> {
285 unbarriered_store_gc_ref(builder, ty.heap_type, dst, new_val, flags)
286 }
287}